From fb678f87d41a88a2015651b18afe391a84f1cd7c Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Thu, 28 Nov 2024 17:22:59 +0100 Subject: [PATCH 01/16] start working on caching src as well --- src/build.rs | 14 +++++++------ src/cache.rs | 57 ++++++++++++++++++++++++++++++++++++++-------------- 2 files changed, 50 insertions(+), 21 deletions(-) diff --git a/src/build.rs b/src/build.rs index da5e32fc..4bfd3a20 100644 --- a/src/build.rs +++ b/src/build.rs @@ -114,12 +114,14 @@ pub async fn run_build( let directories = output.build_configuration.directories.clone(); - let output = output - .fetch_sources(tool_configuration) - .await - .into_diagnostic()?; - - let output = output.build_or_fetch_cache(tool_configuration).await?; + let output = if output.recipe.cache.is_some() { + output.build_or_fetch_cache(tool_configuration).await? + } else { + output + .fetch_sources(tool_configuration) + .await + .into_diagnostic()? + }; let output = output .resolve_dependencies(tool_configuration) diff --git a/src/cache.rs b/src/cache.rs index cdaebb17..5acd028a 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -19,7 +19,7 @@ use crate::{ render::resolved_dependencies::{ install_environments, resolve_dependencies, FinalizedDependencies, }, - source::copy_dir::{copy_file, create_symlink, CopyOptions}, + source::copy_dir::{copy_file, create_symlink, CopyDir, CopyOptions}, }; /// Error type for cache key generation @@ -39,10 +39,16 @@ pub enum CacheKeyError { pub struct Cache { /// The requirements that were used to build the cache pub requirements: Requirements, + /// The finalized dependencies pub finalized_dependencies: FinalizedDependencies, + /// The prefix files that are included in the cache pub prefix_files: Vec<(PathBuf, bool)>, + + /// The (dirty) source files that are included in the cache + pub work_dir_files: Vec, + /// The prefix that was used at build time (needs to be replaced when /// restoring the files) pub prefix: PathBuf, @@ -135,26 +141,30 @@ impl Output { } } + // restore the work dir files + let cache_dir_work = cache_dir.join("work_dir"); + CopyDir::new(&cache_dir_work, &self.build_configuration.directories.work_dir) + .run() + .into_diagnostic()?; + Ok(Output { finalized_cache_dependencies: Some(cache.finalized_dependencies.clone()), ..self.clone() }) } + /// This will fetch sources and build the cache if it doesn't exist + /// Note: this modifies the output in place pub(crate) async fn build_or_fetch_cache( - &self, + mut self, tool_configuration: &crate::tool_configuration::Configuration, ) -> Result { - // if we don't have a cache, we need to run the cache build with our current - // workdir, and then return the cache - let span = tracing::info_span!("Running cache build"); - let _enter = span.enter(); - - let target_platform = self.build_configuration.target_platform; - let mut env_vars = env_vars::vars(self, "BUILD"); - env_vars.extend(env_vars::os_vars(self.prefix(), &target_platform)); + if let Some(cache) = self.recipe.cache.clone() { + // if we don't have a cache, we need to run the cache build with our current + // workdir, and then return the cache + let span = tracing::info_span!("Running cache build"); + let _enter = span.enter(); - if let Some(cache) = &self.recipe.cache { tracing::info!("Cache key: {:?}", self.cache_key().into_diagnostic()?); let cache_key = format!("bld_{}", self.cache_key().into_diagnostic()?); @@ -170,17 +180,26 @@ impl Output { return self.restore_cache(cache_dir).await; } + self = self + .fetch_sources(tool_configuration) + .await + .into_diagnostic()?; + + let target_platform = self.build_configuration.target_platform; + let mut env_vars = env_vars::vars(&self, "BUILD"); + env_vars.extend(env_vars::os_vars(self.prefix(), &target_platform)); + // Reindex the channels let channels = build_reindexed_channels(&self.build_configuration, tool_configuration) .into_diagnostic() .context("failed to reindex output channel")?; let finalized_dependencies = - resolve_dependencies(&cache.requirements, self, &channels, tool_configuration) + resolve_dependencies(&cache.requirements, &self, &channels, tool_configuration) .await .unwrap(); - install_environments(self, &finalized_dependencies, tool_configuration) + install_environments(&self, &finalized_dependencies, tool_configuration) .await .into_diagnostic()?; @@ -213,6 +232,7 @@ impl Output { let mut creation_cache = HashSet::new(); let mut copied_files = Vec::new(); let copy_options = CopyOptions::default(); + for file in &new_files.new_files { // skip directories (if they are not a symlink) // directories are implicitly created by the files @@ -241,11 +261,18 @@ impl Output { } } + // We also need to copy the work dir files to the cache + let work_dir_files = CopyDir::new( + &self.build_configuration.directories.work_dir.clone(), + &cache_dir.join("work_dir"), + ).run().into_diagnostic()?; + // save the cache let cache = Cache { requirements: cache.requirements.clone(), finalized_dependencies: finalized_dependencies.clone(), prefix_files: copied_files, + work_dir_files: work_dir_files.copied_paths().to_vec(), prefix: self.prefix().to_path_buf(), }; @@ -254,10 +281,10 @@ impl Output { Ok(Output { finalized_cache_dependencies: Some(finalized_dependencies), - ..self.clone() + ..self }) } else { - Ok(self.clone()) + Ok(self) } } } From cca8fbe459d1fe6f2d16bcb9e29b72345fd86019 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 29 Nov 2024 10:05:02 +0100 Subject: [PATCH 02/16] fix global name when caching source as well --- src/cache.rs | 15 +++++++++++---- src/lib.rs | 18 +++++++++++++++--- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index 5acd028a..dfd611a6 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -143,9 +143,14 @@ impl Output { // restore the work dir files let cache_dir_work = cache_dir.join("work_dir"); - CopyDir::new(&cache_dir_work, &self.build_configuration.directories.work_dir) - .run() - .into_diagnostic()?; + let result = CopyDir::new( + &cache_dir_work, + &self.build_configuration.directories.work_dir, + ) + .run() + .into_diagnostic()?; + + tracing::info!("Restored source files from cache"); Ok(Output { finalized_cache_dependencies: Some(cache.finalized_dependencies.clone()), @@ -265,7 +270,9 @@ impl Output { let work_dir_files = CopyDir::new( &self.build_configuration.directories.work_dir.clone(), &cache_dir.join("work_dir"), - ).run().into_diagnostic()?; + ) + .run() + .into_diagnostic()?; // save the cache let cache = Cache { diff --git a/src/lib.rs b/src/lib.rs index 42052880..22a9cd65 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -164,8 +164,10 @@ pub async fn get_build_output( } let mut host_platform = args.host_platform; + // If target_platform is not set, we default to the host platform let target_platform = args.target_platform.unwrap_or(host_platform); + // If target_platform is set and host_platform is not, then we default // host_platform to the target_platform if let Some(target_platform) = args.target_platform { @@ -268,6 +270,12 @@ pub async fn get_build_output( let mut subpackages = BTreeMap::new(); let mut outputs = Vec::new(); + + let global_build_name = outputs_and_variants + .first() + .map(|o| o.name.clone()) + .unwrap_or_default(); + for discovered_output in outputs_and_variants { let hash = HashInfo::from_variant(&discovered_output.used_vars, &discovered_output.noarch_type); @@ -313,9 +321,13 @@ pub async fn get_build_output( }, ); - let name = recipe.package().name().clone(); - // Add the channels from the args and by default always conda-forge + let build_name = if recipe.cache.is_some() { + global_build_name.clone() + } else { + recipe.package().name().as_normalized().to_string() + }; + // Add the channels from the args and by default always conda-forge let channels = args .channel .clone() @@ -341,7 +353,7 @@ pub async fn get_build_output( hash, variant: discovered_output.used_vars.clone(), directories: Directories::setup( - name.as_normalized(), + &build_name, recipe_path, &output_dir, args.no_build_id, From 7cf80a05b023296afb8b8c31d99ca349e710dba4 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 29 Nov 2024 11:00:00 +0100 Subject: [PATCH 03/16] fix up serialization of `PathSrc` --- src/build.rs | 2 +- src/cache.rs | 39 +++++++++++++++++++++++++++---------- src/recipe/parser/source.rs | 36 +++++++++++++++++++++++++++------- src/source/mod.rs | 14 ++++++++++++- 4 files changed, 72 insertions(+), 19 deletions(-) diff --git a/src/build.rs b/src/build.rs index 4bfd3a20..2513cc25 100644 --- a/src/build.rs +++ b/src/build.rs @@ -118,7 +118,7 @@ pub async fn run_build( output.build_or_fetch_cache(tool_configuration).await? } else { output - .fetch_sources(tool_configuration) + .fetch_sources(None, tool_configuration) .await .into_diagnostic()? }; diff --git a/src/cache.rs b/src/cache.rs index dfd611a6..0c05fa02 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -15,7 +15,7 @@ use crate::{ env_vars, metadata::{build_reindexed_channels, Output}, packaging::{contains_prefix_binary, contains_prefix_text, content_type, Files}, - recipe::parser::{Dependency, Requirements}, + recipe::parser::{Dependency, Requirements, Source}, render::resolved_dependencies::{ install_environments, resolve_dependencies, FinalizedDependencies, }, @@ -52,6 +52,9 @@ pub struct Cache { /// The prefix that was used at build time (needs to be replaced when /// restoring the files) pub prefix: PathBuf, + + /// The sources that were already present in the `work_dir` + pub sources: Vec, } impl Output { @@ -107,11 +110,11 @@ impl Output { } /// Restore an existing cache from a cache directory - async fn restore_cache(&self, cache_dir: PathBuf) -> Result { - let cache: Cache = serde_json::from_str( - &fs::read_to_string(cache_dir.join("cache.json")).into_diagnostic()?, - ) - .into_diagnostic()?; + async fn restore_cache( + &self, + cache: Cache, + cache_dir: PathBuf, + ) -> Result { let copy_options = CopyOptions { skip_exist: true, ..Default::default() @@ -143,7 +146,7 @@ impl Output { // restore the work dir files let cache_dir_work = cache_dir.join("work_dir"); - let result = CopyDir::new( + CopyDir::new( &cache_dir_work, &self.build_configuration.directories.work_dir, ) @@ -181,12 +184,27 @@ impl Output { // restore the cache if it exists by copying the files to the prefix if cache_dir.exists() { - tracing::info!("Restoring cache from {:?}", cache_dir); - return self.restore_cache(cache_dir).await; + let text = fs::read_to_string(cache_dir.join("cache.json")).into_diagnostic()?; + match serde_json::from_str::(&text) { + Ok(cache) => { + tracing::info!("Restoring cache from {:?}", cache_dir); + self = self + .fetch_sources(Some(cache.sources.clone()), tool_configuration) + .await + .into_diagnostic()?; + return self.restore_cache(cache, cache_dir).await; + } + Err(e) => { + tracing::error!("Failed to parse cache: {:?} - rebuilding", e); + tracing::info!("JSON: {}", text); + // remove the cache dir and run as normal + fs::remove_dir_all(&cache_dir).into_diagnostic()?; + } + } } self = self - .fetch_sources(tool_configuration) + .fetch_sources(None, tool_configuration) .await .into_diagnostic()?; @@ -281,6 +299,7 @@ impl Output { prefix_files: copied_files, work_dir_files: work_dir_files.copied_paths().to_vec(), prefix: self.prefix().to_path_buf(), + sources: self.recipe.source.clone(), }; let cache_file = cache_dir.join("cache.json"); diff --git a/src/recipe/parser/source.rs b/src/recipe/parser/source.rs index 70c894e4..3506193c 100644 --- a/src/recipe/parser/source.rs +++ b/src/recipe/parser/source.rs @@ -20,7 +20,7 @@ use crate::{ use super::FlattenErrors; /// Source information. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(untagged)] pub enum Source { /// Git source pointing to a Git repository to retrieve the source from @@ -167,7 +167,7 @@ where } /// Git source information. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct GitSource { /// Url to the git repository #[serde(rename = "git")] @@ -358,7 +358,7 @@ impl TryConvertNode for RenderedMappingNode { } /// A Git repository URL or a local path to a Git repository -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(untagged)] pub enum GitUrl { /// A remote Git repository URL @@ -382,7 +382,7 @@ impl fmt::Display for GitUrl { /// A url source (usually a tar.gz or tar.bz2 archive). A compressed file /// will be extracted to the `work` (or `work/` directory). #[serde_as] -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct UrlSource { /// Url to the source code (usually a tar.gz or tar.bz2 etc. file) #[serde_as(as = "OneOrMany<_, PreferOne>")] @@ -509,7 +509,7 @@ impl TryConvertNode for RenderedMappingNode { /// A local path source. The source code will be copied to the `work` /// (or `work/` directory). #[serde_as] -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct PathSource { /// Path to the local source code pub path: PathBuf, @@ -531,10 +531,14 @@ pub struct PathSource { #[serde(skip_serializing_if = "Option::is_none")] pub file_name: Option, /// Whether to use the `.gitignore` file in the source directory. Defaults to `true`. - #[serde(skip_serializing_if = "should_not_serialize_use_gitignore")] + #[serde(default = "default_gitignore", skip_serializing_if = "should_not_serialize_use_gitignore")] pub use_gitignore: bool, } +fn default_gitignore() -> bool { + true +} + /// Helper method to skip serializing the use_gitignore flag if it is true. fn should_not_serialize_use_gitignore(use_gitignore: &bool) -> bool { *use_gitignore @@ -677,4 +681,22 @@ mod tests { assert_eq!(parsed_git.url, git.url); } -} + + // test serde json round trip for path source "../" + #[test] + fn test_path_source_round_trip() { + let path_source = PathSource { + path: "../".into(), + sha256: None, + md5: None, + patches: Vec::new(), + target_directory: None, + file_name: None, + use_gitignore: true, + }; + + let json = serde_json::to_string(&path_source).unwrap(); + serde_json::from_str::(&json).unwrap(); + } + +} \ No newline at end of file diff --git a/src/source/mod.rs b/src/source/mod.rs index 156cae24..9080cbf8 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -283,6 +283,7 @@ impl Output { /// Fetches the sources for the given output and returns a new output with the finalized sources attached pub async fn fetch_sources( self, + already_fetched: Option>, tool_configuration: &tool_configuration::Configuration, ) -> Result { let span = tracing::info_span!("Fetching source code"); @@ -299,8 +300,19 @@ impl Output { Ok(self) } else { + let sources_to_fetch = if let Some(already_fetched) = already_fetched { + self.recipe + .sources() + .iter() + .cloned() + .filter(|s| !already_fetched.iter().any(|fetched| fetched == s)) + .collect() + } else { + self.recipe.sources().to_vec() + }; + let rendered_sources = fetch_sources( - self.recipe.sources(), + &sources_to_fetch, &self.build_configuration.directories, &self.system_tools, tool_configuration, From 211f49a57844df038cf40825b864b636e2cbc2a4 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 29 Nov 2024 11:16:35 +0100 Subject: [PATCH 04/16] clippy and format --- src/recipe/parser/source.rs | 8 +++++--- src/source/mod.rs | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/recipe/parser/source.rs b/src/recipe/parser/source.rs index 3506193c..ac5d49f8 100644 --- a/src/recipe/parser/source.rs +++ b/src/recipe/parser/source.rs @@ -531,7 +531,10 @@ pub struct PathSource { #[serde(skip_serializing_if = "Option::is_none")] pub file_name: Option, /// Whether to use the `.gitignore` file in the source directory. Defaults to `true`. - #[serde(default = "default_gitignore", skip_serializing_if = "should_not_serialize_use_gitignore")] + #[serde( + default = "default_gitignore", + skip_serializing_if = "should_not_serialize_use_gitignore" + )] pub use_gitignore: bool, } @@ -698,5 +701,4 @@ mod tests { let json = serde_json::to_string(&path_source).unwrap(); serde_json::from_str::(&json).unwrap(); } - -} \ No newline at end of file +} diff --git a/src/source/mod.rs b/src/source/mod.rs index 9080cbf8..47411370 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -304,8 +304,8 @@ impl Output { self.recipe .sources() .iter() + .filter(|&s| !already_fetched.iter().any(|fetched| fetched == s)) .cloned() - .filter(|s| !already_fetched.iter().any(|fetched| fetched == s)) .collect() } else { self.recipe.sources().to_vec() From a4fc7848d32a3c57487bb421997569525491ce18 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sat, 30 Nov 2024 08:09:15 +0100 Subject: [PATCH 05/16] add source to cache --- src/cache.rs | 3 --- src/recipe/parser/cache.rs | 6 +++++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index 0c05fa02..fb7dc642 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -52,9 +52,6 @@ pub struct Cache { /// The prefix that was used at build time (needs to be replaced when /// restoring the files) pub prefix: PathBuf, - - /// The sources that were already present in the `work_dir` - pub sources: Vec, } impl Output { diff --git a/src/recipe/parser/cache.rs b/src/recipe/parser/cache.rs index 7696bab0..4da1f7c4 100644 --- a/src/recipe/parser/cache.rs +++ b/src/recipe/parser/cache.rs @@ -9,11 +9,14 @@ use crate::{ }; use serde::{Deserialize, Serialize}; -use super::{Build, Requirements}; +use super::{Build, Requirements, Source}; /// A cache build that can be used to split up a build into multiple outputs #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Cache { + /// Sources that are used in the cache build and subsequent output builds + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub source: Vec, /// The build configuration for the cache pub build: Build, /// The requirements for building the cache @@ -35,6 +38,7 @@ impl TryConvertNode for RenderedMappingNode { validate_keys! { cache, self.iter(), + source, build, requirements }; From 2acc0038bcf82fa01b1f3f42d27cd7e4ebe71eb7 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sat, 30 Nov 2024 11:05:58 +0100 Subject: [PATCH 06/16] use source from cache key --- src/build.rs | 2 +- src/cache.rs | 24 ++++++++++++++++-------- src/source/mod.rs | 14 +------------- 3 files changed, 18 insertions(+), 22 deletions(-) diff --git a/src/build.rs b/src/build.rs index 2513cc25..4bfd3a20 100644 --- a/src/build.rs +++ b/src/build.rs @@ -118,7 +118,7 @@ pub async fn run_build( output.build_or_fetch_cache(tool_configuration).await? } else { output - .fetch_sources(None, tool_configuration) + .fetch_sources(tool_configuration) .await .into_diagnostic()? }; diff --git a/src/cache.rs b/src/cache.rs index fb7dc642..a6cf4d5e 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -15,11 +15,14 @@ use crate::{ env_vars, metadata::{build_reindexed_channels, Output}, packaging::{contains_prefix_binary, contains_prefix_text, content_type, Files}, - recipe::parser::{Dependency, Requirements, Source}, + recipe::parser::{Dependency, Requirements}, render::resolved_dependencies::{ install_environments, resolve_dependencies, FinalizedDependencies, }, - source::copy_dir::{copy_file, create_symlink, CopyDir, CopyOptions}, + source::{ + copy_dir::{copy_file, create_symlink, CopyDir, CopyOptions}, + fetch_sources, + }, }; /// Error type for cache key generation @@ -186,7 +189,7 @@ impl Output { Ok(cache) => { tracing::info!("Restoring cache from {:?}", cache_dir); self = self - .fetch_sources(Some(cache.sources.clone()), tool_configuration) + .fetch_sources(tool_configuration) .await .into_diagnostic()?; return self.restore_cache(cache, cache_dir).await; @@ -200,10 +203,16 @@ impl Output { } } - self = self - .fetch_sources(None, tool_configuration) - .await - .into_diagnostic()?; + // fetch the sources for the `cache` section + // TODO store them as finalized?! + fetch_sources( + &cache.source, + &self.build_configuration.directories, + &self.system_tools, + tool_configuration, + ) + .await + .into_diagnostic()?; let target_platform = self.build_configuration.target_platform; let mut env_vars = env_vars::vars(&self, "BUILD"); @@ -296,7 +305,6 @@ impl Output { prefix_files: copied_files, work_dir_files: work_dir_files.copied_paths().to_vec(), prefix: self.prefix().to_path_buf(), - sources: self.recipe.source.clone(), }; let cache_file = cache_dir.join("cache.json"); diff --git a/src/source/mod.rs b/src/source/mod.rs index 47411370..6e231ba7 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -283,7 +283,6 @@ impl Output { /// Fetches the sources for the given output and returns a new output with the finalized sources attached pub async fn fetch_sources( self, - already_fetched: Option>, tool_configuration: &tool_configuration::Configuration, ) -> Result { let span = tracing::info_span!("Fetching source code"); @@ -300,19 +299,8 @@ impl Output { Ok(self) } else { - let sources_to_fetch = if let Some(already_fetched) = already_fetched { - self.recipe - .sources() - .iter() - .filter(|&s| !already_fetched.iter().any(|fetched| fetched == s)) - .cloned() - .collect() - } else { - self.recipe.sources().to_vec() - }; - let rendered_sources = fetch_sources( - &sources_to_fetch, + &self.recipe.sources(), &self.build_configuration.directories, &self.system_tools, tool_configuration, From fcf15af7ee7c71332a3500a8efce5aaf976e8999 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sat, 30 Nov 2024 18:50:46 +0100 Subject: [PATCH 07/16] store finalized sources --- src/cache.rs | 8 +++++--- src/lib.rs | 3 ++- src/metadata.rs | 11 ++++++++--- src/source/mod.rs | 38 ++++++++++++++------------------------ 4 files changed, 29 insertions(+), 31 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index a6cf4d5e..dccb77e2 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -204,9 +204,10 @@ impl Output { } // fetch the sources for the `cache` section - // TODO store them as finalized?! - fetch_sources( - &cache.source, + let rendered_sources = fetch_sources( + self.finalized_cache_sources + .as_ref() + .unwrap_or(&cache.source), &self.build_configuration.directories, &self.system_tools, tool_configuration, @@ -312,6 +313,7 @@ impl Output { Ok(Output { finalized_cache_dependencies: Some(finalized_dependencies), + finalized_cache_sources: Some(rendered_sources), ..self }) } else { diff --git a/src/lib.rs b/src/lib.rs index 22a9cd65..4d8972a3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -373,8 +373,9 @@ pub async fn get_build_output( force_colors: args.color_build_log && console::colors_enabled(), }, finalized_dependencies: None, - finalized_cache_dependencies: None, finalized_sources: None, + finalized_cache_dependencies: None, + finalized_cache_sources: None, system_tools: SystemTools::new(), build_summary: Arc::new(Mutex::new(BuildSummary::default())), extra_meta: Some( diff --git a/src/metadata.rs b/src/metadata.rs index 799a10c1..1fce87d1 100644 --- a/src/metadata.rs +++ b/src/metadata.rs @@ -404,13 +404,18 @@ pub struct Output { /// dependencies have not been resolved yet. During the `run_build` /// functions, the dependencies are resolved and this field is filled. pub finalized_dependencies: Option, - /// The finalized dependencies from the cache (if there is a cache - /// instruction) - pub finalized_cache_dependencies: Option, /// The finalized sources for this output. Contain the exact git hashes for /// the sources that are used to build this output. pub finalized_sources: Option>, + /// The finalized dependencies from the cache (if there is a cache + /// instruction) + #[serde(skip_serializing_if = "Option::is_none")] + pub finalized_cache_dependencies: Option, + /// The finalized sources from the cache (if there is a cache instruction) + #[serde(skip_serializing_if = "Option::is_none")] + pub finalized_cache_sources: Option>, + /// Summary of the build #[serde(skip)] pub build_summary: Arc>, diff --git a/src/source/mod.rs b/src/source/mod.rs index 6e231ba7..c3f44a56 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -288,29 +288,19 @@ impl Output { let span = tracing::info_span!("Fetching source code"); let _enter = span.enter(); - if let Some(finalized_sources) = &self.finalized_sources { - fetch_sources( - finalized_sources, - &self.build_configuration.directories, - &self.system_tools, - tool_configuration, - ) - .await?; - - Ok(self) - } else { - let rendered_sources = fetch_sources( - &self.recipe.sources(), - &self.build_configuration.directories, - &self.system_tools, - tool_configuration, - ) - .await?; - - Ok(Output { - finalized_sources: Some(rendered_sources), - ..self - }) - } + let rendered_sources = fetch_sources( + self.finalized_sources + .as_deref() + .unwrap_or(self.recipe.sources()), + &self.build_configuration.directories, + &self.system_tools, + tool_configuration, + ) + .await?; + + Ok(Output { + finalized_sources: Some(rendered_sources), + ..self + }) } } From 6adeea75d1012722511232ff489fd3d6d5833d30 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sat, 30 Nov 2024 19:13:34 +0100 Subject: [PATCH 08/16] simplify as we assume same prefix for every output --- src/cache.rs | 81 ++++++---------------------------------------------- 1 file changed, 8 insertions(+), 73 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index dccb77e2..b5acb9a2 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -1,12 +1,10 @@ //! Functions to deal with the build cache use std::{ collections::{BTreeMap, HashSet}, - path::{Path, PathBuf}, + path::PathBuf, }; use fs_err as fs; -use memchr::memmem; -use memmap2::Mmap; use miette::{Context, IntoDiagnostic}; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; @@ -14,7 +12,7 @@ use sha2::{Digest, Sha256}; use crate::{ env_vars, metadata::{build_reindexed_channels, Output}, - packaging::{contains_prefix_binary, contains_prefix_text, content_type, Files}, + packaging::Files, recipe::parser::{Dependency, Requirements}, render::resolved_dependencies::{ install_environments, resolve_dependencies, FinalizedDependencies, @@ -47,7 +45,7 @@ pub struct Cache { pub finalized_dependencies: FinalizedDependencies, /// The prefix files that are included in the cache - pub prefix_files: Vec<(PathBuf, bool)>, + pub prefix_files: Vec, /// The (dirty) source files that are included in the cache pub work_dir_files: Vec, @@ -97,13 +95,11 @@ impl Output { self.build_configuration.build_platform.platform.to_string(), ); - let cache_key = (cache, selected_variant); + let cache_key = (cache, selected_variant, self.prefix()); // serialize to json and hash let mut hasher = Sha256::new(); - let serialized = serde_json::to_string(&cache_key)?; - hasher.update(serialized.as_bytes()); - let result = hasher.finalize(); - Ok(format!("{:x}", result)) + cache_key.serialize(&mut serde_json::Serializer::new(&mut hasher))?; + Ok(format!("{:x}", hasher.finalize())) } else { Err(CacheKeyError::NoCacheKeyAvailable) } @@ -122,7 +118,7 @@ impl Output { let cache_prefix = cache.prefix; let mut paths_created = HashSet::new(); - for (file, has_prefix) in &cache.prefix_files { + for file in &cache.prefix_files { tracing::info!("Restoring from cache: {:?}", file); let dest = self.prefix().join(file); let source = &cache_dir.join("prefix").join(file); @@ -138,10 +134,6 @@ impl Output { create_symlink(&new_symlink_target, &dest).into_diagnostic()?; } } - - if *has_prefix { - replace_prefix(&dest, &cache_prefix, self.prefix())?; - } } // restore the work dir files @@ -274,21 +266,7 @@ impl Output { .expect("File should be in prefix"); let dest = &prefix_cache_dir.join(stripped); copy_file(file, dest, &mut creation_cache, ©_options).into_diagnostic()?; - - // Defend against broken symlinks here! - if !file.is_symlink() { - // check if the file contains the prefix - let content_type = content_type(file).into_diagnostic()?; - let has_prefix = if content_type.map(|c| c.is_text()).unwrap_or(false) { - contains_prefix_text(file, self.prefix(), self.target_platform()) - } else { - contains_prefix_binary(file, self.prefix()) - } - .into_diagnostic()?; - copied_files.push((stripped.to_path_buf(), has_prefix)); - } else { - copied_files.push((stripped.to_path_buf(), false)); - } + copied_files.push(stripped.to_path_buf()); } // We also need to copy the work dir files to the cache @@ -321,46 +299,3 @@ impl Output { } } } - -/// Simple replace prefix function that does a direct replacement without any -/// padding considerations because we know that the prefix is the same length as -/// the original prefix. -fn replace_prefix(file: &Path, old_prefix: &Path, new_prefix: &Path) -> Result<(), miette::Error> { - // mmap the file, and use the fast string search to find the prefix - let output = { - let map_file = fs::File::open(file).into_diagnostic()?; - let mmap = unsafe { Mmap::map(&map_file).into_diagnostic()? }; - let new_prefix_bytes = new_prefix.as_os_str().as_encoded_bytes(); - let old_prefix_bytes = old_prefix.as_os_str().as_encoded_bytes(); - - // if the prefix is the same, we don't need to do anything - if old_prefix == new_prefix { - return Ok(()); - } - - assert_eq!( - new_prefix_bytes.len(), - old_prefix_bytes.len(), - "Prefixes must have the same length: {:?} != {:?}", - new_prefix, - old_prefix - ); - - let mut output = Vec::with_capacity(mmap.len()); - let mut last_match_end = 0; - let finder = memmem::Finder::new(old_prefix_bytes); - - while let Some(index) = finder.find(&mmap[last_match_end..]) { - let absolute_index = last_match_end + index; - output.extend_from_slice(&mmap[last_match_end..absolute_index]); - output.extend_from_slice(new_prefix_bytes); - last_match_end = absolute_index + new_prefix_bytes.len(); - } - output.extend_from_slice(&mmap[last_match_end..]); - output - // close file & mmap at end of scope - }; - - // overwrite the file - fs::write(file, output).into_diagnostic() -} From 0602a72b9a0f76e2f4be02e1f95468017847b044 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sun, 1 Dec 2024 08:21:31 +0100 Subject: [PATCH 09/16] simplify code --- src/cache.rs | 47 +++++++++++++++++---------------------------- src/opt.rs | 12 ++++++++---- src/system_tools.rs | 3 ++- 3 files changed, 28 insertions(+), 34 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index b5acb9a2..fca05a5c 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -13,12 +13,12 @@ use crate::{ env_vars, metadata::{build_reindexed_channels, Output}, packaging::Files, - recipe::parser::{Dependency, Requirements}, + recipe::parser::{Dependency, Requirements, Source}, render::resolved_dependencies::{ install_environments, resolve_dependencies, FinalizedDependencies, }, source::{ - copy_dir::{copy_file, create_symlink, CopyDir, CopyOptions}, + copy_dir::{copy_file, CopyDir, CopyOptions}, fetch_sources, }, }; @@ -44,6 +44,9 @@ pub struct Cache { /// The finalized dependencies pub finalized_dependencies: FinalizedDependencies, + /// The finalized sources + pub finalized_sources: Vec, + /// The prefix files that are included in the cache pub prefix_files: Vec, @@ -111,44 +114,29 @@ impl Output { cache: Cache, cache_dir: PathBuf, ) -> Result { - let copy_options = CopyOptions { - skip_exist: true, - ..Default::default() - }; - let cache_prefix = cache.prefix; - - let mut paths_created = HashSet::new(); - for file in &cache.prefix_files { - tracing::info!("Restoring from cache: {:?}", file); - let dest = self.prefix().join(file); - let source = &cache_dir.join("prefix").join(file); - copy_file(source, &dest, &mut paths_created, ©_options).into_diagnostic()?; - - // check if the symlink starts with the old prefix, and if yes, make the symlink - // absolute with the new prefix - if source.is_symlink() { - let symlink_target = fs::read_link(source).into_diagnostic()?; - if let Ok(rest) = symlink_target.strip_prefix(&cache_prefix) { - let new_symlink_target = self.prefix().join(rest); - fs::remove_file(&dest).into_diagnostic()?; - create_symlink(&new_symlink_target, &dest).into_diagnostic()?; - } - } - } + let cache_prefix_dir = cache_dir.join("prefix"); + let copied_prefix = CopyDir::new(&cache_prefix_dir, self.prefix()) + .run() + .into_diagnostic()?; // restore the work dir files let cache_dir_work = cache_dir.join("work_dir"); - CopyDir::new( + let copied_cache = CopyDir::new( &cache_dir_work, &self.build_configuration.directories.work_dir, ) .run() .into_diagnostic()?; - tracing::info!("Restored source files from cache"); + let combined_files = copied_prefix.copied_paths().len() + copied_cache.copied_paths().len(); + tracing::info!( + "Restored {} source and prefix files from cache", + combined_files + ); Ok(Output { finalized_cache_dependencies: Some(cache.finalized_dependencies.clone()), + finalized_cache_sources: Some(cache.finalized_sources.clone()), ..self.clone() }) } @@ -165,7 +153,7 @@ impl Output { let span = tracing::info_span!("Running cache build"); let _enter = span.enter(); - tracing::info!("Cache key: {:?}", self.cache_key().into_diagnostic()?); + tracing::info!("using cache key: {:?}", self.cache_key().into_diagnostic()?); let cache_key = format!("bld_{}", self.cache_key().into_diagnostic()?); let cache_dir = self @@ -281,6 +269,7 @@ impl Output { let cache = Cache { requirements: cache.requirements.clone(), finalized_dependencies: finalized_dependencies.clone(), + finalized_sources: rendered_sources.clone(), prefix_files: copied_files, work_dir_files: work_dir_files.copied_paths().to_vec(), prefix: self.prefix().to_path_buf(), diff --git a/src/opt.rs b/src/opt.rs index b7ac2b5b..8d6161bc 100644 --- a/src/opt.rs +++ b/src/opt.rs @@ -114,7 +114,7 @@ pub fn get_rattler_build_version() -> &'static str { env!("CARGO_PKG_VERSION") } -#[allow(missing_docs)] +/// The main application. #[derive(Parser)] #[clap(version = crate_version!())] pub struct App { @@ -494,20 +494,24 @@ pub struct UploadOpts { pub common: CommonOpts, } -/// Server type. +/// The server type to upload to. #[derive(Clone, Debug, PartialEq, Parser)] -#[allow(missing_docs)] pub enum ServerType { + /// A Quetz server Quetz(QuetzOpts), + /// An Artifactory server Artifactory(ArtifactoryOpts), + /// A prefix.dev server Prefix(PrefixOpts), + /// An anaconda.org server Anaconda(AnacondaOpts), + /// A conda-forge server #[clap(hide = true)] CondaForge(CondaForgeOpts), } #[derive(Clone, Debug, PartialEq, Parser)] -/// Upload to aQuetz server. +/// Upload to a Quetz server. /// Authentication is used from the keychain / auth-file. pub struct QuetzOpts { /// The URL to your Quetz server diff --git a/src/system_tools.rs b/src/system_tools.rs index 8842404e..97e51151 100644 --- a/src/system_tools.rs +++ b/src/system_tools.rs @@ -11,9 +11,10 @@ use std::{ }; use thiserror::Error; +/// Errors that can occur when working with system tools #[derive(Error, Debug)] -#[allow(missing_docs)] pub enum ToolError { + /// The tool was not found on the system #[error("failed to find `{0}` ({1})")] ToolNotFound(Tool, which::Error), } From ba4f5aa42e942ac596c2bb1f971cd77287269ad9 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sun, 1 Dec 2024 08:38:12 +0100 Subject: [PATCH 10/16] inject jinja --- src/cache.rs | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index fca05a5c..fc2491ea 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -6,6 +6,7 @@ use std::{ use fs_err as fs; use miette::{Context, IntoDiagnostic}; +use minijinja::Value; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; @@ -13,7 +14,10 @@ use crate::{ env_vars, metadata::{build_reindexed_channels, Output}, packaging::Files, - recipe::parser::{Dependency, Requirements, Source}, + recipe::{ + parser::{Dependency, Requirements, Source}, + Jinja, + }, render::resolved_dependencies::{ install_environments, resolve_dependencies, FinalizedDependencies, }, @@ -175,8 +179,11 @@ impl Output { return self.restore_cache(cache, cache_dir).await; } Err(e) => { - tracing::error!("Failed to parse cache: {:?} - rebuilding", e); - tracing::info!("JSON: {}", text); + tracing::error!( + "Failed to parse cache at {}: {:?} - rebuilding", + cache_dir.join("cache.json").display(), + e + ); // remove the cache dir and run as normal fs::remove_dir_all(&cache_dir).into_diagnostic()?; } @@ -213,6 +220,14 @@ impl Output { .await .into_diagnostic()?; + let selector_config = self.build_configuration.selector_config(); + let mut jinja = Jinja::new(selector_config.clone()); + for (k, v) in self.recipe.context.iter() { + jinja + .context_mut() + .insert(k.clone(), Value::from_safe_string(v.clone())); + } + cache .build .script() @@ -222,7 +237,7 @@ impl Output { &self.build_configuration.directories.recipe_dir, &self.build_configuration.directories.host_prefix, Some(&self.build_configuration.directories.build_prefix), - None, // TODO fix this to be proper Jinja context + Some(jinja), ) .await .into_diagnostic()?; From 48aff40d43fdf7d27ab666dda153d1a2b986679a Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sun, 1 Dec 2024 08:57:57 +0100 Subject: [PATCH 11/16] add a test --- py-rattler-build/rattler_build/__init__.py | 3 ++- test-data/recipes/cache/recipe-symlinks.yaml | 1 + .../test_symlinks/test_symlink_cache.json | 7 +++++++ test/end-to-end/test_symlinks.py | 16 ++++++++++++++-- 4 files changed, 24 insertions(+), 3 deletions(-) diff --git a/py-rattler-build/rattler_build/__init__.py b/py-rattler-build/rattler_build/__init__.py index ec70e0e0..c4062e10 100644 --- a/py-rattler-build/rattler_build/__init__.py +++ b/py-rattler-build/rattler_build/__init__.py @@ -1,4 +1,5 @@ -from .rattler_build import get_rattler_build_version_py as _get_rattler_build_version_py +from .rattler_build import \ + get_rattler_build_version_py as _get_rattler_build_version_py def rattler_build_version() -> str: diff --git a/test-data/recipes/cache/recipe-symlinks.yaml b/test-data/recipes/cache/recipe-symlinks.yaml index 8c9ec28f..ee39622f 100644 --- a/test-data/recipes/cache/recipe-symlinks.yaml +++ b/test-data/recipes/cache/recipe-symlinks.yaml @@ -14,6 +14,7 @@ cache: ln -s $PREFIX/foo.txt $PREFIX/absolute-symlink.txt ln -s $PREFIX/non-existent-file $PREFIX/broken-symlink.txt ln -s ./foo.txt $PREFIX/relative-symlink.txt + echo ${{ PREFIX }} > $PREFIX/prefix.txt outputs: - package: diff --git a/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json b/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json index d2d735b8..9e062586 100644 --- a/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json +++ b/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json @@ -24,6 +24,13 @@ "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size_in_bytes": 0 }, + { + "_path": "prefix.txt", + "file_mode": "text", + "path_type": "hardlink", + "sha256": "1f7c780c5a35eeab2ffd8a7d38f9db34974e84ca569db0ff99af21159b5d02d6", + "size_in_bytes": 256 + }, { "_path": "relative-symlink.txt", "path_type": "softlink", diff --git a/test/end-to-end/test_symlinks.py b/test/end-to-end/test_symlinks.py index e269a37f..804445c8 100644 --- a/test/end-to-end/test_symlinks.py +++ b/test/end-to-end/test_symlinks.py @@ -4,6 +4,7 @@ import pytest from helpers import RattlerBuild, get_extracted_package +from syrupy.filters import paths as filter_paths @pytest.mark.skipif( @@ -34,10 +35,11 @@ def test_symlink_cache( paths_json = pkg / "info/paths.json" j = json.loads(paths_json.read_text()) - assert snapshot_json == j + # prefix placeholder always changes, and we test it later + assert snapshot_json(exclude=filter_paths("paths.4.prefix_placeholder")) == j paths = j["paths"] - assert len(paths) == 5 + assert len(paths) == 6 for p in paths: if "symlink" in p["_path"]: assert p["path_type"] == "softlink" @@ -61,3 +63,13 @@ def test_symlink_cache( relative_symlink = pkg / "bin/exe-symlink" assert relative_symlink.is_symlink() assert relative_symlink.readlink() == Path("exe") + + prefix_txt = pkg / "prefix.txt" + assert prefix_txt.exists() + contents = prefix_txt.read_text() + assert contents.len() > 0 + # find the path in paths.json for the prefix.txt + for p in paths: + if p["_path"] == "prefix.txt": + assert p["path_type"] == "hardlink" + assert p["prefix_placeholder"] == contents.strip() From a472f745713232ae554146aba50746dc1fbbb5cc Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sun, 1 Dec 2024 09:12:41 +0100 Subject: [PATCH 12/16] update snapshots --- .../rattler_build__metadata__test__curl_recipe.yaml.snap | 1 - .../rattler_build__metadata__test__read_recipe_with_sources.snap | 1 - .../rattler_build__metadata__test__rich_recipe.yaml.snap | 1 - 3 files changed, 3 deletions(-) diff --git a/src/snapshots/rattler_build__metadata__test__curl_recipe.yaml.snap b/src/snapshots/rattler_build__metadata__test__curl_recipe.yaml.snap index 6dc4e031..305fdce8 100644 --- a/src/snapshots/rattler_build__metadata__test__curl_recipe.yaml.snap +++ b/src/snapshots/rattler_build__metadata__test__curl_recipe.yaml.snap @@ -640,7 +640,6 @@ finalized_dependencies: run: depends: [] constraints: [] -finalized_cache_dependencies: ~ finalized_sources: ~ system_tools: rattler-build: 0.12.1 diff --git a/src/snapshots/rattler_build__metadata__test__read_recipe_with_sources.snap b/src/snapshots/rattler_build__metadata__test__read_recipe_with_sources.snap index 00c23e8a..f8f59145 100644 --- a/src/snapshots/rattler_build__metadata__test__read_recipe_with_sources.snap +++ b/src/snapshots/rattler_build__metadata__test__read_recipe_with_sources.snap @@ -62,7 +62,6 @@ finalized_dependencies: run: depends: [] constraints: [] -finalized_cache_dependencies: ~ finalized_sources: - git: "https://github.com/prefix-dev/rattler-build" rev: 4ae7bd207b437c3a5955788e6516339a84358e1c diff --git a/src/snapshots/rattler_build__metadata__test__rich_recipe.yaml.snap b/src/snapshots/rattler_build__metadata__test__rich_recipe.yaml.snap index 556a4602..9d541d92 100644 --- a/src/snapshots/rattler_build__metadata__test__rich_recipe.yaml.snap +++ b/src/snapshots/rattler_build__metadata__test__rich_recipe.yaml.snap @@ -383,7 +383,6 @@ finalized_dependencies: from: host run_export: python constraints: [] -finalized_cache_dependencies: ~ finalized_sources: ~ system_tools: rattler-build: 0.12.1 From 6cdc2c538c74609107d840bfbe5f419b6dc25f25 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sun, 1 Dec 2024 09:14:32 +0100 Subject: [PATCH 13/16] correct a typo, revert changes from before --- src/opt.rs | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/opt.rs b/src/opt.rs index 8d6161bc..9cd66e28 100644 --- a/src/opt.rs +++ b/src/opt.rs @@ -114,7 +114,7 @@ pub fn get_rattler_build_version() -> &'static str { env!("CARGO_PKG_VERSION") } -/// The main application. +#[allow(missing_docs)] #[derive(Parser)] #[clap(version = crate_version!())] pub struct App { @@ -494,25 +494,21 @@ pub struct UploadOpts { pub common: CommonOpts, } -/// The server type to upload to. +/// Server type. #[derive(Clone, Debug, PartialEq, Parser)] +#[allow(missing_docs)] pub enum ServerType { - /// A Quetz server Quetz(QuetzOpts), - /// An Artifactory server Artifactory(ArtifactoryOpts), - /// A prefix.dev server Prefix(PrefixOpts), - /// An anaconda.org server Anaconda(AnacondaOpts), - /// A conda-forge server #[clap(hide = true)] CondaForge(CondaForgeOpts), } -#[derive(Clone, Debug, PartialEq, Parser)] /// Upload to a Quetz server. /// Authentication is used from the keychain / auth-file. +#[derive(Clone, Debug, PartialEq, Parser)] pub struct QuetzOpts { /// The URL to your Quetz server #[arg(short, long, env = "QUETZ_SERVER_URL")] From bd3f0d7b16de0af777bccb89deb4296c77c07ce9 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sun, 1 Dec 2024 10:44:54 +0100 Subject: [PATCH 14/16] ignore sha --- .../__snapshots__/test_symlinks/test_symlink_cache.json | 1 - test/end-to-end/test_symlinks.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json b/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json index 9e062586..7dc0a0fc 100644 --- a/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json +++ b/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json @@ -28,7 +28,6 @@ "_path": "prefix.txt", "file_mode": "text", "path_type": "hardlink", - "sha256": "1f7c780c5a35eeab2ffd8a7d38f9db34974e84ca569db0ff99af21159b5d02d6", "size_in_bytes": 256 }, { diff --git a/test/end-to-end/test_symlinks.py b/test/end-to-end/test_symlinks.py index 804445c8..537dadd8 100644 --- a/test/end-to-end/test_symlinks.py +++ b/test/end-to-end/test_symlinks.py @@ -36,7 +36,7 @@ def test_symlink_cache( paths_json = pkg / "info/paths.json" j = json.loads(paths_json.read_text()) # prefix placeholder always changes, and we test it later - assert snapshot_json(exclude=filter_paths("paths.4.prefix_placeholder")) == j + assert snapshot_json(exclude=filter_paths("paths.4.prefix_placeholder", "paths.4.sha256")) == j paths = j["paths"] assert len(paths) == 6 @@ -67,7 +67,7 @@ def test_symlink_cache( prefix_txt = pkg / "prefix.txt" assert prefix_txt.exists() contents = prefix_txt.read_text() - assert contents.len() > 0 + assert len(contents) > 0 # find the path in paths.json for the prefix.txt for p in paths: if p["_path"] == "prefix.txt": From 7c568ef8c496213eb3943beeabe545e80d87e46f Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Sun, 1 Dec 2024 10:54:26 +0100 Subject: [PATCH 15/16] use ruff / ruff-format --- py-rattler-build/rattler_build/__init__.py | 3 +-- test/end-to-end/test_symlinks.py | 7 ++++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/py-rattler-build/rattler_build/__init__.py b/py-rattler-build/rattler_build/__init__.py index c4062e10..ec70e0e0 100644 --- a/py-rattler-build/rattler_build/__init__.py +++ b/py-rattler-build/rattler_build/__init__.py @@ -1,5 +1,4 @@ -from .rattler_build import \ - get_rattler_build_version_py as _get_rattler_build_version_py +from .rattler_build import get_rattler_build_version_py as _get_rattler_build_version_py def rattler_build_version() -> str: diff --git a/test/end-to-end/test_symlinks.py b/test/end-to-end/test_symlinks.py index 537dadd8..bec38229 100644 --- a/test/end-to-end/test_symlinks.py +++ b/test/end-to-end/test_symlinks.py @@ -36,7 +36,12 @@ def test_symlink_cache( paths_json = pkg / "info/paths.json" j = json.loads(paths_json.read_text()) # prefix placeholder always changes, and we test it later - assert snapshot_json(exclude=filter_paths("paths.4.prefix_placeholder", "paths.4.sha256")) == j + assert ( + snapshot_json( + exclude=filter_paths("paths.4.prefix_placeholder", "paths.4.sha256") + ) + == j + ) paths = j["paths"] assert len(paths) == 6 From 347be2a4e99c3669c738152430926a528438a967 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Wed, 4 Dec 2024 13:08:17 +0100 Subject: [PATCH 16/16] add warning about moving the source --- src/recipe/parser/output.rs | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/src/recipe/parser/output.rs b/src/recipe/parser/output.rs index cceb9066..4f517eaa 100644 --- a/src/recipe/parser/output.rs +++ b/src/recipe/parser/output.rs @@ -4,6 +4,8 @@ //! each mapping can have its own `package`, `source`, `build`, `requirements`, //! `test`, and `about` fields. +use marked_yaml::types::MarkedMappingNode; + use crate::{ _partialerror, recipe::{ @@ -18,6 +20,30 @@ static ALLOWED_KEYS_MULTI_OUTPUTS: [&str; 8] = [ "context", "recipe", "source", "build", "outputs", "about", "extra", "cache", ]; +// Check if the `cache` top-level key is present. If it does not contain a source, but there is a +// top-level `source` key, then we should warn the user because this key was moved to the `cache` +fn check_src_cache(root: &MarkedMappingNode) -> Result<(), ParsingError> { + if let Some(cache) = root.get("cache") { + let has_top_level_source = root.contains_key("source"); + let cache_map = cache.as_mapping().ok_or_else(|| { + ParsingError::from_partial( + "", + _partialerror!( + *cache.span(), + ErrorKind::ExpectedMapping, + help = "`cache` must always be a mapping" + ), + ) + })?; + + if !cache_map.contains_key("source") && has_top_level_source { + tracing::warn!("The cache has its own `source` key now. You probably want to move the top-level `source` key into the `cache` key."); + } + } + + Ok(()) +} + /// Retrieve all outputs from the recipe source (YAML) pub fn find_outputs_from_src(src: &str) -> Result, ParsingError> { let root_node = parse_yaml(0, src)?; @@ -32,6 +58,8 @@ pub fn find_outputs_from_src(src: &str) -> Result, ParsingError> { ) })?; + check_src_cache(root_map)?; + if root_map.contains_key("outputs") { if root_map.contains_key("package") { let key = root_map