diff --git a/src/build.rs b/src/build.rs index da5e32fc4..4bfd3a20a 100644 --- a/src/build.rs +++ b/src/build.rs @@ -114,12 +114,14 @@ pub async fn run_build( let directories = output.build_configuration.directories.clone(); - let output = output - .fetch_sources(tool_configuration) - .await - .into_diagnostic()?; - - let output = output.build_or_fetch_cache(tool_configuration).await?; + let output = if output.recipe.cache.is_some() { + output.build_or_fetch_cache(tool_configuration).await? + } else { + output + .fetch_sources(tool_configuration) + .await + .into_diagnostic()? + }; let output = output .resolve_dependencies(tool_configuration) diff --git a/src/cache.rs b/src/cache.rs index cdaebb174..fc2491ead 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -1,25 +1,30 @@ //! Functions to deal with the build cache use std::{ collections::{BTreeMap, HashSet}, - path::{Path, PathBuf}, + path::PathBuf, }; use fs_err as fs; -use memchr::memmem; -use memmap2::Mmap; use miette::{Context, IntoDiagnostic}; +use minijinja::Value; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; use crate::{ env_vars, metadata::{build_reindexed_channels, Output}, - packaging::{contains_prefix_binary, contains_prefix_text, content_type, Files}, - recipe::parser::{Dependency, Requirements}, + packaging::Files, + recipe::{ + parser::{Dependency, Requirements, Source}, + Jinja, + }, render::resolved_dependencies::{ install_environments, resolve_dependencies, FinalizedDependencies, }, - source::copy_dir::{copy_file, create_symlink, CopyOptions}, + source::{ + copy_dir::{copy_file, CopyDir, CopyOptions}, + fetch_sources, + }, }; /// Error type for cache key generation @@ -39,10 +44,19 @@ pub enum CacheKeyError { pub struct Cache { /// The requirements that were used to build the cache pub requirements: Requirements, + /// The finalized dependencies pub finalized_dependencies: FinalizedDependencies, + + /// The finalized sources + pub finalized_sources: Vec, + /// The prefix files that are included in the cache - pub prefix_files: Vec<(PathBuf, bool)>, + pub prefix_files: Vec, + + /// The (dirty) source files that are included in the cache + pub work_dir_files: Vec, + /// The prefix that was used at build time (needs to be replaced when /// restoring the files) pub prefix: PathBuf, @@ -88,74 +102,62 @@ impl Output { self.build_configuration.build_platform.platform.to_string(), ); - let cache_key = (cache, selected_variant); + let cache_key = (cache, selected_variant, self.prefix()); // serialize to json and hash let mut hasher = Sha256::new(); - let serialized = serde_json::to_string(&cache_key)?; - hasher.update(serialized.as_bytes()); - let result = hasher.finalize(); - Ok(format!("{:x}", result)) + cache_key.serialize(&mut serde_json::Serializer::new(&mut hasher))?; + Ok(format!("{:x}", hasher.finalize())) } else { Err(CacheKeyError::NoCacheKeyAvailable) } } /// Restore an existing cache from a cache directory - async fn restore_cache(&self, cache_dir: PathBuf) -> Result { - let cache: Cache = serde_json::from_str( - &fs::read_to_string(cache_dir.join("cache.json")).into_diagnostic()?, + async fn restore_cache( + &self, + cache: Cache, + cache_dir: PathBuf, + ) -> Result { + let cache_prefix_dir = cache_dir.join("prefix"); + let copied_prefix = CopyDir::new(&cache_prefix_dir, self.prefix()) + .run() + .into_diagnostic()?; + + // restore the work dir files + let cache_dir_work = cache_dir.join("work_dir"); + let copied_cache = CopyDir::new( + &cache_dir_work, + &self.build_configuration.directories.work_dir, ) + .run() .into_diagnostic()?; - let copy_options = CopyOptions { - skip_exist: true, - ..Default::default() - }; - let cache_prefix = cache.prefix; - - let mut paths_created = HashSet::new(); - for (file, has_prefix) in &cache.prefix_files { - tracing::info!("Restoring from cache: {:?}", file); - let dest = self.prefix().join(file); - let source = &cache_dir.join("prefix").join(file); - copy_file(source, &dest, &mut paths_created, ©_options).into_diagnostic()?; - - // check if the symlink starts with the old prefix, and if yes, make the symlink - // absolute with the new prefix - if source.is_symlink() { - let symlink_target = fs::read_link(source).into_diagnostic()?; - if let Ok(rest) = symlink_target.strip_prefix(&cache_prefix) { - let new_symlink_target = self.prefix().join(rest); - fs::remove_file(&dest).into_diagnostic()?; - create_symlink(&new_symlink_target, &dest).into_diagnostic()?; - } - } - if *has_prefix { - replace_prefix(&dest, &cache_prefix, self.prefix())?; - } - } + let combined_files = copied_prefix.copied_paths().len() + copied_cache.copied_paths().len(); + tracing::info!( + "Restored {} source and prefix files from cache", + combined_files + ); Ok(Output { finalized_cache_dependencies: Some(cache.finalized_dependencies.clone()), + finalized_cache_sources: Some(cache.finalized_sources.clone()), ..self.clone() }) } + /// This will fetch sources and build the cache if it doesn't exist + /// Note: this modifies the output in place pub(crate) async fn build_or_fetch_cache( - &self, + mut self, tool_configuration: &crate::tool_configuration::Configuration, ) -> Result { - // if we don't have a cache, we need to run the cache build with our current - // workdir, and then return the cache - let span = tracing::info_span!("Running cache build"); - let _enter = span.enter(); - - let target_platform = self.build_configuration.target_platform; - let mut env_vars = env_vars::vars(self, "BUILD"); - env_vars.extend(env_vars::os_vars(self.prefix(), &target_platform)); + if let Some(cache) = self.recipe.cache.clone() { + // if we don't have a cache, we need to run the cache build with our current + // workdir, and then return the cache + let span = tracing::info_span!("Running cache build"); + let _enter = span.enter(); - if let Some(cache) = &self.recipe.cache { - tracing::info!("Cache key: {:?}", self.cache_key().into_diagnostic()?); + tracing::info!("using cache key: {:?}", self.cache_key().into_diagnostic()?); let cache_key = format!("bld_{}", self.cache_key().into_diagnostic()?); let cache_dir = self @@ -166,24 +168,66 @@ impl Output { // restore the cache if it exists by copying the files to the prefix if cache_dir.exists() { - tracing::info!("Restoring cache from {:?}", cache_dir); - return self.restore_cache(cache_dir).await; + let text = fs::read_to_string(cache_dir.join("cache.json")).into_diagnostic()?; + match serde_json::from_str::(&text) { + Ok(cache) => { + tracing::info!("Restoring cache from {:?}", cache_dir); + self = self + .fetch_sources(tool_configuration) + .await + .into_diagnostic()?; + return self.restore_cache(cache, cache_dir).await; + } + Err(e) => { + tracing::error!( + "Failed to parse cache at {}: {:?} - rebuilding", + cache_dir.join("cache.json").display(), + e + ); + // remove the cache dir and run as normal + fs::remove_dir_all(&cache_dir).into_diagnostic()?; + } + } } + // fetch the sources for the `cache` section + let rendered_sources = fetch_sources( + self.finalized_cache_sources + .as_ref() + .unwrap_or(&cache.source), + &self.build_configuration.directories, + &self.system_tools, + tool_configuration, + ) + .await + .into_diagnostic()?; + + let target_platform = self.build_configuration.target_platform; + let mut env_vars = env_vars::vars(&self, "BUILD"); + env_vars.extend(env_vars::os_vars(self.prefix(), &target_platform)); + // Reindex the channels let channels = build_reindexed_channels(&self.build_configuration, tool_configuration) .into_diagnostic() .context("failed to reindex output channel")?; let finalized_dependencies = - resolve_dependencies(&cache.requirements, self, &channels, tool_configuration) + resolve_dependencies(&cache.requirements, &self, &channels, tool_configuration) .await .unwrap(); - install_environments(self, &finalized_dependencies, tool_configuration) + install_environments(&self, &finalized_dependencies, tool_configuration) .await .into_diagnostic()?; + let selector_config = self.build_configuration.selector_config(); + let mut jinja = Jinja::new(selector_config.clone()); + for (k, v) in self.recipe.context.iter() { + jinja + .context_mut() + .insert(k.clone(), Value::from_safe_string(v.clone())); + } + cache .build .script() @@ -193,7 +237,7 @@ impl Output { &self.build_configuration.directories.recipe_dir, &self.build_configuration.directories.host_prefix, Some(&self.build_configuration.directories.build_prefix), - None, // TODO fix this to be proper Jinja context + Some(jinja), ) .await .into_diagnostic()?; @@ -213,6 +257,7 @@ impl Output { let mut creation_cache = HashSet::new(); let mut copied_files = Vec::new(); let copy_options = CopyOptions::default(); + for file in &new_files.new_files { // skip directories (if they are not a symlink) // directories are implicitly created by the files @@ -224,28 +269,24 @@ impl Output { .expect("File should be in prefix"); let dest = &prefix_cache_dir.join(stripped); copy_file(file, dest, &mut creation_cache, ©_options).into_diagnostic()?; - - // Defend against broken symlinks here! - if !file.is_symlink() { - // check if the file contains the prefix - let content_type = content_type(file).into_diagnostic()?; - let has_prefix = if content_type.map(|c| c.is_text()).unwrap_or(false) { - contains_prefix_text(file, self.prefix(), self.target_platform()) - } else { - contains_prefix_binary(file, self.prefix()) - } - .into_diagnostic()?; - copied_files.push((stripped.to_path_buf(), has_prefix)); - } else { - copied_files.push((stripped.to_path_buf(), false)); - } + copied_files.push(stripped.to_path_buf()); } + // We also need to copy the work dir files to the cache + let work_dir_files = CopyDir::new( + &self.build_configuration.directories.work_dir.clone(), + &cache_dir.join("work_dir"), + ) + .run() + .into_diagnostic()?; + // save the cache let cache = Cache { requirements: cache.requirements.clone(), finalized_dependencies: finalized_dependencies.clone(), + finalized_sources: rendered_sources.clone(), prefix_files: copied_files, + work_dir_files: work_dir_files.copied_paths().to_vec(), prefix: self.prefix().to_path_buf(), }; @@ -254,53 +295,11 @@ impl Output { Ok(Output { finalized_cache_dependencies: Some(finalized_dependencies), - ..self.clone() + finalized_cache_sources: Some(rendered_sources), + ..self }) } else { - Ok(self.clone()) + Ok(self) } } } - -/// Simple replace prefix function that does a direct replacement without any -/// padding considerations because we know that the prefix is the same length as -/// the original prefix. -fn replace_prefix(file: &Path, old_prefix: &Path, new_prefix: &Path) -> Result<(), miette::Error> { - // mmap the file, and use the fast string search to find the prefix - let output = { - let map_file = fs::File::open(file).into_diagnostic()?; - let mmap = unsafe { Mmap::map(&map_file).into_diagnostic()? }; - let new_prefix_bytes = new_prefix.as_os_str().as_encoded_bytes(); - let old_prefix_bytes = old_prefix.as_os_str().as_encoded_bytes(); - - // if the prefix is the same, we don't need to do anything - if old_prefix == new_prefix { - return Ok(()); - } - - assert_eq!( - new_prefix_bytes.len(), - old_prefix_bytes.len(), - "Prefixes must have the same length: {:?} != {:?}", - new_prefix, - old_prefix - ); - - let mut output = Vec::with_capacity(mmap.len()); - let mut last_match_end = 0; - let finder = memmem::Finder::new(old_prefix_bytes); - - while let Some(index) = finder.find(&mmap[last_match_end..]) { - let absolute_index = last_match_end + index; - output.extend_from_slice(&mmap[last_match_end..absolute_index]); - output.extend_from_slice(new_prefix_bytes); - last_match_end = absolute_index + new_prefix_bytes.len(); - } - output.extend_from_slice(&mmap[last_match_end..]); - output - // close file & mmap at end of scope - }; - - // overwrite the file - fs::write(file, output).into_diagnostic() -} diff --git a/src/lib.rs b/src/lib.rs index 42052880c..4d8972a32 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -164,8 +164,10 @@ pub async fn get_build_output( } let mut host_platform = args.host_platform; + // If target_platform is not set, we default to the host platform let target_platform = args.target_platform.unwrap_or(host_platform); + // If target_platform is set and host_platform is not, then we default // host_platform to the target_platform if let Some(target_platform) = args.target_platform { @@ -268,6 +270,12 @@ pub async fn get_build_output( let mut subpackages = BTreeMap::new(); let mut outputs = Vec::new(); + + let global_build_name = outputs_and_variants + .first() + .map(|o| o.name.clone()) + .unwrap_or_default(); + for discovered_output in outputs_and_variants { let hash = HashInfo::from_variant(&discovered_output.used_vars, &discovered_output.noarch_type); @@ -313,9 +321,13 @@ pub async fn get_build_output( }, ); - let name = recipe.package().name().clone(); - // Add the channels from the args and by default always conda-forge + let build_name = if recipe.cache.is_some() { + global_build_name.clone() + } else { + recipe.package().name().as_normalized().to_string() + }; + // Add the channels from the args and by default always conda-forge let channels = args .channel .clone() @@ -341,7 +353,7 @@ pub async fn get_build_output( hash, variant: discovered_output.used_vars.clone(), directories: Directories::setup( - name.as_normalized(), + &build_name, recipe_path, &output_dir, args.no_build_id, @@ -361,8 +373,9 @@ pub async fn get_build_output( force_colors: args.color_build_log && console::colors_enabled(), }, finalized_dependencies: None, - finalized_cache_dependencies: None, finalized_sources: None, + finalized_cache_dependencies: None, + finalized_cache_sources: None, system_tools: SystemTools::new(), build_summary: Arc::new(Mutex::new(BuildSummary::default())), extra_meta: Some( diff --git a/src/metadata.rs b/src/metadata.rs index 799a10c10..1fce87d11 100644 --- a/src/metadata.rs +++ b/src/metadata.rs @@ -404,13 +404,18 @@ pub struct Output { /// dependencies have not been resolved yet. During the `run_build` /// functions, the dependencies are resolved and this field is filled. pub finalized_dependencies: Option, - /// The finalized dependencies from the cache (if there is a cache - /// instruction) - pub finalized_cache_dependencies: Option, /// The finalized sources for this output. Contain the exact git hashes for /// the sources that are used to build this output. pub finalized_sources: Option>, + /// The finalized dependencies from the cache (if there is a cache + /// instruction) + #[serde(skip_serializing_if = "Option::is_none")] + pub finalized_cache_dependencies: Option, + /// The finalized sources from the cache (if there is a cache instruction) + #[serde(skip_serializing_if = "Option::is_none")] + pub finalized_cache_sources: Option>, + /// Summary of the build #[serde(skip)] pub build_summary: Arc>, diff --git a/src/opt.rs b/src/opt.rs index b7ac2b5b6..9cd66e285 100644 --- a/src/opt.rs +++ b/src/opt.rs @@ -506,9 +506,9 @@ pub enum ServerType { CondaForge(CondaForgeOpts), } -#[derive(Clone, Debug, PartialEq, Parser)] -/// Upload to aQuetz server. +/// Upload to a Quetz server. /// Authentication is used from the keychain / auth-file. +#[derive(Clone, Debug, PartialEq, Parser)] pub struct QuetzOpts { /// The URL to your Quetz server #[arg(short, long, env = "QUETZ_SERVER_URL")] diff --git a/src/recipe/parser/cache.rs b/src/recipe/parser/cache.rs index 7696bab0e..4da1f7c47 100644 --- a/src/recipe/parser/cache.rs +++ b/src/recipe/parser/cache.rs @@ -9,11 +9,14 @@ use crate::{ }; use serde::{Deserialize, Serialize}; -use super::{Build, Requirements}; +use super::{Build, Requirements, Source}; /// A cache build that can be used to split up a build into multiple outputs #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Cache { + /// Sources that are used in the cache build and subsequent output builds + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub source: Vec, /// The build configuration for the cache pub build: Build, /// The requirements for building the cache @@ -35,6 +38,7 @@ impl TryConvertNode for RenderedMappingNode { validate_keys! { cache, self.iter(), + source, build, requirements }; diff --git a/src/recipe/parser/output.rs b/src/recipe/parser/output.rs index cceb90665..4f517eaae 100644 --- a/src/recipe/parser/output.rs +++ b/src/recipe/parser/output.rs @@ -4,6 +4,8 @@ //! each mapping can have its own `package`, `source`, `build`, `requirements`, //! `test`, and `about` fields. +use marked_yaml::types::MarkedMappingNode; + use crate::{ _partialerror, recipe::{ @@ -18,6 +20,30 @@ static ALLOWED_KEYS_MULTI_OUTPUTS: [&str; 8] = [ "context", "recipe", "source", "build", "outputs", "about", "extra", "cache", ]; +// Check if the `cache` top-level key is present. If it does not contain a source, but there is a +// top-level `source` key, then we should warn the user because this key was moved to the `cache` +fn check_src_cache(root: &MarkedMappingNode) -> Result<(), ParsingError> { + if let Some(cache) = root.get("cache") { + let has_top_level_source = root.contains_key("source"); + let cache_map = cache.as_mapping().ok_or_else(|| { + ParsingError::from_partial( + "", + _partialerror!( + *cache.span(), + ErrorKind::ExpectedMapping, + help = "`cache` must always be a mapping" + ), + ) + })?; + + if !cache_map.contains_key("source") && has_top_level_source { + tracing::warn!("The cache has its own `source` key now. You probably want to move the top-level `source` key into the `cache` key."); + } + } + + Ok(()) +} + /// Retrieve all outputs from the recipe source (YAML) pub fn find_outputs_from_src(src: &str) -> Result, ParsingError> { let root_node = parse_yaml(0, src)?; @@ -32,6 +58,8 @@ pub fn find_outputs_from_src(src: &str) -> Result, ParsingError> { ) })?; + check_src_cache(root_map)?; + if root_map.contains_key("outputs") { if root_map.contains_key("package") { let key = root_map diff --git a/src/recipe/parser/source.rs b/src/recipe/parser/source.rs index 70c894e49..ac5d49f8b 100644 --- a/src/recipe/parser/source.rs +++ b/src/recipe/parser/source.rs @@ -20,7 +20,7 @@ use crate::{ use super::FlattenErrors; /// Source information. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(untagged)] pub enum Source { /// Git source pointing to a Git repository to retrieve the source from @@ -167,7 +167,7 @@ where } /// Git source information. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct GitSource { /// Url to the git repository #[serde(rename = "git")] @@ -358,7 +358,7 @@ impl TryConvertNode for RenderedMappingNode { } /// A Git repository URL or a local path to a Git repository -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(untagged)] pub enum GitUrl { /// A remote Git repository URL @@ -382,7 +382,7 @@ impl fmt::Display for GitUrl { /// A url source (usually a tar.gz or tar.bz2 archive). A compressed file /// will be extracted to the `work` (or `work/` directory). #[serde_as] -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct UrlSource { /// Url to the source code (usually a tar.gz or tar.bz2 etc. file) #[serde_as(as = "OneOrMany<_, PreferOne>")] @@ -509,7 +509,7 @@ impl TryConvertNode for RenderedMappingNode { /// A local path source. The source code will be copied to the `work` /// (or `work/` directory). #[serde_as] -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct PathSource { /// Path to the local source code pub path: PathBuf, @@ -531,10 +531,17 @@ pub struct PathSource { #[serde(skip_serializing_if = "Option::is_none")] pub file_name: Option, /// Whether to use the `.gitignore` file in the source directory. Defaults to `true`. - #[serde(skip_serializing_if = "should_not_serialize_use_gitignore")] + #[serde( + default = "default_gitignore", + skip_serializing_if = "should_not_serialize_use_gitignore" + )] pub use_gitignore: bool, } +fn default_gitignore() -> bool { + true +} + /// Helper method to skip serializing the use_gitignore flag if it is true. fn should_not_serialize_use_gitignore(use_gitignore: &bool) -> bool { *use_gitignore @@ -677,4 +684,21 @@ mod tests { assert_eq!(parsed_git.url, git.url); } + + // test serde json round trip for path source "../" + #[test] + fn test_path_source_round_trip() { + let path_source = PathSource { + path: "../".into(), + sha256: None, + md5: None, + patches: Vec::new(), + target_directory: None, + file_name: None, + use_gitignore: true, + }; + + let json = serde_json::to_string(&path_source).unwrap(); + serde_json::from_str::(&json).unwrap(); + } } diff --git a/src/snapshots/rattler_build__metadata__test__curl_recipe.yaml.snap b/src/snapshots/rattler_build__metadata__test__curl_recipe.yaml.snap index 6dc4e031e..305fdce89 100644 --- a/src/snapshots/rattler_build__metadata__test__curl_recipe.yaml.snap +++ b/src/snapshots/rattler_build__metadata__test__curl_recipe.yaml.snap @@ -640,7 +640,6 @@ finalized_dependencies: run: depends: [] constraints: [] -finalized_cache_dependencies: ~ finalized_sources: ~ system_tools: rattler-build: 0.12.1 diff --git a/src/snapshots/rattler_build__metadata__test__read_recipe_with_sources.snap b/src/snapshots/rattler_build__metadata__test__read_recipe_with_sources.snap index 00c23e8ab..f8f59145b 100644 --- a/src/snapshots/rattler_build__metadata__test__read_recipe_with_sources.snap +++ b/src/snapshots/rattler_build__metadata__test__read_recipe_with_sources.snap @@ -62,7 +62,6 @@ finalized_dependencies: run: depends: [] constraints: [] -finalized_cache_dependencies: ~ finalized_sources: - git: "https://github.com/prefix-dev/rattler-build" rev: 4ae7bd207b437c3a5955788e6516339a84358e1c diff --git a/src/snapshots/rattler_build__metadata__test__rich_recipe.yaml.snap b/src/snapshots/rattler_build__metadata__test__rich_recipe.yaml.snap index 556a4602a..9d541d920 100644 --- a/src/snapshots/rattler_build__metadata__test__rich_recipe.yaml.snap +++ b/src/snapshots/rattler_build__metadata__test__rich_recipe.yaml.snap @@ -383,7 +383,6 @@ finalized_dependencies: from: host run_export: python constraints: [] -finalized_cache_dependencies: ~ finalized_sources: ~ system_tools: rattler-build: 0.12.1 diff --git a/src/source/mod.rs b/src/source/mod.rs index 156cae245..c3f44a567 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -288,29 +288,19 @@ impl Output { let span = tracing::info_span!("Fetching source code"); let _enter = span.enter(); - if let Some(finalized_sources) = &self.finalized_sources { - fetch_sources( - finalized_sources, - &self.build_configuration.directories, - &self.system_tools, - tool_configuration, - ) - .await?; - - Ok(self) - } else { - let rendered_sources = fetch_sources( - self.recipe.sources(), - &self.build_configuration.directories, - &self.system_tools, - tool_configuration, - ) - .await?; - - Ok(Output { - finalized_sources: Some(rendered_sources), - ..self - }) - } + let rendered_sources = fetch_sources( + self.finalized_sources + .as_deref() + .unwrap_or(self.recipe.sources()), + &self.build_configuration.directories, + &self.system_tools, + tool_configuration, + ) + .await?; + + Ok(Output { + finalized_sources: Some(rendered_sources), + ..self + }) } } diff --git a/src/system_tools.rs b/src/system_tools.rs index 8842404e4..97e511516 100644 --- a/src/system_tools.rs +++ b/src/system_tools.rs @@ -11,9 +11,10 @@ use std::{ }; use thiserror::Error; +/// Errors that can occur when working with system tools #[derive(Error, Debug)] -#[allow(missing_docs)] pub enum ToolError { + /// The tool was not found on the system #[error("failed to find `{0}` ({1})")] ToolNotFound(Tool, which::Error), } diff --git a/test-data/recipes/cache/recipe-symlinks.yaml b/test-data/recipes/cache/recipe-symlinks.yaml index 8c9ec28fc..ee39622f6 100644 --- a/test-data/recipes/cache/recipe-symlinks.yaml +++ b/test-data/recipes/cache/recipe-symlinks.yaml @@ -14,6 +14,7 @@ cache: ln -s $PREFIX/foo.txt $PREFIX/absolute-symlink.txt ln -s $PREFIX/non-existent-file $PREFIX/broken-symlink.txt ln -s ./foo.txt $PREFIX/relative-symlink.txt + echo ${{ PREFIX }} > $PREFIX/prefix.txt outputs: - package: diff --git a/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json b/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json index d2d735b8a..7dc0a0fc4 100644 --- a/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json +++ b/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json @@ -24,6 +24,12 @@ "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size_in_bytes": 0 }, + { + "_path": "prefix.txt", + "file_mode": "text", + "path_type": "hardlink", + "size_in_bytes": 256 + }, { "_path": "relative-symlink.txt", "path_type": "softlink", diff --git a/test/end-to-end/test_symlinks.py b/test/end-to-end/test_symlinks.py index e269a37ff..bec38229d 100644 --- a/test/end-to-end/test_symlinks.py +++ b/test/end-to-end/test_symlinks.py @@ -4,6 +4,7 @@ import pytest from helpers import RattlerBuild, get_extracted_package +from syrupy.filters import paths as filter_paths @pytest.mark.skipif( @@ -34,10 +35,16 @@ def test_symlink_cache( paths_json = pkg / "info/paths.json" j = json.loads(paths_json.read_text()) - assert snapshot_json == j + # prefix placeholder always changes, and we test it later + assert ( + snapshot_json( + exclude=filter_paths("paths.4.prefix_placeholder", "paths.4.sha256") + ) + == j + ) paths = j["paths"] - assert len(paths) == 5 + assert len(paths) == 6 for p in paths: if "symlink" in p["_path"]: assert p["path_type"] == "softlink" @@ -61,3 +68,13 @@ def test_symlink_cache( relative_symlink = pkg / "bin/exe-symlink" assert relative_symlink.is_symlink() assert relative_symlink.readlink() == Path("exe") + + prefix_txt = pkg / "prefix.txt" + assert prefix_txt.exists() + contents = prefix_txt.read_text() + assert len(contents) > 0 + # find the path in paths.json for the prefix.txt + for p in paths: + if p["_path"] == "prefix.txt": + assert p["path_type"] == "hardlink" + assert p["prefix_placeholder"] == contents.strip()