diff --git a/.github/workflows/typos.yml b/.github/workflows/typos.yml index d21926517..3d649176d 100644 --- a/.github/workflows/typos.yml +++ b/.github/workflows/typos.yml @@ -10,4 +10,4 @@ jobs: uses: actions/checkout@v4.1.7 - name: Check spelling - uses: crate-ci/typos@v1.28.1 + uses: crate-ci/typos@v1.28.2 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4849d39a9..20510442c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,33 +1,20 @@ exclude: '(\.patch|\.diff|\.snap|\.ambr|test-data/recipes/test-parsing/.+)$' repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 - hooks: - - id: check-yaml - - id: end-of-file-fixer - - id: trailing-whitespace - - repo: https://github.com/psf/black - rev: 23.9.1 - hooks: - - id: black - args: [--safe, --quiet] - - repo: https://github.com/pre-commit/mirrors-isort - rev: v5.10.1 - hooks: - - id: isort - exclude: tests/data - - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 - hooks: - - id: flake8 - args: ['--max-line-length=120'] - language_version: python3 - additional_dependencies: - - flake8-typing-imports==1.15.0 - - flake8-builtins==2.1.0 - - flake8-bugbear==23.9.16 - - flake8-isort==6.1.0 + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.6 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format + - repo: local hooks: - id: rustfmt diff --git a/docs/multiple_output_cache.md b/docs/multiple_output_cache.md index c4ba94b93..7ebe11a50 100644 --- a/docs/multiple_output_cache.md +++ b/docs/multiple_output_cache.md @@ -19,9 +19,14 @@ recipe: version: '0.1.0' cache: + source: + - url: https://example.com/library.tar.gz + sha256: 1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef + requirements: build: - ${{ compiler('c') }} + build: script: - mkdir -p $PREFIX/lib @@ -56,15 +61,85 @@ You can use a list of globs to select only the files that you want. For something more complicated you can also use `include` and `exclude` fields in the `files` selector. Please refer to the [the build options documentation](build_options.md#include-only-certain-files-in-the-package). -## Run exports from the cache +### Run exports from the cache Since the cache output also has build- and host requirements we need to additionally take care of eventual "run-exports" from the cache output. Run exports from the cache-dependencies are handled very similar to the run exports from a given output. We append any run exports to the outputs. If the cache has an "ignore run exports" section, than we apply those filters at the cache level. If the output ignores any run exports, then we also ignore the run-exports if they would come from the cache. -## Caching in the $SRC_DIR +### Source code in the cache + +The cache output has its own `source` section. For every output, the (dirty) source is restored from the cache directory. Outputs can layer additional files on top of the cache source. +However, if you already ran `cmake` in the cache output, you can continue from where the build left off. This is useful when you want to e.g. build additional components (such as Python bindings) on top of the already-built library. + -If you used `conda-build` a lot, you might have noticed that a top-level build is also caching the changes in the `$SRC_DIR`. This is not the case for `rattler-build` yet. +## C++ Example that builds Python bindings on top of a library + +You can find an example (with source code) here: [Link](https://github.com/wolfv/rattler-build-cache-test/). + +```yaml title="variants.yaml" +python: + - "3.12.*" + - "3.11.*" +``` + +And the corresponding recipe: + +```yaml title="recipe.yaml" +recipe: +name: calculator +version: 1.0.0 + +cache: +source: + path: ../ -You could try to work around by e.g. copying files into the `$PREFIX` and restoring them in each output. +requirements: + build: + - ${{ compiler('cxx') }} + - cmake + - ninja +build: + script: + # make sure that `alternative_name.md` is not present + - test ! -f ./alternative_name.md + - mkdir build + - cd build + - cmake $SRC_DIR -GNinja ${CMAKE_ARGS} + - ninja install + +outputs: +# this first output will include all files installed during the cache build +- package: + name: libcalculator + + requirements: + run_exports: + - ${{ pin_subpackage('libcalculator') }} +# This output will build the Python bindings using CMake and then create new +# packages with the Python bindings +- package: + name: py-calculator + source: + - path: ../README.md + file_name: alternative_name.md + + requirements: + build: + - ${{ compiler('cxx') }} + - cmake + - ninja + host: + - pybind11 + - python + - libcalculator + + build: + script: + # assert that the README.md file is present + - test -f ./alternative_name.md + - cd build + - cmake $SRC_DIR -GNinja ${CMAKE_ARGS} -DBUILD_PYTHON_BINDINGS=ON + - ninja install +``` diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 61fcec163..87d3c5e7d 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -186,6 +186,12 @@ Build a package from a recipe Path to an auth-file to read authentication information from +- `--channel-priority ` + + Channel priority to use when solving + + - Default value: `strict` + - `--tui` Launch the terminal user interface @@ -334,6 +340,12 @@ These test files are written at "package creation time" and are part of the pack Path to an auth-file to read authentication information from +- `--channel-priority ` + + Channel priority to use when solving + + - Default value: `strict` + ###### **Modifying result** - `--output-dir ` @@ -399,6 +411,12 @@ Rebuild a package from a package file instead of a recipe Path to an auth-file to read authentication information from +- `--channel-priority ` + + Channel priority to use when solving + + - Default value: `strict` + ###### **Modifying result** - `--test ` @@ -431,7 +449,7 @@ Upload a package ##### **Subcommands:** -* `quetz` — Upload to aQuetz server. Authentication is used from the keychain / auth-file +* `quetz` — Upload to a Quetz server. Authentication is used from the keychain / auth-file * `artifactory` — Options for uploading to a Artifactory channel. Authentication is used from the keychain / auth-file * `prefix` — Options for uploading to a prefix.dev server. Authentication is used from the keychain / auth-file * `anaconda` — Options for uploading to a Anaconda.org server @@ -474,6 +492,12 @@ Upload a package Path to an auth-file to read authentication information from +- `--channel-priority ` + + Channel priority to use when solving + + - Default value: `strict` + ###### **Modifying result** - `--output-dir ` @@ -487,7 +511,7 @@ Upload a package #### `quetz` -Upload to aQuetz server. Authentication is used from the keychain / auth-file +Upload to a Quetz server. Authentication is used from the keychain / auth-file **Usage:** `rattler-build upload quetz [OPTIONS] --url --channel ` diff --git a/docs/tutorials/perl.md b/docs/tutorials/perl.md new file mode 100644 index 000000000..a63735716 --- /dev/null +++ b/docs/tutorials/perl.md @@ -0,0 +1,93 @@ +# Packaging a Perl (CPAM) package + +Packaging a Perl package is similar to packaging a Python package! + +## Building a Perl Package + +### A perl `noarch: generic` package + +The following recipe is for the Perl package `Call::Context`. We use `perl` in the `host` requirements, and install the package using `make`. +The `noarch: generic` is used to indicate that the package is architecture-independent - since this is a pure Perl package, it can be installed and run on any platform (`noarch`). + +```yaml title="recipe.yaml" +context: + version: 0.03 + +package: + name: perl-call-context + version: ${{ version }} + +source: + url: https://cpan.metacpan.org/authors/id/F/FE/FELIPE/Call-Context-${{ version }}.tar.gz + sha256: 0ee6bf46bc72755adb7a6b08e79d12e207de5f7809707b3c353b58cb2f0b5a26 + +build: + number: 0 + noarch: generic + script: + - perl Makefile.PL INSTALLDIRS=vendor NO_PERLLOCAL=1 NO_PACKLIST=1 + - make + - make test + - make install + +requirements: + build: + - make + host: + - perl + +tests: + - perl: + uses: + - Call::Context + +about: + license: GPL-1.0-or-later OR Artistic-1.0-Perl + summary: Sanity-check calling context + homepage: http://metacpan.org/pod/Call-Context +``` + +### A perl package with a C extension + +Some `perl` packages have native code extensions. In this example, we will build a package for the Perl package `Data::Dumper` using the `C` compiler. +The `c` compiler and `make` are required at build time in the `build` requirements to compile the native code extension. +We use `perl` in the `host` requirements, and install the package using `make`. + +```yaml title="recipe.yaml" +context: + version: "2.183" + +package: + name: "perl-data-dumper" + version: ${{ version }} + +source: + url: https://cpan.metacpan.org/authors/id/N/NW/NWCLARK/Data-Dumper-${{ version }}.tar.gz + sha256: e42736890b7dae1b37818d9c5efa1f1fdc52dec04f446a33a4819bf1d4ab5ad3 + +build: + number: 0 + script: + - perl Makefile.PL INSTALLDIRS=vendor NO_PERLLOCAL=1 NO_PACKLIST=1 + - make + - make test + - make install VERBINST=1 + +requirements: + build: + - ${{ compiler('c') }} + - make + host: + - perl + - perl-extutils-makemaker + +tests: + - perl: + uses: + - Data::Dumper + +about: + homepage: https://metacpan.org/pod/Data::Dumper + license: GPL-1.0-or-later OR Artistic-1.0-Perl + summary: 'seeds germane, yet not germinated' +``` diff --git a/mkdocs.yml b/mkdocs.yml index bb161864f..38018ad73 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -103,6 +103,7 @@ nav: - "Javascript": tutorials/javascript.md - "Rust": tutorials/rust.md - "Go": tutorials/go.md + - "Perl": tutorials/perl.md - "Converting from conda-build": converting_from_conda_build.md - Build options: diff --git a/py-rattler-build/README.md b/py-rattler-build/README.md index 822727f9f..f03ae8b78 100644 --- a/py-rattler-build/README.md +++ b/py-rattler-build/README.md @@ -1 +1 @@ -# Python bindings to rattler-build \ No newline at end of file +# Python bindings to rattler-build diff --git a/py-rattler-build/tests/unit/test_basic.py b/py-rattler-build/tests/unit/test_basic.py index 39f146b4f..9a59d162d 100644 --- a/py-rattler-build/tests/unit/test_basic.py +++ b/py-rattler-build/tests/unit/test_basic.py @@ -1,6 +1,7 @@ -import rattler_build from pathlib import Path +import rattler_build + def test_basic() -> None: parent_cargo_toml = Path(__file__).parent.parent.parent.parent / "Cargo.toml" diff --git a/src/build.rs b/src/build.rs index 2275e78de..f9ea6a900 100644 --- a/src/build.rs +++ b/src/build.rs @@ -112,12 +112,14 @@ pub async fn run_build( let directories = output.build_configuration.directories.clone(); - let output = output - .fetch_sources(tool_configuration) - .await - .into_diagnostic()?; - - let output = output.build_or_fetch_cache(tool_configuration).await?; + let output = if output.recipe.cache.is_some() { + output.build_or_fetch_cache(tool_configuration).await? + } else { + output + .fetch_sources(tool_configuration) + .await + .into_diagnostic()? + }; let output = output .resolve_dependencies(tool_configuration) diff --git a/src/cache.rs b/src/cache.rs index 7b9cbb31a..bd9d5e5fb 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -1,25 +1,30 @@ //! Functions to deal with the build cache use std::{ collections::{BTreeMap, HashSet}, - path::{Path, PathBuf}, + path::PathBuf, }; use fs_err as fs; -use memchr::memmem; -use memmap2::Mmap; use miette::{Context, IntoDiagnostic}; +use minijinja::Value; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; use crate::{ env_vars, metadata::{build_reindexed_channels, Output}, - packaging::{contains_prefix_binary, contains_prefix_text, content_type, Files}, - recipe::parser::{Dependency, Requirements}, + packaging::Files, + recipe::{ + parser::{Dependency, Requirements, Source}, + Jinja, + }, render::resolved_dependencies::{ install_environments, resolve_dependencies, FinalizedDependencies, }, - source::copy_dir::{copy_file, create_symlink, CopyOptions}, + source::{ + copy_dir::{copy_file, CopyDir, CopyOptions}, + fetch_sources, + }, }; /// Error type for cache key generation @@ -39,10 +44,19 @@ pub enum CacheKeyError { pub struct Cache { /// The requirements that were used to build the cache pub requirements: Requirements, + /// The finalized dependencies pub finalized_dependencies: FinalizedDependencies, + + /// The finalized sources + pub finalized_sources: Vec, + /// The prefix files that are included in the cache - pub prefix_files: Vec<(PathBuf, bool)>, + pub prefix_files: Vec, + + /// The (dirty) source files that are included in the cache + pub work_dir_files: Vec, + /// The prefix that was used at build time (needs to be replaced when /// restoring the files) pub prefix: PathBuf, @@ -88,74 +102,62 @@ impl Output { self.build_configuration.build_platform.platform.to_string(), ); - let cache_key = (cache, selected_variant); + let cache_key = (cache, selected_variant, self.prefix()); // serialize to json and hash let mut hasher = Sha256::new(); - let serialized = serde_json::to_string(&cache_key)?; - hasher.update(serialized.as_bytes()); - let result = hasher.finalize(); - Ok(format!("{:x}", result)) + cache_key.serialize(&mut serde_json::Serializer::new(&mut hasher))?; + Ok(format!("{:x}", hasher.finalize())) } else { Err(CacheKeyError::NoCacheKeyAvailable) } } /// Restore an existing cache from a cache directory - async fn restore_cache(&self, cache_dir: PathBuf) -> Result { - let cache: Cache = serde_json::from_str( - &fs::read_to_string(cache_dir.join("cache.json")).into_diagnostic()?, + async fn restore_cache( + &self, + cache: Cache, + cache_dir: PathBuf, + ) -> Result { + let cache_prefix_dir = cache_dir.join("prefix"); + let copied_prefix = CopyDir::new(&cache_prefix_dir, self.prefix()) + .run() + .into_diagnostic()?; + + // restore the work dir files + let cache_dir_work = cache_dir.join("work_dir"); + let copied_cache = CopyDir::new( + &cache_dir_work, + &self.build_configuration.directories.work_dir, ) + .run() .into_diagnostic()?; - let copy_options = CopyOptions { - skip_exist: true, - ..Default::default() - }; - let cache_prefix = cache.prefix; - - let mut paths_created = HashSet::new(); - for (file, has_prefix) in &cache.prefix_files { - tracing::info!("Restoring from cache: {:?}", file); - let dest = self.prefix().join(file); - let source = &cache_dir.join("prefix").join(file); - copy_file(source, &dest, &mut paths_created, ©_options).into_diagnostic()?; - - // check if the symlink starts with the old prefix, and if yes, make the symlink - // absolute with the new prefix - if source.is_symlink() { - let symlink_target = fs::read_link(source).into_diagnostic()?; - if let Ok(rest) = symlink_target.strip_prefix(&cache_prefix) { - let new_symlink_target = self.prefix().join(rest); - fs::remove_file(&dest).into_diagnostic()?; - create_symlink(&new_symlink_target, &dest).into_diagnostic()?; - } - } - if *has_prefix { - replace_prefix(&dest, &cache_prefix, self.prefix())?; - } - } + let combined_files = copied_prefix.copied_paths().len() + copied_cache.copied_paths().len(); + tracing::info!( + "Restored {} source and prefix files from cache", + combined_files + ); Ok(Output { finalized_cache_dependencies: Some(cache.finalized_dependencies.clone()), + finalized_cache_sources: Some(cache.finalized_sources.clone()), ..self.clone() }) } + /// This will fetch sources and build the cache if it doesn't exist + /// Note: this modifies the output in place pub(crate) async fn build_or_fetch_cache( - &self, + mut self, tool_configuration: &crate::tool_configuration::Configuration, ) -> Result { - // if we don't have a cache, we need to run the cache build with our current - // workdir, and then return the cache - let span = tracing::info_span!("Running cache build"); - let _enter = span.enter(); - - let target_platform = self.build_configuration.target_platform; - let mut env_vars = env_vars::vars(self, "BUILD"); - env_vars.extend(env_vars::os_vars(self.prefix(), &target_platform)); + if let Some(cache) = self.recipe.cache.clone() { + // if we don't have a cache, we need to run the cache build with our current + // workdir, and then return the cache + let span = tracing::info_span!("Running cache build"); + let _enter = span.enter(); - if let Some(cache) = &self.recipe.cache { - tracing::info!("Cache key: {:?}", self.cache_key().into_diagnostic()?); + tracing::info!("using cache key: {:?}", self.cache_key().into_diagnostic()?); let cache_key = format!("bld_{}", self.cache_key().into_diagnostic()?); let cache_dir = self @@ -166,24 +168,66 @@ impl Output { // restore the cache if it exists by copying the files to the prefix if cache_dir.exists() { - tracing::info!("Restoring cache from {:?}", cache_dir); - return self.restore_cache(cache_dir).await; + let text = fs::read_to_string(cache_dir.join("cache.json")).into_diagnostic()?; + match serde_json::from_str::(&text) { + Ok(cache) => { + tracing::info!("Restoring cache from {:?}", cache_dir); + self = self + .fetch_sources(tool_configuration) + .await + .into_diagnostic()?; + return self.restore_cache(cache, cache_dir).await; + } + Err(e) => { + tracing::error!( + "Failed to parse cache at {}: {:?} - rebuilding", + cache_dir.join("cache.json").display(), + e + ); + // remove the cache dir and run as normal + fs::remove_dir_all(&cache_dir).into_diagnostic()?; + } + } } + // fetch the sources for the `cache` section + let rendered_sources = fetch_sources( + self.finalized_cache_sources + .as_ref() + .unwrap_or(&cache.source), + &self.build_configuration.directories, + &self.system_tools, + tool_configuration, + ) + .await + .into_diagnostic()?; + + let target_platform = self.build_configuration.target_platform; + let mut env_vars = env_vars::vars(&self, "BUILD"); + env_vars.extend(env_vars::os_vars(self.prefix(), &target_platform)); + // Reindex the channels let channels = build_reindexed_channels(&self.build_configuration, tool_configuration) .into_diagnostic() .context("failed to reindex output channel")?; let finalized_dependencies = - resolve_dependencies(&cache.requirements, self, &channels, tool_configuration) + resolve_dependencies(&cache.requirements, &self, &channels, tool_configuration) .await .unwrap(); - install_environments(self, &finalized_dependencies, tool_configuration) + install_environments(&self, &finalized_dependencies, tool_configuration) .await .into_diagnostic()?; + let selector_config = self.build_configuration.selector_config(); + let mut jinja = Jinja::new(selector_config.clone()); + for (k, v) in self.recipe.context.iter() { + jinja + .context_mut() + .insert(k.clone(), Value::from_safe_string(v.clone())); + } + cache .build .script() @@ -193,7 +237,7 @@ impl Output { &self.build_configuration.directories.recipe_dir, &self.build_configuration.directories.host_prefix, Some(&self.build_configuration.directories.build_prefix), - None, // TODO fix this to be proper Jinja context + Some(jinja), ) .await .into_diagnostic()?; @@ -213,6 +257,7 @@ impl Output { let mut creation_cache = HashSet::new(); let mut copied_files = Vec::new(); let copy_options = CopyOptions::default(); + for file in &new_files.new_files { // skip directories (if they are not a symlink) // directories are implicitly created by the files @@ -224,28 +269,24 @@ impl Output { .expect("File should be in prefix"); let dest = &prefix_cache_dir.join(stripped); copy_file(file, dest, &mut creation_cache, ©_options).into_diagnostic()?; - - // Defend against broken symlinks here! - if !file.is_symlink() { - // check if the file contains the prefix - let content_type = content_type(file).into_diagnostic()?; - let has_prefix = if content_type.map(|c| c.is_text()).unwrap_or(false) { - contains_prefix_text(file, self.prefix(), self.target_platform()) - } else { - contains_prefix_binary(file, self.prefix()) - } - .into_diagnostic()?; - copied_files.push((stripped.to_path_buf(), has_prefix)); - } else { - copied_files.push((stripped.to_path_buf(), false)); - } + copied_files.push(stripped.to_path_buf()); } + // We also need to copy the work dir files to the cache + let work_dir_files = CopyDir::new( + &self.build_configuration.directories.work_dir.clone(), + &cache_dir.join("work_dir"), + ) + .run() + .into_diagnostic()?; + // save the cache let cache = Cache { requirements: cache.requirements.clone(), finalized_dependencies: finalized_dependencies.clone(), + finalized_sources: rendered_sources.clone(), prefix_files: copied_files, + work_dir_files: work_dir_files.copied_paths().to_vec(), prefix: self.prefix().to_path_buf(), }; @@ -254,53 +295,11 @@ impl Output { Ok(Output { finalized_cache_dependencies: Some(finalized_dependencies), - ..self.clone() + finalized_cache_sources: Some(rendered_sources), + ..self }) } else { - Ok(self.clone()) + Ok(self) } } } - -/// Simple replace prefix function that does a direct replacement without any -/// padding considerations because we know that the prefix is the same length as -/// the original prefix. -fn replace_prefix(file: &Path, old_prefix: &Path, new_prefix: &Path) -> Result<(), miette::Error> { - // mmap the file, and use the fast string search to find the prefix - let output = { - let map_file = fs::File::open(file).into_diagnostic()?; - let mmap = unsafe { Mmap::map(&map_file).into_diagnostic()? }; - let new_prefix_bytes = new_prefix.as_os_str().as_encoded_bytes(); - let old_prefix_bytes = old_prefix.as_os_str().as_encoded_bytes(); - - // if the prefix is the same, we don't need to do anything - if old_prefix == new_prefix { - return Ok(()); - } - - assert_eq!( - new_prefix_bytes.len(), - old_prefix_bytes.len(), - "Prefixes must have the same length: {:?} != {:?}", - new_prefix, - old_prefix - ); - - let mut output = Vec::with_capacity(mmap.len()); - let mut last_match_end = 0; - let finder = memmem::Finder::new(old_prefix_bytes); - - while let Some(index) = finder.find(&mmap[last_match_end..]) { - let absolute_index = last_match_end + index; - output.extend_from_slice(&mmap[last_match_end..absolute_index]); - output.extend_from_slice(new_prefix_bytes); - last_match_end = absolute_index + new_prefix_bytes.len(); - } - output.extend_from_slice(&mmap[last_match_end..]); - output - // close file & mmap at end of scope - }; - - // overwrite the file - fs::write(file, output).into_diagnostic() -} diff --git a/src/lib.rs b/src/lib.rs index 63f3a466f..46f88245d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -166,8 +166,10 @@ pub async fn get_build_output( } let mut host_platform = args.host_platform; + // If target_platform is not set, we default to the host platform let target_platform = args.target_platform.unwrap_or(host_platform); + // If target_platform is set and host_platform is not, then we default // host_platform to the target_platform if let Some(target_platform) = args.target_platform { @@ -270,6 +272,12 @@ pub async fn get_build_output( let mut subpackages = BTreeMap::new(); let mut outputs = Vec::new(); + + let global_build_name = outputs_and_variants + .first() + .map(|o| o.name.clone()) + .unwrap_or_default(); + for discovered_output in outputs_and_variants { let recipe = &discovered_output.recipe; @@ -290,7 +298,11 @@ pub async fn get_build_output( }, ); - let name = recipe.package().name().clone(); + let build_name = if recipe.cache.is_some() { + global_build_name.clone() + } else { + recipe.package().name().as_normalized().to_string() + }; // Add the channels from the args and by default always conda-forge let channels = args @@ -318,7 +330,7 @@ pub async fn get_build_output( hash: discovered_output.hash.clone(), variant: discovered_output.used_vars.clone(), directories: Directories::setup( - name.as_normalized(), + &build_name, recipe_path, &output_dir, args.no_build_id, @@ -338,8 +350,9 @@ pub async fn get_build_output( force_colors: args.color_build_log && console::colors_enabled(), }, finalized_dependencies: None, - finalized_cache_dependencies: None, finalized_sources: None, + finalized_cache_dependencies: None, + finalized_cache_sources: None, system_tools: SystemTools::new(), build_summary: Arc::new(Mutex::new(BuildSummary::default())), extra_meta: Some( diff --git a/src/linux/link.rs b/src/linux/link.rs index e7a215b9c..9d40f4b8e 100644 --- a/src/linux/link.rs +++ b/src/linux/link.rs @@ -15,6 +15,7 @@ use std::path::{Path, PathBuf}; use crate::post_process::relink::{RelinkError, Relinker}; use crate::recipe::parser::GlobVec; use crate::system_tools::{SystemTools, Tool}; +use crate::unix::permission_guard::{PermissionGuard, READ_WRITE}; use crate::utils::to_lexical_absolute; /// A linux shared object (ELF) @@ -214,6 +215,8 @@ impl Relinker for SharedObject { // keep only first unique item final_rpaths = final_rpaths.into_iter().unique().collect(); + let _permission_guard = PermissionGuard::new(&self.path, READ_WRITE)?; + // run builtin relink. if it fails, try patchelf if builtin_relink(&self.path, &final_rpaths).is_err() { call_patchelf(&self.path, &final_rpaths, system_tools)?; diff --git a/src/macos/link.rs b/src/macos/link.rs index 68db59453..a0e24d1e6 100644 --- a/src/macos/link.rs +++ b/src/macos/link.rs @@ -12,6 +12,7 @@ use std::path::{Path, PathBuf}; use crate::post_process::relink::{RelinkError, Relinker}; use crate::recipe::parser::GlobVec; use crate::system_tools::{SystemTools, Tool}; +use crate::unix::permission_guard::{PermissionGuard, READ_WRITE}; use crate::utils::to_lexical_absolute; /// A macOS dylib (Mach-O) @@ -257,8 +258,11 @@ impl Relinker for Dylib { } if modified { - // run builtin relink. if it fails, try install_name_tool - if relink(&self.path, &changes).is_err() { + let _permission_guard = PermissionGuard::new(&self.path, READ_WRITE)?; + // run builtin relink. If it fails, try install_name_tool + if let Err(e) = relink(&self.path, &changes) { + assert!(self.path.exists()); + tracing::warn!("Builtin relink failed {:?}, trying install_name_tool", e); install_name_tool(&self.path, &changes, system_tools)?; } codesign(&self.path, system_tools)?; @@ -402,7 +406,7 @@ fn relink(dylib_path: &Path, changes: &DylibChanges) -> Result<(), RelinkError> let new_path = new_path.as_bytes(); if new_path.len() > old_path.len() { - tracing::debug!( + tracing::info!( "new path is longer than old path: {} > {}", new_path.len(), old_path.len() diff --git a/src/metadata.rs b/src/metadata.rs index fc4ed4594..4b942180c 100644 --- a/src/metadata.rs +++ b/src/metadata.rs @@ -405,13 +405,17 @@ pub struct Output { /// dependencies have not been resolved yet. During the `run_build` /// functions, the dependencies are resolved and this field is filled. pub finalized_dependencies: Option, + /// The finalized sources for this output. Contain the exact git hashes for + /// the sources that are used to build this output. + pub finalized_sources: Option>, + /// The finalized dependencies from the cache (if there is a cache /// instruction) #[serde(skip_serializing_if = "Option::is_none")] pub finalized_cache_dependencies: Option, - /// The finalized sources for this output. Contain the exact git hashes for - /// the sources that are used to build this output. - pub finalized_sources: Option>, + /// The finalized sources from the cache (if there is a cache instruction) + #[serde(skip_serializing_if = "Option::is_none")] + pub finalized_cache_sources: Option>, /// Summary of the build #[serde(skip)] diff --git a/src/opt.rs b/src/opt.rs index b7ac2b5b6..9cd66e285 100644 --- a/src/opt.rs +++ b/src/opt.rs @@ -506,9 +506,9 @@ pub enum ServerType { CondaForge(CondaForgeOpts), } -#[derive(Clone, Debug, PartialEq, Parser)] -/// Upload to aQuetz server. +/// Upload to a Quetz server. /// Authentication is used from the keychain / auth-file. +#[derive(Clone, Debug, PartialEq, Parser)] pub struct QuetzOpts { /// The URL to your Quetz server #[arg(short, long, env = "QUETZ_SERVER_URL")] diff --git a/src/package_test/run_test.rs b/src/package_test/run_test.rs index 7dc546343..0a9fbdf1a 100644 --- a/src/package_test/run_test.rs +++ b/src/package_test/run_test.rs @@ -33,7 +33,8 @@ use crate::{ env_vars, metadata::PlatformWithVirtualPackages, recipe::parser::{ - CommandsTest, DownstreamTest, PythonTest, PythonVersion, Script, ScriptContent, TestType, + CommandsTest, DownstreamTest, PerlTest, PythonTest, PythonVersion, Script, ScriptContent, + TestType, }, render::solver::create_environment, source::copy_dir::CopyDir, @@ -425,6 +426,10 @@ pub async fn run_test( .run_test(&pkg, &package_folder, &prefix, &config) .await? } + TestType::Perl { perl } => { + perl.run_test(&pkg, &package_folder, &prefix, &config) + .await? + } TestType::Downstream(downstream) if downstream_package.is_none() => { downstream .run_test(&pkg, package_file, &prefix, &config) @@ -591,6 +596,69 @@ impl PythonTest { } } +impl PerlTest { + /// Execute the Perl test + pub async fn run_test( + &self, + pkg: &ArchiveIdentifier, + path: &Path, + prefix: &Path, + config: &TestConfiguration, + ) -> Result<(), TestError> { + let span = tracing::info_span!("Running perl test"); + let _guard = span.enter(); + + let match_spec = MatchSpec::from_str( + format!("{}={}={}", pkg.name, pkg.version, pkg.build_string).as_str(), + ParseStrictness::Lenient, + )?; + + let dependencies = vec![ + MatchSpec::from_str("perl", ParseStrictness::Strict).unwrap(), + match_spec, + ]; + + create_environment( + "test", + &dependencies, + config + .host_platform + .as_ref() + .unwrap_or(&config.current_platform), + prefix, + &config.channels, + &config.tool_configuration, + config.channel_priority, + config.solve_strategy, + ) + .await + .map_err(TestError::TestEnvironmentSetup)?; + + let mut imports = String::new(); + tracing::info!("Testing perl imports:\n"); + + for module in &self.uses { + writeln!(imports, "use {};", module)?; + tracing::info!(" use {};", module); + } + tracing::info!("\n"); + + let script = Script { + content: ScriptContent::Command(imports.clone()), + interpreter: Some("perl".into()), + ..Script::default() + }; + + let tmp_dir = tempfile::tempdir()?; + script + .run_script(Default::default(), tmp_dir.path(), path, prefix, None, None) + .await + .map_err(|e| TestError::TestFailed(e.to_string()))?; + + Ok(()) + } +} + impl CommandsTest { /// Execute the command test pub async fn run_test( diff --git a/src/recipe/parser.rs b/src/recipe/parser.rs index 249f156a2..f43c7856f 100644 --- a/src/recipe/parser.rs +++ b/src/recipe/parser.rs @@ -35,7 +35,7 @@ mod test; pub use self::{ about::About, - build::{Build, BuildString, DynamicLinking, PrefixDetection}, + build::{Build, BuildString, DynamicLinking, PrefixDetection, Python}, cache::Cache, glob_vec::GlobVec, output::find_outputs_from_src, @@ -49,7 +49,7 @@ pub use self::{ source::{GitRev, GitSource, GitUrl, PathSource, Source, UrlSource}, test::{ CommandsTest, CommandsTestFiles, CommandsTestRequirements, DownstreamTest, - PackageContentsTest, PythonTest, PythonVersion, TestType, + PackageContentsTest, PerlTest, PythonTest, PythonVersion, TestType, }, }; diff --git a/src/recipe/parser/cache.rs b/src/recipe/parser/cache.rs index 7696bab0e..4da1f7c47 100644 --- a/src/recipe/parser/cache.rs +++ b/src/recipe/parser/cache.rs @@ -9,11 +9,14 @@ use crate::{ }; use serde::{Deserialize, Serialize}; -use super::{Build, Requirements}; +use super::{Build, Requirements, Source}; /// A cache build that can be used to split up a build into multiple outputs #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct Cache { + /// Sources that are used in the cache build and subsequent output builds + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub source: Vec, /// The build configuration for the cache pub build: Build, /// The requirements for building the cache @@ -35,6 +38,7 @@ impl TryConvertNode for RenderedMappingNode { validate_keys! { cache, self.iter(), + source, build, requirements }; diff --git a/src/recipe/parser/output.rs b/src/recipe/parser/output.rs index cceb90665..4f517eaae 100644 --- a/src/recipe/parser/output.rs +++ b/src/recipe/parser/output.rs @@ -4,6 +4,8 @@ //! each mapping can have its own `package`, `source`, `build`, `requirements`, //! `test`, and `about` fields. +use marked_yaml::types::MarkedMappingNode; + use crate::{ _partialerror, recipe::{ @@ -18,6 +20,30 @@ static ALLOWED_KEYS_MULTI_OUTPUTS: [&str; 8] = [ "context", "recipe", "source", "build", "outputs", "about", "extra", "cache", ]; +// Check if the `cache` top-level key is present. If it does not contain a source, but there is a +// top-level `source` key, then we should warn the user because this key was moved to the `cache` +fn check_src_cache(root: &MarkedMappingNode) -> Result<(), ParsingError> { + if let Some(cache) = root.get("cache") { + let has_top_level_source = root.contains_key("source"); + let cache_map = cache.as_mapping().ok_or_else(|| { + ParsingError::from_partial( + "", + _partialerror!( + *cache.span(), + ErrorKind::ExpectedMapping, + help = "`cache` must always be a mapping" + ), + ) + })?; + + if !cache_map.contains_key("source") && has_top_level_source { + tracing::warn!("The cache has its own `source` key now. You probably want to move the top-level `source` key into the `cache` key."); + } + } + + Ok(()) +} + /// Retrieve all outputs from the recipe source (YAML) pub fn find_outputs_from_src(src: &str) -> Result, ParsingError> { let root_node = parse_yaml(0, src)?; @@ -32,6 +58,8 @@ pub fn find_outputs_from_src(src: &str) -> Result, ParsingError> { ) })?; + check_src_cache(root_map)?; + if root_map.contains_key("outputs") { if root_map.contains_key("package") { let key = root_map diff --git a/src/recipe/parser/source.rs b/src/recipe/parser/source.rs index 70c894e49..ac5d49f8b 100644 --- a/src/recipe/parser/source.rs +++ b/src/recipe/parser/source.rs @@ -20,7 +20,7 @@ use crate::{ use super::FlattenErrors; /// Source information. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(untagged)] pub enum Source { /// Git source pointing to a Git repository to retrieve the source from @@ -167,7 +167,7 @@ where } /// Git source information. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct GitSource { /// Url to the git repository #[serde(rename = "git")] @@ -358,7 +358,7 @@ impl TryConvertNode for RenderedMappingNode { } /// A Git repository URL or a local path to a Git repository -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(untagged)] pub enum GitUrl { /// A remote Git repository URL @@ -382,7 +382,7 @@ impl fmt::Display for GitUrl { /// A url source (usually a tar.gz or tar.bz2 archive). A compressed file /// will be extracted to the `work` (or `work/` directory). #[serde_as] -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct UrlSource { /// Url to the source code (usually a tar.gz or tar.bz2 etc. file) #[serde_as(as = "OneOrMany<_, PreferOne>")] @@ -509,7 +509,7 @@ impl TryConvertNode for RenderedMappingNode { /// A local path source. The source code will be copied to the `work` /// (or `work/` directory). #[serde_as] -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct PathSource { /// Path to the local source code pub path: PathBuf, @@ -531,10 +531,17 @@ pub struct PathSource { #[serde(skip_serializing_if = "Option::is_none")] pub file_name: Option, /// Whether to use the `.gitignore` file in the source directory. Defaults to `true`. - #[serde(skip_serializing_if = "should_not_serialize_use_gitignore")] + #[serde( + default = "default_gitignore", + skip_serializing_if = "should_not_serialize_use_gitignore" + )] pub use_gitignore: bool, } +fn default_gitignore() -> bool { + true +} + /// Helper method to skip serializing the use_gitignore flag if it is true. fn should_not_serialize_use_gitignore(use_gitignore: &bool) -> bool { *use_gitignore @@ -677,4 +684,21 @@ mod tests { assert_eq!(parsed_git.url, git.url); } + + // test serde json round trip for path source "../" + #[test] + fn test_path_source_round_trip() { + let path_source = PathSource { + path: "../".into(), + sha256: None, + md5: None, + patches: Vec::new(), + target_directory: None, + file_name: None, + use_gitignore: true, + }; + + let json = serde_json::to_string(&path_source).unwrap(); + serde_json::from_str::(&json).unwrap(); + } } diff --git a/src/recipe/parser/test.rs b/src/recipe/parser/test.rs index 4f940ceea..03587b96e 100644 --- a/src/recipe/parser/test.rs +++ b/src/recipe/parser/test.rs @@ -121,6 +121,13 @@ impl Default for PythonTest { } } +/// A special Perl test that checks if the imports are available and runs `cpanm check`. +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PerlTest { + /// List of perl `uses` to test + pub uses: Vec, +} + /// A test that runs the tests of a downstream package. #[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] pub struct DownstreamTest { @@ -137,6 +144,11 @@ pub enum TestType { /// The imports to test and the `pip check` flag python: PythonTest, }, + /// A Perl test that will test if the modules are available + Perl { + /// The modules to test + perl: PerlTest, + }, /// A test that executes multiple commands in a freshly created environment Command(CommandsTest), /// A test that runs the tests of a downstream package @@ -247,10 +259,14 @@ impl TryConvertNode for RenderedMappingNode { let package_contents = as_mapping(value, key_str)?.try_convert(key_str)?; test = TestType::PackageContents { package_contents }; } + "perl" => { + let perl = as_mapping(value, key_str)?.try_convert(key_str)?; + test = TestType::Perl { perl }; + } invalid => Err(vec![_partialerror!( *key.span(), ErrorKind::InvalidField(invalid.to_string().into()), - help = format!("expected fields for {name} is one of `python`, `script`, `downstream`, `package_contents`") + help = format!("expected fields for {name} is one of `python`, `perl`, `script`, `downstream`, `package_contents`") )])? } Ok(()) @@ -383,6 +399,18 @@ impl TryConvertNode for RenderedMappingNode { } } +/////////////////////////// +/// Perl Test /// +/////////////////////////// + +impl TryConvertNode for RenderedMappingNode { + fn try_convert(&self, _name: &str) -> Result> { + let mut perl_test = PerlTest::default(); + validate_keys!(perl_test, self.iter(), uses); + Ok(perl_test) + } +} + /////////////////////////// /// Package Contents /// /////////////////////////// diff --git a/src/source/mod.rs b/src/source/mod.rs index 156cae245..c3f44a567 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -288,29 +288,19 @@ impl Output { let span = tracing::info_span!("Fetching source code"); let _enter = span.enter(); - if let Some(finalized_sources) = &self.finalized_sources { - fetch_sources( - finalized_sources, - &self.build_configuration.directories, - &self.system_tools, - tool_configuration, - ) - .await?; - - Ok(self) - } else { - let rendered_sources = fetch_sources( - self.recipe.sources(), - &self.build_configuration.directories, - &self.system_tools, - tool_configuration, - ) - .await?; - - Ok(Output { - finalized_sources: Some(rendered_sources), - ..self - }) - } + let rendered_sources = fetch_sources( + self.finalized_sources + .as_deref() + .unwrap_or(self.recipe.sources()), + &self.build_configuration.directories, + &self.system_tools, + tool_configuration, + ) + .await?; + + Ok(Output { + finalized_sources: Some(rendered_sources), + ..self + }) } } diff --git a/src/system_tools.rs b/src/system_tools.rs index 8842404e4..97e511516 100644 --- a/src/system_tools.rs +++ b/src/system_tools.rs @@ -11,9 +11,10 @@ use std::{ }; use thiserror::Error; +/// Errors that can occur when working with system tools #[derive(Error, Debug)] -#[allow(missing_docs)] pub enum ToolError { + /// The tool was not found on the system #[error("failed to find `{0}` ({1})")] ToolNotFound(Tool, which::Error), } diff --git a/src/unix/mod.rs b/src/unix/mod.rs index 3d7924f6a..119cb1b35 100644 --- a/src/unix/mod.rs +++ b/src/unix/mod.rs @@ -1 +1,2 @@ pub mod env; +pub mod permission_guard; diff --git a/src/unix/permission_guard.rs b/src/unix/permission_guard.rs new file mode 100644 index 000000000..a3edb4e64 --- /dev/null +++ b/src/unix/permission_guard.rs @@ -0,0 +1,112 @@ +//! Implementation of the `PermissionGuard` struct. + +/// User read/write permissions (0o600). +pub const READ_WRITE: u32 = 0o600; + +#[cfg(unix)] +mod unix { + use std::fs::Permissions; + use std::io; + use std::os::unix::fs::PermissionsExt; + use std::path::{Path, PathBuf}; + + /// A guard that modifies the permissions of a file and restores them when dropped. + pub struct PermissionGuard { + /// The path to the file. + path: PathBuf, + /// The original permissions of the file. + original_permissions: Permissions, + } + + impl PermissionGuard { + /// Create a new `PermissionGuard` for the given path with the given permissions. + pub fn new>(path: P, permissions: u32) -> io::Result { + let path = path.as_ref().to_path_buf(); + let metadata = std::fs::metadata(&path)?; + let original_permissions = metadata.permissions(); + + let new_permissions = Permissions::from_mode(original_permissions.mode() | permissions); + + // Set new permissions + std::fs::set_permissions(&path, new_permissions)?; + + Ok(Self { + path, + original_permissions, + }) + } + } + + impl Drop for PermissionGuard { + fn drop(&mut self) { + if self.path.exists() { + if let Err(e) = + std::fs::set_permissions(&self.path, self.original_permissions.clone()) + { + eprintln!("Failed to restore file permissions: {}", e); + } + } + } + } + + #[cfg(test)] + mod tests { + use super::*; + use std::fs::{self, File}; + use tempfile::tempdir; + + #[test] + fn test_permission_guard_modifies_and_restores() -> io::Result<()> { + let dir = tempdir()?; + let test_file = dir.path().join("test-restore.txt"); + File::create(&test_file)?; + + // Set initial permissions to 0o002 so we can check if the guard modifies them + fs::set_permissions(&test_file, Permissions::from_mode(0o002))?; + let initial_mode = fs::metadata(&test_file)?.permissions().mode(); + + // Create scope for PermissionGuard + { + let _guard = PermissionGuard::new(&test_file, 0o200)?; // Write permission + + // Check permissions were modified + let modified_mode = fs::metadata(&test_file)?.permissions().mode(); + assert_ne!(initial_mode, modified_mode); + assert_eq!(modified_mode & 0o200, 0o200); + } + + // Check permissions were restored after guard dropped + let final_mode = fs::metadata(&test_file)?.permissions().mode(); + assert_eq!(initial_mode, final_mode); + + Ok(()) + } + + #[test] + fn test_permission_guard_nonexistent_file() { + let result = PermissionGuard::new("nonexistent_file", 0o777); + assert!(result.is_err()); + } + } +} + +#[cfg(windows)] +mod windows { + use std::io; + use std::path::Path; + + pub struct PermissionGuard; + + impl PermissionGuard { + /// Create a new `PermissionGuard` for the given path with the given permissions. Does nothing on Windows. + pub fn new>(_path: P, _permissions: u32) -> io::Result { + Ok(Self) + } + } +} + +#[cfg(unix)] +pub use self::unix::PermissionGuard; + +#[cfg(windows)] +pub use self::windows::PermissionGuard; diff --git a/test-data/recipes/cache/recipe-symlinks.yaml b/test-data/recipes/cache/recipe-symlinks.yaml index 8c9ec28fc..ee39622f6 100644 --- a/test-data/recipes/cache/recipe-symlinks.yaml +++ b/test-data/recipes/cache/recipe-symlinks.yaml @@ -14,6 +14,7 @@ cache: ln -s $PREFIX/foo.txt $PREFIX/absolute-symlink.txt ln -s $PREFIX/non-existent-file $PREFIX/broken-symlink.txt ln -s ./foo.txt $PREFIX/relative-symlink.txt + echo ${{ PREFIX }} > $PREFIX/prefix.txt outputs: - package: diff --git a/test-data/recipes/perl-test/recipe.yaml b/test-data/recipes/perl-test/recipe.yaml new file mode 100644 index 000000000..bb4e939d3 --- /dev/null +++ b/test-data/recipes/perl-test/recipe.yaml @@ -0,0 +1,40 @@ +context: + version: 0.03 + +package: + name: perl-call-context + version: ${{ version }} + +source: + url: https://cpan.metacpan.org/authors/id/F/FE/FELIPE/Call-Context-${{ version }}.tar.gz + sha256: 0ee6bf46bc72755adb7a6b08e79d12e207de5f7809707b3c353b58cb2f0b5a26 + +build: + number: 0 + noarch: generic + script: + - perl Makefile.PL INSTALLDIRS=site NO_PERLLOCAL=1 NO_PACKLIST=1 + - make + - make test + - make install + - echo "LICENSE-ARTISTIC" > LICENSE-ARTISTIC + - echo "LICENSE-GPL" > LICENSE-GPL + +requirements: + build: + - make + host: + - perl + +tests: + - perl: + uses: + - Call::Context + +about: + license: GPL-1.0-or-later OR Artistic-1.0-Perl + license_file: + - LICENSE-ARTISTIC + - LICENSE-GPL + summary: Sanity-check calling context + homepage: http://metacpan.org/pod/Call-Context diff --git a/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json b/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json index d2d735b8a..7dc0a0fc4 100644 --- a/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json +++ b/test/end-to-end/__snapshots__/test_symlinks/test_symlink_cache.json @@ -24,6 +24,12 @@ "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size_in_bytes": 0 }, + { + "_path": "prefix.txt", + "file_mode": "text", + "path_type": "hardlink", + "size_in_bytes": 256 + }, { "_path": "relative-symlink.txt", "path_type": "softlink", diff --git a/test/end-to-end/__snapshots__/test_tests.ambr b/test/end-to-end/__snapshots__/test_tests.ambr new file mode 100644 index 000000000..0d2edb459 --- /dev/null +++ b/test/end-to-end/__snapshots__/test_tests.ambr @@ -0,0 +1,9 @@ +# serializer version: 1 +# name: test_perl_tests + ''' + - perl: + uses: + - Call::Context + + ''' +# --- diff --git a/test/end-to-end/test_simple.py b/test/end-to-end/test_simple.py index 1d9787373..4e0a1f764 100644 --- a/test/end-to-end/test_simple.py +++ b/test/end-to-end/test_simple.py @@ -8,8 +8,7 @@ import pytest import requests import yaml -from helpers import (RattlerBuild, check_build_output, get_extracted_package, - get_package) +from helpers import RattlerBuild, check_build_output, get_extracted_package, get_package def test_functionality(rattler_build: RattlerBuild): diff --git a/test/end-to-end/test_symlinks.py b/test/end-to-end/test_symlinks.py index e269a37ff..bec38229d 100644 --- a/test/end-to-end/test_symlinks.py +++ b/test/end-to-end/test_symlinks.py @@ -4,6 +4,7 @@ import pytest from helpers import RattlerBuild, get_extracted_package +from syrupy.filters import paths as filter_paths @pytest.mark.skipif( @@ -34,10 +35,16 @@ def test_symlink_cache( paths_json = pkg / "info/paths.json" j = json.loads(paths_json.read_text()) - assert snapshot_json == j + # prefix placeholder always changes, and we test it later + assert ( + snapshot_json( + exclude=filter_paths("paths.4.prefix_placeholder", "paths.4.sha256") + ) + == j + ) paths = j["paths"] - assert len(paths) == 5 + assert len(paths) == 6 for p in paths: if "symlink" in p["_path"]: assert p["path_type"] == "softlink" @@ -61,3 +68,13 @@ def test_symlink_cache( relative_symlink = pkg / "bin/exe-symlink" assert relative_symlink.is_symlink() assert relative_symlink.readlink() == Path("exe") + + prefix_txt = pkg / "prefix.txt" + assert prefix_txt.exists() + contents = prefix_txt.read_text() + assert len(contents) > 0 + # find the path in paths.json for the prefix.txt + for p in paths: + if p["_path"] == "prefix.txt": + assert p["path_type"] == "hardlink" + assert p["prefix_placeholder"] == contents.strip() diff --git a/test/end-to-end/test_tests.py b/test/end-to-end/test_tests.py new file mode 100644 index 000000000..f403b13bf --- /dev/null +++ b/test/end-to-end/test_tests.py @@ -0,0 +1,20 @@ +import os +from pathlib import Path + +import pytest +from helpers import RattlerBuild, get_extracted_package + + +@pytest.mark.skipif( + os.name == "nt", reason="recipe does not support execution on windows" +) +def test_perl_tests( + rattler_build: RattlerBuild, recipes: Path, tmp_path: Path, snapshot +): + rattler_build.build(recipes / "perl-test", tmp_path) + pkg = get_extracted_package(tmp_path, "perl-call-context") + + assert (pkg / "info" / "tests" / "tests.yaml").exists() + content = (pkg / "info" / "tests" / "tests.yaml").read_text() + + assert snapshot == content