From 148704068da846ae45028331cb6f4b6712a784c9 Mon Sep 17 00:00:00 2001 From: nichmor Date: Mon, 9 Dec 2024 11:38:56 +0200 Subject: [PATCH 01/20] feat: add matchspec caching (#2637) Co-authored-by: Tim de Jager --- crates/pixi_build_frontend/Cargo.toml | 3 +- crates/pixi_build_frontend/src/tool/cache.rs | 418 ++++++++++++++++-- .../pixi_build_frontend/src/tool/installer.rs | 33 +- ...ntend__tool__cache__tests__find_specs.snap | 36 ++ .../conda-meta/bat-0.24.0-h3bba108_1.json | 42 ++ crates/pixi_consts/src/consts.rs | 4 +- examples/boltons/pixi.toml | 2 + src/cli/clean.rs | 10 + 8 files changed, 516 insertions(+), 32 deletions(-) create mode 100644 crates/pixi_build_frontend/src/tool/snapshots/pixi_build_frontend__tool__cache__tests__find_specs.snap create mode 100644 crates/pixi_build_frontend/tests/data/conda-meta/bat-0.24.0-h3bba108_1.json diff --git a/crates/pixi_build_frontend/Cargo.toml b/crates/pixi_build_frontend/Cargo.toml index 442fad106..11a49d871 100644 --- a/crates/pixi_build_frontend/Cargo.toml +++ b/crates/pixi_build_frontend/Cargo.toml @@ -11,7 +11,7 @@ version = "0.1.0" [dependencies] dashmap = { workspace = true } -fs-err = { workspace = true } +fs-err = { workspace = true, features = ["tokio"] } futures = { workspace = true } itertools = { workspace = true } jsonrpsee = { workspace = true, features = ["client"] } @@ -42,6 +42,7 @@ tracing = { workspace = true } url = "2.5.0" which = { workspace = true } + pixi_build_types = { path = "../pixi_build_types" } diff --git a/crates/pixi_build_frontend/src/tool/cache.rs b/crates/pixi_build_frontend/src/tool/cache.rs index dc150f04f..e00bda4f4 100644 --- a/crates/pixi_build_frontend/src/tool/cache.rs +++ b/crates/pixi_build_frontend/src/tool/cache.rs @@ -1,11 +1,20 @@ use std::{ + ffi::OsStr, fmt::Debug, - path::PathBuf, + path::{Path, PathBuf}, sync::{Arc, Weak}, }; use dashmap::{DashMap, Entry}; -use rattler_conda_types::ChannelConfig; +use itertools::Itertools; +use miette::{Context, IntoDiagnostic}; +use rattler_conda_types::{ChannelConfig, Matches, Platform, PrefixRecord}; +use rattler_shell::{ + activation::{ActivationVariables, Activator}, + shell::ShellEnum, +}; + +use fs_err::tokio as tokio_fs; use tokio::sync::broadcast; use super::{installer::ToolInstaller, IsolatedTool}; @@ -18,7 +27,7 @@ enum PendingOrFetched { Fetched(T), } -/// A [`ToolCache`] maintains a cache of environments for isolated build tools. +/// A [`ToolCache`] maintains a cache of environments for build tools. /// /// This is useful to ensure that if we need to build multiple packages that use /// the same tool, we can reuse their environments. @@ -31,6 +40,56 @@ pub struct ToolCache { cache: DashMap>>, } +/// Finds the `PrefixRecord`s from `conda-meta` directory which starts with `Matchspec` names. +pub(crate) async fn find_spec_records( + conda_meta: &Path, + name_to_match: Vec, +) -> miette::Result>> { + let mut read_dir = tokio_fs::read_dir(conda_meta).await.into_diagnostic()?; + let mut records = Vec::new(); + + // Set to keep track of which names have matching files + let mut matched_names = std::collections::HashSet::new(); + + while let Some(entry) = read_dir.next_entry().await.into_diagnostic()? { + let path = entry.path(); + + // Check if the entry is a file and has a .json extension + if path.is_file() && path.extension().and_then(OsStr::to_str) == Some("json") { + if let Some(file_name) = path.file_name().and_then(OsStr::to_str) { + // Check if the file name starts with any of the names in name_to_match + for name in &name_to_match { + // Filename is in the form of: -- + // this part is taken from ArchiveIdentifier + // https://github.com/conda/rattler/blob/b90daf5032e5c83ead9f9623576105ee08be837b/crates/rattler_conda_types/src/package/archive_identifier.rs#L11 + let Some((_, _, filename)) = file_name.rsplitn(3, '-').next_tuple() else { + continue; + }; + + if name == filename { + matched_names.insert(name.clone()); + + let prefix_record = PrefixRecord::from_path(&path) + .into_diagnostic() + .wrap_err_with(|| { + format!("Couldn't parse JSON from {}", path.display()) + })?; + + records.push(prefix_record); + } + } + } + } + } + + // Check if all names in name_to_match were matched + if matched_names.len() == name_to_match.len() { + return Ok(Some(records)); + } + + Ok(None) +} + #[derive(thiserror::Error, Debug)] pub enum ToolCacheError { #[error("could not resolve '{}', {1}", .0.display())] @@ -53,6 +112,7 @@ impl ToolCache { &self, spec: IsolatedToolSpec, context: &impl ToolInstaller, + cache_dir: &Path, channel_config: &ChannelConfig, ) -> miette::Result> { let sender = match self.cache.entry(spec.clone()) { @@ -84,13 +144,13 @@ impl ToolCache { // Explicitly drop the entry, so we don't block any other tasks. drop(entry); - // Drop the sender + // // Drop the sender drop(sender); return match receiver.recv().await { Ok(tool) => Ok(tool), - Err(_) => miette::bail!( - "a coalesced tool {} request install failed", + Err(err) => miette::bail!( + "installing of {} tool failed. Reason: {err}", spec.command ), }; @@ -115,11 +175,29 @@ impl ToolCache { // other tasks will find a pending entry and will wait for the tool // to become available. // - // Let's start by installing tool. If an error occurs we immediately return - // the error. This will drop the sender and all other waiting tasks will - // receive an error. - // Installation happens outside the critical section - let tool = Arc::new(context.install(&spec, channel_config).await?); + + // Let's start by finding already existing matchspec + let tool = match self.get_file_system_cached(&spec, cache_dir).await? { + // Let's start by installing tool. If an error occurs we immediately return + // the error. This will drop the sender and all other waiting tasks will + // receive an error. + // Installation happens outside the critical section + None => { + tracing::debug!("not found any existing environment for {:?}", spec.specs); + context.install(&spec, channel_config).await? + } + + Some(tool) => { + tracing::debug!( + "reusing existing environment in {} for {:?}", + tool.prefix.display(), + spec.specs + ); + tool + } + }; + + let tool = Arc::new(tool); // Store the fetched files in the entry. self.cache @@ -131,6 +209,83 @@ impl ToolCache { Ok(tool) } + + /// Try to find already existing environment with the same tool spec + /// in the cache directory. + pub async fn get_file_system_cached( + &self, + spec: &IsolatedToolSpec, + cache_dir: &Path, + ) -> miette::Result> { + // check if the cache directory exists + if !cache_dir.exists() { + return Ok(None); + } + + let specs: Vec = spec + .specs + .iter() + .filter_map(|match_spec| match_spec.name.as_ref()) + .map(|name| name.as_normalized().to_string()) + .collect(); + + if specs.len() != spec.specs.len() { + return Ok(None); + } + + // verify if we have a similar environment that match our matchspec + // we need to load all prefix record from all folders in the cache + // load all package records + let mut entries = tokio_fs::read_dir(&cache_dir).await.into_diagnostic()?; + let mut directories = Vec::new(); + + while let Some(entry) = entries.next_entry().await.into_diagnostic()? { + let path = entry.path(); + if path.is_dir() { + directories.push(path); + } + } + + // let's find existing package records + let mut records_of_records = Vec::new(); + + for dir in directories.iter() { + let records = find_spec_records(&dir.join("conda-meta"), specs.clone()).await?; + + if let Some(records) = records { + records_of_records.push((dir, records)); + } + } + + // Find the first set of records where all specs in the manifest are present + let matching_record = records_of_records.iter().find(|records| { + spec.specs.iter().all(|spec| { + records + .1 + .iter() + .any(|record| spec.matches(&record.repodata_record.package_record)) + }) + }); + + if let Some(records) = matching_record { + // Get the activation scripts + let activator = + Activator::from_path(records.0, ShellEnum::default(), Platform::current()).unwrap(); + + let activation_scripts = activator + .run_activation(ActivationVariables::from_env().unwrap_or_default(), None) + .unwrap(); + + let cached_tool = IsolatedTool::new( + spec.command.clone(), + records.0.to_path_buf(), + activation_scripts, + ); + + return Ok(Some(cached_tool)); + } + Ok(None) + } } #[cfg(test)] @@ -142,16 +297,56 @@ mod tests { ChannelConfig, MatchSpec, NamedChannelOrUrl, ParseStrictness, Platform, }; use reqwest_middleware::ClientWithMiddleware; - use tokio::sync::{Barrier, Mutex}; + use tokio::sync::{Barrier, Mutex, Semaphore}; use crate::{ tool::{ + cache::{find_spec_records, ToolCache}, installer::{ToolContext, ToolInstaller}, IsolatedTool, ToolSpec, }, IsolatedToolSpec, }; + const BAT_META_JSON: &str = "bat-0.24.0-h3bba108_1.json"; + + /// A test helper to create a temporary directory and write conda meta files. + /// This is used to simulate already installed tools. + struct CondaMetaWriter { + pub tmp_dir: PathBuf, + } + + impl CondaMetaWriter { + async fn new() -> Self { + let tempdir = tempfile::tempdir().unwrap(); + let tmp_dir = tempdir.path().to_path_buf(); + + tokio::fs::create_dir_all(&tmp_dir).await.unwrap(); + Self { tmp_dir } + } + + /// Write a meta-json file to the conda-meta directory. + /// If `override_name` is provided, the file will be written with that name. + async fn write_meta_json( + &self, + meta_json: &str, + env_dir_name: &str, + override_name: Option<&str>, + ) { + let bat_conda_meta = self.tmp_dir.join(env_dir_name).join("conda-meta"); + tokio::fs::create_dir_all(&bat_conda_meta).await.unwrap(); + + let meta_file = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("tests/data/conda-meta") + .join(meta_json); + // copy file and override the original name if necessary + let name = override_name.unwrap_or(meta_json); + tokio::fs::copy(meta_file, bat_conda_meta.join(name)) + .await + .unwrap(); + } + } + /// A test installer that will count how many times a tool was installed. /// This is used to verify that we only install a tool once. #[derive(Default, Clone)] @@ -191,7 +386,7 @@ mod tests { let auth_client = ClientWithMiddleware::default(); let channel_config = config.global_channel_config(); - let tool_context = ToolContext::builder() + let tool_context = ToolContext::for_tests() .with_platform(compatible_target_platform()) .with_client(auth_client.clone()) .with_gateway(config.gateway(auth_client)) @@ -223,7 +418,7 @@ mod tests { let channel_config = ChannelConfig::default_with_root_dir(PathBuf::new()); let tool_context = Arc::new( - ToolContext::builder() + ToolContext::for_tests() .with_client(auth_client.clone()) .build(), ); @@ -257,7 +452,12 @@ mod tests { tool_context .cache - .get_or_install_tool(tool_spec, &tool_installer, &channel_config) + .get_or_install_tool( + tool_spec, + &tool_installer, + &tool_context.cache_dir, + &channel_config, + ) .await }); @@ -318,7 +518,7 @@ mod tests { let channel_config = ChannelConfig::default_with_root_dir(PathBuf::new()); let tool_context = Arc::new( - ToolContext::builder() + ToolContext::for_tests() .with_client(auth_client.clone()) .build(), ); @@ -331,31 +531,67 @@ mod tests { channels: Vec::from([NamedChannelOrUrl::Name("conda-forge".to_string())]), }; - // Let's imitate that we have 4 requests to install a tool - // we will use a barrier to ensure all tasks start at the same time. - let num_tasks = 2; - let barrier = Arc::new(Barrier::new(num_tasks)); let mut handles = Vec::new(); - for _ in 0..num_tasks { - let barrier = barrier.clone(); + // We need to test that failure of one task will not block other tasks + // to execute. + // To test it we want to synchronize the installation of the tool + // in the following way + // first task will fail, and set the semaphore to true + // so other task can proceed to execute. + // in this way we can verify that we handle a task failure correctly + // and other tasks can proceed to install the tool. - let tool_context = tool_context.clone(); + // It is is necessary to do it in this way because + // without synchronization, all tasks will be blocked on the waiting stage + // and failure of one task will be propagated to all other tasks. + + let semaphore = Arc::new(Semaphore::new(1)); + { + let semaphore = semaphore.clone(); + let tool_context = tool_context.clone(); let tool_installer = tool_installer.clone(); let channel_config = channel_config.clone(); let tool_spec = tool_spec.clone(); let handle = tokio::spawn(async move { - barrier.wait().await; + let _sem = semaphore.acquire().await.unwrap(); tool_context .cache - .get_or_install_tool(tool_spec, &tool_installer, &channel_config) + .get_or_install_tool( + tool_spec, + &tool_installer, + &tool_context.cache_dir, + &channel_config, + ) .await }); + handles.push(handle); + } + { + let semaphore = semaphore.clone(); + + let tool_context = tool_context.clone(); + let tool_installer = tool_installer.clone(); + + let channel_config = channel_config.clone(); + let tool_spec = tool_spec.clone(); + let handle = tokio::spawn(async move { + let _sem = semaphore.acquire().await.unwrap(); + tool_context + .cache + .get_or_install_tool( + tool_spec, + &tool_installer, + &tool_context.cache_dir, + &channel_config, + ) + .await + }); handles.push(handle); } @@ -374,4 +610,136 @@ mod tests { let install_count = lock.get(&tool_spec).unwrap(); assert_eq!(install_count, &2); } + + #[tokio::test] + async fn test_can_find_from_filesystem() { + let config = Config::for_tests(); + + let tool_cache = ToolCache::new(); + + let conda_meta_builder = CondaMetaWriter::new().await; + + conda_meta_builder + .write_meta_json(BAT_META_JSON, "bat-somehash", None) + .await; + + let tool_spec = IsolatedToolSpec { + specs: vec![MatchSpec::from_str("bat", ParseStrictness::Strict).unwrap()], + command: "bat".into(), + channels: config.default_channels.clone(), + }; + + let tool = tool_cache + .get_file_system_cached(&tool_spec, &conda_meta_builder.tmp_dir) + .await + .unwrap() + .unwrap(); + + assert_eq!( + tool.prefix + .file_name() + .unwrap() + .to_string_lossy() + .to_string(), + "bat-somehash" + ); + assert_eq!(tool.command, "bat"); + } + + #[tokio::test] + async fn test_missing_from_filesystem() { + let config = Config::for_tests(); + + let tool_cache = ToolCache::new(); + + let conda_meta_builder = CondaMetaWriter::new().await; + + conda_meta_builder + .write_meta_json(BAT_META_JSON, "bat-somehash", None) + .await; + + let tool_spec = IsolatedToolSpec { + specs: vec![MatchSpec::from_str("bat==1.0.0", ParseStrictness::Strict).unwrap()], + command: "bat".into(), + channels: config.default_channels.clone(), + }; + + let tool = tool_cache + .get_file_system_cached(&tool_spec, &conda_meta_builder.tmp_dir) + .await + .unwrap(); + + assert!(tool.is_none()); + } + + #[tokio::test] + async fn test_find_specs() { + let conda_meta_builder = CondaMetaWriter::new().await; + + conda_meta_builder + .write_meta_json(BAT_META_JSON, "one-env", None) + .await; + + // we have there bat and batt. We need to find only bat + + let records = find_spec_records( + &conda_meta_builder + .tmp_dir + .join("one-env") + .join("conda-meta"), + vec!["bat".to_string()], + ) + .await + .unwrap() + .unwrap(); + + insta::assert_yaml_snapshot!(records); + } + + #[tokio::test] + async fn test_find_more_specs() { + let conda_meta_builder = CondaMetaWriter::new().await; + + // write only one meta-json file, but ask for more specs + conda_meta_builder + .write_meta_json(BAT_META_JSON, "one-env", None) + .await; + + // we have there bat and batt. We need to find only bat + + let records = find_spec_records( + &conda_meta_builder + .tmp_dir + .join("one-env") + .join("conda-meta"), + vec!["bat".to_string(), "boltons".to_string()], + ) + .await + .unwrap(); + + assert!(records.is_none()); + } + + #[tokio::test] + async fn test_skip_wrong_json() { + let conda_meta_builder = CondaMetaWriter::new().await; + + // verify that event when we have wrong json file, we will skip reading it. + conda_meta_builder + .write_meta_json(BAT_META_JSON, "one-env", Some("wrong.json")) + .await; + + // we have there bat and batt. We need to find only bat + + let records = find_spec_records( + &conda_meta_builder + .tmp_dir + .join("one-env") + .join("conda-meta"), + vec!["bat".to_string()], + ) + .await + .unwrap(); + assert!(records.is_none()); + } } diff --git a/crates/pixi_build_frontend/src/tool/installer.rs b/crates/pixi_build_frontend/src/tool/installer.rs index 08b36c2d5..3147bb341 100644 --- a/crates/pixi_build_frontend/src/tool/installer.rs +++ b/crates/pixi_build_frontend/src/tool/installer.rs @@ -2,7 +2,7 @@ use std::fmt::Debug; use std::future::Future; use std::path::PathBuf; -use pixi_consts::consts::{CACHED_BUILD_ENVS_DIR, CONDA_REPODATA_CACHE_DIR}; +use pixi_consts::consts::CACHED_BUILD_TOOL_ENVS_DIR; use pixi_progress::wrap_in_progress; use pixi_utils::{EnvironmentHash, PrefixGuard}; use rattler::{install::Installer, package_cache::PackageCache}; @@ -93,8 +93,8 @@ impl ToolContextBuilder { pub fn build(self) -> ToolContext { let gateway = self.gateway.unwrap_or_else(|| { Gateway::builder() + .with_cache_dir(self.cache_dir.clone()) .with_client(self.client.clone()) - .with_cache_dir(self.cache_dir.join(CONDA_REPODATA_CACHE_DIR)) .finish() }); @@ -114,7 +114,8 @@ impl ToolContextBuilder { pub struct ToolContext { // Authentication client to use for fetching repodata. pub client: ClientWithMiddleware, - // The cache directory to use for the tools. + // The cache directory to use while installing tools. + // This cache directory is also passed to the Gateway and Installer. pub cache_dir: PathBuf, // The gateway to use for fetching repodata. pub gateway: Gateway, @@ -148,6 +149,16 @@ impl ToolContext { ToolContextBuilder::new() } + /// Create a new tool context builder to be used for tests + /// + /// The main difference is that it uses a tmp cache directory + /// instead of rattler one + #[cfg(test)] + pub fn for_tests() -> ToolContextBuilder { + let cache_dir = tempfile::tempdir().unwrap().into_path(); + ToolContextBuilder::new().with_cache_dir(cache_dir) + } + /// Instantiate a tool from a specification. /// /// If the tool is not already cached, it will be created, installed and cached. @@ -174,7 +185,12 @@ impl ToolContext { let installed = self .cache - .get_or_install_tool(spec, self, channel_config) + .get_or_install_tool( + spec, + self, + &self.cache_dir.join(CACHED_BUILD_TOOL_ENVS_DIR), + channel_config, + ) .await .map_err(ToolCacheError::Install)?; @@ -233,9 +249,16 @@ impl ToolInstaller for ToolContext { self.platform, ); + // ensure that the cache directory exists + if !self.cache_dir.exists() { + tokio::fs::create_dir(&self.cache_dir) + .await + .into_diagnostic()?; + } + let cached_dir = self .cache_dir - .join(CACHED_BUILD_ENVS_DIR) + .join(CACHED_BUILD_TOOL_ENVS_DIR) .join(cache.name()); let mut prefix_guard = PrefixGuard::new(&cached_dir).into_diagnostic()?; diff --git a/crates/pixi_build_frontend/src/tool/snapshots/pixi_build_frontend__tool__cache__tests__find_specs.snap b/crates/pixi_build_frontend/src/tool/snapshots/pixi_build_frontend__tool__cache__tests__find_specs.snap new file mode 100644 index 000000000..8ab61707d --- /dev/null +++ b/crates/pixi_build_frontend/src/tool/snapshots/pixi_build_frontend__tool__cache__tests__find_specs.snap @@ -0,0 +1,36 @@ +--- +source: crates/pixi_build_frontend/src/tool/cache.rs +expression: records +--- +- build: h3bba108_1 + build_number: 1 + constrains: + - __osx >=11.0 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + md5: d85cac82d09d751449e5f9ae718ff429 + name: bat + sha256: 20efa2746790fd3da66cd897fe2bdf4a66221b9ddaa54bb1eb9bd4a4fff4ea99 + size: 2335050 + subdir: osx-arm64 + timestamp: 1729864776947 + version: 0.24.0 + fn: bat-0.24.0-h3bba108_1.conda + url: "https://conda.anaconda.org/conda-forge/osx-arm64/bat-0.24.0-h3bba108_1.conda" + channel: "https://conda.anaconda.org/conda-forge/" + extracted_package_dir: /Users/graf/Library/Caches/rattler/cache/pkgs/bat-0.24.0-h3bba108_1 + files: + - bin/bat + paths_data: + paths_version: 1 + paths: + - _path: bin/bat + path_type: hardlink + sha256: fb935cb88e82cde24a0a1847d3acd614adf7050bcafa9a47a44e7d06fff2d1ed + sha256_in_prefix: fb935cb88e82cde24a0a1847d3acd614adf7050bcafa9a47a44e7d06fff2d1ed + size_in_bytes: 4847920 + link: + source: /Users/graf/Library/Caches/rattler/cache/pkgs/bat-0.24.0-h3bba108_1 + type: 1 diff --git a/crates/pixi_build_frontend/tests/data/conda-meta/bat-0.24.0-h3bba108_1.json b/crates/pixi_build_frontend/tests/data/conda-meta/bat-0.24.0-h3bba108_1.json new file mode 100644 index 000000000..beffae966 --- /dev/null +++ b/crates/pixi_build_frontend/tests/data/conda-meta/bat-0.24.0-h3bba108_1.json @@ -0,0 +1,42 @@ +{ + "build": "h3bba108_1", + "build_number": 1, + "constrains": [ + "__osx >=11.0" + ], + "depends": [ + "__osx >=11.0" + ], + "license": "MIT", + "license_family": "MIT", + "md5": "d85cac82d09d751449e5f9ae718ff429", + "name": "bat", + "sha256": "20efa2746790fd3da66cd897fe2bdf4a66221b9ddaa54bb1eb9bd4a4fff4ea99", + "size": 2335050, + "subdir": "osx-arm64", + "timestamp": 1729864776947, + "version": "0.24.0", + "fn": "bat-0.24.0-h3bba108_1.conda", + "url": "https://conda.anaconda.org/conda-forge/osx-arm64/bat-0.24.0-h3bba108_1.conda", + "channel": "https://conda.anaconda.org/conda-forge/", + "extracted_package_dir": "/Users/graf/Library/Caches/rattler/cache/pkgs/bat-0.24.0-h3bba108_1", + "files": [ + "bin/bat" + ], + "paths_data": { + "paths_version": 1, + "paths": [ + { + "_path": "bin/bat", + "path_type": "hardlink", + "sha256": "fb935cb88e82cde24a0a1847d3acd614adf7050bcafa9a47a44e7d06fff2d1ed", + "sha256_in_prefix": "fb935cb88e82cde24a0a1847d3acd614adf7050bcafa9a47a44e7d06fff2d1ed", + "size_in_bytes": 4847920 + } + ] + }, + "link": { + "source": "/Users/graf/Library/Caches/rattler/cache/pkgs/bat-0.24.0-h3bba108_1", + "type": 1 + } +} diff --git a/crates/pixi_consts/src/consts.rs b/crates/pixi_consts/src/consts.rs index e74b8dcc9..3d59bc23d 100644 --- a/crates/pixi_consts/src/consts.rs +++ b/crates/pixi_consts/src/consts.rs @@ -31,7 +31,9 @@ pub const CONDA_META_DIR: &str = "conda-meta"; pub const PYPI_CACHE_DIR: &str = "uv-cache"; pub const CONDA_PYPI_MAPPING_CACHE_DIR: &str = "conda-pypi-mapping"; pub const CACHED_ENVS_DIR: &str = "cached-envs-v0"; -pub const CACHED_BUILD_ENVS_DIR: &str = "cached-build-envs-v0"; +// TODO: CACHED_BUILD_ENVS_DIR was deprecated in favor of CACHED_BUILD_ENVS_DIR. This constant will be removed in a future release. +pub const _CACHED_BUILD_ENVS_DIR: &str = "cached-build-envs-v0"; +pub const CACHED_BUILD_TOOL_ENVS_DIR: &str = "cached-build-tool-envs-v0"; pub const CONDA_INSTALLER: &str = "conda"; diff --git a/examples/boltons/pixi.toml b/examples/boltons/pixi.toml index 5700ee66d..e4d606627 100644 --- a/examples/boltons/pixi.toml +++ b/examples/boltons/pixi.toml @@ -21,6 +21,8 @@ description = "Add a short description here" name = "boltons" version = "0.1.0" +[host-dependencies] +hatchling = "*" [build-system] build-backend = { name = "pixi-build-rattler-build", version = "*" } diff --git a/src/cli/clean.rs b/src/cli/clean.rs index 483c1849d..110ffcd26 100644 --- a/src/cli/clean.rs +++ b/src/cli/clean.rs @@ -65,6 +65,10 @@ pub struct CacheArgs { #[arg(long)] pub repodata: bool, + /// Clean only the build backend tools cache. + #[arg(long)] + pub tool: bool, + /// Answer yes to all questions. #[clap(short = 'y', long = "yes", alias = "assume-yes")] assume_yes: bool, @@ -154,6 +158,12 @@ async fn clean_cache(args: CacheArgs) -> miette::Result<()> { if args.exec { dirs.push(cache_dir.join(consts::CACHED_ENVS_DIR)); } + if args.tool { + dirs.push(cache_dir.join(consts::CACHED_BUILD_TOOL_ENVS_DIR)); + // TODO: Let's clean deprecated cache directory. + // This will be removed in a future release. + dirs.push(cache_dir.join(consts::_CACHED_BUILD_ENVS_DIR)); + } if dirs.is_empty() && (args.assume_yes || dialoguer::Confirm::new() .with_prompt("No cache types specified using the flags.\nDo you really want to remove all cache directories from your machine?") .interact_opt() From 26a6392ee0d9c6e45bd4859d176caf10c4f455d7 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Mon, 9 Dec 2024 13:14:05 +0100 Subject: [PATCH 02/20] chore: version to 0.39.1 (#2666) --- CHANGELOG.md | 40 ++++++++++++++++ CITATION.cff | 4 +- Cargo.lock | 2 +- Cargo.toml | 2 +- crates/pixi_consts/src/consts.rs | 2 +- crates/pixi_trampoline/Cargo.lock | 63 ++++++++++++-------------- docs/advanced/github_actions.md | 2 +- docs/advanced/production_deployment.md | 2 +- docs/ide_integration/devcontainer.md | 2 +- install/install.ps1 | 2 +- install/install.sh | 2 +- schema/schema.json | 4 +- tbump.toml | 4 +- tests/integration_python/common.py | 2 +- 14 files changed, 84 insertions(+), 49 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 034acca35..71367cfa1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,46 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +### [0.39.1] - 2024-12-09 +#### Added + +- Add proper unit testing for PyPI installation and fix re-installation issues by @tdejager in [#2617](https://github.com/prefix-dev/pixi/pull/2617) +- Add detailed json output for task list by @jjjermiah in [#2608](https://github.com/prefix-dev/pixi/pull/2608) +- Add `pixi project name` CLI by @LiamConnors in [#2649](https://github.com/prefix-dev/pixi/pull/2649) + +#### Changed + +- Use `fs-err` in more places by @Hofer-Julian in [#2636](https://github.com/prefix-dev/pixi/pull/2636) + +#### Documentation + +- Remove `tclf` from community.md📑 by @KarelZe in [#2619](https://github.com/prefix-dev/pixi/pull/2619) +- Update contributing guide by @LiamConnors in [#2650](https://github.com/prefix-dev/pixi/pull/2650) +- Update clean cache CLI doc by @LiamConnors in [#2657](https://github.com/prefix-dev/pixi/pull/2657) + +#### Fixed + +- Color formatting detection on stdout by @blmaier in [#2613](https://github.com/prefix-dev/pixi/pull/2613) +- Use correct dependency location for `pixi upgrade` by @Hofer-Julian in [#2472](https://github.com/prefix-dev/pixi/pull/2472) +- Regression `detached-environments` not used by @ruben-arts in [#2627](https://github.com/prefix-dev/pixi/pull/2627) +- Allow configuring pypi insecure host by @zen-xu in [#2521](https://github.com/prefix-dev/pixi/pull/2521)[#2622](https://github.com/prefix-dev/pixi/pull/2622) + +#### Refactor + +- Rework CI and use `cargo-dist` for releases by @baszalmstra in [#2566](https://github.com/prefix-dev/pixi/pull/2566) + +#### `pixi build` Preview work +- Refactor to `[build-system.build-backend]` by @baszalmstra in [#2601](https://github.com/prefix-dev/pixi/pull/2601) +- Remove ipc override from options and give it manually to test by @wolfv in [#2629](https://github.com/prefix-dev/pixi/pull/2629) +- Pixi build trigger rebuild by @Hofer-Julian in [#2641](https://github.com/prefix-dev/pixi/pull/2641) +- Add variant config to `[workspace.build-variants]` by @wolfv in [#2634](https://github.com/prefix-dev/pixi/pull/2634) +- Add request coalescing for isolated tools by @nichmor in [#2589](https://github.com/prefix-dev/pixi/pull/2589) +- Add example using `rich` and `pixi-build-python` and remove flask by @Hofer-Julian in [#2638](https://github.com/prefix-dev/pixi/pull/2638) +- (simple) build tool override by @wolfv in [#2620](https://github.com/prefix-dev/pixi/pull/2620) +- Add caching of build tool installation by @nichmor in [#2637](https://github.com/prefix-dev/pixi/pull/2637) +#### New Contributors +* @blmaier made their first contribution in [#2613](https://github.com/prefix-dev/pixi/pull/2613) + ### [0.39.0] - 2024-12-02 #### ✨ Highlights diff --git a/CITATION.cff b/CITATION.cff index 2dd4f6bfd..6622caae3 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -30,8 +30,8 @@ authors: - given-names: Julian family-names: Hofer email: julian.hofer@protonmail.com -repository-code: 'https://github.com/prefix-dev/pixi/releases/tag/v0.39.0' -url: 'https://pixi.sh/v0.39.0' +repository-code: 'https://github.com/prefix-dev/pixi/releases/tag/v0.39.1' +url: 'https://pixi.sh/v0.39.1' abstract: >- A cross-platform, language agnostic, package/project management tool for development in virtual environments. diff --git a/Cargo.lock b/Cargo.lock index 9e0ca9de0..1c904bc40 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3590,7 +3590,7 @@ dependencies = [ [[package]] name = "pixi" -version = "0.39.0" +version = "0.39.1" dependencies = [ "ahash", "assert_matches", diff --git a/Cargo.toml b/Cargo.toml index bd8f64ba8..0a4deeda5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -165,7 +165,7 @@ license.workspace = true name = "pixi" readme.workspace = true repository.workspace = true -version = "0.39.0" +version = "0.39.1" [features] default = ["rustls-tls"] diff --git a/crates/pixi_consts/src/consts.rs b/crates/pixi_consts/src/consts.rs index 3d59bc23d..ae9f0a2d7 100644 --- a/crates/pixi_consts/src/consts.rs +++ b/crates/pixi_consts/src/consts.rs @@ -14,7 +14,7 @@ pub const CONFIG_FILE: &str = "config.toml"; pub const PIXI_DIR: &str = ".pixi"; pub const PIXI_VERSION: &str = match option_env!("PIXI_VERSION") { Some(v) => v, - None => "0.39.0", + None => "0.39.1", }; pub const PREFIX_FILE_NAME: &str = "pixi_env_prefix"; pub const ENVIRONMENTS_DIR: &str = "envs"; diff --git a/crates/pixi_trampoline/Cargo.lock b/crates/pixi_trampoline/Cargo.lock index 0315f8119..becd132f1 100644 --- a/crates/pixi_trampoline/Cargo.lock +++ b/crates/pixi_trampoline/Cargo.lock @@ -869,9 +869,9 @@ dependencies = [ [[package]] name = "file_url" -version = "0.1.7" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eff487eda48708def359958613c6c9762d9c4f8396db240e37083758ccb01c79" +checksum = "c2789b7b3e160530d89d1e126aff9811c3421bb77ebb9b62ffa3abbeba69f12d" dependencies = [ "itertools 0.13.0", "percent-encoding", @@ -1854,9 +1854,9 @@ dependencies = [ [[package]] name = "miette" -version = "7.2.0" +version = "7.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4edc8853320c2a0dab800fbda86253c8938f6ea88510dc92c5f1ed20e794afc1" +checksum = "317f146e2eb7021892722af37cf1b971f0a70c8406f487e24952667616192c64" dependencies = [ "backtrace", "backtrace-ext", @@ -1874,9 +1874,9 @@ dependencies = [ [[package]] name = "miette-derive" -version = "7.2.0" +version = "7.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c" +checksum = "23c9b935fbe1d6cbd1dac857b54a688145e2d93f48db36010514d0f612d0ad67" dependencies = [ "proc-macro2", "quote", @@ -2548,9 +2548,9 @@ dependencies = [ [[package]] name = "rattler" -version = "0.28.3" +version = "0.28.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4df153e466ba59551f1967e46b312bc9743cec6f424691f20c45c99553d97b0d" +checksum = "238dd1b6ca1f2e622e438092e6523a37c6b018e25b236406a6aa182d13885b39" dependencies = [ "anyhow", "digest", @@ -2586,14 +2586,15 @@ dependencies = [ [[package]] name = "rattler_cache" -version = "0.2.11" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "465972d151a672bc000b64c19a67c4c3b4ffeb11bd433eaf4dfe4c9aa04d748a" +checksum = "63a90b8eb4a8406619d0685a18d0f55ffae762399258ca5eb422c55ba1fe7282" dependencies = [ "anyhow", "dashmap", "digest", "dirs", + "fs-err 3.0.0", "fs4", "futures", "fxhash", @@ -2614,9 +2615,9 @@ dependencies = [ [[package]] name = "rattler_conda_types" -version = "0.29.2" +version = "0.29.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157b9dca7f9ed5bf7f04202fdd9fda5d8a76f76e937a3b6fd94ed8b2d55565bc" +checksum = "fa6e2010c1a639982d9c22766598159dbeda9b5701ab01a863c66e55520c1ba1" dependencies = [ "chrono", "dirs", @@ -2677,9 +2678,9 @@ dependencies = [ [[package]] name = "rattler_networking" -version = "0.21.6" +version = "0.21.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b547cd28190f62c7f580493e41b7f6c9abc6bbef755e8703e8faea890ca2397f" +checksum = "40f5ad1da789b5bbe9585b4d255f2df82c676a951e2f002a76bf9fa7389c4962" dependencies = [ "anyhow", "async-trait", @@ -2706,12 +2707,13 @@ dependencies = [ [[package]] name = "rattler_package_streaming" -version = "0.22.14" +version = "0.22.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef13aafa7b14262517b9b2ccce8a96f821c27e52489e2e9c67e57dbbfb932031" +checksum = "7b881c9f633407c171a62809e754315e09d273edcf4e9217d2cc4b102721e65c" dependencies = [ "bzip2", "chrono", + "fs-err 3.0.0", "futures-util", "num_cpus", "rattler_conda_types", @@ -2734,9 +2736,9 @@ dependencies = [ [[package]] name = "rattler_redaction" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa822f7a897914ff30e372814234047d556c98f3813fad616c93147b38dab7e7" +checksum = "575cd5c830c5c2d25412531c5a3d307a0ca66ddccc466baaa5219cfa9e90c60e" dependencies = [ "reqwest", "reqwest-middleware", @@ -2745,9 +2747,9 @@ dependencies = [ [[package]] name = "rattler_repodata_gateway" -version = "0.21.23" +version = "0.21.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7999914f20afeeca1019c61956a27707f6258020e343975f979e25b172e6252f" +checksum = "315d710364bd4ca46ed37fbb06f50d3e4774f5b7775fb77f1f232d35632fa149" dependencies = [ "anyhow", "async-compression", @@ -2800,9 +2802,9 @@ dependencies = [ [[package]] name = "rattler_shell" -version = "0.22.7" +version = "0.22.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47fbaf13fcf46e10c72c266f975f9d979e13bfe4ff159e4778b2357cf0ac2530" +checksum = "070b851b93cd8973a6e9377c06323aca1d8faeeeb5b59f80f3cd1e2c8a7684bf" dependencies = [ "enum_dispatch", "fs-err 3.0.0", @@ -2946,9 +2948,9 @@ dependencies = [ [[package]] name = "reqwest-middleware" -version = "0.3.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562ceb5a604d3f7c885a792d42c199fd8af239d0a51b2fa6a78aafa092452b04" +checksum = "d1ccd3b55e711f91a9885a2fa6fbbb2e39db1776420b062efc058c6410f7e5e3" dependencies = [ "anyhow", "async-trait", @@ -3424,12 +3426,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "smawk" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" - [[package]] name = "socket2" version = "0.5.7" @@ -3576,12 +3572,12 @@ dependencies = [ [[package]] name = "terminal_size" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" +checksum = "5352447f921fda68cf61b4101566c0bdb5104eff6804d0678e5227580ab6a4e9" dependencies = [ "rustix", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -3590,7 +3586,6 @@ version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" dependencies = [ - "smawk", "unicode-linebreak", "unicode-width 0.1.14", ] diff --git a/docs/advanced/github_actions.md b/docs/advanced/github_actions.md index 5edfc5752..e173e0b3f 100644 --- a/docs/advanced/github_actions.md +++ b/docs/advanced/github_actions.md @@ -15,7 +15,7 @@ We created [prefix-dev/setup-pixi](https://github.com/prefix-dev/setup-pixi) to ```yaml - uses: prefix-dev/setup-pixi@v0.8.0 with: - pixi-version: v0.39.0 + pixi-version: v0.39.1 cache: true auth-host: prefix.dev auth-token: ${{ secrets.PREFIX_DEV_TOKEN }} diff --git a/docs/advanced/production_deployment.md b/docs/advanced/production_deployment.md index e5428b7d1..fcbe16f4f 100644 --- a/docs/advanced/production_deployment.md +++ b/docs/advanced/production_deployment.md @@ -33,7 +33,7 @@ It also makes use of `pixi shell-hook` to not rely on pixi being installed in th For more examples, take a look at [pavelzw/pixi-docker-example](https://github.com/pavelzw/pixi-docker-example). ```Dockerfile -FROM ghcr.io/prefix-dev/pixi:0.39.0 AS build +FROM ghcr.io/prefix-dev/pixi:0.39.1 AS build # copy source code, pixi.toml and pixi.lock to the container WORKDIR /app diff --git a/docs/ide_integration/devcontainer.md b/docs/ide_integration/devcontainer.md index 303f9ed45..9b1b81d53 100644 --- a/docs/ide_integration/devcontainer.md +++ b/docs/ide_integration/devcontainer.md @@ -11,7 +11,7 @@ Then, create the following two files in the `.devcontainer` directory: ```dockerfile title=".devcontainer/Dockerfile" FROM mcr.microsoft.com/devcontainers/base:jammy -ARG PIXI_VERSION=v0.39.0 +ARG PIXI_VERSION=v0.39.1 RUN curl -L -o /usr/local/bin/pixi -fsSL --compressed "https://github.com/prefix-dev/pixi/releases/download/${PIXI_VERSION}/pixi-$(uname -m)-unknown-linux-musl" \ && chmod +x /usr/local/bin/pixi \ diff --git a/install/install.ps1 b/install/install.ps1 index 986172d36..4872675a8 100644 --- a/install/install.ps1 +++ b/install/install.ps1 @@ -18,7 +18,7 @@ .LINK https://github.com/prefix-dev/pixi .NOTES - Version: v0.39.0 + Version: v0.39.1 #> param ( [string] $PixiVersion = 'latest', diff --git a/install/install.sh b/install/install.sh index a55e4f73d..fc0987ee8 100644 --- a/install/install.sh +++ b/install/install.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash set -euo pipefail -# Version: v0.39.0 +# Version: v0.39.1 __wrap__() { diff --git a/schema/schema.json b/schema/schema.json index 5bc182e36..0625850d2 100644 --- a/schema/schema.json +++ b/schema/schema.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://pixi.sh/v0.39.0/schema/manifest/schema.json", + "$id": "https://pixi.sh/v0.39.1/schema/manifest/schema.json", "title": "`pixi.toml` manifest file", "description": "The configuration for a [`pixi`](https://pixi.sh) project.", "type": "object", @@ -22,7 +22,7 @@ "title": "Schema", "description": "The schema identifier for the project's configuration", "type": "string", - "default": "https://pixi.sh/v0.39.0/schema/manifest/schema.json", + "default": "https://pixi.sh/v0.39.1/schema/manifest/schema.json", "format": "uri-reference" }, "activation": { diff --git a/tbump.toml b/tbump.toml index 9be24f4e1..d0f2f1112 100644 --- a/tbump.toml +++ b/tbump.toml @@ -1,7 +1,7 @@ github_url = "https://github.com/prefix-dev/pixi" [version] -current = "0.39.0" +current = "0.39.1" # Example of a semver regexp. # Make sure this matches current_version before @@ -19,7 +19,7 @@ regex = ''' [git] # The current version will get updated when tbump is run -message_template = "Bump version: 0.39.0 → {new_version}" +message_template = "Bump version: 0.39.1 → {new_version}" tag_template = "v{new_version}" # For each file to patch, add a [[file]] config diff --git a/tests/integration_python/common.py b/tests/integration_python/common.py index efb7aba23..8cdde8194 100644 --- a/tests/integration_python/common.py +++ b/tests/integration_python/common.py @@ -4,7 +4,7 @@ import subprocess import os -PIXI_VERSION = "0.39.0" +PIXI_VERSION = "0.39.1" ALL_PLATFORMS = '["linux-64", "osx-64", "win-64", "linux-ppc64le", "linux-aarch64"]' From 815c9aed8e32a5ef91da7d7b4de65187b89eebf4 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Tue, 10 Dec 2024 11:40:17 +0100 Subject: [PATCH 03/20] fix: release workflow (#2673) Co-authored-by: nichmor --- .github/workflows/release.yml | 23 ++++++--- Cargo.toml | 6 ++- install/install.sh | 13 ++++- src/cli/self_update.rs | 81 ++++++++++++++++++++++++++++++- tests/pixi_flat_archive.tar.gz | Bin 0 -> 338 bytes tests/pixi_nested_archive.tar.gz | Bin 0 -> 420 bytes 6 files changed, 112 insertions(+), 11 deletions(-) create mode 100644 tests/pixi_flat_archive.tar.gz create mode 100644 tests/pixi_nested_archive.tar.gz diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 059269836..c54d53068 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -62,11 +62,22 @@ jobs: - uses: actions/checkout@v4 with: submodules: recursive - - name: Install dist - # we specify bash to get pipefail; it guards against the `curl` command - # failing. otherwise `sh` won't catch that `curl` returned non-0 - shell: bash - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.1/cargo-dist-installer.sh | sh" +# Turn back on when we're on the released version of dist +# - name: Install dist +# # we specify bash to get pipefail; it guards against the `curl` command +# # failing. otherwise `sh` won't catch that `curl` returned non-0 +# shell: bash +# run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.1/cargo-dist-installer.sh | sh" + # Install fork of dist to allow for binaries in the root of the tarball + + # Use fork of dist to allow for binaries in the root of the tarball + - name: Install cargo-dist from git + uses: baptiste0928/cargo-install@v3 + with: + crate: cargo-sort + git: https://github.com/ruben-arts/cargo-dist + branch: feature/allow_binaries_in_root_of_tar + - name: Cache dist uses: actions/upload-artifact@v4 with: @@ -288,4 +299,4 @@ jobs: # Write and read notes from a file to avoid quoting breaking things echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt - gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* + gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --draft true --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* diff --git a/Cargo.toml b/Cargo.toml index 0a4deeda5..30e80637a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -362,7 +362,7 @@ tokio = { workspace = true, features = ["rt"] } # Config for 'dist' [workspace.metadata.dist] # The preferred dist version to use in CI (Cargo.toml SemVer syntax) -cargo-dist-version = "0.25.1" +cargo-dist-version = "0.26.0-prerelease.3" # CI backends to support ci = "github" # The installers to generate for each app @@ -386,10 +386,12 @@ dispatch-releases = true github-release = "announce" # Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true) auto-includes = false +# Whether to put the binaries in the root of the archive or not, this only applies to unix archives +binaries-in-root = true # Which actions to run on pull requests pr-run-mode = "skip" # Skip checking whether the specified configuration files are up to date -allow-dirty = ["msi"] +allow-dirty = ["msi", "ci"] # Whether to sign macOS executables macos-sign = true diff --git a/install/install.sh b/install/install.sh index fc0987ee8..295a4e423 100644 --- a/install/install.sh +++ b/install/install.sh @@ -93,8 +93,19 @@ mkdir -p "$BIN_DIR" if [[ "$(uname -o)" == "Msys" ]]; then unzip "$TEMP_FILE" -d "$BIN_DIR" else - tar -xzf "$TEMP_FILE" -C "$BIN_DIR" + # Extract to a temporary directory first + TEMP_DIR=$(mktemp -d) + tar -xzf "$TEMP_FILE" -C "$TEMP_DIR" + + # Find and move the `pixi` binary, making sure to handle the case where it's in a subdirectory + if [[ -f "$TEMP_DIR/pixi" ]]; then + mv "$TEMP_DIR/pixi" "$BIN_DIR/" + else + mv "$(find "$TEMP_DIR" -type f -name pixi)" "$BIN_DIR/" + fi + chmod +x "$BIN_DIR/pixi" + rm -rf "$TEMP_DIR" fi echo "The 'pixi' binary is installed into '${BIN_DIR}'" diff --git a/src/cli/self_update.rs b/src/cli/self_update.rs index fb6bc1949..a990ddf3a 100644 --- a/src/cli/self_update.rs +++ b/src/cli/self_update.rs @@ -7,6 +7,7 @@ use miette::{Context, IntoDiagnostic}; use pixi_consts::consts; use reqwest::Client; use serde::Deserialize; +use tempfile::{NamedTempFile, TempDir}; /// Update pixi to the latest version or a specific version. #[derive(Debug, clap::Parser)] @@ -133,8 +134,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { // Uncompress the archive if archive_name.ends_with(".tar.gz") { - let mut archive = Archive::new(GzDecoder::new(archived_tempfile.as_file())); - archive.unpack(binary_tempdir).into_diagnostic()?; + unpack_tar_gz(&archived_tempfile, binary_tempdir)?; } else if archive_name.ends_with(".zip") { let mut archive = zip::ZipArchive::new(archived_tempfile.as_file()).into_diagnostic()?; archive.extract(binary_tempdir).into_diagnostic()?; @@ -163,6 +163,34 @@ pub async fn execute(args: Args) -> miette::Result<()> { Ok(()) } +/// Unpack files from a tar.gz archive to a target directory. +fn unpack_tar_gz( + archived_tempfile: &NamedTempFile, + binary_tempdir: &TempDir, +) -> miette::Result<()> { + let mut archive = Archive::new(GzDecoder::new(archived_tempfile.as_file())); + + for entry in archive.entries().into_diagnostic()? { + let mut entry = entry.into_diagnostic()?; + let path = entry.path().into_diagnostic()?; + + // Skip directories; we only care about files. + if entry.header().entry_type().is_file() { + // Create a flat path by stripping any directory components. + let stripped_path = path + .file_name() + .ok_or_else(|| miette::miette!("Failed to extract file name from {:?}", path))?; + + // Construct the final path in the target directory. + let final_path = binary_tempdir.path().join(stripped_path); + + // Unpack the file to the destination. + entry.unpack(final_path).into_diagnostic()?; + } + } + Ok(()) +} + async fn retrieve_target_version(version: &Option) -> miette::Result { // Fetch the target version from github. // The target version is: @@ -218,3 +246,52 @@ pub async fn execute_stub(_: Args) -> miette::Result<()> { message.unwrap_or("This version of pixi was built without self-update support. Please use your package manager to update pixi.") ) } + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + #[test] + pub fn test_unarchive_flat_structure() { + // This archive contains a single file named "a_file" + // So we expect the file to be extracted to the target directory + + let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let archive_path = manifest_dir.join("tests").join("pixi_flat_archive.tar.gz"); + + let named_tempfile = tempfile::NamedTempFile::new().unwrap(); + let binary_tempdir = tempfile::tempdir().unwrap(); + + fs_err::copy(archive_path, named_tempfile.path()).unwrap(); + + super::unpack_tar_gz(&named_tempfile, &binary_tempdir).unwrap(); + + let binary_path = binary_tempdir.path().join("a_file"); + assert!(binary_path.exists()); + } + + #[test] + pub fn test_unarchive_nested_structure() { + // This archive contains following nested structure + // pixi_nested_archive.tar.gz + // ├── some_pixi (directory) + // │ └── some_pixi (file) + // So we want to test that we can extract only the file to the target directory + // without parent directory + let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let archive_path = manifest_dir + .join("tests") + .join("pixi_nested_archive.tar.gz"); + + let named_tempfile = tempfile::NamedTempFile::new().unwrap(); + let binary_tempdir = tempfile::tempdir().unwrap(); + + fs_err::copy(archive_path, named_tempfile.path()).unwrap(); + + super::unpack_tar_gz(&named_tempfile, &binary_tempdir).unwrap(); + + let binary_path = binary_tempdir.path().join("some_pixi"); + assert!(binary_path.exists()); + assert!(binary_path.is_file()); + } +} diff --git a/tests/pixi_flat_archive.tar.gz b/tests/pixi_flat_archive.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..757b20e2635b1f6f36a19a1e7575be7f63c3ac5b GIT binary patch literal 338 zcmV-Y0j>TYiwFQW5m;vc1MQW~O2a@9$5%nbibp-lK0%!QNMa5>L{VC(RNEl+RF=4r zK$FH8H7B3LgW$ne@xk0R^@E_P2ZKoSgJE`;|17hCf5sS#aS%=aOx>69#}5)@Ct@A<#5vbBnq5jqk+ z?ho+@&(LRRO$AkPsY36ces-nnd4t(fSKO;uFG-d@&NuyOWQZi0NFzz7H!>EnFSX!# z^zeAM-QW0yM>c%NvB*18oXd2lzW=~n{}U+rpHt2e&g*EV%!2!$0gkdTl5Ju$woAF~ zn1;iESwzt?ZQSebxr6;q_w>NXMV6(;S9Wc?C$5_xhJMVr^lqWW9XwP;y|b#KPabdm k^>Yx7e{jdE}n*Y>>X8t!cG&I!73(LAYo&XL202TA8c>n+a literal 0 HcmV?d00001 diff --git a/tests/pixi_nested_archive.tar.gz b/tests/pixi_nested_archive.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..951ee14f39fe93ff3aa19b973c29c179a957427a GIT binary patch literal 420 zcmV;V0bBkbiwFRA6If>e1MQbjOT#c2#;YLW#G@W1pP=S%vmJJ*qHHjkZbRm2l$uGw zZnc}*lb^$b;K8rr2Q#%}9d&jJ%S4+Wyd+KEv`Na7JT-fxp)Y!Y7kP#%AcUMT@KIVx z2{BE|q)q^$99s|}Mmd0p5XcMwm5peIyV2MQ*A)bXt>Y+~Y~cb|z&lZQpf=zUc(PjWC07crOOd zz!h=ATle|palg5?L^!nqRRfx3NQq5dJy=$}D~3=l1SBy%XZ{&5BPW6u{= z#0e>5M#{u2vkJ2y#wEr5EbV4hR*U2UwyN(3YJV zmwBCLBwqUMWhg9D)gP#P$0Gg5ddZysh!O)Zbwf%ixc<}cZz^+way=|cf8zfb=9cz< zD?$6eLX+xWofBNBf0W(-xxBCa-zp$*zW<{XTiO8re^UJ`{r}6=|7ZWlAcOk+*Jv~v OmAnHUC0q;u7ytlMkI@eR literal 0 HcmV?d00001 From a6928b7ba7f4e4752730ba77d2513c3726014ceb Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Tue, 10 Dec 2024 11:44:54 +0100 Subject: [PATCH 04/20] fix: type in release.yml (#2682) --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c54d53068..923efe91b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -74,7 +74,7 @@ jobs: - name: Install cargo-dist from git uses: baptiste0928/cargo-install@v3 with: - crate: cargo-sort + crate: cargo-dist git: https://github.com/ruben-arts/cargo-dist branch: feature/allow_binaries_in_root_of_tar From 1b3f99ec4040abc7e3b7d0f184ffb477423af005 Mon Sep 17 00:00:00 2001 From: Tim de Jager Date: Tue, 10 Dec 2024 15:34:42 +0100 Subject: [PATCH 05/20] fix: removed duplication PyPI install test (#2669) --- src/install_pypi/plan/test/harness.rs | 35 ++++++++++++--------------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/src/install_pypi/plan/test/harness.rs b/src/install_pypi/plan/test/harness.rs index b64ee84d5..5e4f31a1e 100644 --- a/src/install_pypi/plan/test/harness.rs +++ b/src/install_pypi/plan/test/harness.rs @@ -325,12 +325,17 @@ impl<'a> InstalledDistProvider<'a> for MockedSitePackages { /// Builder to create pypi package data, this is essentially the locked data struct PyPIPackageDataBuilder; +enum UrlType { + Direct, + Other, +} + impl PyPIPackageDataBuilder { fn registry>(name: S, version: S) -> PypiPackageData { PypiPackageData { name: pep508_rs::PackageName::new(name.as_ref().to_owned()).unwrap(), version: pep440_rs::Version::from_str(version.as_ref()).unwrap(), - // We dont check these fields, for determining the installation from a registry + // We don't check these fields, for determining the installation from a registry // requires_dist: vec![], requires_python: None, @@ -364,21 +369,13 @@ impl PyPIPackageDataBuilder { } } - fn direct_url>(name: S, version: S, url: Url) -> PypiPackageData { + fn url>(name: S, version: S, url: Url, url_type: UrlType) -> PypiPackageData { // Create new url with direct+ in the scheme - let url = Url::parse(&format!("direct+{}", url)).unwrap(); - PypiPackageData { - name: pep508_rs::PackageName::new(name.as_ref().to_owned()).unwrap(), - version: pep440_rs::Version::from_str(version.as_ref()).unwrap(), - requires_dist: vec![], - requires_python: None, - location: UrlOrPath::Url(url), - hash: None, - editable: false, - } - } - - fn git>(name: S, version: S, url: Url) -> PypiPackageData { + let url = if matches!(url_type, UrlType::Direct) { + Url::parse(&format!("direct+{}", url)).unwrap() + } else { + url + }; PypiPackageData { name: pep508_rs::PackageName::new(name.as_ref().to_owned()).unwrap(), version: pep440_rs::Version::from_str(version.as_ref()).unwrap(), @@ -463,7 +460,7 @@ impl RequiredPackages { pub fn add_archive>(mut self, name: S, version: S, url: Url) -> Self { let package_name = uv_normalize::PackageName::new(name.as_ref().to_owned()).expect("should be correct"); - let data = PyPIPackageDataBuilder::direct_url(name, version, url); + let data = PyPIPackageDataBuilder::url(name, version, url, UrlType::Direct); self.required.insert(package_name, data); self } @@ -471,12 +468,12 @@ impl RequiredPackages { pub fn add_git>(mut self, name: S, version: S, url: Url) -> Self { let package_name = uv_normalize::PackageName::new(name.as_ref().to_owned()).expect("should be correct"); - let data = PyPIPackageDataBuilder::git(name, version, url); + let data = PyPIPackageDataBuilder::url(name, version, url, UrlType::Other); self.required.insert(package_name, data); self } - /// Convert the required packages where it the data is borrowed + /// Convert the required packages where the data is borrowed /// this is needed to pass it into the [`InstallPlanner`] pub fn to_borrowed(&self) -> HashMap { self.required.iter().map(|(k, v)| (k.clone(), v)).collect() @@ -491,7 +488,7 @@ fn python_version() -> uv_pep440::Version { uv_pep440::Version::from_str(TEST_PYTHON_VERSION).unwrap() } -/// Simple function to create an install planner +/// Simple function to create an installation planner pub fn install_planner() -> InstallPlanner { InstallPlanner::new( uv_cache::Cache::temp().unwrap(), From 52e80bcc0bea82662193c48bb7d95d438f6c51bd Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Wed, 11 Dec 2024 15:23:23 +0100 Subject: [PATCH 06/20] fix: also install cargo-dist on build-local (#2683) --- .github/workflows/release.yml | 81 +++++++++++++++++++++++++++++++++-- Cargo.toml | 2 +- 2 files changed, 78 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 923efe91b..49a6ec20e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -82,7 +82,8 @@ jobs: uses: actions/upload-artifact@v4 with: name: cargo-dist-cache - path: ~/.cargo/bin/dist + # TODO: revert after switching back to released dist + path: ~/.cargo-install/cargo-dist/bin/dist # sure would be cool if github gave us proper conditionals... # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible # functionality based on whether this is a pull_request, and whether it's from a fork. @@ -127,6 +128,10 @@ jobs: CODESIGN_CERTIFICATE: ${{ secrets.CODESIGN_CERTIFICATE }} CODESIGN_CERTIFICATE_PASSWORD: ${{ secrets.CODESIGN_CERTIFICATE_PASSWORD }} CODESIGN_IDENTITY: ${{ secrets.CODESIGN_IDENTITY }} + CODESIGN_ADDITIONAL_ARGS: ${{ vars.CODESIGN_ADDITIONAL_ARGS }} + APPLEID_TEAMID: ${{ secrets.APPLEID_TEAMID }} + APPLEID_USERNAME: ${{ secrets.APPLEID_USERNAME }} + APPLEID_PASSWORD: ${{ secrets.APPLEID_PASSWORD }} steps: - name: enable windows longpaths run: | @@ -134,8 +139,13 @@ jobs: - uses: actions/checkout@v4 with: submodules: recursive - - name: Install dist - run: ${{ matrix.install_dist }} + # Install fork of dist to allow for binaries in the root of the tarball + - name: Install cargo-dist from git + uses: baptiste0928/cargo-install@v3 + with: + crate: cargo-dist + git: https://github.com/ruben-arts/cargo-dist + branch: feature/allow_binaries_in_root_of_tar # Get the dist-manifest - name: Fetch local artifacts uses: actions/download-artifact@v4 @@ -262,6 +272,69 @@ jobs: name: artifacts-dist-manifest path: dist-manifest.json + # Unpack binaries, required for setup-pixi + - name: Unpack binaries tar + run: | + set -e pipefail # fail if any command fails + mkdir -p unpacked-artifacts + for artifact in target/distrib/pixi-*.tar.gz; do + echo artifact: $artifact + # Extract the base name without the extension + base_name=$(basename "$artifact" .tar.gz) + + # Extract the artifact contents + tar -xvf "$artifact" -O pixi > unpacked-artifacts/$base_name + done + - name: Unpack binaries zip + run: | + set -e pipefail # fail if any command fails + for artifact in target/distrib/pixi-*.zip; do + echo artifact: $artifact + # Extract the base name without the extension + base_name=$(basename "$artifact" .zip) + + # Extract the artifact contents + # Because zip is only used on windows we can assume the binary is an exe + unzip -p "$artifact" pixi.exe > unpacked-artifacts/$base_name.exe + done + + # Upload unpacked artifacts, not sure how to do this in one go as you have to name the artifact + - name: Upload unpacked artifact for pixi-aarch64-unknown-linux-musl + uses: actions/upload-artifact@v4 + with: + name: artifacts-pixi-aarch64-unknown-linux-musl + path: unpacked-artifacts/pixi-aarch64-unknown-linux-musl + + - name: Upload unpacked artifact for pixi-x86_64-unknown-linux-musl + uses: actions/upload-artifact@v4 + with: + name: artifacts-pixi-x86_64-unknown-linux-musl + path: unpacked-artifacts/pixi-x86_64-unknown-linux-musl + + - name: Upload unpacked artifact for pixi-aarch64-apple-darwin + uses: actions/upload-artifact@v4 + with: + name: artifacts-pixi-aarch64-apple-darwin + path: unpacked-artifacts/pixi-aarch64-apple-darwin + + - name: Upload unpacked artifact for pixi-x86_64-apple-darwin + uses: actions/upload-artifact@v4 + with: + name: artifacts-pixi-x86_64-apple-darwin + path: unpacked-artifacts/pixi-x86_64-apple-darwin + + - name: Upload unpacked artifact for pixi-x86_64-pc-windows-msvc.exe + uses: actions/upload-artifact@v4 + with: + name: artifacts-pixi-x86_64-pc-windows-msvc.exe + path: unpacked-artifacts/pixi-x86_64-pc-windows-msvc.exe + + - name: Upload unpacked artifact for pixi-aarch64-pc-windows-msvc.exe + uses: actions/upload-artifact@v4 + with: + name: artifacts-pixi-aarch64-pc-windows-msvc.exe + path: unpacked-artifacts/pixi-aarch64-pc-windows-msvc.exe + # Create a GitHub Release while uploading all files to it announce: needs: @@ -299,4 +372,4 @@ jobs: # Write and read notes from a file to avoid quoting breaking things echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt - gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --draft true --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* + gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --draft --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* diff --git a/Cargo.toml b/Cargo.toml index 30e80637a..97dd243d5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -362,7 +362,7 @@ tokio = { workspace = true, features = ["rt"] } # Config for 'dist' [workspace.metadata.dist] # The preferred dist version to use in CI (Cargo.toml SemVer syntax) -cargo-dist-version = "0.26.0-prerelease.3" +cargo-dist-version = "0.25.1" # CI backends to support ci = "github" # The installers to generate for each app From 5561cc388355b4cc807a9895fc2703b11c8710da Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Wed, 11 Dec 2024 15:27:23 +0100 Subject: [PATCH 07/20] chore: version to 0.39.2 (#2690) --- CHANGELOG.md | 3 +++ CITATION.cff | 4 ++-- Cargo.lock | 2 +- Cargo.toml | 2 +- crates/pixi_consts/src/consts.rs | 2 +- docs/advanced/github_actions.md | 2 +- docs/advanced/production_deployment.md | 2 +- docs/ide_integration/devcontainer.md | 2 +- install/install.ps1 | 2 +- install/install.sh | 2 +- schema/schema.json | 4 ++-- tbump.toml | 4 ++-- tests/integration_python/common.py | 2 +- 13 files changed, 18 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 71367cfa1..11778d777 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,9 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +### [0.39.2] - 2024-12-11 +Patch release to fix the binary generation in CI. + ### [0.39.1] - 2024-12-09 #### Added diff --git a/CITATION.cff b/CITATION.cff index 6622caae3..7119cf6e6 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -30,8 +30,8 @@ authors: - given-names: Julian family-names: Hofer email: julian.hofer@protonmail.com -repository-code: 'https://github.com/prefix-dev/pixi/releases/tag/v0.39.1' -url: 'https://pixi.sh/v0.39.1' +repository-code: 'https://github.com/prefix-dev/pixi/releases/tag/v0.39.2' +url: 'https://pixi.sh/v0.39.2' abstract: >- A cross-platform, language agnostic, package/project management tool for development in virtual environments. diff --git a/Cargo.lock b/Cargo.lock index 1c904bc40..0120c17ca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3590,7 +3590,7 @@ dependencies = [ [[package]] name = "pixi" -version = "0.39.1" +version = "0.39.2" dependencies = [ "ahash", "assert_matches", diff --git a/Cargo.toml b/Cargo.toml index 97dd243d5..8c52b1b9b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -165,7 +165,7 @@ license.workspace = true name = "pixi" readme.workspace = true repository.workspace = true -version = "0.39.1" +version = "0.39.2" [features] default = ["rustls-tls"] diff --git a/crates/pixi_consts/src/consts.rs b/crates/pixi_consts/src/consts.rs index ae9f0a2d7..ddd720040 100644 --- a/crates/pixi_consts/src/consts.rs +++ b/crates/pixi_consts/src/consts.rs @@ -14,7 +14,7 @@ pub const CONFIG_FILE: &str = "config.toml"; pub const PIXI_DIR: &str = ".pixi"; pub const PIXI_VERSION: &str = match option_env!("PIXI_VERSION") { Some(v) => v, - None => "0.39.1", + None => "0.39.2", }; pub const PREFIX_FILE_NAME: &str = "pixi_env_prefix"; pub const ENVIRONMENTS_DIR: &str = "envs"; diff --git a/docs/advanced/github_actions.md b/docs/advanced/github_actions.md index e173e0b3f..9be6474cb 100644 --- a/docs/advanced/github_actions.md +++ b/docs/advanced/github_actions.md @@ -15,7 +15,7 @@ We created [prefix-dev/setup-pixi](https://github.com/prefix-dev/setup-pixi) to ```yaml - uses: prefix-dev/setup-pixi@v0.8.0 with: - pixi-version: v0.39.1 + pixi-version: v0.39.2 cache: true auth-host: prefix.dev auth-token: ${{ secrets.PREFIX_DEV_TOKEN }} diff --git a/docs/advanced/production_deployment.md b/docs/advanced/production_deployment.md index fcbe16f4f..cda9bb547 100644 --- a/docs/advanced/production_deployment.md +++ b/docs/advanced/production_deployment.md @@ -33,7 +33,7 @@ It also makes use of `pixi shell-hook` to not rely on pixi being installed in th For more examples, take a look at [pavelzw/pixi-docker-example](https://github.com/pavelzw/pixi-docker-example). ```Dockerfile -FROM ghcr.io/prefix-dev/pixi:0.39.1 AS build +FROM ghcr.io/prefix-dev/pixi:0.39.2 AS build # copy source code, pixi.toml and pixi.lock to the container WORKDIR /app diff --git a/docs/ide_integration/devcontainer.md b/docs/ide_integration/devcontainer.md index 9b1b81d53..d8d695025 100644 --- a/docs/ide_integration/devcontainer.md +++ b/docs/ide_integration/devcontainer.md @@ -11,7 +11,7 @@ Then, create the following two files in the `.devcontainer` directory: ```dockerfile title=".devcontainer/Dockerfile" FROM mcr.microsoft.com/devcontainers/base:jammy -ARG PIXI_VERSION=v0.39.1 +ARG PIXI_VERSION=v0.39.2 RUN curl -L -o /usr/local/bin/pixi -fsSL --compressed "https://github.com/prefix-dev/pixi/releases/download/${PIXI_VERSION}/pixi-$(uname -m)-unknown-linux-musl" \ && chmod +x /usr/local/bin/pixi \ diff --git a/install/install.ps1 b/install/install.ps1 index 4872675a8..9de9f2fe1 100644 --- a/install/install.ps1 +++ b/install/install.ps1 @@ -18,7 +18,7 @@ .LINK https://github.com/prefix-dev/pixi .NOTES - Version: v0.39.1 + Version: v0.39.2 #> param ( [string] $PixiVersion = 'latest', diff --git a/install/install.sh b/install/install.sh index 295a4e423..fe9dee837 100644 --- a/install/install.sh +++ b/install/install.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash set -euo pipefail -# Version: v0.39.1 +# Version: v0.39.2 __wrap__() { diff --git a/schema/schema.json b/schema/schema.json index 0625850d2..f624df24c 100644 --- a/schema/schema.json +++ b/schema/schema.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://pixi.sh/v0.39.1/schema/manifest/schema.json", + "$id": "https://pixi.sh/v0.39.2/schema/manifest/schema.json", "title": "`pixi.toml` manifest file", "description": "The configuration for a [`pixi`](https://pixi.sh) project.", "type": "object", @@ -22,7 +22,7 @@ "title": "Schema", "description": "The schema identifier for the project's configuration", "type": "string", - "default": "https://pixi.sh/v0.39.1/schema/manifest/schema.json", + "default": "https://pixi.sh/v0.39.2/schema/manifest/schema.json", "format": "uri-reference" }, "activation": { diff --git a/tbump.toml b/tbump.toml index d0f2f1112..d5205e0b6 100644 --- a/tbump.toml +++ b/tbump.toml @@ -1,7 +1,7 @@ github_url = "https://github.com/prefix-dev/pixi" [version] -current = "0.39.1" +current = "0.39.2" # Example of a semver regexp. # Make sure this matches current_version before @@ -19,7 +19,7 @@ regex = ''' [git] # The current version will get updated when tbump is run -message_template = "Bump version: 0.39.1 → {new_version}" +message_template = "Bump version: 0.39.2 → {new_version}" tag_template = "v{new_version}" # For each file to patch, add a [[file]] config diff --git a/tests/integration_python/common.py b/tests/integration_python/common.py index 8cdde8194..543d1c9c2 100644 --- a/tests/integration_python/common.py +++ b/tests/integration_python/common.py @@ -4,7 +4,7 @@ import subprocess import os -PIXI_VERSION = "0.39.1" +PIXI_VERSION = "0.39.2" ALL_PLATFORMS = '["linux-64", "osx-64", "win-64", "linux-ppc64le", "linux-aarch64"]' From 32b98a1a9fc759d6525ef49abf7840bc42cf3475 Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Wed, 11 Dec 2024 10:43:29 -0500 Subject: [PATCH 08/20] docs: add broken curl version check in install.sh (#2686) Co-authored-by: Ruben Arts --- install/install.sh | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/install/install.sh b/install/install.sh index fe9dee837..0af329f5c 100644 --- a/install/install.sh +++ b/install/install.sh @@ -69,6 +69,16 @@ else fi if hash curl 2> /dev/null; then + # Check that the curl version is not 8.8.0, which is broken for --write-out + # https://github.com/curl/curl/issues/13845 + if [[ "$(curl --version | head -n 1 | cut -d ' ' -f 2)" == "8.8.0" ]]; then + echo "error: curl 8.8.0 is known to be broken, please use a different version" + if [[ $(uname -o) == "Msys" ]]; then + echo "A common way to get an updated version of curl is to upgrade Git for Windows:" + echo " https://gitforwindows.org/" + fi + exit 1 + fi HTTP_CODE="$(curl -SL $CURL_OPTIONS "$DOWNLOAD_URL" --output "$TEMP_FILE" --write-out "%{http_code}")" if [[ "${HTTP_CODE}" -lt 200 || "${HTTP_CODE}" -gt 299 ]]; then echo "error: '${DOWNLOAD_URL}' is not available" From 311bd35caf10d793696de3e616499a524635c263 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Wed, 11 Dec 2024 16:44:21 +0100 Subject: [PATCH 09/20] docs: Add whitespace between text and images (#2689) --- docs/examples/cpp-sdl.md | 1 + docs/examples/opencv.md | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/examples/cpp-sdl.md b/docs/examples/cpp-sdl.md index 10bcfbcfb..1f3f97114 100644 --- a/docs/examples/cpp-sdl.md +++ b/docs/examples/cpp-sdl.md @@ -5,6 +5,7 @@ description: How to build and run an SDL application in C++ --- ![](https://storage.googleapis.com/prefix-cms-images/docs/sdl_examle.png) + The `cpp-sdl` example is located in the pixi repository. ```shell diff --git a/docs/examples/opencv.md b/docs/examples/opencv.md index 690d46caf..1703e5ba1 100644 --- a/docs/examples/opencv.md +++ b/docs/examples/opencv.md @@ -25,6 +25,7 @@ pixi run start ``` The screen that starts should look like this: + ![](https://storage.googleapis.com/prefix-cms-images/docs/opencv_face_recognition.png) Check out the `webcame_capture.py` to see how we detect a face. @@ -34,7 +35,9 @@ Check out the `webcame_capture.py` to see how we detect a face. Next to face recognition, a camera calibration example is also included. You'll need a checkerboard for this to work. -Print this: [![chessboard](https://github.com/opencv/opencv/blob/4.x/doc/pattern.png?raw=true)](https://github.com/opencv/opencv/blob/4.x/doc/pattern.png) +Print this: + +[![chessboard](https://github.com/opencv/opencv/blob/4.x/doc/pattern.png?raw=true)](https://github.com/opencv/opencv/blob/4.x/doc/pattern.png) Then run From 7487cb6df4f58b0cfc4eb62691fb88516468f814 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Thu, 12 Dec 2024 11:14:44 +0100 Subject: [PATCH 10/20] feat: help user with lockfile update error (#2684) --- src/lock_file/mod.rs | 58 +++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 54 insertions(+), 4 deletions(-) diff --git a/src/lock_file/mod.rs b/src/lock_file/mod.rs index d38ce8b1e..c0fe1c2e6 100644 --- a/src/lock_file/mod.rs +++ b/src/lock_file/mod.rs @@ -11,7 +11,7 @@ use crate::Project; use miette::{IntoDiagnostic, WrapErr}; pub(crate) use package_identifier::PypiPackageIdentifier; use pixi_record::PixiRecord; -use rattler_lock::{LockFile, PypiPackageData, PypiPackageEnvironmentData}; +use rattler_lock::{LockFile, ParseCondaLockError, PypiPackageData, PypiPackageEnvironmentData}; pub(crate) use records_by_name::{PixiRecordsByName, PypiRecordsByName}; pub(crate) use resolve::{ conda::resolve_conda, pypi::resolve_pypi, uv_resolution_context::UvResolutionContext, @@ -42,7 +42,16 @@ pub async fn load_lock_file(project: &Project) -> miette::Result { // Spawn a background task because loading the file might be IO bound. tokio::task::spawn_blocking(move || { LockFile::from_path(&lock_file_path) - .into_diagnostic() + .map_err(|err| match err { + ParseCondaLockError::IncompatibleVersion{ lock_file_version, max_supported_version} => { + miette::miette!( + help="Please update pixi to the latest version and try again.", + "The lock file version is {}, but only up to including version {} is supported by the current version.", + lock_file_version, max_supported_version + ) + } + _ => miette::miette!(err), + }) .wrap_err_with(|| { format!( "Failed to load lock file from `{}`", @@ -50,9 +59,50 @@ pub async fn load_lock_file(project: &Project) -> miette::Result { ) }) }) - .await - .unwrap_or_else(|e| Err(e).into_diagnostic()) + .await + .unwrap_or_else(|e| Err(e).into_diagnostic()) } else { Ok(LockFile::default()) } } + +#[cfg(test)] +mod tests { + use crate::{load_lock_file, Project}; + + #[tokio::test] + async fn test_load_newer_lock_file() { + // Test that loading a lock file with a newer version than the current + // version of pixi will return an error. + let temp_dir = tempfile::tempdir().unwrap(); + let project = r#" + [project] + name = "pixi" + channels = [] + platforms = [] + "#; + let project = + Project::from_str(temp_dir.path().join("pixi.toml").as_path(), project).unwrap(); + + let lock_file_path = project.lock_file_path(); + let raw_lock_file = r#" + version: 9999 + environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: {} + packages: [] + "#; + fs_err::tokio::write(&lock_file_path, raw_lock_file) + .await + .unwrap(); + + let err = load_lock_file(&project).await.unwrap_err(); + let dbg_err = format!("{:?}", err); + // Test that the error message contains the correct information. + assert!(dbg_err.contains("The lock file version is 9999, but only up to including version")); + // Also test that we try to help user by suggesting to update pixi. + assert!(dbg_err.contains("Please update pixi to the latest version and try again.")); + } +} From b6d8407775b2153b2407beb9dfc2e8d6698ddd94 Mon Sep 17 00:00:00 2001 From: Bas Zalmstra Date: Thu, 12 Dec 2024 17:04:24 +0100 Subject: [PATCH 11/20] chore: bump rattler (#2700) --- Cargo.lock | 36 ++++++++++++++++++------------------ Cargo.toml | 16 ++++++++-------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0120c17ca..ac77f6801 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4330,9 +4330,9 @@ dependencies = [ [[package]] name = "rattler" -version = "0.28.5" +version = "0.28.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "238dd1b6ca1f2e622e438092e6523a37c6b018e25b236406a6aa182d13885b39" +checksum = "7401c660efdc73b2c617b19458d8da7252870223fc01bcde9f74fc796bd7f9ec" dependencies = [ "anyhow", "clap", @@ -4371,9 +4371,9 @@ dependencies = [ [[package]] name = "rattler_cache" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63a90b8eb4a8406619d0685a18d0f55ffae762399258ca5eb422c55ba1fe7282" +checksum = "a81013e4d652c9925652e1a131f3076bf8c68d09749d0fca02673370221b4326" dependencies = [ "anyhow", "dashmap", @@ -4400,9 +4400,9 @@ dependencies = [ [[package]] name = "rattler_conda_types" -version = "0.29.3" +version = "0.29.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa6e2010c1a639982d9c22766598159dbeda9b5701ab01a863c66e55520c1ba1" +checksum = "2ecb8083d6e91a3f45cc740430a6e5caba7bbf4eb20a51923a20d274ef146ced" dependencies = [ "chrono", "dirs", @@ -4453,9 +4453,9 @@ dependencies = [ [[package]] name = "rattler_lock" -version = "0.22.32" +version = "0.22.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8bddb02b5eb7bbf245438f1b5eb7feb44c0186bf7d8750b51c4cdf046e0dcff" +checksum = "575afe3b691918cca3d80fd4a9666ba61b984a22f2af14c5e3534cbd5092c6aa" dependencies = [ "chrono", "file_url", @@ -4517,9 +4517,9 @@ dependencies = [ [[package]] name = "rattler_package_streaming" -version = "0.22.16" +version = "0.22.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b881c9f633407c171a62809e754315e09d273edcf4e9217d2cc4b102721e65c" +checksum = "a858e5c43dcc64d42fdd617887b4b1a24930761f39654587bfe0ee44cf361fd6" dependencies = [ "bzip2", "chrono", @@ -4557,9 +4557,9 @@ dependencies = [ [[package]] name = "rattler_repodata_gateway" -version = "0.21.25" +version = "0.21.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "315d710364bd4ca46ed37fbb06f50d3e4774f5b7775fb77f1f232d35632fa149" +checksum = "8b6466b07cb03d75ed85f667cac3ab0e781daeb2ebbfce0b7fab8f83d2fd316f" dependencies = [ "anyhow", "async-compression", @@ -4612,9 +4612,9 @@ dependencies = [ [[package]] name = "rattler_shell" -version = "0.22.8" +version = "0.22.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "070b851b93cd8973a6e9377c06323aca1d8faeeeb5b59f80f3cd1e2c8a7684bf" +checksum = "36c0ab95a3fd48f3287545ca356abd51cbafd9433c901e9a6d6ffb07416e5d0d" dependencies = [ "enum_dispatch", "fs-err 3.0.0", @@ -4631,9 +4631,9 @@ dependencies = [ [[package]] name = "rattler_solve" -version = "1.2.4" +version = "1.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "948f7a8d90cfe3cd48637724d2112a82928cdedb1f17027ce1019927cc8ad977" +checksum = "8773a2993c1ab0d3517a19cc947d026a7ab33de4404681f8e72074c52b162670" dependencies = [ "chrono", "futures", @@ -4650,9 +4650,9 @@ dependencies = [ [[package]] name = "rattler_virtual_packages" -version = "1.1.11" +version = "1.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7567e46d8ad302bbc3c5d657843c957d481a2c6b7c45397d95e0cd4b8ae47a17" +checksum = "5d7693577efc6a7ad0717ecb31d411701275eb6626c94a163c2fdf9596ea4100" dependencies = [ "archspec", "libloading", diff --git a/Cargo.toml b/Cargo.toml index 8c52b1b9b..728b75918 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -106,18 +106,18 @@ which = "6.0.3" # Rattler crates file_url = "0.2.0" -rattler = { version = "0.28.5", default-features = false } -rattler_cache = { version = "0.2.13", default-features = false } -rattler_conda_types = { version = "0.29.3", default-features = false } +rattler = { version = "0.28.6", default-features = false } +rattler_cache = { version = "0.2.14", default-features = false } +rattler_conda_types = { version = "0.29.4", default-features = false } rattler_digest = { version = "1.0.3", default-features = false } -rattler_lock = { version = "0.22.32", default-features = false } +rattler_lock = { version = "0.22.33", default-features = false } rattler_networking = { version = "0.21.8", default-features = false, features = [ "google-cloud-auth", ] } -rattler_repodata_gateway = { version = "0.21.25", default-features = false } -rattler_shell = { version = "0.22.8", default-features = false } -rattler_solve = { version = "1.2.4", default-features = false } -rattler_virtual_packages = { version = "1.1.11", default-features = false } +rattler_repodata_gateway = { version = "0.21.26", default-features = false } +rattler_shell = { version = "0.22.9", default-features = false } +rattler_solve = { version = "1.2.5", default-features = false } +rattler_virtual_packages = { version = "1.1.12", default-features = false } # Bumping this to a higher version breaks the Windows path handling. From ab627716615ff3af6883214869c5a0398a96624e Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Thu, 12 Dec 2024 17:57:08 +0100 Subject: [PATCH 12/20] fix: `pixi global sync` reports after each handled environment (#2698) --- src/cli/global/sync.rs | 22 ++++++++++----- .../pixi_global/test_global.py | 28 +++++++++++++++++++ 2 files changed, 43 insertions(+), 7 deletions(-) diff --git a/src/cli/global/sync.rs b/src/cli/global/sync.rs index f20fbbc08..31eb061fa 100644 --- a/src/cli/global/sync.rs +++ b/src/cli/global/sync.rs @@ -1,4 +1,4 @@ -use crate::global::{self, StateChanges}; +use crate::global; use clap::Parser; use fancy_display::FancyDisplay; use pixi_config::{Config, ConfigCli}; @@ -17,10 +17,15 @@ pub async fn execute(args: Args) -> miette::Result<()> { .await? .with_cli_config(config.clone()); - let mut state_changes = StateChanges::default(); + let mut has_changed = false; // Prune environments that are not listed - state_changes |= project.prune_old_environments().await?; + let state_change = project.prune_old_environments().await?; + + if state_change.has_changed() { + has_changed = true; + state_change.report(); + } // Remove broken files if let Err(err) = project.remove_broken_files().await { @@ -30,14 +35,17 @@ pub async fn execute(args: Args) -> miette::Result<()> { let mut errors = Vec::new(); for env_name in project.environments().keys() { match project.sync_environment(env_name, None).await { - Ok(state_change) => state_changes |= state_change, + Ok(state_change) => { + if state_change.has_changed() { + has_changed = true; + state_change.report(); + } + } Err(err) => errors.push((env_name, err)), } } - if state_changes.has_changed() { - state_changes.report(); - } else { + if !has_changed { eprintln!( "{}Nothing to do. The pixi global installation is already up-to-date.", console::style(console::Emoji("✔ ", "")).green() diff --git a/tests/integration_python/pixi_global/test_global.py b/tests/integration_python/pixi_global/test_global.py index c3b99362f..11c5ede25 100644 --- a/tests/integration_python/pixi_global/test_global.py +++ b/tests/integration_python/pixi_global/test_global.py @@ -129,6 +129,34 @@ def test_sync_change_expose(pixi: Path, tmp_pixi_workspace: Path, dummy_channel_ assert not dummy_in_disguise.is_file() +def test_sync_prune(pixi: Path, tmp_pixi_workspace: Path, dummy_channel_1: str) -> None: + env = {"PIXI_HOME": str(tmp_pixi_workspace)} + manifests = tmp_pixi_workspace.joinpath("manifests") + manifests.mkdir() + manifest = manifests.joinpath("pixi-global.toml") + toml = f""" + [envs.test] + channels = ["{dummy_channel_1}"] + dependencies = {{ dummy-a = "*" }} + exposed = {{ dummy-a = "dummy-a"}} + """ + parsed_toml = tomllib.loads(toml) + manifest.write_text(toml) + dummy_a = tmp_pixi_workspace / "bin" / exec_extension("dummy-a") + + # Test basic commands + verify_cli_command([pixi, "global", "sync"], env=env) + assert dummy_a.is_file() + + # Remove environment + del parsed_toml["envs"]["test"] + manifest.write_text(tomli_w.dumps(parsed_toml)) + verify_cli_command( + [pixi, "global", "sync"], env=env, stderr_contains="Removed environment test" + ) + assert not dummy_a.is_file() + + def test_sync_manually_remove_binary( pixi: Path, tmp_pixi_workspace: Path, dummy_channel_1: str ) -> None: From 8a67cd6983be28a22cd842327fcf4dbd21ab02af Mon Sep 17 00:00:00 2001 From: Tim de Jager Date: Fri, 13 Dec 2024 09:47:48 +0100 Subject: [PATCH 13/20] refactor: renamed some manifest types for clarity (#2704) --- .../diagnostics__missing_backend.snap | 5 -- .../pixi_manifest/src/manifests/manifest.rs | 61 ++++++++----------- crates/pixi_manifest/src/manifests/mod.rs | 2 +- ..._table_name__tests__nameless_to_toml.snap} | 2 +- ...space__tests__build_invalid_matchspec.snap | 5 -- ..._tests__build_section_deserialization.snap | 8 --- ...ifests__workspace__tests__invalid_key.snap | 7 --- crates/pixi_manifest/src/manifests/source.rs | 2 +- .../manifests/{project.rs => table_name.rs} | 0 crates/pixi_manifest/src/pyproject.rs | 4 +- ...ifest__test__run_dependencies_feature.snap | 12 ---- src/cli/upgrade.rs | 4 +- ...ts__dependency_set_with_build_section.snap | 5 -- 13 files changed, 34 insertions(+), 83 deletions(-) delete mode 100644 crates/pixi_build_frontend/tests/snapshots/diagnostics__missing_backend.snap rename crates/pixi_manifest/src/manifests/snapshots/{pixi_manifest__manifests__project__tests__nameless_to_toml.snap => pixi_manifest__manifests__table_name__tests__nameless_to_toml.snap} (84%) delete mode 100644 crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_invalid_matchspec.snap delete mode 100644 crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_section_deserialization.snap delete mode 100644 crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key.snap rename crates/pixi_manifest/src/manifests/{project.rs => table_name.rs} (100%) delete mode 100644 crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_feature.snap delete mode 100644 src/project/snapshots/pixi__project__tests__dependency_set_with_build_section.snap diff --git a/crates/pixi_build_frontend/tests/snapshots/diagnostics__missing_backend.snap b/crates/pixi_build_frontend/tests/snapshots/diagnostics__missing_backend.snap deleted file mode 100644 index 95cbce92f..000000000 --- a/crates/pixi_build_frontend/tests/snapshots/diagnostics__missing_backend.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: crates/pixi_build_frontend/tests/diagnostics.rs -expression: snapshot ---- - × failed to setup a build backend, the backend tool could not be installed: No build match specs provided for 'non-existing' command. diff --git a/crates/pixi_manifest/src/manifests/manifest.rs b/crates/pixi_manifest/src/manifests/manifest.rs index e3fbcb4da..fbf3c6422 100644 --- a/crates/pixi_manifest/src/manifests/manifest.rs +++ b/crates/pixi_manifest/src/manifests/manifest.rs @@ -64,8 +64,9 @@ pub struct Manifest { /// Note that if the document is edited, this field will not be updated. pub contents: Option, - /// Editable toml document - pub document: ManifestSource, + /// Reference to the original toml source + /// used for modification + pub source: ManifestSource, /// The parsed workspace manifest pub workspace: WorkspaceManifest, @@ -90,7 +91,7 @@ impl Manifest { /// Return the toml manifest file name ('pixi.toml' or 'pyproject.toml') pub fn file_name(&self) -> &str { - match self.document { + match self.source { ManifestSource::PixiToml(_) => consts::PROJECT_MANIFEST, ManifestSource::PyProjectToml(_) => consts::PYPROJECT_MANIFEST, } @@ -153,7 +154,7 @@ impl Manifest { Ok(Self { path: manifest_path.to_path_buf(), contents: Some(contents), - document: source, + source, workspace: workspace_manifest, package: package_manifest, }) @@ -161,7 +162,7 @@ impl Manifest { /// Save the manifest to the file and update the contents pub fn save(&mut self) -> miette::Result<()> { - let contents = self.document.to_string(); + let contents = self.source.to_string(); fs_err::write(&self.path, &contents).into_diagnostic()?; self.contents = Some(contents); Ok(()) @@ -203,7 +204,7 @@ impl Manifest { } // Add the task to the Toml manifest - self.document + self.source .add_task(name.as_str(), task.clone(), platform, feature_name)?; // Add the task to the manifest @@ -230,7 +231,7 @@ impl Manifest { } } - self.document.add_environment( + self.source.add_environment( name.clone(), features.clone(), solve_group.clone(), @@ -257,7 +258,7 @@ impl Manifest { /// Removes an environment from the project. pub fn remove_environment(&mut self, name: &str) -> miette::Result { // Remove the environment from the TOML document - if !self.document.remove_environment(name)? { + if !self.source.remove_environment(name)? { return Ok(false); } @@ -291,7 +292,7 @@ impl Manifest { .ok_or_else(|| miette::miette!("task {} does not exist", name))?; // Remove the task from the Toml manifest - self.document + self.source .remove_task(name.as_str(), platform, feature_name)?; // Remove the task from the internal manifest @@ -321,7 +322,7 @@ impl Manifest { current.extend(new.clone()); // Then to the TOML document - let platforms = self.document.get_array_mut("platforms", feature_name)?; + let platforms = self.source.get_array_mut("platforms", feature_name)?; for platform in new.iter() { platforms.push(platform.to_string()); } @@ -359,7 +360,7 @@ impl Manifest { // And from the TOML document let retained = retained.iter().map(|p| p.to_string()).collect_vec(); - let platforms = self.document.get_array_mut("platforms", feature_name)?; + let platforms = self.source.get_array_mut("platforms", feature_name)?; platforms.retain(|x| retained.contains(&x.to_string())); Ok(()) @@ -388,13 +389,8 @@ impl Manifest { .try_add_dependency(&name, &spec, spec_type, overwrite_behavior) { Ok(true) => { - self.document.add_dependency( - &name, - &spec, - spec_type, - platform, - feature_name, - )?; + self.source + .add_dependency(&name, &spec, spec_type, platform, feature_name)?; any_added = true; } Ok(false) => {} @@ -422,7 +418,7 @@ impl Manifest { .try_add_pep508_dependency(requirement, editable, overwrite_behavior) { Ok(true) => { - self.document.add_pypi_dependency( + self.source.add_pypi_dependency( requirement, platform, feature_name, @@ -466,7 +462,7 @@ impl Manifest { Err(e) => return Err(e.into()), }; // Remove the dependency from the TOML document - self.document + self.source .remove_dependency(dep, spec_type, platform, feature_name)?; } Ok(()) @@ -499,7 +495,7 @@ impl Manifest { Err(e) => return Err(e.into()), }; // Remove the dependency from the TOML document - self.document + self.source .remove_pypi_dependency(dep, platform, feature_name)?; } Ok(()) @@ -589,7 +585,7 @@ impl Manifest { *current = final_channels.clone(); // Update the TOML document - let channels = self.document.get_array_mut("channels", feature_name)?; + let channels = self.source.get_array_mut("channels", feature_name)?; channels.clear(); for channel in final_channels { channels.push(Value::from(channel)); @@ -632,7 +628,7 @@ impl Manifest { let current_clone = current.clone(); // And from the TOML document - let channels = self.document.get_array_mut("channels", feature_name)?; + let channels = self.source.get_array_mut("channels", feature_name)?; // clear and recreate from current list channels.clear(); for channel in current_clone.iter() { @@ -645,7 +641,7 @@ impl Manifest { /// Set the project name pub fn set_name(&mut self, name: &str) -> miette::Result<()> { self.workspace.workspace.name = name.to_string(); - self.document.set_name(name); + self.source.set_name(name); Ok(()) } @@ -654,7 +650,7 @@ impl Manifest { pub fn set_description(&mut self, description: &str) -> miette::Result<()> { // Update in both the manifest and the toml self.workspace.workspace.description = Some(description.to_string()); - self.document.set_description(description); + self.source.set_description(description); Ok(()) } @@ -667,7 +663,7 @@ impl Manifest { .into_diagnostic() .context("could not convert version to a valid project version")?, ); - self.document.set_version(version); + self.source.set_version(version); Ok(()) } @@ -1049,10 +1045,7 @@ mod tests { } // Write the toml to string and verify the content - assert_snapshot!( - format!("test_remove_{}", name), - manifest.document.to_string() - ); + assert_snapshot!(format!("test_remove_{}", name), manifest.source.to_string()); } fn test_remove_pypi( @@ -1103,7 +1096,7 @@ mod tests { // Write the toml to string and verify the content assert_snapshot!( format!("test_remove_pypi_{}", name), - manifest.document.to_string() + manifest.source.to_string() ); } @@ -1661,7 +1654,7 @@ platforms = ["linux-64", "win-64"] .iter() .any(|c| c.channel == prioritized_channel2.channel && c.priority == Some(-12i32))); - assert_snapshot!(manifest.document.to_string()); + assert_snapshot!(manifest.source.to_string()); } #[test] @@ -2012,7 +2005,7 @@ test = "test initial" &FeatureName::Named("test".to_string()), ) .unwrap(); - assert_snapshot!(manifest.document.to_string()); + assert_snapshot!(manifest.source.to_string()); } #[test] @@ -2135,7 +2128,7 @@ bar = "*" ">=2.3".to_string() ); - assert_snapshot!(manifest.document.to_string()); + assert_snapshot!(manifest.source.to_string()); } #[test] diff --git a/crates/pixi_manifest/src/manifests/mod.rs b/crates/pixi_manifest/src/manifests/mod.rs index b73f75b14..d6dfed811 100644 --- a/crates/pixi_manifest/src/manifests/mod.rs +++ b/crates/pixi_manifest/src/manifests/mod.rs @@ -8,7 +8,7 @@ //! manifest file which allows relating certain parts of the manifest back to //! the original source code. -pub mod project; +pub mod table_name; mod manifest; mod package; diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__nameless_to_toml.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__table_name__tests__nameless_to_toml.snap similarity index 84% rename from crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__nameless_to_toml.snap rename to crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__table_name__tests__nameless_to_toml.snap index 3bbe1c518..e7fa187e5 100644 --- a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__nameless_to_toml.snap +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__table_name__tests__nameless_to_toml.snap @@ -1,5 +1,5 @@ --- -source: crates/pixi_manifest/src/manifests/project.rs +source: crates/pixi_manifest/src/manifests/table_name.rs expression: table --- "rattler >=1" = ">=1" diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_invalid_matchspec.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_invalid_matchspec.snap deleted file mode 100644 index 49b456e6e..000000000 --- a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_invalid_matchspec.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: crates/pixi_manifest/src/manifests/workspace.rs -expression: err.unwrap().to_string() ---- -"TOML parse error at line 8, column 25\n |\n8 | dependencies = [\"python-build-backend > > 12\"]\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nunable to parse version spec: > > 12\n" diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_section_deserialization.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_section_deserialization.snap deleted file mode 100644 index f55db85c1..000000000 --- a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_section_deserialization.snap +++ /dev/null @@ -1,8 +0,0 @@ ---- -source: crates/pixi_manifest/src/manifests/workspace.rs -expression: manifest.build.clone().unwrap() ---- -dependencies: - - python-build-backend >12 -build-backend: python-build-backend -channels: [] diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key.snap deleted file mode 100644 index 38f7bb4ff..000000000 --- a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key.snap +++ /dev/null @@ -1,7 +0,0 @@ ---- -source: crates/pixi_manifest/src/manifests/workspace.rs -expression: "examples.into_iter().map(|example|\nWorkspaceManifest::from_toml_str(&example).unwrap_err().to_string()).collect::>().join(\"\\n\")" ---- -unknown field `foobar`, expected one of `project`, `workspace`, `package`, `system-requirements`, `target`, `dependencies`, `host-dependencies`, `build-dependencies`, `pypi-dependencies`, `activation`, `tasks`, `feature`, `environments`, `pypi-options`, `build-system`, `$schema`, `tool` -unknown field `hostdependencies`, expected one of `dependencies`, `host-dependencies`, `build-dependencies`, `pypi-dependencies`, `activation`, `tasks` -Failed to parse environment name 'INVALID', please use only lowercase letters, numbers and dashes diff --git a/crates/pixi_manifest/src/manifests/source.rs b/crates/pixi_manifest/src/manifests/source.rs index 69389a983..d92d83720 100644 --- a/crates/pixi_manifest/src/manifests/source.rs +++ b/crates/pixi_manifest/src/manifests/source.rs @@ -7,7 +7,7 @@ use toml_edit::{value, Array, Item, Table, Value}; use crate::toml::TomlDocument; use crate::{ - manifests::project::TableName, pypi::PyPiPackageName, FeatureName, PyPiRequirement, + manifests::table_name::TableName, pypi::PyPiPackageName, FeatureName, PyPiRequirement, PypiDependencyLocation, SpecType, Task, TomlError, }; diff --git a/crates/pixi_manifest/src/manifests/project.rs b/crates/pixi_manifest/src/manifests/table_name.rs similarity index 100% rename from crates/pixi_manifest/src/manifests/project.rs rename to crates/pixi_manifest/src/manifests/table_name.rs diff --git a/crates/pixi_manifest/src/pyproject.rs b/crates/pixi_manifest/src/pyproject.rs index 58b3fcc04..e84bce2e7 100644 --- a/crates/pixi_manifest/src/pyproject.rs +++ b/crates/pixi_manifest/src/pyproject.rs @@ -618,7 +618,7 @@ mod tests { .get(&PyPiPackageName::from_normalized(requirement.name.clone())) .is_some()); - assert_snapshot!(manifest.document.to_string()); + assert_snapshot!(manifest.source.to_string()); } #[test] @@ -643,7 +643,7 @@ mod tests { .get(&name) .is_none()); - assert_snapshot!(manifest.document.to_string()); + assert_snapshot!(manifest.source.to_string()); } #[test] diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_feature.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_feature.snap deleted file mode 100644 index 703307914..000000000 --- a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_feature.snap +++ /dev/null @@ -1,12 +0,0 @@ ---- -source: crates/pixi_manifest/src/toml/manifest.rs -expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n\n [feature.foobar.run-dependencies]\n \"#,)" ---- - × unknown field `run-dependencies`, expected one of `platforms`, `channels`, `channel-priority`, `system-requirements`, `target`, `dependencies`, `host-dependencies`, `build-dependencies`, `pypi- - │ dependencies`, `activation`, `tasks`, `pypi-options` - ╭─[pixi.toml:6:25] - 5 │ - 6 │ [feature.foobar.run-dependencies] - · ──────────────── - 7 │ - ╰──── diff --git a/src/cli/upgrade.rs b/src/cli/upgrade.rs index 179ee7834..ce914f179 100644 --- a/src/cli/upgrade.rs +++ b/src/cli/upgrade.rs @@ -199,7 +199,7 @@ fn parse_specs( .filter(|(name, _)| { if name.as_normalized() == "python" { if let pixi_manifest::ManifestSource::PyProjectToml(document) = - project.manifest.document.clone() + project.manifest.source.clone() { if document .get_nested_table("[tool.pixi.dependencies.python]") @@ -251,7 +251,7 @@ fn parse_specs( _ => None, }) .map(|(name, req)| { - let location = project.manifest.document.pypi_dependency_location( + let location = project.manifest.source.pypi_dependency_location( &name, None, // TODO: add support for platforms &args.specs.feature, diff --git a/src/project/snapshots/pixi__project__tests__dependency_set_with_build_section.snap b/src/project/snapshots/pixi__project__tests__dependency_set_with_build_section.snap deleted file mode 100644 index 21821c8df..000000000 --- a/src/project/snapshots/pixi__project__tests__dependency_set_with_build_section.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: src/project/mod.rs -expression: "format_dependencies(project.default_environment().environment_dependencies(Some(Platform::Linux64)))" ---- -foo = "==1.0" From 7ce7d3464f6f74bcfb089bf928911e497b8a8c11 Mon Sep 17 00:00:00 2001 From: Wolf Vollprecht Date: Fri, 13 Dec 2024 05:59:35 -0500 Subject: [PATCH 14/20] feat: add target to workspace (#2655) Adds support for the "`.target`" on the workspace object. We then merge the variants (more specific one should win). E.g.: ``` [workspace.build-variants] foo = ["1.0"] [workspace.win-64.build-variants] foo = ["2.0.* *_win"] # wins on Windows ``` --- ...s__workspace__tests__build_variants-2.snap | 21 +++++++++ ...sts__workspace__tests__build_variants.snap | 14 ++++++ .../pixi_manifest/src/manifests/workspace.rs | 33 +++++++++++++- crates/pixi_manifest/src/pyproject.rs | 1 - crates/pixi_manifest/src/toml/workspace.rs | 23 ++++++++-- crates/pixi_manifest/src/workspace.rs | 4 +- src/build/mod.rs | 43 ++++++++++++++----- 7 files changed, 121 insertions(+), 18 deletions(-) create mode 100644 crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_variants-2.snap create mode 100644 crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_variants.snap diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_variants-2.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_variants-2.snap new file mode 100644 index 000000000..e8178332e --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_variants-2.snap @@ -0,0 +1,21 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: resolved_win +--- +[ + Some( + { + "python": [ + "1.0.*", + ], + }, + ), + Some( + { + "python": [ + "3.10.*", + "3.11.*", + ], + }, + ), +] diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_variants.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_variants.snap new file mode 100644 index 000000000..202556d4f --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_variants.snap @@ -0,0 +1,14 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: resolved_linux +--- +[ + Some( + { + "python": [ + "3.10.*", + "3.11.*", + ], + }, + ), +] diff --git a/crates/pixi_manifest/src/manifests/workspace.rs b/crates/pixi_manifest/src/manifests/workspace.rs index e4faef076..0b1c88f17 100644 --- a/crates/pixi_manifest/src/manifests/workspace.rs +++ b/crates/pixi_manifest/src/manifests/workspace.rs @@ -82,7 +82,7 @@ impl WorkspaceManifest { #[cfg(test)] mod tests { - use insta::{assert_snapshot, assert_yaml_snapshot}; + use insta::{assert_debug_snapshot, assert_snapshot, assert_yaml_snapshot}; use itertools::Itertools; use rattler_conda_types::{NamedChannelOrUrl, Platform}; @@ -476,4 +476,35 @@ mod tests { "#; let _manifest = WorkspaceManifest::from_toml_str(contents).unwrap(); } + + #[test] + fn test_build_variants() { + let contents = r#" + [workspace] + name = "foo" + channels = [] + platforms = [] + + [workspace.build-variants] + python = ["3.10.*", "3.11.*"] + + [workspace.target.win-64.build-variants] + python = ["1.0.*"] + "#; + let manifest = WorkspaceManifest::from_toml_str(contents).unwrap(); + println!("{:?}", manifest.workspace.build_variants); + let resolved_linux = manifest + .workspace + .build_variants + .resolve(Some(Platform::Linux64)) + .collect::>(); + assert_debug_snapshot!(resolved_linux); + + let resolved_win = manifest + .workspace + .build_variants + .resolve(Some(Platform::Win64)) + .collect::>(); + assert_debug_snapshot!(resolved_win); + } } diff --git a/crates/pixi_manifest/src/pyproject.rs b/crates/pixi_manifest/src/pyproject.rs index e84bce2e7..7764dfe66 100644 --- a/crates/pixi_manifest/src/pyproject.rs +++ b/crates/pixi_manifest/src/pyproject.rs @@ -267,7 +267,6 @@ impl PyProjectManifest { homepage: None, repository: None, documentation: None, - build_variants: None, })?; // Add python as dependency based on the `project.requires_python` property diff --git a/crates/pixi_manifest/src/toml/workspace.rs b/crates/pixi_manifest/src/toml/workspace.rs index 63aeea360..e96348528 100644 --- a/crates/pixi_manifest/src/toml/workspace.rs +++ b/crates/pixi_manifest/src/toml/workspace.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, path::PathBuf}; -use indexmap::IndexSet; +use indexmap::{IndexMap, IndexSet}; use rattler_conda_types::{NamedChannelOrUrl, Platform, Version}; use rattler_solve::ChannelPriority; use serde::Deserialize; @@ -10,9 +10,15 @@ use url::Url; use crate::{ preview::Preview, pypi::pypi_options::PypiOptions, utils::PixiSpanned, PrioritizedChannel, - Workspace, + TargetSelector, Targets, Workspace, }; +#[derive(Debug, Clone, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub struct TomlWorkspaceTarget { + build_variants: Option>>, +} + /// The TOML representation of the `[[workspace]]` section in a pixi manifest. #[serde_as] #[derive(Debug, Clone, Deserialize)] @@ -46,6 +52,9 @@ pub struct TomlWorkspace { #[serde(default)] pub preview: Preview, + #[serde(default)] + pub target: IndexMap, TomlWorkspaceTarget>, + pub build_variants: Option>>, } @@ -65,7 +74,6 @@ pub struct ExternalWorkspaceProperties { pub homepage: Option, pub repository: Option, pub documentation: Option, - pub build_variants: Option>>, } #[derive(Debug, Error)] @@ -99,7 +107,14 @@ impl TomlWorkspace { conda_pypi_map: self.conda_pypi_map, pypi_options: self.pypi_options, preview: self.preview, - build_variants: self.build_variants.or(external.build_variants), + build_variants: Targets::from_default_and_user_defined( + self.build_variants, + self.target + .clone() + .into_iter() + .map(|(k, v)| (k, v.build_variants)) + .collect(), + ), }) } } diff --git a/crates/pixi_manifest/src/workspace.rs b/crates/pixi_manifest/src/workspace.rs index cac3c3d8e..55837acb5 100644 --- a/crates/pixi_manifest/src/workspace.rs +++ b/crates/pixi_manifest/src/workspace.rs @@ -6,7 +6,7 @@ use rattler_solve::ChannelPriority; use url::Url; use super::pypi::pypi_options::PypiOptions; -use crate::{preview::Preview, utils::PixiSpanned, PrioritizedChannel}; +use crate::{preview::Preview, utils::PixiSpanned, PrioritizedChannel, Targets}; /// Describes the contents of the `[workspace]` section of the project manifest. #[derive(Debug, Clone)] @@ -62,5 +62,5 @@ pub struct Workspace { pub preview: Preview, /// Build variants - pub build_variants: Option>>, + pub build_variants: Targets>>>, } diff --git a/src/build/mod.rs b/src/build/mod.rs index 80dcdf56f..c0a11c923 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -26,6 +26,7 @@ use pixi_build_types::{ use pixi_config::get_cache_dir; pub use pixi_glob::{GlobHashCache, GlobHashError}; use pixi_glob::{GlobHashKey, GlobModificationTime, GlobModificationTimeError}; +use pixi_manifest::Targets; use pixi_record::{InputHash, PinnedPathSpec, PinnedSourceSpec, SourceRecord}; use pixi_spec::SourceSpec; use rattler_conda_types::{ @@ -54,7 +55,7 @@ pub struct BuildContext { cache_dir: PathBuf, work_dir: PathBuf, tool_context: Arc, - variant_config: Option>>, + variant_config: Targets>>>, } #[derive(Debug, Error, Diagnostic)] @@ -117,7 +118,7 @@ impl BuildContext { cache_dir: PathBuf, dot_pixi_dir: PathBuf, channel_config: ChannelConfig, - variant_config: Option>>, + variant_config: Targets>>>, tool_context: Arc, ) -> Result { Ok(Self { @@ -133,16 +134,18 @@ impl BuildContext { } pub fn from_project(project: &crate::project::Project) -> miette::Result { + let variant = project + .manifest() + .workspace + .workspace + .build_variants + .clone(); + Self::new( get_cache_dir()?, project.pixi_dir(), project.channel_config(), - project - .manifest() - .workspace - .workspace - .build_variants - .clone(), + variant, Arc::new(ToolContext::default()), ) .into_diagnostic() @@ -163,6 +166,26 @@ impl BuildContext { } } + fn resolve_variant(&self, platform: Platform) -> HashMap> { + let mut result = HashMap::new(); + + // Resolves from most specific to least specific. + for variants in self.variant_config.resolve(Some(platform)).flatten() { + // Update the hash map, but only items that are not already in the map. + for (key, value) in variants { + result.entry(key.clone()).or_insert_with(|| value.clone()); + } + } + + tracing::info!( + "resolved variant configuration for {}: {:?}", + platform, + result + ); + + result + } + /// Extracts the metadata for a package from the given source specification. #[allow(clippy::too_many_arguments)] pub async fn extract_source_metadata( @@ -280,7 +303,7 @@ impl BuildContext { } .key(), ), - variant_configuration: self.variant_config.clone(), + variant_configuration: Some(self.resolve_variant(host_platform)), }, build_reporter.as_conda_build_reporter(), ) @@ -519,7 +542,7 @@ impl BuildContext { } .key(), ), - variant_configuration: self.variant_config.clone(), + variant_configuration: Some(self.resolve_variant(host_platform)), }, metadata_reporter.as_conda_metadata_reporter().clone(), ) From 11624e7a435f9b5d9813b3cce4fda2e0c7592223 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Fri, 13 Dec 2024 14:57:47 +0100 Subject: [PATCH 15/20] fix: config search order (#2702) --- Cargo.lock | 20 ++--- crates/pixi_config/src/lib.rs | 80 +++++++++++++++---- ...config__tests__config_merge_multiple.snap} | 3 +- tests/integration_python/conftest.py | 18 ++++- 4 files changed, 94 insertions(+), 27 deletions(-) rename crates/pixi_config/src/snapshots/{pixi_config__tests__config_merge.snap => pixi_config__tests__config_merge_multiple.snap} (98%) diff --git a/Cargo.lock b/Cargo.lock index ac77f6801..536b44da0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4330,9 +4330,9 @@ dependencies = [ [[package]] name = "rattler" -version = "0.28.6" +version = "0.28.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7401c660efdc73b2c617b19458d8da7252870223fc01bcde9f74fc796bd7f9ec" +checksum = "fdd552c29726ad42d6023060dc3f42c6bad97af00b479b4c1fb77d3c3e1f6db4" dependencies = [ "anyhow", "clap", @@ -4371,9 +4371,9 @@ dependencies = [ [[package]] name = "rattler_cache" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a81013e4d652c9925652e1a131f3076bf8c68d09749d0fca02673370221b4326" +checksum = "2523385739abca920f8b9ac5c292b0835f003ee4fa3d5da89b44fa2e4be68961" dependencies = [ "anyhow", "dashmap", @@ -4400,9 +4400,9 @@ dependencies = [ [[package]] name = "rattler_conda_types" -version = "0.29.4" +version = "0.29.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ecb8083d6e91a3f45cc740430a6e5caba7bbf4eb20a51923a20d274ef146ced" +checksum = "f4fe3619a8d2903b0adfb2889fa58f906855fc5dd260c17d5a4dc2c447a701d1" dependencies = [ "chrono", "dirs", @@ -4517,9 +4517,9 @@ dependencies = [ [[package]] name = "rattler_package_streaming" -version = "0.22.17" +version = "0.22.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a858e5c43dcc64d42fdd617887b4b1a24930761f39654587bfe0ee44cf361fd6" +checksum = "37e5d3a43c996a379bcc3c68bb8322d31baa11d6170f3746fc5667317cf10f8d" dependencies = [ "bzip2", "chrono", @@ -4612,9 +4612,9 @@ dependencies = [ [[package]] name = "rattler_shell" -version = "0.22.9" +version = "0.22.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c0ab95a3fd48f3287545ca356abd51cbafd9433c901e9a6d6ffb07416e5d0d" +checksum = "86d2b039c5e575929d91f62364cd84c13c115a705e4a4d634d852b77f1fcb5af" dependencies = [ "enum_dispatch", "fs-err 3.0.0", diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index f077049f4..09e88618a 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -138,7 +138,7 @@ impl ConfigCliPrompt { } } -#[derive(Clone, Default, Debug, Serialize, Deserialize)] +#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "kebab-case")] pub struct RepodataConfig { #[serde(flatten)] @@ -155,6 +155,7 @@ impl RepodataConfig { /// Merge the given RepodataConfig into the current one. /// `other` is mutable to allow for moving the values out of it. + /// The given config will have higher priority pub fn merge(&self, mut other: Self) -> Self { let mut per_channel: HashMap<_, _> = self .per_channel @@ -198,7 +199,7 @@ impl From for Config { } } } -#[derive(Clone, Default, Debug, Deserialize, Serialize)] +#[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Eq)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] pub struct RepodataChannelConfig { /// Disable JLAP compression for repodata. @@ -256,7 +257,7 @@ pub enum KeyringProvider { Subprocess, } -#[derive(Clone, Debug, Deserialize, Serialize, Default)] +#[derive(Clone, Debug, Deserialize, Serialize, Default, PartialEq, Eq)] #[serde(rename_all = "kebab-case")] pub struct PyPIConfig { /// The default index URL for PyPI packages. @@ -436,7 +437,7 @@ impl PyPIConfig { } /// The strategy for that will be used for pinning a version of a package. -#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Copy)] +#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, Copy)] #[serde(rename_all = "kebab-case")] pub enum PinningStrategy { /// Default semver strategy e.g. "1.2.3" becomes ">=1.2.3, <2" but "0.1.0" @@ -544,7 +545,7 @@ impl PinningStrategy { } } -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] #[serde(rename_all = "kebab-case")] pub struct Config { #[serde(default)] @@ -909,10 +910,11 @@ impl Config { } /// Merge the given config into the current one. + /// The given config will have higher priority #[must_use] - pub fn merge_config(mut self, other: Config) -> Self { - self.mirrors.extend(other.mirrors); - self.loaded_from.extend(other.loaded_from); + pub fn merge_config(self, mut other: Config) -> Self { + other.mirrors.extend(self.mirrors); + other.loaded_from.extend(self.loaded_from); Self { default_channels: if other.default_channels.is_empty() { @@ -925,16 +927,16 @@ impl Config { authentication_override_file: other .authentication_override_file .or(self.authentication_override_file), - mirrors: self.mirrors, - loaded_from: self.loaded_from, + mirrors: other.mirrors, + loaded_from: other.loaded_from, // currently this is always the default so just use the other value channel_config: other.channel_config, - repodata_config: other.repodata_config.merge(self.repodata_config), - pypi_config: other.pypi_config.merge(self.pypi_config), + repodata_config: self.repodata_config.merge(other.repodata_config), + pypi_config: self.pypi_config.merge(other.pypi_config), detached_environments: other.detached_environments.or(self.detached_environments), pinning_strategy: other.pinning_strategy.or(self.pinning_strategy), force_activate: other.force_activate, - experimental: other.experimental.merge(self.experimental), + experimental: self.experimental.merge(other.experimental), // Make other take precedence over self to allow for setting the value through the CLI concurrency: self.concurrency.merge(other.concurrency), } @@ -1385,7 +1387,57 @@ UNUSED = "unused" } #[test] - fn test_config_merge() { + fn test_config_merge_priority() { + // If I set every config key, ensure that `other wins` + let mut config = Config::default(); + let other = Config { + default_channels: vec![NamedChannelOrUrl::from_str("conda-forge").unwrap()], + channel_config: ChannelConfig::default_with_root_dir(PathBuf::from("/root/dir")), + tls_no_verify: Some(true), + detached_environments: Some(DetachedEnvironments::Path(PathBuf::from("/path/to/envs"))), + concurrency: ConcurrencyConfig { + solves: 5, + ..ConcurrencyConfig::default() + }, + change_ps1: Some(false), + authentication_override_file: Some(PathBuf::default()), + mirrors: HashMap::from([( + Url::parse("https://conda.anaconda.org/conda-forge").unwrap(), + Vec::default(), + )]), + pinning_strategy: Some(PinningStrategy::NoPin), + experimental: ExperimentalConfig { + use_environment_activation_cache: Some(true), + }, + loaded_from: Vec::from([PathBuf::from_str("test").unwrap()]), + force_activate: Some(true), + pypi_config: PyPIConfig { + allow_insecure_host: Vec::from(["test".to_string()]), + extra_index_urls: Vec::from([ + Url::parse("https://conda.anaconda.org/conda-forge").unwrap() + ]), + index_url: Some(Url::parse("https://conda.anaconda.org/conda-forge").unwrap()), + keyring_provider: Some(KeyringProvider::Subprocess), + }, + repodata_config: RepodataConfig { + default: RepodataChannelConfig { + disable_bzip2: Some(true), + disable_jlap: Some(true), + disable_sharded: Some(true), + disable_zstd: Some(true), + }, + per_channel: HashMap::from([( + Url::parse("https://conda.anaconda.org/conda-forge").unwrap(), + RepodataChannelConfig::default(), + )]), + }, + }; + let original_other = other.clone(); + config = config.merge_config(other); + assert_eq!(config, original_other); + } + #[test] + fn test_config_merge_multiple() { let mut config = Config::default(); let other = Config { default_channels: vec![NamedChannelOrUrl::from_str("conda-forge").unwrap()], diff --git a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap similarity index 98% rename from crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap rename to crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap index e8d53846d..105f882d0 100644 --- a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap +++ b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge_multiple.snap @@ -1,7 +1,6 @@ --- source: crates/pixi_config/src/lib.rs expression: debug -snapshot_kind: text --- Config { default_channels: [ @@ -25,8 +24,8 @@ Config { mirrors: {}, pinning_strategy: None, loaded_from: [ - "path/config_1.toml", "path/config_2.toml", + "path/config_1.toml", ], channel_config: ChannelConfig { channel_alias: Url { diff --git a/tests/integration_python/conftest.py b/tests/integration_python/conftest.py index 9b963831b..d3988f05c 100644 --- a/tests/integration_python/conftest.py +++ b/tests/integration_python/conftest.py @@ -21,10 +21,26 @@ def pixi(request: pytest.FixtureRequest) -> Path: @pytest.fixture def tmp_pixi_workspace(tmp_path: Path) -> Path: pixi_config = """ +# Reset to defaults +default-channels = ["conda-forge"] +change-ps1 = true +tls-no-verify = false +detached-environments = false +pinning-strategy = "semver" + +[concurrency] +downloads = 50 + +[experimental] +use-environment-activation-cache = false + +# Enable sharded repodata [repodata-config."https://prefix.dev/"] disable-sharded = false """ - tmp_path.joinpath("config.toml").write_text(pixi_config) + dot_pixi = tmp_path.joinpath(".pixi") + dot_pixi.mkdir() + dot_pixi.joinpath("config.toml").write_text(pixi_config) return tmp_path From 4a4b10c892c4ec149d58b939e0e5a77526281071 Mon Sep 17 00:00:00 2001 From: Tim de Jager Date: Fri, 13 Dec 2024 15:32:45 +0100 Subject: [PATCH 16/20] fix: modified example to be a bit more logical for my taste. (#2674) --- examples/rich_example/pixi.lock | 3 ++- examples/rich_example/pyproject.toml | 23 ++++++++++--------- .../pixi_build/test_build.py | 12 ---------- 3 files changed, 14 insertions(+), 24 deletions(-) diff --git a/examples/rich_example/pixi.lock b/examples/rich_example/pixi.lock index 2a21ce057..63f626a48 100644 --- a/examples/rich_example/pixi.lock +++ b/examples/rich_example/pixi.lock @@ -737,9 +737,10 @@ packages: build: pyhbf21a9e_0 subdir: noarch depends: + - rich >=13.9.4,<14 - python input: - hash: 838c2144d5cbf29b8080a4f13fe011a48a1dfda18faf9dc1137319d65f603db4 + hash: bfdfc32d8617e2c3448398649d982ae510f7c27fe6e90067aefb1f965d40df8a globs: - pyproject.toml - conda: https://prefix.dev/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda diff --git a/examples/rich_example/pyproject.toml b/examples/rich_example/pyproject.toml index 83a974ee6..8394e0119 100644 --- a/examples/rich_example/pyproject.toml +++ b/examples/rich_example/pyproject.toml @@ -6,11 +6,14 @@ name = "rich_example" requires-python = ">= 3.11" version = "0.1.0" +[project.scripts] +rich_example = "rich_example:main" + [build-system] build-backend = "hatchling.build" requires = ["hatchling"] -[tool.pixi.project] +[tool.pixi.workspace] channels = ["https://prefix.dev/conda-forge"] platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] preview = ["pixi-build"] @@ -24,9 +27,12 @@ hatchling = "==1.26.3" # This way uv is used instead of pip uv = "*" - -[project.scripts] -rich_example = "rich_example:main" +# This section marks the project as a pixi package. +# +# Normally a number of fields would be set here, like the name, version, etc. +# However, since all these fields are already defined in the [project] section +# at the top of this file they are not required. +[tool.pixi.package] # The build-system section defines the build system that will be used to turn # the source code of this package into a conda package. Similarly to the above @@ -53,12 +59,7 @@ channels = [ test = "rich_example" [tool.pixi.dependencies] -rich = ">=13.9.4,<14" rich_example = { path = "." } -# This section marks the project as a pixi package. -# -# Normally a number of fields would be set here, like the name, version, etc. -# However, since all these fields are already defined in the [project] section -# at the top of this file they are not required. -[tool.pixi.package] +[tool.pixi.run-dependencies] +rich = ">=13.9.4,<14" diff --git a/tests/integration_python/pixi_build/test_build.py b/tests/integration_python/pixi_build/test_build.py index 01d6cc030..ddec05597 100644 --- a/tests/integration_python/pixi_build/test_build.py +++ b/tests/integration_python/pixi_build/test_build.py @@ -2,7 +2,6 @@ import shutil import json - from ..common import verify_cli_command @@ -17,17 +16,6 @@ def test_build_conda_package(pixi: Path, examples_dir: Path, tmp_pixi_workspace: manifest_path = target_dir / "pyproject.toml" - # Add a boltons package to it - verify_cli_command( - [ - pixi, - "add", - "boltons", - "--manifest-path", - manifest_path, - ], - ) - # build it verify_cli_command( [pixi, "build", "--manifest-path", manifest_path, "--output-dir", manifest_path.parent] From e061747c15d863dd2e7c2993ad892615c6513a72 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:49:05 +0100 Subject: [PATCH 17/20] feat: support editable installs for `pixi build` (#2661) --- .../src/procedures/conda_build.rs | 4 + src/build/mod.rs | 2 + src/cli/build.rs | 1 + src/lock_file/update.rs | 2 +- .../pixi_build/editable-pyproject/.gitignore | 2 + .../pixi_build/editable-pyproject/pixi.lock | 778 ++++++++++++++++++ .../editable-pyproject/pyproject.toml | 54 ++ .../src/editable_pyproject/__init__.py | 23 + .../pixi_build/test_build.py | 42 +- 9 files changed, 905 insertions(+), 3 deletions(-) create mode 100644 tests/data/pixi_build/editable-pyproject/.gitignore create mode 100644 tests/data/pixi_build/editable-pyproject/pixi.lock create mode 100644 tests/data/pixi_build/editable-pyproject/pyproject.toml create mode 100644 tests/data/pixi_build/editable-pyproject/src/editable_pyproject/__init__.py diff --git a/crates/pixi_build_types/src/procedures/conda_build.rs b/crates/pixi_build_types/src/procedures/conda_build.rs index f629d168f..63e33dfd5 100644 --- a/crates/pixi_build_types/src/procedures/conda_build.rs +++ b/crates/pixi_build_types/src/procedures/conda_build.rs @@ -43,6 +43,10 @@ pub struct CondaBuildParams { /// /// The directory may not yet exist. pub work_directory: PathBuf, + + /// Whether we want to install the package as editable + // TODO: remove this parameter as soon as we have profiles + pub editable: bool, } /// Identifier of an output. diff --git a/src/build/mod.rs b/src/build/mod.rs index c0a11c923..6b98bf9df 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -289,6 +289,8 @@ impl BuildContext { channel_configuration: ChannelConfiguration { base_url: self.channel_config.channel_alias.clone(), }, + // only use editable for build path dependencies + editable: source_spec.source.as_path().is_some(), outputs: Some(vec![CondaOutputIdentifier { name: Some(source_spec.package_record.name.as_normalized().to_string()), version: Some(source_spec.package_record.version.version().to_string()), diff --git a/src/cli/build.rs b/src/cli/build.rs index 8760f5623..c309925f7 100644 --- a/src/cli/build.rs +++ b/src/cli/build.rs @@ -165,6 +165,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { base_url: channel_config.channel_alias, }, outputs: None, + editable: false, work_directory: work_dir.path().to_path_buf(), variant_configuration: Some(Default::default()), }, diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index 3021e99e8..1fdc30529 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -74,7 +74,7 @@ impl Project { &self, options: UpdateLockFileOptions, ) -> miette::Result> { - self::update_lock_file(self, options).await + update_lock_file(self, options).await } /// Get lockfile without checking diff --git a/tests/data/pixi_build/editable-pyproject/.gitignore b/tests/data/pixi_build/editable-pyproject/.gitignore new file mode 100644 index 000000000..e1dac7338 --- /dev/null +++ b/tests/data/pixi_build/editable-pyproject/.gitignore @@ -0,0 +1,2 @@ +.pixi +#*.egg-info diff --git a/tests/data/pixi_build/editable-pyproject/pixi.lock b/tests/data/pixi_build/editable-pyproject/pixi.lock new file mode 100644 index 000000000..515668e88 --- /dev/null +++ b/tests/data/pixi_build/editable-pyproject/pixi.lock @@ -0,0 +1,778 @@ +version: 6 +environments: + default: + channels: + - url: https://prefix.dev/conda-forge/ + packages: + linux-64: + - conda: https://prefix.dev/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + - conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda + - conda: https://prefix.dev/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 + - conda: https://prefix.dev/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/libmpdec-4.0.0-h4bc722e_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + - conda: https://prefix.dev/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda + - conda: https://prefix.dev/conda-forge/linux-64/python-3.13.1-ha99a958_102_cp313.conda + - conda: https://prefix.dev/conda-forge/linux-64/python_abi-3.13-5_cp313.conda + - conda: https://prefix.dev/conda-forge/linux-64/readline-8.2-h8228510_1.conda + - conda: https://prefix.dev/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda + - conda: https://prefix.dev/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: . + osx-64: + - conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + - conda: https://prefix.dev/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.6.4-h240833e_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 + - conda: https://prefix.dev/conda-forge/osx-64/liblzma-5.6.3-hd471939_1.conda + - conda: https://prefix.dev/conda-forge/osx-64/libmpdec-4.0.0-hfdf4475_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libsqlite-3.47.2-hdb6dae5_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + - conda: https://prefix.dev/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda + - conda: https://prefix.dev/conda-forge/osx-64/openssl-3.4.0-hd471939_0.conda + - conda: https://prefix.dev/conda-forge/osx-64/python-3.13.1-h2334245_102_cp313.conda + - conda: https://prefix.dev/conda-forge/osx-64/python_abi-3.13-5_cp313.conda + - conda: https://prefix.dev/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda + - conda: https://prefix.dev/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda + - conda: https://prefix.dev/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: . + osx-arm64: + - conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/ca-certificates-2024.8.30-hf0a4a13_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.6.4-h286801f_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 + - conda: https://prefix.dev/conda-forge/osx-arm64/liblzma-5.6.3-h39f12f2_1.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libmpdec-4.0.0-h99b78c6_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libsqlite-3.47.2-h3f77e49_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/openssl-3.4.0-h39f12f2_0.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/python-3.13.1-h4f43103_102_cp313.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/python_abi-3.13-5_cp313.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda + - conda: https://prefix.dev/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda + - conda: https://prefix.dev/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: . + win-64: + - conda: https://prefix.dev/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda + - conda: https://prefix.dev/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda + - conda: https://prefix.dev/conda-forge/win-64/libexpat-2.6.4-he0c23c2_0.conda + - conda: https://prefix.dev/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 + - conda: https://prefix.dev/conda-forge/win-64/liblzma-5.6.3-h2466b09_1.conda + - conda: https://prefix.dev/conda-forge/win-64/libmpdec-4.0.0-h2466b09_0.conda + - conda: https://prefix.dev/conda-forge/win-64/libsqlite-3.47.2-h67fdade_0.conda + - conda: https://prefix.dev/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda + - conda: https://prefix.dev/conda-forge/win-64/openssl-3.4.0-h2466b09_0.conda + - conda: https://prefix.dev/conda-forge/win-64/python-3.13.1-h071d269_102_cp313.conda + - conda: https://prefix.dev/conda-forge/win-64/python_abi-3.13-5_cp313.conda + - conda: https://prefix.dev/conda-forge/win-64/tk-8.6.13-h5226925_1.conda + - conda: https://prefix.dev/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://prefix.dev/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda + - conda: https://prefix.dev/conda-forge/win-64/vc-14.3-ha32ba9b_23.conda + - conda: https://prefix.dev/conda-forge/win-64/vc14_runtime-14.42.34433-he29a5d6_23.conda + - conda: https://prefix.dev/conda-forge/win-64/vs2015_runtime-14.42.34433-hdffcdeb_23.conda + - conda: . +packages: +- conda: https://prefix.dev/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 + md5: d7c89558ba9fa0495403155b64376d81 + license: None + size: 2562 + timestamp: 1578324546067 +- conda: https://prefix.dev/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + build_number: 16 + sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 + md5: 73aaf86a425cc6e73fcf236a5a46396d + depends: + - _libgcc_mutex 0.1 conda_forge + - libgomp >=7.5.0 + constrains: + - openmp_impl 9999 + license: BSD-3-Clause + license_family: BSD + size: 23621 + timestamp: 1650670423406 +- conda: https://prefix.dev/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda + sha256: 5ced96500d945fb286c9c838e54fa759aa04a7129c59800f0846b4335cee770d + md5: 62ee74e96c5ebb0af99386de58cf9553 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + license: bzip2-1.0.6 + license_family: BSD + size: 252783 + timestamp: 1720974456583 +- conda: https://prefix.dev/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 + md5: 7ed4301d437b59045be7e051a0308211 + depends: + - __osx >=10.13 + license: bzip2-1.0.6 + license_family: BSD + size: 134188 + timestamp: 1720974491916 +- conda: https://prefix.dev/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 + md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab + depends: + - __osx >=11.0 + license: bzip2-1.0.6 + license_family: BSD + size: 122909 + timestamp: 1720974522888 +- conda: https://prefix.dev/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda + sha256: 35a5dad92e88fdd7fc405e864ec239486f4f31eec229e31686e61a140a8e573b + md5: 276e7ffe9ffe39688abc665ef0f45596 + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: bzip2-1.0.6 + license_family: BSD + size: 54927 + timestamp: 1720974860185 +- conda: https://prefix.dev/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda + sha256: afee721baa6d988e27fef1832f68d6f32ac8cc99cdf6015732224c2841a09cea + md5: c27d1c142233b5bc9ca570c6e2e0c244 + license: ISC + size: 159003 + timestamp: 1725018903918 +- conda: https://prefix.dev/conda-forge/osx-64/ca-certificates-2024.8.30-h8857fd0_0.conda + sha256: 593f302d0f44c2c771e1614ee6d56fffdc7d616e6f187669c8b0e34ffce3e1ae + md5: b7e5424e7f06547a903d28e4651dbb21 + license: ISC + size: 158665 + timestamp: 1725019059295 +- conda: https://prefix.dev/conda-forge/osx-arm64/ca-certificates-2024.8.30-hf0a4a13_0.conda + sha256: 2db1733f4b644575dbbdd7994a8f338e6ef937f5ebdb74acd557e9dda0211709 + md5: 40dec13fd8348dbe303e57be74bd3d35 + license: ISC + size: 158482 + timestamp: 1725019034582 +- conda: https://prefix.dev/conda-forge/win-64/ca-certificates-2024.8.30-h56e8100_0.conda + sha256: 0fcac3a7ffcc556649e034a1802aedf795e64227eaa7194d207b01eaf26454c4 + md5: 4c4fd67c18619be5aa65dc5b6c72e490 + license: ISC + size: 158773 + timestamp: 1725019107649 +- conda: . + name: editable-pyproject + version: 0.1.0 + build: pyhbf21a9e_0 + subdir: noarch + depends: + - python + input: + hash: e2aeabaf6b81a2830a9736cab58074fa349bf18244acf4ef82050e4459199f36 + globs: + - pyproject.toml +- conda: https://prefix.dev/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda + sha256: 7c91cea91b13f4314d125d1bedb9d03a29ebbd5080ccdea70260363424646dbe + md5: 048b02e3962f066da18efe3a21b77672 + depends: + - __glibc >=2.17,<3.0.a0 + constrains: + - binutils_impl_linux-64 2.43 + license: GPL-3.0-only + license_family: GPL + size: 669211 + timestamp: 1729655358674 +- conda: https://prefix.dev/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda + sha256: 56541b98447b58e52d824bd59d6382d609e11de1f8adf20b23143e353d2b8d26 + md5: db833e03127376d461e1e13e76f09b6c + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + constrains: + - expat 2.6.4.* + license: MIT + license_family: MIT + size: 73304 + timestamp: 1730967041968 +- conda: https://prefix.dev/conda-forge/osx-64/libexpat-2.6.4-h240833e_0.conda + sha256: d10f43d0c5df6c8cf55259bce0fe14d2377eed625956cddce06f58827d288c59 + md5: 20307f4049a735a78a29073be1be2626 + depends: + - __osx >=10.13 + constrains: + - expat 2.6.4.* + license: MIT + license_family: MIT + size: 70758 + timestamp: 1730967204736 +- conda: https://prefix.dev/conda-forge/osx-arm64/libexpat-2.6.4-h286801f_0.conda + sha256: e42ab5ace927ee7c84e3f0f7d813671e1cf3529f5f06ee5899606630498c2745 + md5: 38d2656dd914feb0cab8c629370768bf + depends: + - __osx >=11.0 + constrains: + - expat 2.6.4.* + license: MIT + license_family: MIT + size: 64693 + timestamp: 1730967175868 +- conda: https://prefix.dev/conda-forge/win-64/libexpat-2.6.4-he0c23c2_0.conda + sha256: 0c0447bf20d1013d5603499de93a16b6faa92d7ead870d96305c0f065b6a5a12 + md5: eb383771c680aa792feb529eaf9df82f + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + constrains: + - expat 2.6.4.* + license: MIT + license_family: MIT + size: 139068 + timestamp: 1730967442102 +- conda: https://prefix.dev/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 + sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e + md5: d645c6d2ac96843a2bfaccd2d62b3ac3 + depends: + - libgcc-ng >=9.4.0 + license: MIT + license_family: MIT + size: 58292 + timestamp: 1636488182923 +- conda: https://prefix.dev/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 + sha256: 7a2d27a936ceee6942ea4d397f9c7d136f12549d86f7617e8b6bad51e01a941f + md5: ccb34fb14960ad8b125962d3d79b31a9 + license: MIT + license_family: MIT + size: 51348 + timestamp: 1636488394370 +- conda: https://prefix.dev/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 + sha256: 41b3d13efb775e340e4dba549ab5c029611ea6918703096b2eaa9c015c0750ca + md5: 086914b672be056eb70fd4285b6783b6 + license: MIT + license_family: MIT + size: 39020 + timestamp: 1636488587153 +- conda: https://prefix.dev/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 + sha256: 1951ab740f80660e9bc07d2ed3aefb874d78c107264fd810f24a1a6211d4b1a5 + md5: 2c96d1b6915b408893f9472569dee135 + depends: + - vc >=14.1,<15.0a0 + - vs2015_runtime >=14.16.27012 + license: MIT + license_family: MIT + size: 42063 + timestamp: 1636489106777 +- conda: https://prefix.dev/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda + sha256: 53eb8a79365e58849e7b1a068d31f4f9e718dc938d6f2c03e960345739a03569 + md5: 3cb76c3f10d3bc7f1105b2fc9db984df + depends: + - _libgcc_mutex 0.1 conda_forge + - _openmp_mutex >=4.5 + constrains: + - libgomp 14.2.0 h77fa898_1 + - libgcc-ng ==14.2.0=*_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 848745 + timestamp: 1729027721139 +- conda: https://prefix.dev/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda + sha256: 3a76969c80e9af8b6e7a55090088bc41da4cffcde9e2c71b17f44d37b7cb87f7 + md5: e39480b9ca41323497b05492a63bc35b + depends: + - libgcc 14.2.0 h77fa898_1 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 54142 + timestamp: 1729027726517 +- conda: https://prefix.dev/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda + sha256: 1911c29975ec99b6b906904040c855772ccb265a1c79d5d75c8ceec4ed89cd63 + md5: cc3573974587f12dda90d96e3e55a702 + depends: + - _libgcc_mutex 0.1 conda_forge + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 460992 + timestamp: 1729027639220 +- conda: https://prefix.dev/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda + sha256: e6e425252f3839e2756e4af1ea2074dffd3396c161bf460629f9dfd6a65f15c6 + md5: 2ecf2f1c7e4e21fcfe6423a51a992d84 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: 0BSD + size: 111132 + timestamp: 1733407410083 +- conda: https://prefix.dev/conda-forge/osx-64/liblzma-5.6.3-hd471939_1.conda + sha256: c70639ff3cb034a8e31cb081c907879b6a639bb12b0e090069a68eb69125b10e + md5: f9e9205fed9c664421c1c09f0b90ce6d + depends: + - __osx >=10.13 + license: 0BSD + size: 103745 + timestamp: 1733407504892 +- conda: https://prefix.dev/conda-forge/osx-arm64/liblzma-5.6.3-h39f12f2_1.conda + sha256: d863b8257406918ffdc50ae65502f2b2d6cede29404d09a094f59509d6a0aaf1 + md5: b2553114a7f5e20ccd02378a77d836aa + depends: + - __osx >=11.0 + license: 0BSD + size: 99129 + timestamp: 1733407496073 +- conda: https://prefix.dev/conda-forge/win-64/liblzma-5.6.3-h2466b09_1.conda + sha256: 24d04bd55adfa44c421c99ce169df38cb1ad2bba5f43151bc847fc802496a1fa + md5: 015b9c0bd1eef60729ab577a38aaf0b5 + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: 0BSD + size: 104332 + timestamp: 1733407872569 +- conda: https://prefix.dev/conda-forge/linux-64/libmpdec-4.0.0-h4bc722e_0.conda + sha256: d02d1d3304ecaf5c728e515eb7416517a0b118200cd5eacbe829c432d1664070 + md5: aeb98fdeb2e8f25d43ef71fbacbeec80 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + license: BSD-2-Clause + license_family: BSD + size: 89991 + timestamp: 1723817448345 +- conda: https://prefix.dev/conda-forge/osx-64/libmpdec-4.0.0-hfdf4475_0.conda + sha256: 791be3d30d8e37ec49bcc23eb8f1e1415d911a7c023fa93685f2ea485179e258 + md5: ed625b2e59dff82859c23dd24774156b + depends: + - __osx >=10.13 + license: BSD-2-Clause + license_family: BSD + size: 76561 + timestamp: 1723817691512 +- conda: https://prefix.dev/conda-forge/osx-arm64/libmpdec-4.0.0-h99b78c6_0.conda + sha256: f7917de9117d3a5fe12a39e185c7ce424f8d5010a6f97b4333e8a1dcb2889d16 + md5: 7476305c35dd9acef48da8f754eedb40 + depends: + - __osx >=11.0 + license: BSD-2-Clause + license_family: BSD + size: 69263 + timestamp: 1723817629767 +- conda: https://prefix.dev/conda-forge/win-64/libmpdec-4.0.0-h2466b09_0.conda + sha256: fc529fc82c7caf51202cc5cec5bb1c2e8d90edbac6d0a4602c966366efe3c7bf + md5: 74860100b2029e2523cf480804c76b9b + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: BSD-2-Clause + license_family: BSD + size: 88657 + timestamp: 1723861474602 +- conda: https://prefix.dev/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda + sha256: 48af21ebc2cbf358976f1e0f4a0ab9e91dfc83d0ef337cf3837c6f5bc22fb352 + md5: b58da17db24b6e08bcbf8fed2fb8c915 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libzlib >=1.3.1,<2.0a0 + license: Unlicense + size: 873551 + timestamp: 1733761824646 +- conda: https://prefix.dev/conda-forge/osx-64/libsqlite-3.47.2-hdb6dae5_0.conda + sha256: 4d5e188d921f93c97ce172fc8c4341e8171670ec98d76f9961f65f6306fcda77 + md5: 44d9799fda97eb34f6d88ac1e3eb0ea6 + depends: + - __osx >=10.13 + - libzlib >=1.3.1,<2.0a0 + license: Unlicense + size: 923167 + timestamp: 1733761860127 +- conda: https://prefix.dev/conda-forge/osx-arm64/libsqlite-3.47.2-h3f77e49_0.conda + sha256: f192f3c8973de9ec4c214990715f13b781965247a5cedf9162e7f9e699cfc3c4 + md5: 122d6f29470f1a991e85608e77e56a8a + depends: + - __osx >=11.0 + - libzlib >=1.3.1,<2.0a0 + license: Unlicense + size: 850553 + timestamp: 1733762057506 +- conda: https://prefix.dev/conda-forge/win-64/libsqlite-3.47.2-h67fdade_0.conda + sha256: ecfc0182c3b2e63c870581be1fa0e4dbdfec70d2011cb4f5bde416ece26c41df + md5: ff00095330e0d35a16bd3bdbd1a2d3e7 + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: Unlicense + size: 891292 + timestamp: 1733762116902 +- conda: https://prefix.dev/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda + sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 + md5: 40b61aab5c7ba9ff276c41cfffe6b80b + depends: + - libgcc-ng >=12 + license: BSD-3-Clause + license_family: BSD + size: 33601 + timestamp: 1680112270483 +- conda: https://prefix.dev/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + sha256: d4bfe88d7cb447768e31650f06257995601f89076080e76df55e3112d4e47dc4 + md5: edb0dca6bc32e4f4789199455a1dbeb8 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + size: 60963 + timestamp: 1727963148474 +- conda: https://prefix.dev/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + sha256: 8412f96504fc5993a63edf1e211d042a1fd5b1d51dedec755d2058948fcced09 + md5: 003a54a4e32b02f7355b50a837e699da + depends: + - __osx >=10.13 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + size: 57133 + timestamp: 1727963183990 +- conda: https://prefix.dev/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + sha256: ce34669eadaba351cd54910743e6a2261b67009624dbc7daeeafdef93616711b + md5: 369964e85dc26bfe78f41399b366c435 + depends: + - __osx >=11.0 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + size: 46438 + timestamp: 1727963202283 +- conda: https://prefix.dev/conda-forge/win-64/libzlib-1.3.1-h2466b09_2.conda + sha256: ba945c6493449bed0e6e29883c4943817f7c79cbff52b83360f7b341277c6402 + md5: 41fbfac52c601159df6c01f875de31b9 + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + size: 55476 + timestamp: 1727963768015 +- conda: https://prefix.dev/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda + sha256: 6a1d5d8634c1a07913f1c525db6455918cbc589d745fac46d9d6e30340c8731a + md5: 70caf8bb6cf39a0b6b7efc885f51c0fe + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + license: X11 AND BSD-3-Clause + size: 889086 + timestamp: 1724658547447 +- conda: https://prefix.dev/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda + sha256: b0b3180039ef19502525a2abd5833c00f9624af830fd391f851934d57bffb9af + md5: e102bbf8a6ceeaf429deab8032fc8977 + depends: + - __osx >=10.13 + license: X11 AND BSD-3-Clause + size: 822066 + timestamp: 1724658603042 +- conda: https://prefix.dev/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda + sha256: 27d0b9ff78ad46e1f3a6c96c479ab44beda5f96def88e2fe626e0a49429d8afc + md5: cb2b0ea909b97b3d70cd3921d1445e1a + depends: + - __osx >=11.0 + license: X11 AND BSD-3-Clause + size: 802321 + timestamp: 1724658775723 +- conda: https://prefix.dev/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda + sha256: 814b9dff1847b132c676ee6cc1a8cb2d427320779b93e1b6d76552275c128705 + md5: 23cc74f77eb99315c0360ec3533147a9 + depends: + - __glibc >=2.17,<3.0.a0 + - ca-certificates + - libgcc >=13 + license: Apache-2.0 + license_family: Apache + size: 2947466 + timestamp: 1731377666602 +- conda: https://prefix.dev/conda-forge/osx-64/openssl-3.4.0-hd471939_0.conda + sha256: ba7e068ed469d6625e32ae60e6ad893e655b6695280dadf7e065ed0b6f3b885c + md5: ec99d2ce0b3033a75cbad01bbc7c5b71 + depends: + - __osx >=10.13 + - ca-certificates + license: Apache-2.0 + license_family: Apache + size: 2590683 + timestamp: 1731378034404 +- conda: https://prefix.dev/conda-forge/osx-arm64/openssl-3.4.0-h39f12f2_0.conda + sha256: bd1d58ced46e75efa3b842c61642fd12272c69e9fe4d7261078bc082153a1d53 + md5: df307bbc703324722df0293c9ca2e418 + depends: + - __osx >=11.0 + - ca-certificates + license: Apache-2.0 + license_family: Apache + size: 2935176 + timestamp: 1731377561525 +- conda: https://prefix.dev/conda-forge/win-64/openssl-3.4.0-h2466b09_0.conda + sha256: e03045a0837e01ff5c75e9273a572553e7522290799807f918c917a9826a6484 + md5: d0d805d9b5524a14efb51b3bff965e83 + depends: + - ca-certificates + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: Apache-2.0 + license_family: Apache + size: 8491156 + timestamp: 1731379715927 +- conda: https://prefix.dev/conda-forge/linux-64/python-3.13.1-ha99a958_102_cp313.conda + build_number: 102 + sha256: b10f25c5edc203d15b3f54861bec4868b8200ebc16c8cbc82202e4c8da2b183e + md5: 6e7535f1d1faf524e9210d2689b3149b + depends: + - __glibc >=2.17,<3.0.a0 + - bzip2 >=1.0.8,<2.0a0 + - ld_impl_linux-64 >=2.36.1 + - libexpat >=2.6.4,<3.0a0 + - libffi >=3.4,<4.0a0 + - libgcc >=13 + - liblzma >=5.6.3,<6.0a0 + - libmpdec >=4.0.0,<5.0a0 + - libsqlite >=3.47.0,<4.0a0 + - libuuid >=2.38.1,<3.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.4.0,<4.0a0 + - python_abi 3.13.* *_cp313 + - readline >=8.2,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + license: Python-2.0 + size: 33263183 + timestamp: 1733436074842 +- conda: https://prefix.dev/conda-forge/osx-64/python-3.13.1-h2334245_102_cp313.conda + build_number: 102 + sha256: 8f424519d207379f0410d2783b257426f6d362edbc0b6c6b2a5ed61ff87821f9 + md5: bacdbf2fd86557ad1fb862cb2d30d821 + depends: + - __osx >=10.13 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.6.4,<3.0a0 + - libffi >=3.4,<4.0a0 + - liblzma >=5.6.3,<6.0a0 + - libmpdec >=4.0.0,<5.0a0 + - libsqlite >=3.47.0,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.4.0,<4.0a0 + - python_abi 3.13.* *_cp313 + - readline >=8.2,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + license: Python-2.0 + size: 14067313 + timestamp: 1733434634823 +- conda: https://prefix.dev/conda-forge/osx-arm64/python-3.13.1-h4f43103_102_cp313.conda + build_number: 102 + sha256: 0379adf6bb35ca47036860983701e8f6fae89c028d422f2b9439f3110893bc24 + md5: 8c65c1dfc98312ef8666dbb7c7fc47ca + depends: + - __osx >=11.0 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.6.4,<3.0a0 + - libffi >=3.4,<4.0a0 + - liblzma >=5.6.3,<6.0a0 + - libmpdec >=4.0.0,<5.0a0 + - libsqlite >=3.47.0,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.4.0,<4.0a0 + - python_abi 3.13.* *_cp313 + - readline >=8.2,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + license: Python-2.0 + size: 12905237 + timestamp: 1733433280639 +- conda: https://prefix.dev/conda-forge/win-64/python-3.13.1-h071d269_102_cp313.conda + build_number: 102 + sha256: ee41eda85ebc3a257a3b21a76d255d986b08a285d891e418cbfb70113ee14684 + md5: 70568ba8bbd5f0c7b830e690775eb8b7 + depends: + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.6.4,<3.0a0 + - libffi >=3.4,<4.0a0 + - liblzma >=5.6.3,<6.0a0 + - libmpdec >=4.0.0,<5.0a0 + - libsqlite >=3.47.0,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.4.0,<4.0a0 + - python_abi 3.13.* *_cp313 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: Python-2.0 + size: 16753813 + timestamp: 1733433028707 +- conda: https://prefix.dev/conda-forge/linux-64/python_abi-3.13-5_cp313.conda + build_number: 5 + sha256: 438225b241c5f9bddae6f0178a97f5870a89ecf927dfca54753e689907331442 + md5: 381bbd2a92c863f640a55b6ff3c35161 + constrains: + - python 3.13.* *_cp313 + license: BSD-3-Clause + license_family: BSD + size: 6217 + timestamp: 1723823393322 +- conda: https://prefix.dev/conda-forge/osx-64/python_abi-3.13-5_cp313.conda + build_number: 5 + sha256: 075ad768648e88b78d2a94099563b43d3082e7c35979f457164f26d1079b7b5c + md5: 927a2186f1f997ac018d67c4eece90a6 + constrains: + - python 3.13.* *_cp313 + license: BSD-3-Clause + license_family: BSD + size: 6291 + timestamp: 1723823083064 +- conda: https://prefix.dev/conda-forge/osx-arm64/python_abi-3.13-5_cp313.conda + build_number: 5 + sha256: 4437198eae80310f40b23ae2f8a9e0a7e5c2b9ae411a8621eb03d87273666199 + md5: b8e82d0a5c1664638f87f63cc5d241fb + constrains: + - python 3.13.* *_cp313 + license: BSD-3-Clause + license_family: BSD + size: 6322 + timestamp: 1723823058879 +- conda: https://prefix.dev/conda-forge/win-64/python_abi-3.13-5_cp313.conda + build_number: 5 + sha256: 0c12cc1b84962444002c699ed21e815fb9f686f950d734332a1b74d07db97756 + md5: 44b4fe6f22b57103afb2299935c8b68e + constrains: + - python 3.13.* *_cp313 + license: BSD-3-Clause + license_family: BSD + size: 6716 + timestamp: 1723823166911 +- conda: https://prefix.dev/conda-forge/linux-64/readline-8.2-h8228510_1.conda + sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 + md5: 47d31b792659ce70f470b5c82fdfb7a4 + depends: + - libgcc-ng >=12 + - ncurses >=6.3,<7.0a0 + license: GPL-3.0-only + license_family: GPL + size: 281456 + timestamp: 1679532220005 +- conda: https://prefix.dev/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda + sha256: 41e7d30a097d9b060037f0c6a2b1d4c4ae7e942c06c943d23f9d481548478568 + md5: f17f77f2acf4d344734bda76829ce14e + depends: + - ncurses >=6.3,<7.0a0 + license: GPL-3.0-only + license_family: GPL + size: 255870 + timestamp: 1679532707590 +- conda: https://prefix.dev/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda + sha256: a1dfa679ac3f6007362386576a704ad2d0d7a02e98f5d0b115f207a2da63e884 + md5: 8cbb776a2f641b943d413b3e19df71f4 + depends: + - ncurses >=6.3,<7.0a0 + license: GPL-3.0-only + license_family: GPL + size: 250351 + timestamp: 1679532511311 +- conda: https://prefix.dev/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda + sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e + md5: d453b98d9c83e71da0741bb0ff4d76bc + depends: + - libgcc-ng >=12 + - libzlib >=1.2.13,<2.0.0a0 + license: TCL + license_family: BSD + size: 3318875 + timestamp: 1699202167581 +- conda: https://prefix.dev/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda + sha256: 30412b2e9de4ff82d8c2a7e5d06a15f4f4fef1809a72138b6ccb53a33b26faf5 + md5: bf830ba5afc507c6232d4ef0fb1a882d + depends: + - libzlib >=1.2.13,<2.0.0a0 + license: TCL + license_family: BSD + size: 3270220 + timestamp: 1699202389792 +- conda: https://prefix.dev/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda + sha256: 72457ad031b4c048e5891f3f6cb27a53cb479db68a52d965f796910e71a403a8 + md5: b50a57ba89c32b62428b71a875291c9b + depends: + - libzlib >=1.2.13,<2.0.0a0 + license: TCL + license_family: BSD + size: 3145523 + timestamp: 1699202432999 +- conda: https://prefix.dev/conda-forge/win-64/tk-8.6.13-h5226925_1.conda + sha256: 2c4e914f521ccb2718946645108c9bd3fc3216ba69aea20c2c3cedbd8db32bb1 + md5: fc048363eb8f03cd1737600a5d08aafe + depends: + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: TCL + license_family: BSD + size: 3503410 + timestamp: 1699202577803 +- conda: https://prefix.dev/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf + md5: 8ac3367aafb1cc0a068483c580af8015 + license: LicenseRef-Public-Domain + size: 122354 + timestamp: 1728047496079 +- conda: https://prefix.dev/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda + sha256: db8dead3dd30fb1a032737554ce91e2819b43496a0db09927edf01c32b577450 + md5: 6797b005cd0f439c4c5c9ac565783700 + constrains: + - vs2015_runtime >=14.29.30037 + license: LicenseRef-MicrosoftWindowsSDK10 + size: 559710 + timestamp: 1728377334097 +- conda: https://prefix.dev/conda-forge/win-64/vc-14.3-ha32ba9b_23.conda + sha256: 986ddaf8feec2904eac9535a7ddb7acda1a1dfb9482088fdb8129f1595181663 + md5: 7c10ec3158d1eb4ddff7007c9101adb0 + depends: + - vc14_runtime >=14.38.33135 + track_features: + - vc14 + license: BSD-3-Clause + license_family: BSD + size: 17479 + timestamp: 1731710827215 +- conda: https://prefix.dev/conda-forge/win-64/vc14_runtime-14.42.34433-he29a5d6_23.conda + sha256: c483b090c4251a260aba6ff3e83a307bcfb5fb24ad7ced872ab5d02971bd3a49 + md5: 32b37d0cfa80da34548501cdc913a832 + depends: + - ucrt >=10.0.20348.0 + constrains: + - vs2015_runtime 14.42.34433.* *_23 + license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime + license_family: Proprietary + size: 754247 + timestamp: 1731710681163 +- conda: https://prefix.dev/conda-forge/win-64/vs2015_runtime-14.42.34433-hdffcdeb_23.conda + sha256: 568ce8151eaae256f1cef752fc78651ad7a86ff05153cc7a4740b52ae6536118 + md5: 5c176975ca2b8366abad3c97b3cd1e83 + depends: + - vc14_runtime >=14.42.34433 + license: BSD-3-Clause + license_family: BSD + size: 17572 + timestamp: 1731710685291 diff --git a/tests/data/pixi_build/editable-pyproject/pyproject.toml b/tests/data/pixi_build/editable-pyproject/pyproject.toml new file mode 100644 index 000000000..2afbf4e02 --- /dev/null +++ b/tests/data/pixi_build/editable-pyproject/pyproject.toml @@ -0,0 +1,54 @@ +[project] +dependencies = [] +name = "editable-pyproject" +requires-python = ">= 3.11" +version = "0.1.0" + +[build-system] +build-backend = "hatchling.build" +requires = ["hatchling"] + +[tool.pixi.project] +channels = ["https://prefix.dev/conda-forge"] +platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] +preview = ["pixi-build"] + +[tool.pixi.host-dependencies] +# To be able to install this pyproject we need to install the dependencies of +# the python build-system defined above. Note that different from the +# pyproject build-system this refers to a conda package instead of a pypi +# package. +hatchling = "==1.26.3" + +# The build-system section defines the build system that will be used to turn +# the source code of this package into a conda package. Similarly to the above +# [build-system] section this section instructs pixi which build backend to +# use. The build-backend is an executable that is installed and invoked by +# pixi with the sole purpose to build the package. +[tool.pixi.build-system] +# The name of the build backend to use. This name refers both to the name of +# the package that provides the build backend and the name of the executable +# inside the package that is invoked. +# +# The `build-backend` key also functions as a dependency declaration. At least +# a version specifier must be added. +build-backend = { name = "pixi-build-python", version = "*" } +# These are the conda channels that are used to resolve the dependencies of the +# build backend package. +channels = [ + "https://prefix.dev/pixi-build-backends", + "https://prefix.dev/conda-forge", +] + +[tool.pixi.dependencies] +editable-pyproject = { path = "." } + +[tool.pixi.tasks] +check-editable = "python -c 'import editable_pyproject; editable_pyproject.check_editable()'" + +# This section marks the project as a pixi package. +# +# Normally a number of fields would be set here, like the name, version, etc. +# However, since all these fields are already defined in the [project] section +# at the top of this file they are not required. +[tool.pixi.package] diff --git a/tests/data/pixi_build/editable-pyproject/src/editable_pyproject/__init__.py b/tests/data/pixi_build/editable-pyproject/src/editable_pyproject/__init__.py new file mode 100644 index 000000000..9e8f428a7 --- /dev/null +++ b/tests/data/pixi_build/editable-pyproject/src/editable_pyproject/__init__.py @@ -0,0 +1,23 @@ +__version__ = "1.0.0" + +import sys +from pathlib import Path +import site + + +def is_editable() -> bool: + package_name = "editable_pyproject" + for site_package in site.getsitepackages(): + egg_link_path = Path(site_package).joinpath(f"_{package_name}.pth") + if egg_link_path.is_file(): + return True + return False + + +def check_editable() -> None: + if is_editable(): + print("The package is installed as editable.") + sys.exit(0) + else: + print("The package is not installed as editable.") + sys.exit(1) diff --git a/tests/integration_python/pixi_build/test_build.py b/tests/integration_python/pixi_build/test_build.py index ddec05597..a0dcc63d1 100644 --- a/tests/integration_python/pixi_build/test_build.py +++ b/tests/integration_python/pixi_build/test_build.py @@ -76,12 +76,13 @@ def test_smokey(pixi: Path, build_data: Path, tmp_pixi_workspace: Path) -> None: def test_source_change_trigger_rebuild( pixi: Path, build_data: Path, tmp_pixi_workspace: Path ) -> None: - test_data = build_data.joinpath("simple-pyproject") + project = "simple-pyproject" + test_data = build_data.joinpath(project) # TODO: Setting the cache dir shouldn't be necessary! env = {"PIXI_CACHE_DIR": str(tmp_pixi_workspace.joinpath("pixi_cache"))} - target_dir = tmp_pixi_workspace.joinpath("simple-pyproject") + target_dir = tmp_pixi_workspace.joinpath(project) shutil.copytree(test_data, target_dir) manifest_path = target_dir.joinpath("pyproject.toml") @@ -113,3 +114,40 @@ def test_source_change_trigger_rebuild( stdout_contains="The version of simple-pyproject is 2.0.0", env=env, ) + + +def test_editable_pyproject(pixi: Path, build_data: Path, tmp_pixi_workspace: Path) -> None: + project = "editable-pyproject" + test_data = build_data.joinpath(project) + + # TODO: Setting the cache dir shouldn't be necessary! + env = { + "PIXI_CACHE_DIR": str(tmp_pixi_workspace.joinpath("pixi_cache")), + } + + target_dir = tmp_pixi_workspace.joinpath(project) + shutil.copytree(test_data, target_dir) + manifest_path = target_dir.joinpath("pyproject.toml") + + verify_cli_command( + [ + pixi, + "install", + "--manifest-path", + manifest_path, + ], + env=env, + ) + + # Verify that package is installed as editable + verify_cli_command( + [ + pixi, + "run", + "--manifest-path", + manifest_path, + "check-editable", + ], + env=env, + stdout_contains="The package is installed as editable.", + ) From 5856fdfff0651108dff8d1c32722635a45610638 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Fri, 13 Dec 2024 16:41:48 +0100 Subject: [PATCH 18/20] test: stop testing rich example (#2707) --- tests/scripts/test-examples.sh | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/scripts/test-examples.sh b/tests/scripts/test-examples.sh index 81ee4c3d0..9f35bd05e 100644 --- a/tests/scripts/test-examples.sh +++ b/tests/scripts/test-examples.sh @@ -9,7 +9,6 @@ pixi run -v --manifest-path examples/polarify/pixi.toml -e pl020 test echo "Running the pypi example:" pixi run -v --manifest-path examples/pypi/pixi.toml test -# pixi run -v --manifest-path examples/pypi-source-deps/pixi.toml test echo "Running the conda_mapping example:" pixi run -v --manifest-path examples/conda_mapping/pixi.toml test @@ -17,6 +16,3 @@ pixi run -v --manifest-path examples/conda_mapping/pixi.toml test echo "Running the solve-groups example:" pixi run -v --manifest-path examples/solve-groups/pixi.toml -e min-py38 test pixi run -v --manifest-path examples/solve-groups/pixi.toml -e max-py310 test - -echo "Running the rich example:" -pixi run -v --manifest-path examples/rich_example/pyproject.toml test From d7ccc408b0c9e9811b31ef17f03839f7f1f605ee Mon Sep 17 00:00:00 2001 From: Tim de Jager Date: Mon, 16 Dec 2024 09:14:08 +0100 Subject: [PATCH 19/20] docs: introduction to pixi build (#2685) Co-authored-by: Ruben Arts Co-authored-by: Julian Hofer --- docs/build/dependency_types.md | 104 ++++++++++++++++++ docs/build/getting_started.md | 51 +++++++++ docs/reference/pixi_manifest.md | 68 +++++------- .../pixi_tomls/dependency_types.toml | 26 +++++ .../pixi_tomls/simple_pixi_build.toml | 34 ++++++ mkdocs.yml | 5 +- 6 files changed, 246 insertions(+), 42 deletions(-) create mode 100644 docs/build/dependency_types.md create mode 100644 docs/build/getting_started.md create mode 100644 docs/source_files/pixi_tomls/dependency_types.toml create mode 100644 docs/source_files/pixi_tomls/simple_pixi_build.toml diff --git a/docs/build/dependency_types.md b/docs/build/dependency_types.md new file mode 100644 index 000000000..23fb1ee9e --- /dev/null +++ b/docs/build/dependency_types.md @@ -0,0 +1,104 @@ +# Run, Host and Build Dependencies + +If you add a package to the [dependency table](../reference/pixi_manifest.md#dependencies) of a feature that dependency will be available in all environments that include that feature. +The dependencies of a package that is being built are a bit more granular. +Here you can see the three types of dependencies for a simple C++ package. + +```toml +--8<-- "docs/source_files/pixi_tomls/dependency_types.toml:dependencies" +``` + +Each dependency is used at a different step of the package building process. +`gxx` is used to build the package, `catch` will be linked into the package and `git` will be available during runtime. + +Let's delve deeper into the various types of package dependencies and their specific roles in the build process. + +### [Build Dependencies](../reference/pixi_manifest.md#build-dependencies) +!!! note "pixi-build-cmake" + When using the `pixi-build-cmake` backend you do not need to specify `cmake` or the compiler as a dependency. + The backend will install `cmake`, `ninja` and the C++ compilers by default. + +This table contains dependencies that are needed to build the project. +Different from dependencies and host-dependencies these packages are installed for the architecture of the build machine. +This enables cross-compiling from one machine architecture to another. + +Typical examples of build dependencies are: + +- Compilers are invoked on the build machine, but they generate code for the target machine. + If the project is cross-compiled, the architecture of the build and target machine might differ. +- `cmake` is invoked on the build machine to generate additional code- or project-files which are then include in the compilation process. + +!!! info + The _build_ target refers to the machine that will execute the build. + Programs and libraries installed by these dependencies will be executed on the build machine. + + For example, if you compile on a MacBook with an Apple Silicon chip but target Linux x86_64 then your *build* platform is `osx-arm64` and your *host* platform is `linux-64`. + +### [Host Dependencies](../reference/pixi_manifest.md#host-dependencies) + +Host dependencies are the dependencies needed during build/link time that are specific to the host machine. +The difference to build dependencies becomes for example important during cross compilation. +The compiler is a build dependency since it is specific to your machine. +In contrast, the libraries you link to are host dependencies since they are specific to the host machine. +Typical examples of host dependencies are: + +- Base interpreters: a Python package would list `python` here and an R package would list `mro-base` or `r-base`. +- Libraries your project links against like `openssl`, `rapidjson`, or `xtensor`. + +#### Python code +Because of the way building currently works, dependencies like `hatchling`,`pip`,`uv` etc. are host dependencies. +Otherwise, it would use the wrong python prefix during the build process. + +This is more of a technical limitation, and we are looking into ways to make this less of a hassle. +But for now, you will need to add these dependencies to the `host-dependencies` section. + +So as an example, say we want to use `hatchling` and `uv` as to build a python package. +You need to use, something like this in your manifest file: + +```toml +[host-dependencies] +hatchling = "*" +uv = "*" +``` + +#### Native code +When cross-compiling, you might need to specify host dependencies that should have the *target* machine architecture, and are used during the build process. +When linking a library, for example. +Let's recap an explanation that can be found here [A Master Guide To Linux Cross-Compiling](https://ruvi-d.medium.com/a-master-guide-to-linux-cross-compiling-b894bf909386) + +- *Build machine*: where the code is built. +- *Host machine*: where the built code runs. +- *Target machine*: where the binaries spit out by the built code runs. + +Lets say we are using a Linux PC (linux-64) to cross compile a CMake application called `Awesome` to run on a Linux ARM target machine (linux-aarch64). +We would get the following table: + +| Component | Type | Build | Host | Target | +|-----------|-------------|--------|--------|--------| +| GCC | Compiler | x86_64 | x86_64 | aarch64| +| CMake | Build tool | x86_64 | x86_64 | N/A | +| Awesome | Application | x86_64 | aarch64 | N/A | + +So if I need to use a library like SDL2, I would need to add it to the `host-dependencies` table. +As the machine running `Awesome` will have a different host architecture than the build architecture. + +Giving you something like this in your manifest file: + +```toml + # in our example these dependencies will use the aarch64 binaries +[host-dependencies] +sdl2 = "*" +``` + +#### Run-exports + +Conda packages, can define `run-exports`, that are dependencies that when specified in the `host-dependencies` section, will be implicitly be added to the `run-dependencies` section. +This is useful to avoid having to specify the same dependencies in both sections. +As most packages on conda-forge will have these `run-exports` defined. +When using something like `zlib`, you would only need to specify it in the `host-dependencies` section, and it will be used as a run-dependency automatically. + + +### [Dependencies (Run Dependencies)](../reference/pixi_manifest.md#dependencies) + +These are the dependencies that are required to when running the package, they are the most common dependencies. +And are what you would usually use in a `workspace`. diff --git a/docs/build/getting_started.md b/docs/build/getting_started.md new file mode 100644 index 000000000..3a69bfc8d --- /dev/null +++ b/docs/build/getting_started.md @@ -0,0 +1,51 @@ + +## Introduction + +Next to managing workflows and environments, pixi can also build packages. +This is useful for the following reasons: + +- Building and uploading a package to a conda channel +- Allowing users to directly depend on the source and build it automatically +- Managing multiple packages in a workspace + +We've been working to support these use-cases with the `build` feature in pixi. +The vision is to enable building of packages from source, for any language, on any platform. + + +!!! note "Known limitations" + Currently, the `build` feature has a number of limitations: + + 1. Limited set of [build-backends](https://github.com/prefix-dev/pixi-build-backends). + 2. Build-backends are probably missing a lot of parameters/features. + 3. Recursive source dependencies are not supported. ( source dependencies that have source dependencies ) + 4. Workspace dependencies cannot be inherited. + +## Setting up the Manifest +In this example, we are using `pixi-build-python` in order to build a Python package. +If the package itself has dependencies they need to be mentioned here. +The different kinds of dependencies are explained at the [dependency types chapter](dependency_types.md). + +This is what the `pixi.toml` file looks like for a simple Python package: +```toml +--8<-- "docs/source_files/pixi_tomls/simple_pixi_build.toml:all" +``` + +1. Specifies workspace properties like the name, channels, and platforms. This is currently an alias for `project`. +2. Since the build feature is still in preview, you have to add "pixi-build" to `workspace.preview`. +3. We need to add our package as dependency to the workspace. +4. In `package` you specify properties specific to the package you want to build. +5. Packages are built by using build backends. + By specifying `build-system.build-backend` and `build-system.channels` you determine which backend is used and from which channel it will be downloaded. +6. There are different build backends. + Pixi backends can describe how to build a conda package, for a certain language or build tool. + For example, `pixi-build-python`, allows building a Python package into a conda package. +7. `simple_python` uses `hatchling` as Python build backend so this needs to be mentioned in `host-dependencies`. + Read up on host-dependencies in the [Dependency Types](./dependency_types.md#host-dependencies) +8. Python PEP517 backends like `hatchling` know how to build a Python package. + So `hatchling` creates a Python package, and `pixi-build-python` turns the Python package into a conda package. + +## CLI Commands +Using the preview feature you can now build packages from source. + +- `pixi build` *has been addeded* and will build your source package into a `.conda` file. +- Other commands like `pixi install`, `pixi run` etc. automatically make use of the build feature. diff --git a/docs/reference/pixi_manifest.md b/docs/reference/pixi_manifest.md index d6bb7065e..006d1b72f 100644 --- a/docs/reference/pixi_manifest.md +++ b/docs/reference/pixi_manifest.md @@ -94,7 +94,7 @@ This should be a valid version based on the conda Version Spec. See the [version documentation](https://docs.rs/rattler_conda_types/latest/rattler_conda_types/struct.Version.html), for an explanation of what is allowed in a Version Spec. ```toml --8<-- "docs/source_files/pixi_tomls/main_pixi.toml:project_version" +--8<-- "docs/source_files/pixi_tomls/main_pixi.toml:project_version" ``` ### `authors` (optional) @@ -354,6 +354,8 @@ By default, `uv` and thus `pixi`, will stop at the first index on which a given The `index-strategy` only changes PyPI package resolution and not conda package resolution. ## The `dependencies` table(s) +??? info "Details regarding the dependencies" + For more detail regarding the dependency types, make sure to check the [Run, Host, Build](../build/dependency_types.md) dependency documentation. This section defines what dependencies you would like to use for your project. @@ -363,6 +365,7 @@ The default is `[dependencies]`, which are dependencies that are shared across p Dependencies are defined using a [VersionSpec](https://docs.rs/rattler_conda_types/latest/rattler_conda_types/version_spec/enum.VersionSpec.html). A `VersionSpec` combines a [Version](https://docs.rs/rattler_conda_types/latest/rattler_conda_types/struct.Version.html) with an optional operator. + Some examples are: ```toml @@ -403,6 +406,29 @@ rust = "1.72" pytorch-cpu = { version = "~=1.1", channel = "pytorch" } ``` + +### `host-dependencies` + +```toml +[host-dependencies] +python = "~=3.10.3" +``` +Typical examples of host dependencies are: + +- Base interpreters: a Python package would list `python` here and an R package would list `mro-base` or `r-base`. +- Libraries your project links against during compilation like `openssl`, `rapidjson`, or `xtensor`. + +### `build-dependencies` + +This table contains dependencies that are needed to build the project. +Different from `dependencies` and `host-dependencies` these packages are installed for the architecture of the _build_ machine. +This enables cross-compiling from one machine architecture to another. + +```toml +[build-dependencies] +cmake = "~=3.24" +``` + ### `pypi-dependencies` ??? info "Details regarding the PyPI integration" @@ -551,46 +577,6 @@ Sdists usually depend on system packages to be built, especially when compiling Think for example of Python SDL2 bindings depending on the C library: SDL2. To help built these dependencies we activate the conda environment that includes these pypi dependencies before resolving. This way when a source distribution depends on `gcc` for example, it's used from the conda environment instead of the system. - -### `host-dependencies` - -This table contains dependencies that are needed to build your project but which should not be included when your project is installed as part of another project. -In other words, these dependencies are available during the build but are no longer available when your project is installed. -Dependencies listed in this table are installed for the architecture of the target machine. - -```toml -[host-dependencies] -python = "~=3.10.3" -``` - -Typical examples of host dependencies are: - -- Base interpreters: a Python package would list `python` here and an R package would list `mro-base` or `r-base`. -- Libraries your project links against during compilation like `openssl`, `rapidjson`, or `xtensor`. - -### `build-dependencies` - -This table contains dependencies that are needed to build the project. -Different from `dependencies` and `host-dependencies` these packages are installed for the architecture of the _build_ machine. -This enables cross-compiling from one machine architecture to another. - -```toml -[build-dependencies] -cmake = "~=3.24" -``` - -Typical examples of build dependencies are: - -- Compilers are invoked on the build machine, but they generate code for the target machine. - If the project is cross-compiled, the architecture of the build and target machine might differ. -- `cmake` is invoked on the build machine to generate additional code- or project-files which are then include in the compilation process. - -!!! info - The _build_ target refers to the machine that will execute the build. - Programs and libraries installed by these dependencies will be executed on the build machine. - - For example, if you compile on a MacBook with an Apple Silicon chip but target Linux x86_64 then your *build* platform is `osx-arm64` and your *host* platform is `linux-64`. - ## The `activation` table The activation table is used for specialized activation operations that need to be run when the environment is activated. diff --git a/docs/source_files/pixi_tomls/dependency_types.toml b/docs/source_files/pixi_tomls/dependency_types.toml new file mode 100644 index 000000000..525342b68 --- /dev/null +++ b/docs/source_files/pixi_tomls/dependency_types.toml @@ -0,0 +1,26 @@ +[workspace] +channels = ["https://prefix.dev/conda-forge"] +platforms = ["win-64", "linux-64", "osx-arm64", "osx-64"] +preview = ["pixi-build"] + +[package] +name = "simple_cpp" +version = "0.1.0" + +[build-system] +build-backend = { name = "pixi-build-cmake", version = "*" } +channels = [ + "https://prefix.dev/pixi-build-backends", + "https://prefix.dev/conda-forge", +] +# --8<-- [start:dependencies] +[build-dependencies] +gxx = "*" + +[host-dependencies] +catch = "*" + +[run-dependencies] +git = "*" +# --8<-- [end:dependencies] +# simple_cpp = { path = "." } diff --git a/docs/source_files/pixi_tomls/simple_pixi_build.toml b/docs/source_files/pixi_tomls/simple_pixi_build.toml new file mode 100644 index 000000000..12d38c7c5 --- /dev/null +++ b/docs/source_files/pixi_tomls/simple_pixi_build.toml @@ -0,0 +1,34 @@ +# --8<-- [start:all] +# --8<-- [start:preview] +[workspace] # (1)! +preview = ["pixi-build"] # (2)! +# --8<-- [end:preview] +channels = ["https://prefix.dev/conda-forge"] +platforms = ["win-64", "linux-64", "osx-arm64", "osx-64"] + +# --8<-- [start:dependencies] +[dependencies] # (3)! +simple_python = { path = "." } +# --8<-- [end:dependencies] + +# --8<-- [start:package] +[package] # (4)! +name = "simple_python" +version = "0.1.0" +# --8<-- [end:package] + +# --8<-- [start:build-system] +[build-system] # (5)! +build-backend = { name = "pixi-build-python", version = "*" } # (6)! +channels = [ + "https://prefix.dev/pixi-build-backends", + "https://prefix.dev/conda-forge", +] +# --8<-- [end:build-system] + +# --8<-- [start:host-dependencies] +[host-dependencies] # (7)! +hatchling = "*" # (8)! +# --8<-- [end:host-dependencies] + +# --8<-- [start:all] diff --git a/mkdocs.yml b/mkdocs.yml index a9b1748d8..77d5b7d0e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -123,11 +123,14 @@ nav: - Lockfile: features/lockfile.md - System Requirements: features/system_requirements.md - Global Tools: features/global_tools.md + - Building Packages: + - Getting started: build/getting_started.md + - Dependency Types: build/dependency_types.md - Advanced: - Authentication: advanced/authentication.md - - Info Command: advanced/explain_info_command.md - Channel Logic: advanced/channel_priority.md - GitHub Actions: advanced/github_actions.md + - Info Command: advanced/explain_info_command.md - Updates using GitHub Actions: advanced/updates_github_actions.md - Production Deployment: advanced/production_deployment.md - Pyproject.toml: advanced/pyproject_toml.md From 2e8ca9bc14c34899f02885d4cd14baaa815bf076 Mon Sep 17 00:00:00 2001 From: Hofer-Julian <30049909+Hofer-Julian@users.noreply.github.com> Date: Mon, 16 Dec 2024 10:31:23 +0100 Subject: [PATCH 20/20] docs: pixi build follow up (#2710) --- docs/build/getting_started.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/build/getting_started.md b/docs/build/getting_started.md index 3a69bfc8d..063377ab8 100644 --- a/docs/build/getting_started.md +++ b/docs/build/getting_started.md @@ -47,5 +47,5 @@ This is what the `pixi.toml` file looks like for a simple Python package: ## CLI Commands Using the preview feature you can now build packages from source. -- `pixi build` *has been addeded* and will build your source package into a `.conda` file. -- Other commands like `pixi install`, `pixi run` etc. automatically make use of the build feature. +- `pixi build` has been added and will build a `.conda` file out of your package. +- Other commands like `pixi install` and `pixi run` automatically make use of the build feature when a `path`, `git` or `url` dependency is present.