diff --git a/Cargo.lock b/Cargo.lock index 5738fd4b9..0278e6319 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -691,6 +691,12 @@ dependencies = [ "shlex", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cfg-if" version = "1.0.0" @@ -814,6 +820,16 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -842,6 +858,26 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "const_format" +version = "0.2.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50c655d81ff1114fb0dcdea9225ea9f0cc712a6f8d189378e82bdf62a473a64b" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eff1a44b93f47b1bac19a27932f5c591e43d1ba357ee4f61526c8a25603f0eb1" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + [[package]] name = "core-foundation" version = "0.9.4" @@ -1360,8 +1396,7 @@ dependencies = [ [[package]] name = "file_url" version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eff487eda48708def359958613c6c9762d9c4f8396db240e37083758ccb01c79" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "itertools 0.13.0", "percent-encoding", @@ -1589,6 +1624,10 @@ name = "futures-timer" version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" +dependencies = [ + "gloo-timers", + "send_wrapper", +] [[package]] name = "futures-util" @@ -1677,6 +1716,52 @@ dependencies = [ "walkdir", ] +[[package]] +name = "gloo-net" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "http", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "gloo-utils" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "goblin" version = "0.9.2" @@ -2009,8 +2094,9 @@ dependencies = [ "http", "hyper", "hyper-util", + "log", "rustls", - "rustls-native-certs", + "rustls-native-certs 0.8.0", "rustls-pki-types", "tokio", "tokio-rustls", @@ -2214,6 +2300,15 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.12.1" @@ -2264,6 +2359,26 @@ dependencies = [ "jiff-tzdb", ] +[[package]] +name = "jni" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" +dependencies = [ + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + [[package]] name = "jobserver" version = "0.1.32" @@ -2304,6 +2419,131 @@ dependencies = [ "serde_json", ] +[[package]] +name = "jsonrpsee" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a1d83ae9ed70d8e3440db663e343a82f93913104744cd543bbcdd1dbc0e35d3" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-http-client", + "jsonrpsee-types", + "jsonrpsee-wasm-client", + "jsonrpsee-ws-client", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.24.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d80eccbd47a7b9f1e67663fd846928e941cb49c65236e297dd11c9ea3c5e3387" +dependencies = [ + "base64 0.22.1", + "futures-channel", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core", + "pin-project", + "rustls", + "rustls-pki-types", + "rustls-platform-verifier", + "soketto", + "thiserror", + "tokio", + "tokio-rustls", + "tokio-util", + "tracing", + "url", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.24.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c2709a32915d816a6e8f625bf72cf74523ebe5d8829f895d6b041b1d3137818" +dependencies = [ + "async-trait", + "bytes", + "futures-timer", + "futures-util", + "http", + "http-body", + "http-body-util", + "jsonrpsee-types", + "pin-project", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-stream", + "tracing", + "wasm-bindgen-futures", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.24.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc54db939002b030e794fbfc9d5a925aa2854889c5a2f0352b0bffa54681707e" +dependencies = [ + "async-trait", + "base64 0.22.1", + "http-body", + "hyper", + "hyper-rustls", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "rustls", + "rustls-platform-verifier", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower", + "tracing", + "url", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.24.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ca331cd7b3fe95b33432825c2d4c9f5a43963e207fdc01ae67f9fd80ab0930f" +dependencies = [ + "http", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.24.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c603d97578071dc44d79d3cfaf0775437638fd5adc33c6b622dfe4fa2ec812d" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.24.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "755ca3da1c67671f1fae01cd1a47f41dfb2233a8f19a643e587ab0a663942044" +dependencies = [ + "http", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", + "url", +] + [[package]] name = "jsonwebtoken" version = "9.3.0" @@ -2563,6 +2803,7 @@ dependencies = [ "cfg-if", "miette-derive 7.2.0", "owo-colors", + "serde", "supports-color", "supports-hyperlinks", "supports-unicode", @@ -3224,9 +3465,11 @@ version = "0.38.0" dependencies = [ "ahash", "assert_matches", + "async-fd-lock", "async-once-cell", "async-trait", "barrier_cell", + "base64 0.22.1", "chrono", "clap", "clap-verbosity-flag", @@ -3237,6 +3480,7 @@ dependencies = [ "csv", "deno_task_shell", "dialoguer", + "dirs", "dunce", "fake", "fancy_display", @@ -3262,12 +3506,16 @@ dependencies = [ "pep440_rs", "pep508_rs", "percent-encoding", + "pixi_build_frontend", + "pixi_build_types", "pixi_config", "pixi_consts", "pixi_default_versions", + "pixi_glob", "pixi_manifest", "pixi_progress", "pixi_pty", + "pixi_record", "pixi_spec", "pixi_utils", "pixi_uv_conversions", @@ -3303,6 +3551,7 @@ dependencies = [ "toml_edit", "tracing", "tracing-subscriber", + "typed-path", "url", "uv-auth", "uv-cache", @@ -3328,6 +3577,54 @@ dependencies = [ "zstd", ] +[[package]] +name = "pixi_build_frontend" +version = "0.1.0" +dependencies = [ + "dashmap", + "futures", + "insta", + "itertools 0.13.0", + "jsonrpsee", + "miette 7.2.0", + "pixi_build_types", + "pixi_config", + "pixi_consts", + "pixi_manifest", + "pixi_utils", + "rattler", + "rattler_conda_types", + "rattler_repodata_gateway", + "rattler_shell", + "rattler_solve", + "rattler_virtual_packages", + "regex", + "reqwest-middleware", + "rstest", + "serde", + "serde_json", + "serde_with", + "serde_yaml", + "sha1", + "tempfile", + "thiserror", + "tokio", + "tokio-util", + "tracing", + "url", + "which 6.0.3", +] + +[[package]] +name = "pixi_build_types" +version = "0.1.0" +dependencies = [ + "rattler_conda_types", + "serde", + "serde_with", + "url", +] + [[package]] name = "pixi_config" version = "0.1.0" @@ -3358,7 +3655,7 @@ version = "0.1.0" dependencies = [ "console", "lazy_static", - "rattler_cache", + "rattler_cache 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "url", ] @@ -3369,6 +3666,22 @@ dependencies = [ "rattler_conda_types", ] +[[package]] +name = "pixi_glob" +version = "0.1.0" +dependencies = [ + "dashmap", + "insta", + "itertools 0.13.0", + "memchr", + "rattler_digest", + "rstest", + "tempfile", + "thiserror", + "tokio", + "wax", +] + [[package]] name = "pixi_manifest" version = "0.1.0" @@ -3425,6 +3738,21 @@ dependencies = [ "signal-hook", ] +[[package]] +name = "pixi_record" +version = "0.1.0" +dependencies = [ + "file_url", + "pixi_spec", + "rattler_conda_types", + "rattler_digest", + "rattler_lock", + "serde", + "thiserror", + "typed-path", + "url", +] + [[package]] name = "pixi_spec" version = "0.1.0" @@ -3432,6 +3760,7 @@ dependencies = [ "dirs", "file_url", "insta", + "itertools 0.13.0", "rattler_conda_types", "rattler_digest", "serde", @@ -3546,6 +3875,15 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "pori" +version = "0.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a63d338dec139f56dacc692ca63ad35a6be6a797442479b55acd611d79e906" +dependencies = [ + "nom", +] + [[package]] name = "portable-atomic" version = "1.9.0" @@ -3849,9 +4187,8 @@ dependencies = [ [[package]] name = "rattler" -version = "0.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7365abb0dff1cc5dce7d5b54accfbaccaca512216557a860a59099b2e41872cd" +version = "0.28.3" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "anyhow", "clap", @@ -3868,7 +4205,7 @@ dependencies = [ "memmap2 0.9.5", "once_cell", "parking_lot 0.12.3", - "rattler_cache", + "rattler_cache 0.2.11 (git+https://github.com/conda/rattler?branch=main)", "rattler_conda_types", "rattler_digest", "rattler_networking", @@ -3878,7 +4215,7 @@ dependencies = [ "regex", "reqwest", "reqwest-middleware", - "simple_spawn_blocking", + "simple_spawn_blocking 1.0.0 (git+https://github.com/conda/rattler?branch=main)", "smallvec", "tempfile", "thiserror", @@ -3890,9 +4227,9 @@ dependencies = [ [[package]] name = "rattler_cache" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6fcc7980d8b4f0422145abbaec1f9f05ebd6e59f97fe4e5f9ba17fe2cf3846d" +checksum = "465972d151a672bc000b64c19a67c4c3b4ffeb11bd433eaf4dfe4c9aa04d748a" dependencies = [ "anyhow", "dashmap", @@ -3909,7 +4246,34 @@ dependencies = [ "rattler_package_streaming", "reqwest", "reqwest-middleware", - "simple_spawn_blocking", + "simple_spawn_blocking 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "thiserror", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "rattler_cache" +version = "0.2.11" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" +dependencies = [ + "anyhow", + "dashmap", + "digest", + "dirs", + "fs4", + "futures", + "fxhash", + "itertools 0.13.0", + "parking_lot 0.12.3", + "rattler_conda_types", + "rattler_digest", + "rattler_networking", + "rattler_package_streaming", + "reqwest", + "reqwest-middleware", + "simple_spawn_blocking 1.0.0 (git+https://github.com/conda/rattler?branch=main)", "thiserror", "tokio", "tracing", @@ -3918,9 +4282,8 @@ dependencies = [ [[package]] name = "rattler_conda_types" -version = "0.29.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f70c3e619609920f0fc792743e427f012d449951a951c16154fea8ee2a957a48" +version = "0.29.2" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "chrono", "dirs", @@ -3955,8 +4318,7 @@ dependencies = [ [[package]] name = "rattler_digest" version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a97526971dd357657ea4c88f6d39b31b2875c87dfe9fd12aac305fec6c0f60" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "blake2", "digest", @@ -3971,9 +4333,8 @@ dependencies = [ [[package]] name = "rattler_lock" -version = "0.22.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0db0f03a38c93f291acc3df137fa5e4717da5de36ed4164f0dfdb657977619e0" +version = "0.22.31" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "chrono", "file_url", @@ -3985,18 +4346,19 @@ dependencies = [ "rattler_conda_types", "rattler_digest", "serde", + "serde-value", "serde_repr", "serde_with", "serde_yaml", "thiserror", + "typed-path", "url", ] [[package]] name = "rattler_macros" version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19eadf6fea87bd67d9d4c372caa3c2bed33cd91cdc235ce86210d7bc513ae0a4" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "quote", "syn", @@ -4004,9 +4366,8 @@ dependencies = [ [[package]] name = "rattler_networking" -version = "0.21.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89bc7a2698db24e4849a527528ec2ccb8c5c8c88c4e821e46c9124530b89c407" +version = "0.21.6" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "anyhow", "async-trait", @@ -4033,9 +4394,8 @@ dependencies = [ [[package]] name = "rattler_package_streaming" -version = "0.22.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b31f201486e1fb6baec82edc39ccb87c75f00750357535031e261b11a091e738" +version = "0.22.14" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "bzip2", "chrono", @@ -4062,8 +4422,7 @@ dependencies = [ [[package]] name = "rattler_redaction" version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa822f7a897914ff30e372814234047d556c98f3813fad616c93147b38dab7e7" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "reqwest", "reqwest-middleware", @@ -4072,9 +4431,8 @@ dependencies = [ [[package]] name = "rattler_repodata_gateway" -version = "0.21.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8193bd9d22c286dd30226fe49590d51e2f1391112480f513d9a037b3df54eee" +version = "0.21.23" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "anyhow", "async-compression", @@ -4102,7 +4460,7 @@ dependencies = [ "ouroboros", "parking_lot 0.12.3", "pin-project-lite", - "rattler_cache", + "rattler_cache 0.2.11 (git+https://github.com/conda/rattler?branch=main)", "rattler_conda_types", "rattler_digest", "rattler_networking", @@ -4113,7 +4471,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "simple_spawn_blocking", + "simple_spawn_blocking 1.0.0 (git+https://github.com/conda/rattler?branch=main)", "superslice", "tempfile", "thiserror", @@ -4127,9 +4485,8 @@ dependencies = [ [[package]] name = "rattler_shell" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7bbe53732a38f9f60f88e90dc2daed73d487e02693d7326e6562cf58a484377" +version = "0.22.7" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "enum_dispatch", "fs-err 3.0.0", @@ -4146,9 +4503,8 @@ dependencies = [ [[package]] name = "rattler_solve" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84aaa6a08621e4850aabf242e67ce5ee68f12137c1a6e9f976c00d93c3c7891a" +version = "1.2.3" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "chrono", "futures", @@ -4165,9 +4521,8 @@ dependencies = [ [[package]] name = "rattler_virtual_packages" -version = "1.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0b824df805e5b5fd581f93770fba032cf6d2f95a7e8249972a6fcf5f464f46a" +version = "1.1.10" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" dependencies = [ "archspec", "libloading", @@ -4361,7 +4716,7 @@ dependencies = [ "pin-project-lite", "quinn", "rustls", - "rustls-native-certs", + "rustls-native-certs 0.8.0", "rustls-pemfile", "rustls-pki-types", "serde", @@ -4633,6 +4988,7 @@ version = "0.23.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8" dependencies = [ + "log", "once_cell", "ring", "rustls-pki-types", @@ -4641,6 +4997,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "rustls-pki-types", + "schannel", + "security-framework 2.11.1", +] + [[package]] name = "rustls-native-certs" version = "0.8.0" @@ -4669,6 +5038,33 @@ version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" +[[package]] +name = "rustls-platform-verifier" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" +dependencies = [ + "core-foundation 0.9.4", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs 0.7.3", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework 2.11.1", + "security-framework-sys", + "webpki-roots", + "winapi", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + [[package]] name = "rustls-webpki" version = "0.102.8" @@ -4796,6 +5192,7 @@ dependencies = [ "core-foundation 0.9.4", "core-foundation-sys", "libc", + "num-bigint", "security-framework-sys", ] @@ -4839,6 +5236,12 @@ version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +[[package]] +name = "send_wrapper" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" + [[package]] name = "serde" version = "1.0.214" @@ -5132,6 +5535,14 @@ dependencies = [ "tokio", ] +[[package]] +name = "simple_spawn_blocking" +version = "1.0.0" +source = "git+https://github.com/conda/rattler?branch=main#32eefc87ef0f1bc5bcc0bb65183b97e71808f54c" +dependencies = [ + "tokio", +] + [[package]] name = "siphasher" version = "0.3.11" @@ -5183,6 +5594,21 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "soketto" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures", + "httparse", + "log", + "rand", + "sha1", +] + [[package]] name = "spdx" version = "0.10.6" @@ -5646,6 +6072,27 @@ dependencies = [ "winnow", ] +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + [[package]] name = "tower-service" version = "0.3.3" @@ -5658,6 +6105,7 @@ version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -5796,6 +6244,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + [[package]] name = "unsafe-libyaml" version = "0.2.11" @@ -6028,7 +6482,7 @@ dependencies = [ "uv-platform-tags", "uv-pypi-types", "uv-static", - "which", + "which 7.0.0", ] [[package]] @@ -6235,7 +6689,7 @@ dependencies = [ "uv-cache-key", "uv-fs", "uv-static", - "which", + "which 7.0.0", ] [[package]] @@ -6480,7 +6934,7 @@ dependencies = [ "uv-static", "uv-trampoline-builder", "uv-warnings", - "which", + "which 7.0.0", "windows-registry 0.3.0", "windows-result 0.2.0", "windows-sys 0.59.0", @@ -6875,6 +7329,21 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wax" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d12a78aa0bab22d2f26ed1a96df7ab58e8a93506a3e20adb47c51a93b4e1357" +dependencies = [ + "const_format", + "itertools 0.11.0", + "nom", + "pori", + "regex", + "thiserror", + "walkdir", +] + [[package]] name = "web-sys" version = "0.3.69" @@ -6894,6 +7363,18 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "which" +version = "6.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ee928febd44d98f2f459a4a79bd4d928591333a494a10a868418ac1b39cf1f" +dependencies = [ + "either", + "home", + "rustix", + "winsafe 0.0.19", +] + [[package]] name = "which" version = "7.0.0" diff --git a/Cargo.toml b/Cargo.toml index 6001f8a2d..21d962898 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,8 +16,10 @@ repository = "https://github.com/prefix-dev/pixi" [workspace.dependencies] ahash = "0.8.11" assert_matches = "1.5.0" +async-fd-lock = "0.2.0" async-once-cell = "0.5.3" async-trait = "0.1.82" +base64 = "0.22.1" chrono = "0.4.38" clap = { version = "4.5.9", default-features = false } clap-verbosity-flag = "2.2.0" @@ -47,8 +49,10 @@ indicatif = "0.17.8" insta = "1.38.0" is_executable = "1.0.1" itertools = "0.13.0" +jsonrpsee = "=0.24.2" lazy_static = "1.4.0" libc = { version = "0.2.153", default-features = false } +memchr = "2.7.4" miette = { version = "7.2.0", features = ["fancy"] } minijinja = "2.1.1" nix = { version = "0.29.0", default-features = false } @@ -72,6 +76,7 @@ serde_ignored = "0.1.10" serde_json = "1.0.116" serde_with = "3.7.0" serde_yaml = "0.9.34" +sha1 = "0.10.6" shlex = "1.3.0" signal-hook = "0.3.17" spdx = "0.10.4" @@ -85,7 +90,7 @@ tokio-util = "0.7.10" toml_edit = "0.22.11" tracing = "0.1.40" tracing-subscriber = "0.3.18" -typed-path = "0.9.1" +typed-path = "0.9.2" uv-distribution-filename = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } uv-distribution-types = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } uv-install-wheel = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } @@ -94,6 +99,8 @@ uv-pep508 = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } uv-platform-tags = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } uv-pypi-types = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } uv-requirements-txt = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } +wax = "0.6.0" +which = "6.0.3" # Rattler crates file_url = "0.1.4" @@ -110,6 +117,7 @@ rattler_shell = { version = "0.22.6", default-features = false } rattler_solve = { version = "1.2.2", default-features = false } rattler_virtual_packages = { version = "1.1.9", default-features = false } + # Bumping this to a higher version breaks the Windows path handling. url = "2.5.2" uv-auth = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } @@ -131,18 +139,21 @@ zip = { version = "2.2.0", default-features = false } zstd = { version = "0.13.2", default-features = false } fancy_display = { path = "crates/fancy_display" } +pixi_build_frontend = { path = "crates/pixi_build_frontend" } +pixi_build_types = { path = "crates/pixi_build_types" } pixi_config = { path = "crates/pixi_config" } pixi_consts = { path = "crates/pixi_consts" } pixi_default_versions = { path = "crates/pixi_default_versions" } +pixi_glob = { path = "crates/pixi_glob" } pixi_manifest = { path = "crates/pixi_manifest" } pixi_progress = { path = "crates/pixi_progress" } +pixi_record = { path = "crates/pixi_record" } pixi_spec = { path = "crates/pixi_spec" } pixi_utils = { path = "crates/pixi_utils", default-features = false } pixi_uv_conversions = { path = "crates/pixi_uv_conversions" } pypi_mapping = { path = "crates/pypi_mapping" } pypi_modifiers = { path = "crates/pypi_modifiers" } - [package] authors.workspace = true description = "A package management and workflow tool" @@ -176,8 +187,10 @@ slow_integration_tests = [] [dependencies] ahash = { workspace = true } assert_matches = { workspace = true } +async-fd-lock = { workspace = true } async-once-cell = { workspace = true } barrier_cell = { path = "crates/barrier_cell" } +base64 = { workspace = true } chrono = { workspace = true } clap = { workspace = true, features = [ "derive", @@ -219,6 +232,7 @@ miette = { workspace = true, features = [ minijinja = { workspace = true, features = ["builtins"] } once_cell = { workspace = true } parking_lot = { workspace = true } +rstest = { workspace = true } uv-distribution-filename = { workspace = true } uv-distribution-types = { workspace = true } uv-install-wheel = { workspace = true } @@ -241,12 +255,17 @@ uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } +dirs = "5.0.1" fs-err = { workspace = true, features = ["tokio"] } +pixi_build_frontend = { workspace = true } +pixi_build_types = { workspace = true } pixi_config = { workspace = true } pixi_consts = { workspace = true } pixi_default_versions = { workspace = true } +pixi_glob = { workspace = true } pixi_manifest = { workspace = true } pixi_progress = { workspace = true } +pixi_record = { workspace = true } pixi_spec = { workspace = true } pixi_utils = { workspace = true, default-features = false } pixi_uv_conversions = { workspace = true } @@ -275,6 +294,7 @@ tokio-util = { workspace = true } toml_edit = { workspace = true, features = ["serde"] } tracing = { workspace = true } tracing-subscriber = { workspace = true, features = ["env-filter"] } +typed-path = { workspace = true } url = { workspace = true } uv-auth = { workspace = true } uv-cache = { workspace = true } @@ -293,7 +313,6 @@ xxhash-rust = { workspace = true } zip = { workspace = true, features = ["deflate", "time"] } zstd = { workspace = true } - [target.'cfg(unix)'.dependencies] libc = { workspace = true, default-features = false } nix = { workspace = true, features = ["poll", "term"] } @@ -335,18 +354,19 @@ reqwest-retry = { git = "https://github.com/TrueLayer/reqwest-middleware", rev = # pep508_rs = { git = "https://github.com/astral-sh/uv", tag = "0.4.30" } # deno_task_shell = { path = "../deno_task_shell" } -#rattler = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_cache = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_conda_types = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_digest = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_lock = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_networking = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_package_streaming = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_repodata_gateway = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_shell = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_solve = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } -#rattler_virtual_packages = { git = "https://github.com/baszalmstra/rattler", branch = "perf/prefix-always-sharded" } +file_url = { git = "https://github.com/conda/rattler", branch = "main" } +rattler = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_conda_types = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_digest = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_lock = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_networking = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_package_streaming = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_repodata_gateway = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_shell = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_solve = { git = "https://github.com/conda/rattler", branch = "main" } +rattler_virtual_packages = { git = "https://github.com/conda/rattler", branch = "main" } +#file_url = { path = "../rattler/crates/file_url" } #rattler = { path = "../rattler/crates/rattler" } #rattler_cache = { path = "../rattler/crates/rattler_cache" } #rattler_conda_types = { path = "../rattler/crates/rattler_conda_types" } diff --git a/crates/pixi_build_frontend/Cargo.toml b/crates/pixi_build_frontend/Cargo.toml new file mode 100644 index 000000000..4810f6d07 --- /dev/null +++ b/crates/pixi_build_frontend/Cargo.toml @@ -0,0 +1,47 @@ +[package] +authors.workspace = true +description = "Sets up environments to use for building of pixi.toml files" +edition.workspace = true +homepage.workspace = true +license.workspace = true +name = "pixi_build_frontend" +readme.workspace = true +repository.workspace = true +version = "0.1.0" + +[dependencies] +dashmap = { workspace = true } +futures = { workspace = true } +itertools = { workspace = true } +jsonrpsee = { workspace = true, features = ["client"] } +miette = { workspace = true, features = ["fancy", "serde"] } +pixi_config = { workspace = true } +pixi_consts = { workspace = true } +pixi_manifest = { workspace = true } +pixi_utils = { workspace = true, features = ["rustls-tls"] } +rattler = { workspace = true } +rattler_conda_types = { workspace = true } +rattler_repodata_gateway = { workspace = true, features = ["gateway"] } +rattler_shell = { workspace = true } +rattler_solve = { workspace = true, features = ["resolvo"] } +rattler_virtual_packages = { workspace = true } +regex = { workspace = true } +reqwest-middleware = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +serde_with = { workspace = true } +serde_yaml = { workspace = true } +sha1 = { workspace = true } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["process", "io-std"] } +tokio-util = { workspace = true, features = ["codec"] } +tracing = { workspace = true } +url = "2.5.0" +which = { workspace = true } + +pixi_build_types = { path = "../pixi_build_types" } + +[dev-dependencies] +insta = { workspace = true, features = ["yaml", "filters"] } +rstest = { workspace = true } +tempfile = { workspace = true } diff --git a/crates/pixi_build_frontend/src/build_frontend.rs b/crates/pixi_build_frontend/src/build_frontend.rs new file mode 100644 index 000000000..19363e932 --- /dev/null +++ b/crates/pixi_build_frontend/src/build_frontend.rs @@ -0,0 +1,125 @@ +//! This module is the main entry +use std::{path::PathBuf, sync::Arc}; + +use miette::Diagnostic; +use rattler_conda_types::ChannelConfig; + +use crate::{ + protocol, + protocol_builder::{EnabledProtocols, ProtocolBuilder}, + tool::{ToolCache, ToolContext}, + Protocol, SetupRequest, +}; + +/// The frontend for building packages. +pub struct BuildFrontend { + /// The cache for tools. This is used to avoid re-installing tools. + tool_cache: Arc, + + /// The channel configuration used by the frontend + channel_config: ChannelConfig, + + /// The cache directory to use or `None` to use the default cache directory. + cache_dir: Option, + + /// The configuration to use when enabling the protocols. + enabled_protocols: EnabledProtocols, +} + +impl Default for BuildFrontend { + fn default() -> Self { + Self { + tool_cache: Arc::new(ToolCache::new()), + channel_config: ChannelConfig::default_with_root_dir(PathBuf::new()), + cache_dir: None, + enabled_protocols: EnabledProtocols::default(), + } + } +} + +#[derive(thiserror::Error, Debug, Diagnostic)] +pub enum BuildFrontendError { + /// Error while discovering the pixi.toml + #[error(transparent)] + #[diagnostic(transparent)] + DiscoveringManifest(#[from] protocol::DiscoveryError), + /// Error from the build protocol. + #[error(transparent)] + #[diagnostic(transparent)] + Protocol(#[from] protocol::FinishError), + /// Error discovering system-tool + #[error("error discovering system-tool")] + ToolError(#[from] which::Error), +} + +impl BuildFrontend { + /// Specify the channel configuration + pub fn with_channel_config(self, channel_config: ChannelConfig) -> Self { + Self { + channel_config, + ..self + } + } + + /// Returns the channel config of the frontend + pub fn channel_config(&self) -> &ChannelConfig { + &self.channel_config + } + + /// Optionally sets the cache directory the backend should use. + pub fn with_opt_cache_dir(self, cache_dir: Option) -> Self { + Self { cache_dir, ..self } + } + + /// Sets the cache directory the backend should use. + pub fn with_cache_dir(self, cache_dir: PathBuf) -> Self { + Self { + cache_dir: Some(cache_dir), + ..self + } + } + + /// Sets the tool context + pub fn with_tool_context(self, context: ToolContext) -> Self { + let tool_cache = ToolCache { + cache: self.tool_cache.cache.clone(), + context, + }; + + Self { + tool_cache: tool_cache.into(), + ..self + } + } + + /// Sets the enabling protocols. + pub fn with_enabled_protocols(self, enabled_protocols: EnabledProtocols) -> Self { + Self { + enabled_protocols, + ..self + } + } + + /// Constructs a new [`Protocol`] for the given request. This object can be + /// used to build the package. + pub async fn setup_protocol( + &self, + request: SetupRequest, + ) -> Result { + // Determine the build protocol to use for the source directory. + let protocol = ProtocolBuilder::discover(&request.source_dir, &self.enabled_protocols)? + .with_channel_config(self.channel_config.clone()) + .with_opt_cache_dir(self.cache_dir.clone()); + + tracing::info!( + "discovered a {} source package at {}", + protocol.name(), + request.source_dir.display() + ); + + protocol + .with_backend_override(request.build_tool_override) + .finish(&self.tool_cache, request.build_id) + .await + } +} diff --git a/crates/pixi_build_frontend/src/jsonrpc/mod.rs b/crates/pixi_build_frontend/src/jsonrpc/mod.rs new file mode 100644 index 000000000..59e3db4ca --- /dev/null +++ b/crates/pixi_build_frontend/src/jsonrpc/mod.rs @@ -0,0 +1,22 @@ +use jsonrpsee::core::traits::ToRpcParams; +use serde::Serialize; +use serde_json::value::RawValue; + +mod stdio; +pub(crate) use stdio::{stdio_transport, Receiver, Sender}; + +/// A helper struct to convert a serializable type into a JSON-RPC parameter. +pub struct RpcParams(pub T); + +impl ToRpcParams for RpcParams { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + let json = serde_json::to_string(&self.0)?; + RawValue::from_string(json).map(Some) + } +} + +impl From for RpcParams { + fn from(value: T) -> Self { + Self(value) + } +} diff --git a/crates/pixi_build_frontend/src/jsonrpc/stdio.rs b/crates/pixi_build_frontend/src/jsonrpc/stdio.rs new file mode 100644 index 000000000..ad3e36189 --- /dev/null +++ b/crates/pixi_build_frontend/src/jsonrpc/stdio.rs @@ -0,0 +1,61 @@ +use futures::StreamExt; +use jsonrpsee::core::client::{MaybeSend, ReceivedMessage, TransportReceiverT, TransportSenderT}; +use tokio::{ + io::{AsyncRead, AsyncWrite, AsyncWriteExt}, + process::{ChildStdin, ChildStdout}, +}; +use tokio_util::codec::{FramedRead, LinesCodec}; + +/// Create new transport channels using stdin and stdout of a child process. +pub(crate) fn stdio_transport( + stdin: ChildStdin, + stdout: ChildStdout, +) -> (Sender, Receiver) { + ( + Sender(stdin), + Receiver(FramedRead::new(stdout, LinesCodec::new())), + ) +} + +pub(crate) struct Sender(T); + +#[jsonrpsee::core::async_trait] +impl TransportSenderT for Sender { + type Error = std::io::Error; + + async fn send(&mut self, msg: String) -> Result<(), Self::Error> { + let mut sanitized = msg.replace('\n', ""); + sanitized.push('\n'); + let _n = self.0.write_all(sanitized.as_bytes()).await?; + Ok(()) + } +} + +impl From for Sender { + fn from(value: T) -> Self { + Self(value) + } +} + +pub(crate) struct Receiver(FramedRead); + +#[jsonrpsee::core::async_trait] +impl TransportReceiverT for Receiver { + type Error = std::io::Error; + + async fn receive(&mut self) -> Result { + let response = self + .0 + .next() + .await + .ok_or_else(|| std::io::Error::new(std::io::ErrorKind::UnexpectedEof, "EOF"))? + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; + Ok(ReceivedMessage::Text(response)) + } +} + +impl From for Receiver { + fn from(value: T) -> Self { + Self(FramedRead::new(value, LinesCodec::new())) + } +} diff --git a/crates/pixi_build_frontend/src/lib.rs b/crates/pixi_build_frontend/src/lib.rs new file mode 100644 index 000000000..0395e3c7f --- /dev/null +++ b/crates/pixi_build_frontend/src/lib.rs @@ -0,0 +1,87 @@ +mod build_frontend; +mod jsonrpc; +pub mod protocol; +mod protocols; + +use std::fmt::{Debug, Formatter}; + +pub(crate) use protocols::builders::{conda_protocol, pixi_protocol, rattler_build_protocol}; + +mod protocol_builder; +mod reporters; +mod tool; + +use std::path::PathBuf; + +pub use build_frontend::{BuildFrontend, BuildFrontendError}; +use rattler_conda_types::MatchSpec; +pub use reporters::{CondaBuildReporter, CondaMetadataReporter}; +pub use reporters::{NoopCondaBuildReporter, NoopCondaMetadataReporter}; +use tokio::io::{AsyncRead, AsyncWrite}; +pub use tool::{IsolatedToolSpec, SystemToolSpec, ToolContext, ToolSpec}; +use url::Url; + +pub use crate::protocol::Protocol; + +pub use protocol_builder::EnabledProtocols; + +#[derive(Debug)] +pub enum BackendOverride { + /// Overrwide the backend with a specific tool. + Spec(MatchSpec), + + /// Overwrite the backend with a specific tool. + System(String), + + /// Use the given IO for the backend. + Io(InProcessBackend), +} + +impl From for BackendOverride { + fn from(value: InProcessBackend) -> Self { + Self::Io(value) + } +} + +impl From for Option { + fn from(value: InProcessBackend) -> Self { + Some(value.into()) + } +} + +/// A backend communication protocol that can run in the same process. +pub struct InProcessBackend { + pub rpc_in: Box, + pub rpc_out: Box, +} + +impl Debug for InProcessBackend { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("InProcessBackend").finish() + } +} + +#[derive(Debug)] +pub struct SetupRequest { + /// The source directory that contains the source package. + pub source_dir: PathBuf, + + /// Overrides for the build tool. + pub build_tool_override: Option, + + /// Identifier for the rest of the requests + /// This is used to identify the requests that belong to the same build. + pub build_id: usize, +} + +#[derive(Debug)] +pub struct BuildOutput { + /// Paths to the built artifacts. + pub artifacts: Vec, +} + +#[derive(Debug)] +pub struct CondaMetadataRequest { + /// The base urls of the channels to use. + pub channel_base_urls: Vec, +} diff --git a/crates/pixi_build_frontend/src/protocol.rs b/crates/pixi_build_frontend/src/protocol.rs new file mode 100644 index 000000000..4530b703e --- /dev/null +++ b/crates/pixi_build_frontend/src/protocol.rs @@ -0,0 +1,143 @@ +use std::{path::PathBuf, sync::Arc}; + +use miette::{Diagnostic, IntoDiagnostic}; +use pixi_build_types::procedures::{ + conda_build::{CondaBuildParams, CondaBuildResult}, + conda_metadata::{CondaMetadataParams, CondaMetadataResult}, +}; + +use crate::protocols::builders::{conda_protocol, pixi_protocol, rattler_build_protocol}; + +use crate::{protocols::JsonRPCBuildProtocol, CondaBuildReporter, CondaMetadataReporter}; + +/// Top-level error type for protocol errors. +#[derive(Debug, thiserror::Error, Diagnostic)] +pub enum FinishError { + #[error(transparent)] + #[diagnostic(transparent)] + Pixi(#[from] pixi_protocol::FinishError), + + #[error(transparent)] + #[diagnostic(transparent)] + CondaBuild(#[from] conda_protocol::FinishError), + + #[error(transparent)] + #[diagnostic(transparent)] + RattlerBuild(#[from] rattler_build_protocol::FinishError), +} + +#[derive(Debug, thiserror::Error, Diagnostic)] +pub enum DiscoveryError { + #[error( + "failed to discover a valid project manifest, the source does not refer to a directory" + )] + NotADirectory, + #[error("failed to discover a valid project manifest, the source path '{}' could not be found", .0.display())] + NotFound(PathBuf), + #[error("unable to discover communication protocol, the source directory does not contain a supported manifest")] + #[diagnostic(help( + "Ensure that the source directory contains a valid pixi.toml or meta.yaml file." + ))] + UnsupportedFormat, + + #[error(transparent)] + #[diagnostic(transparent)] + Pixi(#[from] pixi_protocol::ProtocolBuildError), + + #[error(transparent)] + #[diagnostic(transparent)] + RattlerBuild(#[from] rattler_build_protocol::ProtocolBuildError), +} + +/// A protocol describes how to communicate with a build backend. A build +/// backend is a tool that is invoked to generate certain output. +/// +/// The frontend can support multiple backends, and the protocol is used to +/// determine which backend to use for a given source directory and how to +/// communicate with it. +/// +/// +/// The [`Protocol`] protocol is a generic implementation that uses a +/// client-server JSON-RPC interface to communicate with another tool. +/// +/// Using this JSON-RPC interface means we can evolve the backend and frontend +/// tools as long as both tools can establish a shared protocol. The JSON-RPC +/// protocol is engineered in such a way that this is possible. This allows a +/// much newer frontend to still be able to interact with a very old backend +/// which is important if you want to be able to use very old packages in the +/// far future. +/// +/// The conda-build and rattler-build implementations are a hard-coded +/// implementation and do not use a client-server model. Although technically +/// they could also be implemented using the client-server model it is more +/// ergonomic to add their implementation directly into the frontend because no +/// bridge executable is needed. We can always add this later too using the +/// existing protocol. +// I think because we mostly have a single variant in use, boxing does not make +// sense here. +#[allow(clippy::large_enum_variant)] +#[derive(Debug)] +pub enum Protocol { + PixiBuild(JsonRPCBuildProtocol), + // It should be more like subprocess protocol + // as we invoke the tool directly + CondaBuild(conda_protocol::Protocol), +} + +impl From for Protocol { + fn from(value: JsonRPCBuildProtocol) -> Self { + Self::PixiBuild(value) + } +} + +impl From for Protocol { + fn from(value: conda_protocol::Protocol) -> Self { + Self::CondaBuild(value) + } +} + +impl Protocol { + /// Returns the root manifest files of the source directory. These indicate + /// the files that are used to determine the build configuration. + pub fn manifests(&self) -> Vec { + match self { + Self::PixiBuild(protocol) => protocol.manifests(), + Self::CondaBuild(protocol) => protocol.manifests(), + } + } + + pub async fn get_conda_metadata( + &self, + request: &CondaMetadataParams, + reporter: Arc, + ) -> miette::Result { + match self { + Self::PixiBuild(protocol) => protocol + .get_conda_metadata(request, reporter.as_ref()) + .await + .into_diagnostic(), + Self::CondaBuild(protocol) => protocol.get_conda_metadata(request), + } + } + + pub async fn conda_build( + &self, + request: &CondaBuildParams, + reporter: Arc, + ) -> miette::Result { + match self { + Self::PixiBuild(protocol) => protocol + .conda_build(request, reporter.as_ref()) + .await + .into_diagnostic(), + Self::CondaBuild(_) => unreachable!(), + } + } + + pub fn identifier(&self) -> &str { + match self { + Self::PixiBuild(protocol) => protocol.backend_identifier(), + Self::CondaBuild(protocol) => protocol.backend_identifier(), + } + } +} diff --git a/crates/pixi_build_frontend/src/protocol_builder.rs b/crates/pixi_build_frontend/src/protocol_builder.rs new file mode 100644 index 000000000..ee355fffa --- /dev/null +++ b/crates/pixi_build_frontend/src/protocol_builder.rs @@ -0,0 +1,175 @@ +use std::path::{Path, PathBuf}; + +use rattler_conda_types::ChannelConfig; + +use crate::{ + conda_protocol, pixi_protocol, + protocol::{DiscoveryError, FinishError}, + rattler_build_protocol, + tool::ToolCache, + BackendOverride, BuildFrontendError, Protocol, +}; + +/// Configuration to enable or disable certain protocols discovery. +#[derive(Debug)] +pub struct EnabledProtocols { + /// Enable the rattler-build protocol. + pub enable_rattler_build: bool, + /// Enable the pixi protocol. + pub enable_pixi: bool, + /// Enable the conda-build protocol. + pub enable_conda_build: bool, +} + +impl Default for EnabledProtocols { + /// Create a new `EnabledProtocols` with all protocols enabled. + fn default() -> Self { + Self { + enable_rattler_build: true, + enable_pixi: true, + enable_conda_build: true, + } + } +} + +#[derive(Debug)] +pub(crate) enum ProtocolBuilder { + /// A pixi project. + Pixi(pixi_protocol::ProtocolBuilder), + + /// A directory containing a `meta.yaml` that can be interpreted by + /// conda-build. + CondaBuild(conda_protocol::ProtocolBuilder), + + /// A directory containing a `recipe.yaml` that can be built with + /// rattler-build. + RattlerBuild(rattler_build_protocol::ProtocolBuilder), +} + +impl From for ProtocolBuilder { + fn from(value: pixi_protocol::ProtocolBuilder) -> Self { + Self::Pixi(value) + } +} + +impl From for ProtocolBuilder { + fn from(value: conda_protocol::ProtocolBuilder) -> Self { + Self::CondaBuild(value) + } +} + +impl From for ProtocolBuilder { + fn from(value: rattler_build_protocol::ProtocolBuilder) -> Self { + Self::RattlerBuild(value) + } +} + +impl ProtocolBuilder { + /// Discovers the protocol for the given source directory. + pub fn discover( + source_dir: &Path, + enabled_protocols: &EnabledProtocols, + ) -> Result { + if source_dir.is_file() { + return Err(DiscoveryError::NotADirectory); + } else if !source_dir.is_dir() { + return Err(DiscoveryError::NotFound(source_dir.to_path_buf())); + } + + // Try to discover as a rattler-build recipe first + // and it also should be a `pixi` project + if enabled_protocols.enable_rattler_build { + if let Some(protocol) = rattler_build_protocol::ProtocolBuilder::discover(source_dir)? { + return Ok(protocol.into()); + } + } + + // Try to discover as a pixi project + if enabled_protocols.enable_pixi { + if let Some(protocol) = pixi_protocol::ProtocolBuilder::discover(source_dir)? { + return Ok(protocol.into()); + } + } + + // Try to discover as a conda build project + // Unwrap because error is Infallible + if enabled_protocols.enable_conda_build { + if let Some(protocol) = conda_protocol::ProtocolBuilder::discover(source_dir).unwrap() { + return Ok(protocol.into()); + } + } + + // TODO: Add additional formats later + Err(DiscoveryError::UnsupportedFormat) + } + + /// Sets the channel configuration used by the protocol. + pub fn with_channel_config(self, channel_config: ChannelConfig) -> Self { + match self { + Self::Pixi(protocol) => Self::Pixi(protocol.with_channel_config(channel_config)), + Self::CondaBuild(protocol) => { + Self::CondaBuild(protocol.with_channel_config(channel_config)) + } + Self::RattlerBuild(protocol) => { + Self::RattlerBuild(protocol.with_channel_config(channel_config)) + } + } + } + + pub(crate) fn with_backend_override(self, backend: Option) -> Self { + match self { + Self::Pixi(protocol) => Self::Pixi(protocol.with_backend_override(backend)), + Self::CondaBuild(protocol) => Self::CondaBuild(protocol.with_backend_override(backend)), + Self::RattlerBuild(protocol) => { + Self::RattlerBuild(protocol.with_backend_override(backend)) + } + } + } + + /// Sets the cache directory to use for any caching. + pub fn with_opt_cache_dir(self, cache_directory: Option) -> Self { + match self { + Self::Pixi(protocol) => Self::Pixi(protocol.with_opt_cache_dir(cache_directory)), + Self::CondaBuild(protocol) => { + Self::CondaBuild(protocol.with_opt_cache_dir(cache_directory)) + } + Self::RattlerBuild(protocol) => { + Self::RattlerBuild(protocol.with_opt_cache_dir(cache_directory)) + } + } + } + + /// Returns the name of the protocol. + pub fn name(&self) -> &str { + match self { + Self::Pixi(_) => "pixi", + Self::CondaBuild(_) => "conda-build", + Self::RattlerBuild(_) => "rattler-build", + } + } + + /// Finish the construction of the protocol and return the protocol object + pub async fn finish( + self, + tool_cache: &ToolCache, + build_id: usize, + ) -> Result { + match self { + Self::Pixi(protocol) => Ok(protocol + .finish(tool_cache, build_id) + .await + .map_err(FinishError::Pixi)? + .into()), + Self::CondaBuild(protocol) => Ok(protocol + .finish(tool_cache, build_id) + .await + .map_err(FinishError::CondaBuild)? + .into()), + Self::RattlerBuild(protocol) => Ok(protocol + .finish(tool_cache, build_id) + .await + .map_err(FinishError::RattlerBuild)? + .into()), + } + } +} diff --git a/crates/pixi_build_frontend/src/protocols/builders/conda_build/mod.rs b/crates/pixi_build_frontend/src/protocols/builders/conda_build/mod.rs new file mode 100644 index 000000000..5cc57b591 --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/builders/conda_build/mod.rs @@ -0,0 +1,108 @@ +mod protocol; + +use std::{ + convert::Infallible, + path::{Path, PathBuf}, +}; + +use miette::Diagnostic; +pub use protocol::Protocol; +use rattler_conda_types::{ChannelConfig, MatchSpec, ParseStrictness::Strict}; +use thiserror::Error; + +use crate::{ + tool::{IsolatedToolSpec, ToolCache, ToolCacheError, ToolSpec}, + BackendOverride, +}; + +#[derive(Debug, Error, Diagnostic)] +pub enum FinishError { + #[error(transparent)] + Tool(#[from] ToolCacheError), +} + +/// A builder for constructing a [`protocol::Protocol`] instance. +#[derive(Debug)] +pub struct ProtocolBuilder { + /// The directory that contains the source files. + source_dir: PathBuf, + + /// The directory that contains the `meta.yaml` in the source directory. + recipe_dir: PathBuf, + + /// The backend tool to install. + backend_spec: ToolSpec, + + /// The channel configuration used by this instance. + channel_config: ChannelConfig, + + /// The cache directory the backend should use. (not used atm) + _cache_dir: Option, +} + +impl ProtocolBuilder { + /// Discovers the protocol for the given source directory. + pub fn discover(source_dir: &Path) -> Result, Infallible> { + let recipe_dir = source_dir.join("recipe"); + let protocol = if source_dir.join("meta.yaml").is_file() { + Self::new(source_dir, source_dir) + } else if recipe_dir.join("meta.yaml").is_file() { + Self::new(source_dir, &recipe_dir) + } else { + return Ok(None); + }; + + Ok(Some(protocol)) + } + + /// Constructs a new instance from a manifest. + pub fn new(source_dir: &Path, recipe_dir: &Path) -> Self { + let backend_spec = + IsolatedToolSpec::from_specs(vec![MatchSpec::from_str("conda-build", Strict).unwrap()]) + .into(); + + Self { + source_dir: source_dir.to_path_buf(), + recipe_dir: recipe_dir.to_path_buf(), + backend_spec, + channel_config: ChannelConfig::default_with_root_dir(PathBuf::new()), + _cache_dir: None, + } + } + + /// Sets an optional backend override. + pub fn with_backend_override(self, backend_override: Option) -> Self { + Self { + backend_spec: backend_override + .map(BackendOverride::into_spec) + .unwrap_or(self.backend_spec), + ..self + } + } + + /// Sets the channel configuration used by this instance. + pub fn with_channel_config(self, channel_config: ChannelConfig) -> Self { + Self { + channel_config, + ..self + } + } + + /// Sets the cache directory the backend should use. + pub fn with_opt_cache_dir(self, cache_dir: Option) -> Self { + Self { + _cache_dir: cache_dir, + ..self + } + } + + pub async fn finish(self, tool: &ToolCache, _build_id: usize) -> Result { + let tool = tool.instantiate(self.backend_spec).await?; + Ok(Protocol { + _channel_config: self.channel_config, + tool, + source_dir: self.source_dir, + recipe_dir: self.recipe_dir, + }) + } +} diff --git a/crates/pixi_build_frontend/src/protocols/builders/conda_build/protocol.rs b/crates/pixi_build_frontend/src/protocols/builders/conda_build/protocol.rs new file mode 100644 index 000000000..136823cf2 --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/builders/conda_build/protocol.rs @@ -0,0 +1,278 @@ +use std::{path::PathBuf, sync::OnceLock}; + +use miette::{Context, IntoDiagnostic}; +use pixi_build_types::{ + procedures::conda_metadata::{CondaMetadataParams, CondaMetadataResult}, + CondaPackageMetadata, +}; +use rattler_conda_types::{ChannelConfig, NoArchType, PackageName, Platform, VersionWithSource}; +use regex::Regex; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use sha1::{Digest, Sha1}; + +use crate::tool::Tool; + +#[derive(Debug)] +pub struct Protocol { + pub(super) _channel_config: ChannelConfig, + pub(super) tool: Tool, + pub(super) source_dir: PathBuf, + pub(super) recipe_dir: PathBuf, +} + +impl Protocol { + /// Returns a unique identifier for the backend. + pub fn backend_identifier(&self) -> &str { + "conda-build" + } + + /// Returns the relative path from the source directory to the recipe. + pub fn manifests(&self) -> Vec { + self.recipe_dir + .strip_prefix(&self.source_dir) + .unwrap_or(&self.recipe_dir) + .join("meta.yaml") + .to_str() + .map(|s| s.to_string()) + .into_iter() + .collect() + } + + // Extract metadata from the recipe. + pub fn get_conda_metadata( + &self, + request: &CondaMetadataParams, + ) -> miette::Result { + let Some(tool) = self.tool.as_executable() else { + miette::bail!("Cannot use a non-executable tool to render conda metadata"); + }; + + // Construct a new tool that can be used to invoke conda-render instead of the + // original tool. + let conda_render_executable = String::from("conda-render"); + let conda_render_executable = if cfg!(windows) { + format!("{}.exe", conda_render_executable) + } else { + conda_render_executable + }; + + let conda_render_tool = tool.with_executable(conda_render_executable); + + // TODO: Properly pass channels + // TODO: Setup --exclusive-config-files + + let channels = request + .channel_base_urls + .iter() + .flatten() + .flat_map(|url| ["--channel", url.as_str()]); + + let output = conda_render_tool + .command() + // .arg("--verbose") + // This is currently apparently broken in conda-build.. + // .arg("--use-channeldata") + .arg("--override-channels") + .args(channels) + .arg(&self.recipe_dir) + .stderr(std::process::Stdio::inherit()) + .stdout(std::process::Stdio::piped()) + .output() + .into_diagnostic() + .context("failed to spawn conda-render executable")?; + + // Try to parse the contents of the output. + let stdout = String::from_utf8(output.stdout) + .into_diagnostic() + .context("failed to convert the output of conda-render to a valid utf-8 string")?; + + // Fail if the process did not exit successfully. + if !output.status.success() { + miette::bail!( + "conda-render returned with a non-zero exit code:\n{}", + stdout + ); + } + + // Parse the output of conda-render. + let rendered_recipes = extract_rendered_recipes(&stdout)?; + + let metadata = CondaMetadataResult { + packages: rendered_recipes + .into_iter() + .map(|(recipe, meta_yaml)| { + convert_conda_render_output(recipe).with_context(|| { + format!( + "failed to extract metadata from conda-render output:\n{}", + meta_yaml + ) + }) + }) + .collect::>()?, + input_globs: None, + }; + + Ok(metadata) + } +} + +/// Given output from `conda-render`, parse it into one or more +/// [`CondaRenderRecipe`]s. +fn extract_rendered_recipes( + rendered_recipe: &str, +) -> miette::Result> { + static OUTPUT_REGEX: OnceLock = OnceLock::new(); + let output_regex = OUTPUT_REGEX.get_or_init(|| { + Regex::new(r#"(?sR)Hash contents:\r?\n-{14}\r?\n(.+?)-{10}\r?\nmeta.yaml:\r?\n-{10}\r?\n(.+?)(?:-{14}|$)"#) + .unwrap() + }); + + let mut iter = output_regex.captures_iter(rendered_recipe).peekable(); + if iter.peek().is_none() { + miette::bail!( + "could not find metadata in conda-render output:\n{}", + rendered_recipe + ) + } + + iter.map(|captures| { + let hash = captures.get(1).unwrap().as_str().trim(); + let meta_yaml = captures.get(2).unwrap().as_str().trim(); + serde_yaml::from_str(meta_yaml) + .map(|recipe| { + ( + CondaRenderRecipe { + hash_content: hash.to_string().replace("\r\n", "\n"), + recipe, + }, + meta_yaml, + ) + }) + .into_diagnostic() + .with_context(|| format!("failed to parse the rendered recipe:\n{meta_yaml}")) + }) + .collect() +} + +/// Converts a [`CondaRenderRecipe`] output into a [`CondaPackageMetadata`]. +fn convert_conda_render_output(recipe: CondaRenderRecipe) -> miette::Result { + Ok(CondaPackageMetadata { + build: recipe.hash(), + name: recipe.recipe.package.name, + version: recipe.recipe.package.version, + build_number: recipe.recipe.build.number.unwrap_or(0), + subdir: if recipe.recipe.build.noarch.is_none() { + Platform::current() + } else { + Platform::NoArch + }, + depends: recipe.recipe.requirements.run, + constraints: recipe.recipe.requirements.run_constrained, + license: recipe.recipe.about.license, + license_family: recipe.recipe.about.license_family, + noarch: recipe.recipe.build.noarch, + }) +} + +#[derive(Debug, Deserialize, Serialize)] +struct CondaRenderRecipe { + hash_content: String, + recipe: RenderedRecipe, +} + +impl CondaRenderRecipe { + /// Determine the hash of the recipe. This is based on the user specified + /// hash or the hash computed from the hash content. + pub fn hash(&self) -> String { + // TODO: Verify if this logic is actually correct. + + if let Some(hash) = &self.recipe.build.string { + return hash.clone(); + } + + let mut hasher = Sha1::new(); + hasher.update(self.hash_content.as_bytes()); + let result = hasher.finalize(); + + const HASH_LENGTH: usize = 7; + + let res = format!("{:x}", result); + res[..HASH_LENGTH].to_string() + } +} + +#[derive(Debug, Deserialize, Serialize)] +struct RenderedRecipe { + package: RenderedPackage, + build: RenderedBuild, + requirements: RenderedRequirements, + about: RenderedAbout, +} + +#[derive(Debug, Deserialize, Serialize)] +struct RenderedPackage { + name: PackageName, + version: VersionWithSource, +} + +#[serde_as] +#[derive(Debug, Deserialize, Serialize)] +struct RenderedBuild { + #[serde_as(as = "Option>")] + #[serde(skip_serializing_if = "Option::is_none")] + number: Option, + #[serde(skip_serializing_if = "Option::is_none")] + string: Option, + #[serde(default, skip_serializing_if = "NoArchType::is_none")] + noarch: NoArchType, +} + +#[derive(Debug, Deserialize, Serialize)] +struct RenderedRequirements { + #[serde(default)] + run: Vec, + #[serde(default)] + run_constrained: Vec, +} + +#[derive(Debug, Deserialize, Serialize)] +struct RenderedAbout { + #[serde(skip_serializing_if = "Option::is_none")] + license: Option, + #[serde(skip_serializing_if = "Option::is_none")] + license_family: Option, +} + +#[cfg(test)] +mod test { + use std::path::Path; + + use itertools::Itertools; + use rstest::*; + + use super::*; + + #[rstest] + #[case::pinject("conda-render/pinject.txt")] + #[case::microarch("conda-render/microarch-level.txt")] + fn test_extract_rendered_recipe(#[case] path: &str) { + let rendered_recipe = std::fs::read_to_string( + Path::new(env!("CARGO_MANIFEST_DIR")) + .join("test-data") + .join(path), + ) + .unwrap(); + let rendered_recipe = extract_rendered_recipes(&rendered_recipe) + .into_iter() + .flatten() + .format_with("\n===\n", |(recipe, meta_yaml), formatter| { + formatter(&format_args!( + "{meta_yaml}\n---\n{}", + serde_yaml::to_string(&recipe).unwrap() + )) + }) + .to_string(); + insta::assert_snapshot!(path, rendered_recipe); + } +} diff --git a/crates/pixi_build_frontend/src/protocols/builders/conda_build/snapshots/pixi_build_frontend__protocols__builders__conda_build__protocol__test__conda-render__microarch-level.txt.snap b/crates/pixi_build_frontend/src/protocols/builders/conda_build/snapshots/pixi_build_frontend__protocols__builders__conda_build__protocol__test__conda-render__microarch-level.txt.snap new file mode 100644 index 000000000..fe4a1aadc --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/builders/conda_build/snapshots/pixi_build_frontend__protocols__builders__conda_build__protocol__test__conda-render__microarch-level.txt.snap @@ -0,0 +1,144 @@ +--- +source: crates/pixi_build_frontend/src/protocols/builders/conda_build/protocol.rs +expression: rendered_recipe +--- +package: + name: ppc64le-microarch-level + version: '8' +build: + noarch: generic + number: 2 + run_exports: + strong: + - _ppc64le-microarch-level >=8 + string: '2' +requirements: + build: [] + run: + - __unix +about: + description: 'Use the meta-package ppc64le-microarch-level in requirements/build + in conda + + recipes to set up the compiler flags and set up the virtual package + + requirements in the run requirements. + + + When building packages on CI, level=4 will not be guaranteed, so + + you can only use level<=3 to build. + + + The run_exports only has a lower bound and therefore a level=2 + + build can be installed on a level=3 user system. A tighter bound + + is not added because we want to be able to test both level=2 and + + level=3 on a CI machine with level=3. + + Therefore in order to prioritise the highest level, use the build + + number to prioritise the level. + + + Only supported on Linux and macOS. + + ' + home: https://github.com/conda-forge/microarch-level-feedstock + license: BSD-3-Clause + license_file: LICENSE.txt + summary: Meta package to build conda recipes with microarchitecture levels +extra: + copy_test_source_files: true + feedstock-name: microarch-level + final: true + parent_recipe: + name: microarch-level-split + path: F:\projects\microarch-level-feedstock\recipe + version: '8' + recipe-maintainers: + - isuruf +--- +hash_content: |- + {'__unix': '__unix', + 'channel_targets': 'conda-forge main', + 'family': 'ppc64le', + 'level': '8'} +recipe: + package: + name: ppc64le-microarch-level + version: '8' + build: + number: 2 + string: '2' + noarch: generic + requirements: + run: + - __unix + run_constrained: [] + about: + license: BSD-3-Clause + +=== +package: + name: _x86_64-microarch-level + version: '1' +build: + noarch: generic + number: 2 + string: 2_x86_64 +requirements: + build: [] + run: + - __archspec 1=x86_64 +about: + description: 'The meta-package _x86_64-microarch-level enforces the microarchitecture + in the + + user system. + + + Note that a user would need the archspec conda package installed + + in the base environment where conda/mamba is run from. + + + See x86_64-microarch-level for using this in conda recipes + + ' + home: https://github.com/conda-forge/microarch-level-feedstock + license: BSD-3-Clause + license_file: LICENSE.txt + summary: Meta package to build conda recipes with microarchitecture levels +extra: + copy_test_source_files: true + feedstock-name: microarch-level + final: true + parent_recipe: + name: microarch-level-split + path: F:\projects\microarch-level-feedstock\recipe + version: '1' + recipe-maintainers: + - isuruf +--- +hash_content: |- + {'__archspec': '__archspec 1=x86_64', + 'channel_targets': 'conda-forge main', + 'family': 'x86_64', + 'microarchitecture': 'x86_64'} +recipe: + package: + name: _x86_64-microarch-level + version: '1' + build: + number: 2 + string: 2_x86_64 + noarch: generic + requirements: + run: + - __archspec 1=x86_64 + run_constrained: [] + about: + license: BSD-3-Clause diff --git a/crates/pixi_build_frontend/src/protocols/builders/conda_build/snapshots/pixi_build_frontend__protocols__builders__conda_build__protocol__test__conda-render__pinject.txt.snap b/crates/pixi_build_frontend/src/protocols/builders/conda_build/snapshots/pixi_build_frontend__protocols__builders__conda_build__protocol__test__conda-render__pinject.txt.snap new file mode 100644 index 000000000..d90b4838f --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/builders/conda_build/snapshots/pixi_build_frontend__protocols__builders__conda_build__protocol__test__conda-render__pinject.txt.snap @@ -0,0 +1,73 @@ +--- +source: crates/pixi_build_frontend/src/protocols/builders/conda_build/protocol.rs +expression: rendered_recipe +--- +package: + name: pinject + version: 0.14.1 +source: + sha256: 0f0a0b14f9df87a85b529a21cdaf530269b1f24fb303d418583a12bb53f69382 + url: https://pypi.io/packages/source/p/pinject/pinject-0.14.1.tar.gz +build: + noarch: python + number: '0' + script: C:\\Users\\zalms\\conda-bld\\pinject_1723465624118\\_h_env\\python.exe -m + pip install . -vv +requirements: + host: + - ca-certificates 2024.7.4 h56e8100_0 + - libexpat 2.6.2 h63175ca_0 + - tzdata 2024a h0c530f3_0 + - ucrt 10.0.22621.0 h57928b3_0 + - vc14_runtime 14.40.33810 ha82c5b3_20 + - vc 14.3 h8a93ad2_20 + - vs2015_runtime 14.40.33810 h3bf8584_20 + - bzip2 1.0.8 h2466b09_7 + - libffi 3.4.2 h8ffe710_5 + - libsqlite 3.46.0 h2466b09_0 + - libzlib 1.3.1 h2466b09_1 + - openssl 3.3.1 h2466b09_2 + - tk 8.6.13 h5226925_1 + - xz 5.2.6 h8d14728_0 + - python 3.12.5 h889d299_0_cpython + - setuptools 72.1.0 pyhd8ed1ab_0 + - wheel 0.44.0 pyhd8ed1ab_0 + - pip 24.2 pyhd8ed1ab_0 + run: + - python >=3.4 + - six >=1.7.3 + - decorator >=4.3.0 +test: + commands: + - pip check + imports: + - pinject + requires: + - pip +about: + home: https://github.com/google/pinject + license: Apache-2.0 + license_file: LICENSE + summary: A pythonic dependency injection library +extra: + copy_test_source_files: true + final: true + recipe-maintainers: + - baszalmstra +--- +hash_content: '{}' +recipe: + package: + name: pinject + version: 0.14.1 + build: + number: 0 + noarch: python + requirements: + run: + - python >=3.4 + - six >=1.7.3 + - decorator >=4.3.0 + run_constrained: [] + about: + license: Apache-2.0 diff --git a/crates/pixi_build_frontend/src/protocols/builders/mod.rs b/crates/pixi_build_frontend/src/protocols/builders/mod.rs new file mode 100644 index 000000000..9392b4b8e --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/builders/mod.rs @@ -0,0 +1,7 @@ +pub mod conda_build; +pub mod pixi; +pub mod rattler_build; + +pub use conda_build as conda_protocol; +pub use pixi as pixi_protocol; +pub use rattler_build as rattler_build_protocol; diff --git a/crates/pixi_build_frontend/src/protocols/builders/pixi.rs b/crates/pixi_build_frontend/src/protocols/builders/pixi.rs new file mode 100644 index 000000000..77a2357e0 --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/builders/pixi.rs @@ -0,0 +1,190 @@ +use std::{ + fmt, + fmt::{Display, Formatter}, + path::{Path, PathBuf}, +}; + +use miette::Diagnostic; +use pixi_consts::consts; +use pixi_manifest::Manifest; +// pub use protocol::Protocol; +use rattler_conda_types::ChannelConfig; +use thiserror::Error; +use which::Error; + +use crate::{ + protocols::{InitializeError, JsonRPCBuildProtocol}, + tool::{IsolatedToolSpec, ToolCache, ToolCacheError, ToolSpec}, + BackendOverride, +}; + +// use super::{InitializeError, JsonRPCBuildProtocol}; + +/// A protocol that uses a pixi manifest to invoke a build backend . +#[derive(Debug)] +pub struct ProtocolBuilder { + source_dir: PathBuf, + manifest: Manifest, + backend_spec: Option, + _channel_config: ChannelConfig, + cache_dir: Option, +} + +#[derive(thiserror::Error, Debug, Diagnostic)] +pub enum ProtocolBuildError { + #[error("failed to setup a build backend, the {} could not be parsed", .0.file_name().and_then(std::ffi::OsStr::to_str).unwrap_or("manifest"))] + #[diagnostic(help("Ensure that the manifest at '{}' is a valid pixi project manifest", .0.display()))] + FailedToParseManifest(PathBuf, #[diagnostic_source] miette::Report), + + #[error("the {} does not describe a package", .0.file_name().and_then(std::ffi::OsStr::to_str).unwrap_or("manifest"))] + #[diagnostic(help("A [package] section is missing in the manifest"))] + NotAPackage(PathBuf), +} + +#[derive(Debug, Error, Diagnostic)] +pub enum FinishError { + #[error(transparent)] + #[diagnostic(transparent)] + Init(#[from] InitializeError), + NoBuildSection(PathBuf), + Tool(ToolCacheError), +} + +impl Display for FinishError { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + FinishError::Init(init) => write!(f, "{init}"), + FinishError::NoBuildSection(_) => write!(f, "failed to setup a build backend, the project manifest does not contain a [build-system] section"), + FinishError::Tool(ToolCacheError::Instantiate(tool, err)) => match err { + Error::CannotGetCurrentDirAndPathListEmpty|Error::CannotFindBinaryPath => write!(f, "failed to setup a build backend, the backend tool '{}' could not be found", tool.display()), + Error::CannotCanonicalize => write!(f, "failed to setup a build backend, although the backend tool '{}' can be resolved it could not be canonicalized", tool.display()), + }, + FinishError::Tool(ToolCacheError::Install(report)) => write!(f, "failed to setup a build backend, the backend tool could not be installed: {}", report), + FinishError::Tool(ToolCacheError::CacheDir(report)) => write!(f, "failed to setup a build backend, the cache dir could not be discovered: {}", report), + } + } +} + +impl ProtocolBuilder { + /// Constructs a new instance from a manifest. + pub(crate) fn new(source_dir: PathBuf, manifest: Manifest) -> Result { + let backend_spec = manifest + .build_section() + .map(IsolatedToolSpec::from_build_section); + + Ok(Self { + source_dir, + manifest, + backend_spec: backend_spec.map(Into::into), + _channel_config: ChannelConfig::default_with_root_dir(PathBuf::new()), + cache_dir: None, + }) + } + + /// Sets an optional backend override. + pub fn with_backend_override(self, backend_override: Option) -> Self { + Self { + backend_spec: backend_override + .map(BackendOverride::into_spec) + .or(self.backend_spec), + ..self + } + } + + /// Sets the channel configuration used by this instance. + pub fn with_channel_config(self, channel_config: ChannelConfig) -> Self { + Self { + _channel_config: channel_config, + ..self + } + } + + /// Sets the cache directory the backend should use. + pub fn with_opt_cache_dir(self, cache_dir: Option) -> Self { + Self { cache_dir, ..self } + } + + /// Discovers a pixi project in the given source directory. + pub fn discover(source_dir: &Path) -> Result, ProtocolBuildError> { + if let Some(manifest_path) = find_pixi_manifest(source_dir) { + match Manifest::from_path(&manifest_path) { + Ok(manifest) => { + // Make sure the manifest describes a package. + if manifest.package.is_none() { + return Err(ProtocolBuildError::NotAPackage(manifest_path)); + } + + let builder = Self::new(source_dir.to_path_buf(), manifest)?; + return Ok(Some(builder)); + } + Err(e) => { + return Err(ProtocolBuildError::FailedToParseManifest( + manifest_path.to_path_buf(), + e, + )); + } + } + } + Ok(None) + } + + pub async fn finish( + self, + tool: &ToolCache, + build_id: usize, + ) -> Result { + let tool_spec = self + .backend_spec + .ok_or(FinishError::NoBuildSection(self.manifest.path.clone()))?; + + let tool = tool + .instantiate(tool_spec) + .await + .map_err(FinishError::Tool)?; + + Ok(JsonRPCBuildProtocol::setup( + self.source_dir, + self.manifest.path, + build_id, + self.cache_dir, + tool, + ) + .await?) + } + + /// Returns the pixi manifest + pub fn manifest(&self) -> &Manifest { + &self.manifest + } +} + +/// Try to find a pixi manifest in the given source directory. +fn find_pixi_manifest(source_dir: &Path) -> Option { + let pixi_manifest_path = source_dir.join(consts::PROJECT_MANIFEST); + if pixi_manifest_path.exists() { + return Some(pixi_manifest_path); + } + + let pyproject_manifest_path = source_dir.join(consts::PYPROJECT_MANIFEST); + // TODO: Really check if this is a pixi project. + if pyproject_manifest_path.is_file() { + return Some(pyproject_manifest_path); + } + + None +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use super::ProtocolBuilder; + + #[test] + pub fn discover_basic_pixi_manifest() { + let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/basic"); + let manifest_path = super::find_pixi_manifest(&manifest_dir) + .unwrap_or_else(|| panic!("No manifest found at {}", manifest_dir.display())); + ProtocolBuilder::discover(&manifest_path).unwrap(); + } +} diff --git a/crates/pixi_build_frontend/src/protocols/builders/rattler_build.rs b/crates/pixi_build_frontend/src/protocols/builders/rattler_build.rs new file mode 100644 index 000000000..dcb4e30ba --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/builders/rattler_build.rs @@ -0,0 +1,155 @@ +use std::path::{Path, PathBuf}; + +use miette::Diagnostic; +use pixi_manifest::Manifest; + +// pub use protocol::Protocol; +use rattler_conda_types::ChannelConfig; +use thiserror::Error; + +use super::pixi::{self, ProtocolBuildError as PixiProtocolBuildError}; + +use crate::{ + protocols::{InitializeError, JsonRPCBuildProtocol}, + tool::{IsolatedToolSpec, ToolCache, ToolCacheError, ToolSpec}, + BackendOverride, +}; + +#[derive(Debug, Error, Diagnostic)] +pub enum FinishError { + #[error(transparent)] + Tool(#[from] ToolCacheError), + #[error(transparent)] + #[diagnostic(transparent)] + Init(#[from] InitializeError), + #[error("failed to setup a build backend, the project manifest at {0} does not contain a [build] section")] + NoBuildSection(PathBuf), +} + +/// Right now building a rattler-build protocol is *almost* infallible. +/// The only way it can fail is if the pixi protocol cannot be built. +/// This error for now is mostly a wrapper around the pixi protocol build error. +#[derive(thiserror::Error, Debug, Diagnostic)] +pub enum ProtocolBuildError { + #[error(transparent)] + FailedToBuildPixi(#[from] PixiProtocolBuildError), +} + +/// A builder for constructing a [`protocol::Protocol`] instance. +#[derive(Debug)] +pub struct ProtocolBuilder { + /// The directory that contains the source files. + source_dir: PathBuf, + + /// The directory that contains the `recipe.yaml` in the source directory. + recipe_dir: PathBuf, + + /// The path to the manifest file. + manifest_path: PathBuf, + + /// The backend tool to install. + backend_spec: Option, + + /// The channel configuration used by this instance. + _channel_config: ChannelConfig, + + /// The cache directory the backend should use. (not used atm) + cache_dir: Option, +} + +impl ProtocolBuilder { + /// Discovers the protocol for the given source directory. + pub fn discover(source_dir: &Path) -> Result, ProtocolBuildError> { + // first we need to discover that pixi protocol also can be built. + // it is used to get the manifest + + // Ignore the error if we cannot find the pixi protocol. + let pixi_protocol = match pixi::ProtocolBuilder::discover(source_dir) { + Ok(inner_value) => inner_value, + Err(_) => return Ok(None), // Handle the case where the Option is None + }; + + // we cannot find pixi protocol, so we cannot build rattler-build protocol. + let manifest = if let Some(pixi_protocol) = pixi_protocol { + pixi_protocol.manifest().clone() + } else { + return Ok(None); + }; + + let recipe_dir = source_dir.join("recipe"); + + let protocol = if source_dir.join("recipe.yaml").is_file() { + Self::new(source_dir, source_dir, &manifest) + } else if recipe_dir.join("recipe.yaml").is_file() { + Self::new(source_dir, &recipe_dir, &manifest) + } else { + return Ok(None); + }; + + Ok(Some(protocol)) + } + + /// Constructs a new instance from a manifest. + pub fn new(source_dir: &Path, recipe_dir: &Path, manifest: &Manifest) -> Self { + let backend_spec = manifest + .build_section() + .map(IsolatedToolSpec::from_build_section); + + Self { + source_dir: source_dir.to_path_buf(), + recipe_dir: recipe_dir.to_path_buf(), + manifest_path: manifest.path.clone(), + backend_spec: backend_spec.map(Into::into), + _channel_config: ChannelConfig::default_with_root_dir(PathBuf::new()), + cache_dir: None, + } + } + + /// Sets an optional backend override. + pub fn with_backend_override(self, backend_override: Option) -> Self { + Self { + backend_spec: backend_override + .map(BackendOverride::into_spec) + .or(self.backend_spec), + ..self + } + } + + /// Sets the channel configuration used by this instance. + pub fn with_channel_config(self, channel_config: ChannelConfig) -> Self { + Self { + _channel_config: channel_config, + ..self + } + } + + /// Sets the cache directory the backend should use. + pub fn with_opt_cache_dir(self, cache_dir: Option) -> Self { + Self { cache_dir, ..self } + } + + /// Create the protocol instance. + pub async fn finish( + self, + tool: &ToolCache, + build_id: usize, + ) -> Result { + let tool_spec = self + .backend_spec + .ok_or(FinishError::NoBuildSection(self.manifest_path.clone()))?; + + let tool = tool + .instantiate(tool_spec) + .await + .map_err(FinishError::Tool)?; + + Ok(JsonRPCBuildProtocol::setup( + self.source_dir, + self.recipe_dir.join("recipe.yaml"), + build_id, + self.cache_dir, + tool, + ) + .await?) + } +} diff --git a/crates/pixi_build_frontend/src/protocols/error.rs b/crates/pixi_build_frontend/src/protocols/error.rs new file mode 100644 index 000000000..12fef091b --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/error.rs @@ -0,0 +1,97 @@ +use std::{ + error::Error, + fmt::{Display, Formatter}, +}; + +use jsonrpsee::types::ErrorObject; +use miette::{Diagnostic, Severity}; +use serde::{Deserialize, Deserializer}; + +#[derive(Debug)] +pub struct BackendError { + message: String, + source: Option>, + severity: Severity, +} + +#[derive(Debug, Diagnostic)] +pub struct BackendErrorCause { + message: String, + cause: Option>, +} + +impl Display for BackendErrorCause { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", &self.message) + } +} + +impl Error for BackendErrorCause { + fn source(&self) -> Option<&(dyn Error + 'static)> { + self.cause + .as_ref() + .map(|e| e.as_ref() as &(dyn Error + 'static)) + } +} + +impl<'e> From> for BackendError { + fn from(value: ErrorObject<'e>) -> Self { + // Try to parse the error contained in the data field. + let error: RawErrorValue = value + .data() + .and_then(|value| serde_json::from_str(value.get()).ok()) + .unwrap_or_default(); + + let source = error.causes.0.into_iter().fold(None, |previous, cause| { + Some(Box::new(BackendErrorCause { + message: cause, + cause: previous, + })) + }); + + Self { + message: value.message().to_owned(), + source, + severity: error.severity.0, + } + } +} + +impl Display for BackendError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", &self.message) + } +} + +impl Error for BackendError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + self.source.as_ref().map(|e| e.as_ref() as &dyn Error) + } +} + +#[derive(Debug, Default, Deserialize)] +struct RawErrorValue { + severity: OrDefault, + causes: OrDefault>, +} + +#[derive(Debug, Default)] +struct OrDefault(T); + +impl<'de, T: Default + Deserialize<'de>> Deserialize<'de> for OrDefault { + fn deserialize>(d: D) -> Result { + T::deserialize(d) + .or_else(|_| Ok(T::default())) + .map(OrDefault) + } +} + +impl Diagnostic for BackendError { + fn severity(&self) -> Option { + Some(self.severity) + } + + fn diagnostic_source(&self) -> Option<&dyn Diagnostic> { + self.source.as_ref().map(|e| e.as_ref() as &dyn Diagnostic) + } +} diff --git a/crates/pixi_build_frontend/src/protocols/mod.rs b/crates/pixi_build_frontend/src/protocols/mod.rs new file mode 100644 index 000000000..8f71ec44d --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/mod.rs @@ -0,0 +1,399 @@ +//! Implementations of the [`crate::Protocol`] type for various backends. + +use std::{path::PathBuf, sync::Arc}; + +use error::BackendError; +use futures::TryFutureExt; +use jsonrpsee::{ + async_client::{Client, ClientBuilder}, + core::{ + client::{ClientT, Error, TransportReceiverT, TransportSenderT}, + ClientError, + }, + types::ErrorCode, +}; + +use miette::Diagnostic; +use pixi_build_types::{ + procedures::{ + self, + conda_build::{CondaBuildParams, CondaBuildResult}, + conda_metadata::{CondaMetadataParams, CondaMetadataResult}, + initialize::{InitializeParams, InitializeResult}, + }, + BackendCapabilities, FrontendCapabilities, +}; +use stderr::{stderr_null, stderr_stream}; +use thiserror::Error; +use tokio::{ + io::{AsyncBufReadExt, BufReader, Lines}, + process::ChildStderr, + sync::{oneshot, Mutex}, +}; + +use crate::{ + jsonrpc::{stdio_transport, Receiver, RpcParams, Sender}, + tool::Tool, + CondaBuildReporter, CondaMetadataReporter, +}; + +pub mod builders; +mod error; +pub(super) mod stderr; + +#[derive(Debug, Error, Diagnostic)] +pub enum InitializeError { + #[error("failed to setup communication with the build backend, an unexpected io error occurred while communicating with the pixi build backend")] + #[diagnostic(help("Ensure that the project manifest contains a valid [build] section."))] + Io(#[from] std::io::Error), + #[error(transparent)] + #[diagnostic(transparent)] + Protocol(#[from] ProtocolError), +} + +#[derive(Debug, Error, Diagnostic)] +pub enum ProtocolError { + #[error("failed to communicate with the build backend ({0})")] + #[diagnostic(help( + "Ensure that the build backend implements the JSON-RPC protocol correctly." + ))] + JsonRpc(String, #[source] ClientError), + #[error("received invalid response from the build backend ({0}) when calling '{1}'")] + ParseError(String, String, #[source] serde_json::Error), + #[error(transparent)] + #[diagnostic( + transparent, + help("This error originates from the build backend specified in the project manifest.") + )] + BackendError( + #[from] + #[diagnostic_source] + BackendError, + ), + #[error("the build backend ({0}) does not implement the method '{1}'")] + #[diagnostic(help( + "This is often caused by the build backend incorrectly reporting certain capabilities. Consider contacting the build backend maintainers for a fix." + ))] + MethodNotImplemented(String, String), + + #[error("pipe of stderr stopped earlier than expected")] + StdErrPipeStopped, +} + +impl ProtocolError { + pub fn from_client_error(backend_identifier: String, err: ClientError, method: &str) -> Self { + match err { + Error::Call(err) if err.code() > -32001 => Self::BackendError(BackendError::from(err)), + Error::Call(err) if err.code() == ErrorCode::MethodNotFound.code() => { + Self::MethodNotImplemented(backend_identifier, method.to_string()) + } + Error::ParseError(err) => Self::ParseError(backend_identifier, method.to_string(), err), + e => Self::JsonRpc(backend_identifier, e), + } + } +} + +/// Protocol trait that is responsible to setup and communicate with the backend. +/// This allow us to hide the jsonrpc communication hidden in this protocol. +/// This protocol is generic over the manifest what are passed to the build backends. +/// This means that, for rattler-build, the manifest is a recipe.yaml file, +/// and for pixi it's a pixi.toml or a pyproject.toml file. +#[derive(Debug)] +pub struct JsonRPCBuildProtocol { + backend_identifier: String, + + client: Client, + + build_id: usize, + + /// The directory that contains the source files. + source_dir: PathBuf, + + /// The directory that contains the `recipe.yaml` or `pixi.toml` in the source directory. + manifest_path: PathBuf, + + _backend_capabilities: BackendCapabilities, + + stderr: Option>>>>, +} + +impl JsonRPCBuildProtocol { + /// Create a new instance of the protocol. + #[allow(clippy::too_many_arguments)] + fn new( + client: Client, + backend_identifier: String, + source_dir: PathBuf, + manifest_path: PathBuf, + backend_capabilities: BackendCapabilities, + build_id: usize, + stderr: Option>>>>, + ) -> Self { + Self { + client, + backend_identifier, + source_dir, + manifest_path, + _backend_capabilities: backend_capabilities, + build_id, + stderr, + } + } + + /// Setup a new protocol instance. + /// This will spawn a new backend process and establish a JSON-RPC connection. + async fn setup( + source_dir: PathBuf, + manifest_path: PathBuf, + build_id: usize, + cache_dir: Option, + tool: Tool, + ) -> Result { + match tool.try_into_executable() { + Ok(tool) => { + // Spawn the tool and capture stdin/stdout. + let mut process = tokio::process::Command::from(tool.command()) + .stdout(std::process::Stdio::piped()) + .stdin(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .spawn()?; + + let backend_identifier = tool.executable().clone(); + + // Acquire the stdin/stdout handles. + let stdin = process + .stdin + .take() + .expect("since we piped stdin we expect a valid value here"); + let stdout = process + .stdout + .expect("since we piped stdout we expect a valid value here"); + let stderr = process + .stderr + .map(|stderr| BufReader::new(stderr).lines()) + .expect("since we piped stderr we expect a valid value here"); + + // Construct a JSON-RPC client to communicate with the backend process. + let (tx, rx) = stdio_transport(stdin, stdout); + Self::setup_with_transport( + backend_identifier, + source_dir, + manifest_path, + build_id, + cache_dir, + tx, + rx, + Some(stderr), + ) + .await + } + Err(ipc) => { + Self::setup_with_transport( + "".to_string(), + source_dir, + manifest_path, + build_id, + cache_dir, + Sender::from(ipc.rpc_out), + Receiver::from(ipc.rpc_in), + None, + ) + .await + } + } + } + + /// Setup a new protocol instance with a given transport. + #[allow(clippy::too_many_arguments)] + async fn setup_with_transport( + backend_identifier: String, + source_dir: PathBuf, + // In case of rattler-build it's recipe.yaml + manifest_path: PathBuf, + build_id: usize, + cache_dir: Option, + sender: impl TransportSenderT + Send, + receiver: impl TransportReceiverT + Send, + stderr: Option>>, + ) -> Result { + let client: Client = ClientBuilder::default() + // Set 24hours for request timeout because the backend may be long-running. + .request_timeout(std::time::Duration::from_secs(86400)) + .build_with_tokio(sender, receiver); + + // Invoke the initialize method on the backend to establish the connection. + let result: InitializeResult = client + .request( + procedures::initialize::METHOD_NAME, + RpcParams::from(InitializeParams { + manifest_path: manifest_path.clone(), + capabilities: FrontendCapabilities {}, + cache_directory: cache_dir, + }), + ) + .await + .map_err(|err| { + ProtocolError::from_client_error( + backend_identifier.clone(), + err, + procedures::initialize::METHOD_NAME, + ) + })?; + + Ok(JsonRPCBuildProtocol::new( + client, + backend_identifier, + source_dir, + manifest_path, + result.capabilities, + build_id, + stderr.map(Mutex::new).map(Arc::new), + )) + } + + /// Extract metadata from the recipe. + pub async fn get_conda_metadata( + &self, + request: &CondaMetadataParams, + reporter: &dyn CondaMetadataReporter, + ) -> Result { + // Capture all of stderr and discard it + let stderr = self.stderr.as_ref().map(|stderr| { + // Cancellation signal + let (cancel_tx, cancel_rx) = oneshot::channel(); + // Spawn the stderr forwarding task + let handle = tokio::spawn(stderr_null(stderr.clone(), cancel_rx)); + (cancel_tx, handle) + }); + + // Start the metadata operation + let operation = reporter.on_metadata_start(self.build_id); + + let result = self + .client + .request( + procedures::conda_metadata::METHOD_NAME, + RpcParams::from(request), + ) + .await + .map_err(|err| { + ProtocolError::from_client_error( + self.backend_identifier.clone(), + err, + procedures::conda_metadata::METHOD_NAME, + ) + }); + + // Wait for the stderr sink to finish, by signaling it to stop + if let Some((cancel_tx, handle)) = stderr { + // Cancel the stderr forwarding + if cancel_tx.send(()).is_err() { + return Err(ProtocolError::StdErrPipeStopped); + } + handle.await.map_or_else( + |e| match e.try_into_panic() { + Ok(panic) => std::panic::resume_unwind(panic), + Err(_) => Err(ProtocolError::StdErrPipeStopped), + }, + |e| e.map_err(|_| ProtocolError::StdErrPipeStopped), + )?; + } + + reporter.on_metadata_end(operation); + result + } + + /// Build a specific conda package output + pub async fn conda_build( + &self, + request: &CondaBuildParams, + reporter: &dyn CondaBuildReporter, + ) -> Result { + // Captures stderr output + let stderr = self.stderr.as_ref().map(|stderr| { + let (sender, receiver) = tokio::sync::mpsc::channel(100); + let (cancel_tx, cancel_rx) = oneshot::channel(); + let handle = tokio::spawn(stderr_stream(stderr.clone(), sender, cancel_rx)); + (cancel_tx, receiver, handle) + }); + + let operation = reporter.on_build_start(self.build_id); + let request = self + .client + .request( + procedures::conda_build::METHOD_NAME, + RpcParams::from(request), + ) + .map_err(|err| { + ProtocolError::from_client_error( + self.backend_identifier.clone(), + err, + procedures::conda_build::METHOD_NAME, + ) + }); + + // There can be two cases, the stderr is captured or is not captured + // In the case of capturing we need to select between the request and the stderr + // forwarding to drive these two futures concurrently + // + // In the other case we can just wait for the request to finish + let result = if let Some((cancel_tx, receiver, handle)) = stderr { + // This is the case where we capture stderr + + // Create a future that will forward stderr to the reporter + let send_stderr = async { + let mut receiver = receiver; + while let Some(line) = receiver.recv().await { + reporter.on_build_output(operation, line); + } + }; + + // Select between the request and the stderr forwarding + let result = tokio::select! { + result = request => result, + _ = send_stderr => { + Err(ProtocolError::StdErrPipeStopped) + } + }; + + // Cancel the stderr forwarding + if cancel_tx.send(()).is_err() { + return Err(ProtocolError::StdErrPipeStopped); + } + + // Wait for the stderr forwarding to finish, it should because we cancelled + handle.await.map_or_else( + |e| match e.try_into_panic() { + Ok(panic) => std::panic::resume_unwind(panic), + Err(_) => Err(ProtocolError::StdErrPipeStopped), + }, + |e| e.map_err(|_| ProtocolError::StdErrPipeStopped), + )?; + + // Return the result + result + } else { + // This is the case where we don't capture stderr + request.await + }; + + // Build has completed + reporter.on_build_end(operation); + result + } + + pub fn backend_identifier(&self) -> &str { + &self.backend_identifier + } + + pub fn manifests(&self) -> Vec { + self.manifest_path + .strip_prefix(self.source_dir.clone()) + .unwrap_or(&self.manifest_path) + .to_path_buf() + .to_str() + .into_iter() + .map(ToString::to_string) + .collect() + } +} diff --git a/crates/pixi_build_frontend/src/protocols/stderr.rs b/crates/pixi_build_frontend/src/protocols/stderr.rs new file mode 100644 index 000000000..a49fbc244 --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/stderr.rs @@ -0,0 +1,54 @@ +use std::sync::Arc; + +use tokio::{ + io::{BufReader, Lines}, + process::ChildStderr, + sync::{mpsc, oneshot, Mutex}, +}; + +/// Stderr sink that captures the stderr output of the backend +/// but does not do anything with it. +pub(crate) async fn stderr_null( + buffer: Arc>>>, + cancel: oneshot::Receiver<()>, +) -> Result<(), std::io::Error> { + tokio::select! { + // Please stop + _ = cancel => { + Ok(()) + } + // Please keep reading + result = async { + let mut lines = buffer.lock().await; + while let Some(_line) = lines.next_line().await? {} + Ok(()) + } => { + result + } + } +} + +/// Stderr stream that captures the stderr output of the backend +/// and sends it over the stream. +pub(crate) async fn stderr_stream( + buffer: Arc>>>, + sender: mpsc::Sender, + cancel: oneshot::Receiver<()>, +) -> Result<(), std::io::Error> { + tokio::select! { + _ = cancel => { + Ok(()) + } + result = async { + let mut lines = buffer.lock().await; + while let Some(line) = lines.next_line().await? { + if let Err(err) = sender.send(line).await { + return Err(std::io::Error::new(std::io::ErrorKind::Other, err)); + } + } + Ok(()) + } => { + result + } + } +} diff --git a/crates/pixi_build_frontend/src/reporters.rs b/crates/pixi_build_frontend/src/reporters.rs new file mode 100644 index 000000000..ec96a15de --- /dev/null +++ b/crates/pixi_build_frontend/src/reporters.rs @@ -0,0 +1,60 @@ +use std::sync::Arc; + +/// Reporter trait for reporting the progress of metadata operations. +pub trait CondaMetadataReporter: Send + Sync { + /// Reports the start of the get_conda_metadata operation. + /// Returns a unique identifier for the operation. + fn on_metadata_start(&self, build_id: usize) -> usize; + + /// Reports the end of the get_conda_metadata operation. + fn on_metadata_end(&self, operation: usize); +} + +/// A no-op implementation of the CondaMetadataReporter trait. +#[derive(Clone)] +pub struct NoopCondaMetadataReporter; +impl CondaMetadataReporter for NoopCondaMetadataReporter { + fn on_metadata_start(&self, _build_id: usize) -> usize { + 0 + } + fn on_metadata_end(&self, _operation: usize) {} +} + +impl NoopCondaMetadataReporter { + pub fn new() -> Arc { + Arc::new(Self {}) + } +} + +/// Reporter trait for reporting the progress of build operations. +pub trait CondaBuildReporter: Send + Sync { + /// Reports the start of the build_conda operation. + /// Returns a unique identifier for the operation. + fn on_build_start(&self, build_id: usize) -> usize; + + /// Reports the end of the build_conda operation. + fn on_build_end(&self, operation: usize); + + /// Reports output from the build process. + fn on_build_output(&self, operation: usize, line: String); +} + +/// A no-op implementation of the CondaBuildReporter trait. +#[derive(Clone)] +pub struct NoopCondaBuildReporter; +impl CondaBuildReporter for NoopCondaBuildReporter { + fn on_build_start(&self, _build_id: usize) -> usize { + 0 + } + fn on_build_end(&self, _operation: usize) {} + + fn on_build_output(&self, _operation: usize, _line: String) { + todo!() + } +} + +impl NoopCondaBuildReporter { + pub fn new() -> Arc { + Arc::new(Self {}) + } +} diff --git a/crates/pixi_build_frontend/src/tool/cache.rs b/crates/pixi_build_frontend/src/tool/cache.rs new file mode 100644 index 000000000..713671bd3 --- /dev/null +++ b/crates/pixi_build_frontend/src/tool/cache.rs @@ -0,0 +1,256 @@ +use std::{fmt::Debug, hash::Hash, path::PathBuf}; + +use dashmap::{DashMap, Entry}; +use miette::miette; +use pixi_consts::consts::CONDA_REPODATA_CACHE_DIR; +use rattler_conda_types::Channel; +use rattler_repodata_gateway::Gateway; +use reqwest_middleware::ClientWithMiddleware; + +use super::IsolatedTool; +use crate::{ + tool::{SystemTool, Tool, ToolSpec}, + IsolatedToolSpec, SystemToolSpec, +}; + +pub struct ToolContextBuilder { + gateway: Option, + client: ClientWithMiddleware, + channels: Vec, + cache_dir: PathBuf, +} + +impl ToolContextBuilder { + /// Create a new tool context builder. + pub fn new(channels: Vec) -> Self { + Self { + channels, + gateway: None, + client: ClientWithMiddleware::default(), + cache_dir: pixi_config::get_cache_dir().expect("we should have a cache dir"), + } + } + + /// Set the gateway for the tool context. + pub fn with_gateway(mut self, gateway: Gateway) -> Self { + self.gateway = Some(gateway); + self + } + + /// Set the client for the tool context. + pub fn with_client(mut self, client: ClientWithMiddleware) -> Self { + self.client = client; + self + } + + /// Set the cache directory for the tool context. + pub fn with_cache_dir(mut self, cache_dir: PathBuf) -> Self { + self.cache_dir = cache_dir; + self + } + + /// Build the `ToolContext` using builder configuration. + pub fn build(self) -> ToolContext { + let gateway = self.gateway.unwrap_or_else(|| { + Gateway::builder() + .with_client(self.client.clone()) + .with_cache_dir(self.cache_dir.join(CONDA_REPODATA_CACHE_DIR)) + .finish() + }); + + ToolContext { + channels: self.channels, + cache_dir: self.cache_dir, + client: self.client, + gateway, + } + } +} + +/// The tool context, +/// containing client, channels and gateway configuration +/// that will be used to resolve and install tools. +#[derive(Default, Clone)] +pub struct ToolContext { + // Authentication client to use for fetching repodata. + pub client: ClientWithMiddleware, + /// The channels to use for resolving tools. + pub channels: Vec, + // The cache directory to use for the tools. + pub cache_dir: PathBuf, + // The gateway to use for fetching repodata. + pub gateway: Gateway, +} + +impl Debug for ToolContext { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ToolContext") + .field("client", &self.client) + .field("channels", &self.channels) + .field("cache_dir", &self.cache_dir) + .finish() + } +} + +impl ToolContext { + /// Create a new tool context builder with the given channels. + pub fn builder(channels: Vec) -> ToolContextBuilder { + ToolContextBuilder::new(channels) + } +} + +/// A [`ToolCache`] maintains a cache of environments for build tools. +/// +/// This is useful to ensure that if we need to build multiple packages that use +/// the same tool, we can reuse their environments. +/// (nichita): it can also be seen as a way to create tools itself +#[derive(Default, Debug)] +pub struct ToolCache { + /// The cache of tools. + pub cache: DashMap, + /// The context for the tools. + /// It contains necessary details + /// for the tools to be resolved and installed + pub context: ToolContext, +} + +#[derive(thiserror::Error, Debug)] +pub enum ToolCacheError { + #[error("could not resolve '{}', {1}", .0.display())] + Instantiate(PathBuf, which::Error), + #[error("could not install isolated tool '{}'", .0.as_display())] + Install(miette::Report), + #[error("could not determine default cache dir '{}'", .0.as_display())] + CacheDir(miette::Report), +} + +/// Describes the specification of the tool. This can be used to cache tool +/// information. +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub enum CacheableToolSpec { + Isolated(IsolatedToolSpec), + System(SystemToolSpec), +} + +/// A tool that can be invoked. +#[derive(Debug, Clone)] +pub enum CachedTool { + Isolated(IsolatedTool), + System(SystemTool), +} + +impl From for Tool { + fn from(value: CachedTool) -> Self { + match value { + CachedTool::Isolated(tool) => Tool::Isolated(tool), + CachedTool::System(tool) => Tool::System(tool), + } + } +} + +impl From for CachedTool { + fn from(value: IsolatedTool) -> Self { + Self::Isolated(value) + } +} + +impl From for CachedTool { + fn from(value: SystemTool) -> Self { + Self::System(value) + } +} + +impl ToolCache { + /// Construct a new tool cache. + pub fn new() -> Self { + Self { + cache: DashMap::default(), + context: ToolContext::default(), + } + } + + /// Instantiate a tool from a specification. + /// + /// If the tool is not already cached, it will be created, installed and cached. + pub async fn instantiate(&self, spec: ToolSpec) -> Result { + let spec = match spec { + ToolSpec::Io(ipc) => return Ok(Tool::Io(ipc)), + ToolSpec::Isolated(isolated) => CacheableToolSpec::Isolated(isolated), + ToolSpec::System(system) => CacheableToolSpec::System(system), + }; + + let cache_entry = match self.cache.entry(spec.clone()) { + Entry::Occupied(entry) => return Ok(entry.get().clone().into()), + Entry::Vacant(entry) => entry, + }; + + let tool: CachedTool = match spec { + CacheableToolSpec::Isolated(spec) => CachedTool::Isolated(if spec.specs.is_empty() { + return Err(ToolCacheError::Install(miette!( + "No build match specs provided for '{}' command.", + spec.command + ))); + } else { + spec.install(self.context.clone()) + .await + .map_err(ToolCacheError::Install)? + }), + CacheableToolSpec::System(spec) => SystemTool::new(spec.command).into(), + }; + + cache_entry.insert(tool.clone()); + Ok(tool.into()) + } +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use pixi_config::Config; + use rattler_conda_types::{ChannelConfig, MatchSpec, NamedChannelOrUrl, ParseStrictness}; + use reqwest_middleware::ClientWithMiddleware; + + use super::ToolCache; + use crate::{ + tool::{ToolContext, ToolSpec}, + IsolatedToolSpec, + }; + + #[tokio::test] + async fn test_tool_cache() { + let mut cache = ToolCache::new(); + let mut config = Config::default(); + config.default_channels = vec![NamedChannelOrUrl::Name("conda-forge".to_string())]; + + let auth_client = ClientWithMiddleware::default(); + let channel_config = ChannelConfig::default_with_root_dir(PathBuf::new()); + + let channels = config + .default_channels + .iter() + .cloned() + .map(|c| c.into_channel(&channel_config).unwrap()) + .collect(); + + let tool_context = ToolContext::builder(channels) + .with_client(auth_client.clone()) + .build(); + + cache.context = tool_context; + + let tool_spec = IsolatedToolSpec { + specs: vec![MatchSpec::from_str("cowpy", ParseStrictness::Strict).unwrap()], + command: "cowpy".into(), + }; + + let tool = cache + .instantiate(ToolSpec::Isolated(tool_spec)) + .await + .unwrap(); + + let exec = tool.as_executable().unwrap(); + + exec.command().arg("hello").spawn().unwrap(); + } +} diff --git a/crates/pixi_build_frontend/src/tool/mod.rs b/crates/pixi_build_frontend/src/tool/mod.rs new file mode 100644 index 000000000..416274242 --- /dev/null +++ b/crates/pixi_build_frontend/src/tool/mod.rs @@ -0,0 +1,129 @@ +mod cache; +mod spec; + +use std::{collections::HashMap, path::PathBuf}; + +pub use cache::{ToolCache, ToolCacheError, ToolContext}; +pub use spec::{IsolatedToolSpec, SystemToolSpec, ToolSpec}; + +use crate::InProcessBackend; + +/// A tool that can be invoked. +#[derive(Debug)] +pub enum Tool { + Isolated(IsolatedTool), + System(SystemTool), + Io(InProcessBackend), +} + +#[derive(Debug)] +pub enum ExecutableTool { + Isolated(IsolatedTool), + System(SystemTool), +} + +/// A tool that is pre-installed on the system. +#[derive(Debug, Clone)] +pub struct SystemTool { + command: String, +} + +impl SystemTool { + /// Construct a new instance from a command. + pub(crate) fn new(command: impl Into) -> Self { + Self { + command: command.into(), + } + } +} + +impl From for Tool { + fn from(value: SystemTool) -> Self { + Self::System(value) + } +} + +/// A tool that is installed in its own isolated environment. +#[derive(Debug, Clone)] +pub struct IsolatedTool { + /// The command to invoke. + command: String, + /// The prefix to use for the isolated environment. + prefix: PathBuf, + /// Activation scripts + activation_scripts: HashMap, +} + +impl IsolatedTool { + /// Construct a new instance from a command and prefix. + pub(crate) fn new( + command: impl Into, + prefix: impl Into, + activation: HashMap, + ) -> Self { + Self { + command: command.into(), + prefix: prefix.into(), + activation_scripts: activation, + } + } +} + +impl From for Tool { + fn from(value: IsolatedTool) -> Self { + Self::Isolated(value) + } +} + +impl Tool { + pub fn as_executable(&self) -> Option { + match self { + Tool::Isolated(tool) => Some(ExecutableTool::Isolated(tool.clone())), + Tool::System(tool) => Some(ExecutableTool::System(tool.clone())), + Tool::Io(_) => None, + } + } + + pub fn try_into_executable(self) -> Result { + match self { + Tool::Isolated(tool) => Ok(ExecutableTool::Isolated(tool)), + Tool::System(tool) => Ok(ExecutableTool::System(tool)), + Tool::Io(ipc) => Err(ipc), + } + } +} + +impl ExecutableTool { + /// Returns the full path to the executable to invoke. + pub fn executable(&self) -> &String { + match self { + ExecutableTool::Isolated(tool) => &tool.command, + ExecutableTool::System(tool) => &tool.command, + } + } + + /// Construct a new tool that calls another executable. + pub fn with_executable(&self, executable: impl Into) -> Self { + match self { + ExecutableTool::Isolated(tool) => ExecutableTool::Isolated(IsolatedTool::new( + executable, + tool.prefix.clone(), + tool.activation_scripts.clone(), + )), + ExecutableTool::System(_) => ExecutableTool::System(SystemTool::new(executable)), + } + } + + /// Construct a new command that enables invocation of the tool. + pub fn command(&self) -> std::process::Command { + match self { + ExecutableTool::Isolated(tool) => { + let mut cmd = std::process::Command::new(&tool.command); + cmd.envs(tool.activation_scripts.clone()); + + cmd + } + ExecutableTool::System(tool) => std::process::Command::new(&tool.command), + } + } +} diff --git a/crates/pixi_build_frontend/src/tool/spec.rs b/crates/pixi_build_frontend/src/tool/spec.rs new file mode 100644 index 000000000..a4a6fbf3b --- /dev/null +++ b/crates/pixi_build_frontend/src/tool/spec.rs @@ -0,0 +1,165 @@ +use miette::IntoDiagnostic; +use pixi_consts::consts::CACHED_BUILD_ENVS_DIR; +use pixi_manifest::BuildSystem; +use pixi_utils::EnvironmentHash; +use rattler::{install::Installer, package_cache::PackageCache}; +use rattler_conda_types::{GenericVirtualPackage, MatchSpec, Platform}; +use rattler_shell::{ + activation::{ActivationVariables, Activator}, + shell::ShellEnum, +}; +use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; +use rattler_virtual_packages::{VirtualPackage, VirtualPackageOverrides}; + +use crate::{BackendOverride, InProcessBackend}; + +use super::{IsolatedTool, ToolContext}; + +/// Describes the specification of the tool. This can be used to cache tool +/// information. +#[derive(Debug)] +pub enum ToolSpec { + Isolated(IsolatedToolSpec), + System(SystemToolSpec), + Io(InProcessBackend), +} + +/// A build tool that can be installed through a conda package. +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub struct IsolatedToolSpec { + /// The specs used to instantiate the isolated build environment. + pub specs: Vec, + + /// The command to invoke in the isolated environment. + pub command: String, +} + +impl IsolatedToolSpec { + /// Construct a new instance from a list of match specs. + pub fn from_specs(specs: impl IntoIterator) -> Self { + Self { + specs: specs.into_iter().collect(), + command: String::new(), + } + } + + /// Construct a new instance from a build section + pub fn from_build_section(build_section: &BuildSystem) -> Self { + Self { + specs: build_section.dependencies.clone(), + command: build_section.build_backend.clone(), + } + } + + /// Explicitly set the command to invoke. + pub fn with_command(self, command: impl Into) -> Self { + Self { + command: command.into(), + ..self + } + } + + /// Installed the tool in the isolated environment. + pub async fn install(&self, context: ToolContext) -> miette::Result { + let repodata = context + .gateway + .query( + context.channels.clone(), + [Platform::current(), Platform::NoArch], + self.specs.clone(), + ) + .recursive(true) + .execute() + .await + .into_diagnostic()?; + + // Determine virtual packages of the current platform + let virtual_packages = VirtualPackage::detect(&VirtualPackageOverrides::from_env()) + .unwrap() + .iter() + .cloned() + .map(GenericVirtualPackage::from) + .collect(); + + let solved_records = Solver + .solve(SolverTask { + specs: self.specs.clone(), + virtual_packages, + ..SolverTask::from_iter(&repodata) + }) + .into_diagnostic()?; + + let cache = EnvironmentHash::new( + self.command.clone(), + self.specs.clone(), + context + .channels + .iter() + .map(|c| c.base_url.to_string()) + .collect(), + ); + + let cached_dir = context + .cache_dir + .join(CACHED_BUILD_ENVS_DIR) + .join(cache.name()); + + // Install the environment + Installer::new() + .with_download_client(context.client.clone()) + .with_package_cache(PackageCache::new( + context + .cache_dir + .join(pixi_consts::consts::CONDA_PACKAGE_CACHE_DIR), + )) + .install(&cached_dir, solved_records) + .await + .into_diagnostic()?; + + // Get the activation scripts + let activator = + Activator::from_path(&cached_dir, ShellEnum::default(), Platform::current()) + .into_diagnostic()?; + + let activation_scripts = activator + .run_activation(ActivationVariables::from_env().unwrap_or_default(), None) + .into_diagnostic()?; + + Ok(IsolatedTool::new( + self.command.clone(), + cached_dir, + activation_scripts, + )) + } +} + +impl From for ToolSpec { + fn from(value: IsolatedToolSpec) -> Self { + Self::Isolated(value) + } +} + +/// A build tool that is installed on the system. +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub struct SystemToolSpec { + /// The command to invoke. + pub command: String, +} + +impl From for ToolSpec { + fn from(value: SystemToolSpec) -> Self { + Self::System(value) + } +} + +impl BackendOverride { + pub fn into_spec(self) -> ToolSpec { + match self { + BackendOverride::Spec(spec) => { + ToolSpec::Isolated(IsolatedToolSpec::from_specs(vec![spec])) + } + BackendOverride::System(command) => ToolSpec::System(SystemToolSpec { command }), + BackendOverride::Io(process) => ToolSpec::Io(process), + } + } +} diff --git a/crates/pixi_build_frontend/test-data/conda-render/microarch-level.txt b/crates/pixi_build_frontend/test-data/conda-render/microarch-level.txt new file mode 100644 index 000000000..a2aaac58f --- /dev/null +++ b/crates/pixi_build_frontend/test-data/conda-render/microarch-level.txt @@ -0,0 +1,119 @@ +-------------- +Hash contents: +-------------- +{'__unix': '__unix', + 'channel_targets': 'conda-forge main', + 'family': 'ppc64le', + 'level': '8'} +---------- +meta.yaml: +---------- +package: + name: ppc64le-microarch-level + version: '8' +build: + noarch: generic + number: 2 + run_exports: + strong: + - _ppc64le-microarch-level >=8 + string: '2' +requirements: + build: [] + run: + - __unix +about: + description: 'Use the meta-package ppc64le-microarch-level in requirements/build + in conda + + recipes to set up the compiler flags and set up the virtual package + + requirements in the run requirements. + + + When building packages on CI, level=4 will not be guaranteed, so + + you can only use level<=3 to build. + + + The run_exports only has a lower bound and therefore a level=2 + + build can be installed on a level=3 user system. A tighter bound + + is not added because we want to be able to test both level=2 and + + level=3 on a CI machine with level=3. + + Therefore in order to prioritise the highest level, use the build + + number to prioritise the level. + + + Only supported on Linux and macOS. + + ' + home: https://github.com/conda-forge/microarch-level-feedstock + license: BSD-3-Clause + license_file: LICENSE.txt + summary: Meta package to build conda recipes with microarchitecture levels +extra: + copy_test_source_files: true + feedstock-name: microarch-level + final: true + parent_recipe: + name: microarch-level-split + path: F:\projects\microarch-level-feedstock\recipe + version: '8' + recipe-maintainers: + - isuruf + +-------------- +Hash contents: +-------------- +{'__archspec': '__archspec 1=x86_64', + 'channel_targets': 'conda-forge main', + 'family': 'x86_64', + 'microarchitecture': 'x86_64'} +---------- +meta.yaml: +---------- +package: + name: _x86_64-microarch-level + version: '1' +build: + noarch: generic + number: 2 + string: 2_x86_64 +requirements: + build: [] + run: + - __archspec 1=x86_64 +about: + description: 'The meta-package _x86_64-microarch-level enforces the microarchitecture + in the + + user system. + + + Note that a user would need the archspec conda package installed + + in the base environment where conda/mamba is run from. + + + See x86_64-microarch-level for using this in conda recipes + + ' + home: https://github.com/conda-forge/microarch-level-feedstock + license: BSD-3-Clause + license_file: LICENSE.txt + summary: Meta package to build conda recipes with microarchitecture levels +extra: + copy_test_source_files: true + feedstock-name: microarch-level + final: true + parent_recipe: + name: microarch-level-split + path: F:\projects\microarch-level-feedstock\recipe + version: '1' + recipe-maintainers: + - isuruf diff --git a/crates/pixi_build_frontend/test-data/conda-render/pinject.txt b/crates/pixi_build_frontend/test-data/conda-render/pinject.txt new file mode 100644 index 000000000..81970dc0f --- /dev/null +++ b/crates/pixi_build_frontend/test-data/conda-render/pinject.txt @@ -0,0 +1,59 @@ +-------------- +Hash contents: +-------------- +{} +---------- +meta.yaml: +---------- +package: + name: pinject + version: 0.14.1 +source: + sha256: 0f0a0b14f9df87a85b529a21cdaf530269b1f24fb303d418583a12bb53f69382 + url: https://pypi.io/packages/source/p/pinject/pinject-0.14.1.tar.gz +build: + noarch: python + number: '0' + script: C:\\Users\\zalms\\conda-bld\\pinject_1723465624118\\_h_env\\python.exe -m + pip install . -vv +requirements: + host: + - ca-certificates 2024.7.4 h56e8100_0 + - libexpat 2.6.2 h63175ca_0 + - tzdata 2024a h0c530f3_0 + - ucrt 10.0.22621.0 h57928b3_0 + - vc14_runtime 14.40.33810 ha82c5b3_20 + - vc 14.3 h8a93ad2_20 + - vs2015_runtime 14.40.33810 h3bf8584_20 + - bzip2 1.0.8 h2466b09_7 + - libffi 3.4.2 h8ffe710_5 + - libsqlite 3.46.0 h2466b09_0 + - libzlib 1.3.1 h2466b09_1 + - openssl 3.3.1 h2466b09_2 + - tk 8.6.13 h5226925_1 + - xz 5.2.6 h8d14728_0 + - python 3.12.5 h889d299_0_cpython + - setuptools 72.1.0 pyhd8ed1ab_0 + - wheel 0.44.0 pyhd8ed1ab_0 + - pip 24.2 pyhd8ed1ab_0 + run: + - python >=3.4 + - six >=1.7.3 + - decorator >=4.3.0 +test: + commands: + - pip check + imports: + - pinject + requires: + - pip +about: + home: https://github.com/google/pinject + license: Apache-2.0 + license_file: LICENSE + summary: A pythonic dependency injection library +extra: + copy_test_source_files: true + final: true + recipe-maintainers: + - baszalmstra diff --git a/crates/pixi_build_frontend/tests/basic/.gitattributes b/crates/pixi_build_frontend/tests/basic/.gitattributes new file mode 100644 index 000000000..07fe41c52 --- /dev/null +++ b/crates/pixi_build_frontend/tests/basic/.gitattributes @@ -0,0 +1,2 @@ +# GitHub syntax highlighting +pixi.lock linguist-language=YAML linguist-generated=true diff --git a/crates/pixi_build_frontend/tests/basic/.gitignore b/crates/pixi_build_frontend/tests/basic/.gitignore new file mode 100644 index 000000000..096b5eb54 --- /dev/null +++ b/crates/pixi_build_frontend/tests/basic/.gitignore @@ -0,0 +1,3 @@ +# pixi environments +.pixi +*.egg-info diff --git a/crates/pixi_build_frontend/tests/basic/pixi.toml b/crates/pixi_build_frontend/tests/basic/pixi.toml new file mode 100644 index 000000000..de35f78cf --- /dev/null +++ b/crates/pixi_build_frontend/tests/basic/pixi.toml @@ -0,0 +1,18 @@ +[project] +authors = ["Tim de Jager "] +channels = ["conda-forge"] +description = "Add a short description here" +name = "basic" +platforms = ["osx-arm64"] +preview = ["pixi-build"] +version = "0.1.0" + +[tasks] + +[build-system] +build-backend = "pixi-build-python" +dependencies = [] + +[host-dependencies] +pip = "24.*" +python = "3.12.*" diff --git a/crates/pixi_build_frontend/tests/basic/pyproject.toml b/crates/pixi_build_frontend/tests/basic/pyproject.toml new file mode 100644 index 000000000..b8c56ebda --- /dev/null +++ b/crates/pixi_build_frontend/tests/basic/pyproject.toml @@ -0,0 +1 @@ +# This is empty diff --git a/crates/pixi_build_frontend/tests/diagnostics.rs b/crates/pixi_build_frontend/tests/diagnostics.rs new file mode 100644 index 000000000..b5f4b05b2 --- /dev/null +++ b/crates/pixi_build_frontend/tests/diagnostics.rs @@ -0,0 +1,220 @@ +use std::path::Path; + +use miette::{Diagnostic, GraphicalReportHandler, GraphicalTheme}; +use pixi_build_frontend::{BuildFrontend, InProcessBackend, SetupRequest}; + +fn error_to_snapshot(diag: &impl Diagnostic) -> String { + let mut report_str = String::new(); + GraphicalReportHandler::new_themed(GraphicalTheme::unicode_nocolor()) + .without_syntax_highlighting() + .with_width(160) + .render_report(&mut report_str, diag) + .unwrap(); + report_str +} + +#[tokio::test] +async fn test_non_existing_discovery() { + let err = BuildFrontend::default() + .setup_protocol(SetupRequest { + source_dir: "non/existing/path".into(), + build_tool_override: Default::default(), + build_id: 0, + }) + .await + .unwrap_err(); + + insta::assert_snapshot!(error_to_snapshot(&err)); +} + +#[tokio::test] +async fn test_source_dir_is_file() { + let source_file = tempfile::NamedTempFile::new().unwrap(); + let err = BuildFrontend::default() + .setup_protocol(SetupRequest { + source_dir: source_file.path().to_path_buf(), + build_tool_override: Default::default(), + build_id: 0, + }) + .await + .unwrap_err(); + + let snapshot = error_to_snapshot(&err); + let snapshot = snapshot.replace(&source_file.path().display().to_string(), "[SOURCE_FILE]"); + insta::assert_snapshot!(snapshot); +} + +#[tokio::test] +async fn test_source_dir_is_empty() { + let source_dir = tempfile::TempDir::new().unwrap(); + let err = BuildFrontend::default() + .setup_protocol(SetupRequest { + source_dir: source_dir.path().to_path_buf(), + build_tool_override: Default::default(), + build_id: 0, + }) + .await + .unwrap_err(); + + let snapshot = error_to_snapshot(&err); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); + insta::assert_snapshot!(snapshot); +} + +#[tokio::test] +async fn test_invalid_manifest() { + let source_dir = tempfile::TempDir::new().unwrap(); + let manifest = source_dir + .path() + .join(pixi_consts::consts::PROJECT_MANIFEST); + tokio::fs::write(&manifest, "[workspace]").await.unwrap(); + let err = BuildFrontend::default() + .setup_protocol(SetupRequest { + source_dir: source_dir.path().to_path_buf(), + build_tool_override: Default::default(), + build_id: 0, + }) + .await + .unwrap_err(); + + let snapshot = error_to_snapshot(&err); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); + + insta::assert_snapshot!(snapshot); +} + +fn replace_source_dir(snapshot: &str, source_dir: &Path) -> String { + snapshot.replace( + &(source_dir.display().to_string() + std::path::MAIN_SEPARATOR_STR), + "[SOURCE_DIR]/", + ) +} + +#[tokio::test] +async fn test_missing_backend() { + // Setup a temporary project + let source_dir = tempfile::TempDir::new().unwrap(); + let manifest = source_dir + .path() + .join(pixi_consts::consts::PROJECT_MANIFEST); + tokio::fs::write( + &manifest, + r#" + [workspace] + platforms = [] + channels = [] + preview = ['pixi-build'] + + [package] + name = "project" + version = "0.1.0" + + [build-system] + dependencies = [] + build-backend = "non-existing" + channels = [] + "#, + ) + .await + .unwrap(); + + let err = BuildFrontend::default() + .setup_protocol(SetupRequest { + source_dir: source_dir.path().to_path_buf(), + build_tool_override: Default::default(), + build_id: 0, + }) + .await + .unwrap_err(); + + let snapshot = error_to_snapshot(&err); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); + insta::assert_snapshot!(snapshot); +} + +#[tokio::test] +async fn test_not_a_package() { + // Setup a temporary project + let source_dir = tempfile::TempDir::new().unwrap(); + let manifest = source_dir + .path() + .join(pixi_consts::consts::PROJECT_MANIFEST); + tokio::fs::write( + &manifest, + r#" + [workspace] + name = "some-workspace" + platforms = [] + channels = [] + preview = ['pixi-build'] + "#, + ) + .await + .unwrap(); + + let err = BuildFrontend::default() + .setup_protocol(SetupRequest { + source_dir: source_dir.path().to_path_buf(), + build_tool_override: Default::default(), + build_id: 0, + }) + .await + .unwrap_err(); + + let snapshot = error_to_snapshot(&err); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); + insta::assert_snapshot!(snapshot); +} + +#[tokio::test] +async fn test_invalid_backend() { + // Setup a temporary project + let source_dir = tempfile::TempDir::new().unwrap(); + let manifest = source_dir + .path() + .join(pixi_consts::consts::PROJECT_MANIFEST); + tokio::fs::write( + &manifest, + r#" + [workspace] + platforms = [] + channels = [] + preview = ['pixi-build'] + + [package] + version = "0.1.0" + name = "project" + + [build-system] + dependencies = [] + channels = [] + build-backend = "ipc" + "#, + ) + .await + .unwrap(); + + let (in_tx, in_rx) = tokio::io::duplex(1024); + let (out_tx, _out_rx) = tokio::io::duplex(1024); + let ipc = InProcessBackend { + rpc_in: Box::new(in_rx), + rpc_out: Box::new(out_tx), + }; + + // Explicitly drop the sending end of the channel to simulate a closed + // connection. + drop(in_tx); + + let err = BuildFrontend::default() + .setup_protocol(SetupRequest { + source_dir: source_dir.path().to_path_buf(), + build_tool_override: ipc.into(), + build_id: 0, + }) + .await + .unwrap_err(); + + let snapshot = error_to_snapshot(&err); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); + insta::assert_snapshot!(snapshot); +} diff --git a/crates/pixi_build_frontend/tests/snapshots/diagnostics__invalid_backend.snap b/crates/pixi_build_frontend/tests/snapshots/diagnostics__invalid_backend.snap new file mode 100644 index 000000000..3451b4fa4 --- /dev/null +++ b/crates/pixi_build_frontend/tests/snapshots/diagnostics__invalid_backend.snap @@ -0,0 +1,7 @@ +--- +source: crates/pixi_build_frontend/tests/diagnostics.rs +expression: snapshot +--- + × failed to communicate with the build backend () + ╰─▶ The background task closed EOF; restart required + help: Ensure that the build backend implements the JSON-RPC protocol correctly. diff --git a/crates/pixi_build_frontend/tests/snapshots/diagnostics__invalid_manifest.snap b/crates/pixi_build_frontend/tests/snapshots/diagnostics__invalid_manifest.snap new file mode 100644 index 000000000..ec6db2360 --- /dev/null +++ b/crates/pixi_build_frontend/tests/snapshots/diagnostics__invalid_manifest.snap @@ -0,0 +1,12 @@ +--- +source: crates/pixi_build_frontend/tests/diagnostics.rs +expression: snapshot +--- + × failed to setup a build backend, the pixi.toml could not be parsed + ╰─▶ × missing field `channels` + ╭─[pixi.toml:1:1] + 1 │ [workspace] + · ─────────── + ╰──── + + help: Ensure that the manifest at '[SOURCE_DIR]/pixi.toml' is a valid pixi project manifest diff --git a/crates/pixi_build_frontend/tests/snapshots/diagnostics__missing_backend.snap b/crates/pixi_build_frontend/tests/snapshots/diagnostics__missing_backend.snap new file mode 100644 index 000000000..95cbce92f --- /dev/null +++ b/crates/pixi_build_frontend/tests/snapshots/diagnostics__missing_backend.snap @@ -0,0 +1,5 @@ +--- +source: crates/pixi_build_frontend/tests/diagnostics.rs +expression: snapshot +--- + × failed to setup a build backend, the backend tool could not be installed: No build match specs provided for 'non-existing' command. diff --git a/crates/pixi_build_frontend/tests/snapshots/diagnostics__non_existing_discovery.snap b/crates/pixi_build_frontend/tests/snapshots/diagnostics__non_existing_discovery.snap new file mode 100644 index 000000000..16ecccb43 --- /dev/null +++ b/crates/pixi_build_frontend/tests/snapshots/diagnostics__non_existing_discovery.snap @@ -0,0 +1,5 @@ +--- +source: crates/pixi_build_frontend/tests/diagnostics.rs +expression: error_to_snapshot(&err) +--- + × failed to discover a valid project manifest, the source path 'non/existing/path' could not be found diff --git a/crates/pixi_build_frontend/tests/snapshots/diagnostics__not_a_package.snap b/crates/pixi_build_frontend/tests/snapshots/diagnostics__not_a_package.snap new file mode 100644 index 000000000..6d9d6f659 --- /dev/null +++ b/crates/pixi_build_frontend/tests/snapshots/diagnostics__not_a_package.snap @@ -0,0 +1,6 @@ +--- +source: crates/pixi_build_frontend/tests/diagnostics.rs +expression: snapshot +--- + × the pixi.toml does not describe a package + help: A [package] section is missing in the manifest diff --git a/crates/pixi_build_frontend/tests/snapshots/diagnostics__source_dir_is_empty.snap b/crates/pixi_build_frontend/tests/snapshots/diagnostics__source_dir_is_empty.snap new file mode 100644 index 000000000..94befbc23 --- /dev/null +++ b/crates/pixi_build_frontend/tests/snapshots/diagnostics__source_dir_is_empty.snap @@ -0,0 +1,6 @@ +--- +source: crates/pixi_build_frontend/tests/diagnostics.rs +expression: snapshot +--- + × unable to discover communication protocol, the source directory does not contain a supported manifest + help: Ensure that the source directory contains a valid pixi.toml or meta.yaml file. diff --git a/crates/pixi_build_frontend/tests/snapshots/diagnostics__source_dir_is_file.snap b/crates/pixi_build_frontend/tests/snapshots/diagnostics__source_dir_is_file.snap new file mode 100644 index 000000000..1a9575d3b --- /dev/null +++ b/crates/pixi_build_frontend/tests/snapshots/diagnostics__source_dir_is_file.snap @@ -0,0 +1,5 @@ +--- +source: crates/pixi_build_frontend/tests/diagnostics.rs +expression: snapshot +--- + × failed to discover a valid project manifest, the source does not refer to a directory diff --git a/crates/pixi_build_types/Cargo.toml b/crates/pixi_build_types/Cargo.toml new file mode 100644 index 000000000..e0925471e --- /dev/null +++ b/crates/pixi_build_types/Cargo.toml @@ -0,0 +1,16 @@ +[package] +authors.workspace = true +description = "The types for communicating between the pixi build frontend and backend" +edition.workspace = true +homepage.workspace = true +license.workspace = true +name = "pixi_build_types" +readme.workspace = true +repository.workspace = true +version = "0.1.0" + +[dependencies] +rattler_conda_types = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_with = { workspace = true } +url = { workspace = true } diff --git a/crates/pixi_build_types/src/capabilities.rs b/crates/pixi_build_types/src/capabilities.rs new file mode 100644 index 000000000..958547c5d --- /dev/null +++ b/crates/pixi_build_types/src/capabilities.rs @@ -0,0 +1,17 @@ +//! Capabilities that the frontend and backend provide. + +use serde::{Deserialize, Serialize}; +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +/// Capabilities that the backend provides. +pub struct BackendCapabilities { + /// Whether the backend provides the ability for just conda metadata. + pub provides_conda_metadata: Option, + + /// Whether the backend provides the ability to build conda packages. + pub provides_conda_build: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +/// Capabilities that the frontend provides. +pub struct FrontendCapabilities {} diff --git a/crates/pixi_build_types/src/channel_configuration.rs b/crates/pixi_build_types/src/channel_configuration.rs new file mode 100644 index 000000000..350263f4c --- /dev/null +++ b/crates/pixi_build_types/src/channel_configuration.rs @@ -0,0 +1,11 @@ +use serde::{Deserialize, Serialize}; +use url::Url; + +/// Information about the channel configuration to use to resolve dependencies. +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ChannelConfiguration { + /// The default base URL to use for channels when the channel is not + /// specified as a full URL. + pub base_url: Url, +} diff --git a/crates/pixi_build_types/src/conda_package_metadata.rs b/crates/pixi_build_types/src/conda_package_metadata.rs new file mode 100644 index 000000000..2422ed45c --- /dev/null +++ b/crates/pixi_build_types/src/conda_package_metadata.rs @@ -0,0 +1,40 @@ +use rattler_conda_types::{NoArchType, PackageName, Platform, VersionWithSource}; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; + +#[serde_as] +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CondaPackageMetadata { + /// The name of the package. + pub name: PackageName, + + /// The version of the package. + pub version: VersionWithSource, + + /// The build hash of the package. + pub build: String, + + /// The build number of the package. + pub build_number: u64, + + /// The subdir or platform + pub subdir: Platform, + + /// The dependencies of the package + #[serde(default)] + pub depends: Vec, + + /// The constrains of the package + #[serde(default)] + pub constraints: Vec, + + /// The license of the package + pub license: Option, + + /// The license family of the package + pub license_family: Option, + + /// The noarch type of the package + pub noarch: NoArchType, +} diff --git a/crates/pixi_build_types/src/lib.rs b/crates/pixi_build_types/src/lib.rs new file mode 100644 index 000000000..b1f68482a --- /dev/null +++ b/crates/pixi_build_types/src/lib.rs @@ -0,0 +1,23 @@ +#[deny(missing_docs)] +mod capabilities; +mod channel_configuration; +mod conda_package_metadata; +pub mod procedures; + +pub use capabilities::{BackendCapabilities, FrontendCapabilities}; +pub use channel_configuration::ChannelConfiguration; +pub use conda_package_metadata::CondaPackageMetadata; +use rattler_conda_types::{GenericVirtualPackage, Platform}; +use serde::{Deserialize, Serialize}; + +/// A platform and associated virtual packages +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PlatformAndVirtualPackages { + /// The platform + pub platform: Platform, + + /// Virtual packages associated with the platform. Or `None` if the virtual + /// packages are not specified. + pub virtual_packages: Option>, +} diff --git a/crates/pixi_build_types/src/procedures/conda_build.rs b/crates/pixi_build_types/src/procedures/conda_build.rs new file mode 100644 index 000000000..0f21c085e --- /dev/null +++ b/crates/pixi_build_types/src/procedures/conda_build.rs @@ -0,0 +1,81 @@ +use std::path::PathBuf; + +use rattler_conda_types::GenericVirtualPackage; +use serde::{Deserialize, Serialize}; +use url::Url; + +use crate::{ChannelConfiguration, PlatformAndVirtualPackages}; + +pub const METHOD_NAME: &str = "conda/build"; + +/// Parameters for the `conda/build` request. +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CondaBuildParams { + /// The build platform is always the current platform, but the virtual + /// packages used can be override. + /// + /// If this is not present, the virtual packages from the current platform + /// are used. + pub build_platform_virtual_packages: Option>, + + /// The target platform that the metadata should be fetched for. + pub host_platform: Option, + + /// The channel base URLs for the conda channels to use to resolve + pub channel_base_urls: Option>, + + /// The channel configuration to use to resolve dependencies. + pub channel_configuration: ChannelConfiguration, + + /// Information about the outputs to build. This information is previously + /// returned from a call to `conda/getMetadata`. Pass `None` to build all + /// outputs. + #[serde(default)] + pub outputs: Option>, + + /// A directory that can be used by the backend to store files for + /// subsequent requests. This directory is unique for each separate source + /// dependency. + /// + /// The directory may not yet exist. + pub work_directory: PathBuf, +} + +/// Identifier of an output. +#[derive(Default, Debug, Serialize, Deserialize)] +pub struct CondaOutputIdentifier { + pub name: Option, + pub version: Option, + pub build: Option, + pub subdir: Option, +} + +/// Contains the result of the `conda/build` request. +#[derive(Debug, Serialize, Deserialize)] +pub struct CondaBuildResult { + /// The packages that were built. + pub packages: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CondaBuiltPackage { + /// The location on disk where the built package is located. + pub output_file: PathBuf, + + /// The globs that were used as input to the build. Use these for + /// re-verifying the build. + pub input_globs: Vec, + + /// The name of the package. + pub name: String, + + /// The version of the package. + pub version: String, + + /// The build string of the package. + pub build: String, + + /// The subdirectory of the package. + pub subdir: String, +} diff --git a/crates/pixi_build_types/src/procedures/conda_metadata.rs b/crates/pixi_build_types/src/procedures/conda_metadata.rs new file mode 100644 index 000000000..9951177c0 --- /dev/null +++ b/crates/pixi_build_types/src/procedures/conda_metadata.rs @@ -0,0 +1,52 @@ +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use url::Url; + +use crate::{ChannelConfiguration, CondaPackageMetadata, PlatformAndVirtualPackages}; + +pub const METHOD_NAME: &str = "conda/getMetadata"; + +/// Parameters for the `conda/getMetadata` request. +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CondaMetadataParams { + /// The platform that will run the build. + /// + /// If this field is not present, the current platform should be used. + pub build_platform: Option, + + /// The platform where the package will run. In the conda world this might + /// be different from the "target" platform when targeting noarch. + /// + /// If this field is not present, the current platform should be used. + pub host_platform: Option, + + /// The channel base URLs that the metadata should be fetched from. + pub channel_base_urls: Option>, + + /// The channel configuration to use to resolve dependencies. + pub channel_configuration: ChannelConfiguration, + + /// A directory that can be used by the backend to store files for + /// subsequent requests. This directory is unique for each separate source + /// dependency. + /// + /// The directory may not yet exist. + pub work_directory: PathBuf, +} + +/// Contains the result of the `conda/getMetadata` request. +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CondaMetadataResult { + /// Metadata of all the packages that can be build. + pub packages: Vec, + + /// The files that were read as part of the computation. These files are + /// hashed and stored in the lock-file. If the files change, the + /// lock-file will be invalidated. + /// + /// If this field is not present, the input manifest will be used. + #[serde(default)] + pub input_globs: Option>, +} diff --git a/crates/pixi_build_types/src/procedures/initialize.rs b/crates/pixi_build_types/src/procedures/initialize.rs new file mode 100644 index 000000000..8124776a0 --- /dev/null +++ b/crates/pixi_build_types/src/procedures/initialize.rs @@ -0,0 +1,40 @@ +use std::path::PathBuf; + +use serde::{Deserialize, Serialize}; + +use crate::capabilities::{BackendCapabilities, FrontendCapabilities}; + +pub const METHOD_NAME: &str = "initialize"; + +/// Parameters for the initialize request. +/// +/// This request is the first request that the frontend sends to the backend and +/// serves as a hand-shake between the two. The frontend provides its +/// capabilities which allows the backend to adapt its behavior to the frontend. +/// Conversely, the backend provides its capabilities in the response, which +/// allows the frontend to adapt its behavior to the capabilities of the +/// backend. +/// +/// This request is the only request that requires a schema that is forever +/// backwards and forwards compatible. All other requests can be negotiated +/// through the capabilities structs. To facilitate this compatibility we keep +/// the number of arguments in this struct to a bare minimum. +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct InitializeParams { + /// The manifest that the build backend should use. + pub manifest_path: PathBuf, + /// The capabilities that the frontend provides. + pub capabilities: FrontendCapabilities, + + /// Optionally the cache directory to use for any caching activity. + pub cache_directory: Option, +} + +/// The result of the initialize request. +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct InitializeResult { + /// The capabilities that the backend provides. + pub capabilities: BackendCapabilities, +} diff --git a/crates/pixi_build_types/src/procedures/mod.rs b/crates/pixi_build_types/src/procedures/mod.rs new file mode 100644 index 000000000..8b1db16a6 --- /dev/null +++ b/crates/pixi_build_types/src/procedures/mod.rs @@ -0,0 +1,3 @@ +pub mod conda_build; +pub mod conda_metadata; +pub mod initialize; diff --git a/crates/pixi_consts/src/consts.rs b/crates/pixi_consts/src/consts.rs index 5fbd8998d..bc755b717 100644 --- a/crates/pixi_consts/src/consts.rs +++ b/crates/pixi_consts/src/consts.rs @@ -31,6 +31,7 @@ pub const CONDA_META_DIR: &str = "conda-meta"; pub const PYPI_CACHE_DIR: &str = "uv-cache"; pub const CONDA_PYPI_MAPPING_CACHE_DIR: &str = "conda-pypi-mapping"; pub const CACHED_ENVS_DIR: &str = "cached-envs-v0"; +pub const CACHED_BUILD_ENVS_DIR: &str = "cached-build-envs-v0"; pub const CONDA_INSTALLER: &str = "conda"; diff --git a/crates/pixi_glob/Cargo.toml b/crates/pixi_glob/Cargo.toml new file mode 100644 index 000000000..5349323b8 --- /dev/null +++ b/crates/pixi_glob/Cargo.toml @@ -0,0 +1,24 @@ +[package] +authors.workspace = true +description = "A crate to deal with globs in pixi" +edition.workspace = true +homepage.workspace = true +license.workspace = true +name = "pixi_glob" +readme.workspace = true +repository.workspace = true +version = "0.1.0" + +[dependencies] +dashmap = { workspace = true } +itertools = { workspace = true } +memchr = { workspace = true } +rattler_digest = { workspace = true } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["sync", "rt"] } +wax = { workspace = true } + +[dev-dependencies] +insta = { workspace = true } +rstest = { workspace = true } +tempfile = { workspace = true } diff --git a/crates/pixi_glob/src/glob_hash.rs b/crates/pixi_glob/src/glob_hash.rs new file mode 100644 index 000000000..ec9d5d70b --- /dev/null +++ b/crates/pixi_glob/src/glob_hash.rs @@ -0,0 +1,210 @@ +//! This module contains the `GlobHash` struct which is used to calculate a hash of the files that match the given glob patterns. +//! Use this if you want to calculate a hash of a set of files that match a glob pattern. +//! This is useful for finding out if you need to rebuild a target based on the files that match a glob pattern. +use std::{ + fs::File, + io, + io::{BufRead, Read, Write}, + path::{Path, PathBuf}, +}; + +use itertools::Itertools; +use rattler_digest::{digest::Digest, Sha256, Sha256Hash}; +use thiserror::Error; + +use crate::glob_set::{self, GlobSet}; + +/// Contains a hash of the files that match the given glob patterns. +#[derive(Debug, Clone, Default)] +pub struct GlobHash { + /// The hash of the files that match the given glob patterns. + pub hash: Sha256Hash, + #[cfg(test)] + matching_files: Vec, +} + +#[derive(Error, Debug)] +#[allow(missing_docs)] +pub enum GlobHashError { + #[error(transparent)] + FilterGlobError(#[from] glob_set::GlobSetError), + + #[error("during line normalization, failed to access {}", .0.display())] + NormalizeLineEnds(PathBuf, #[source] io::Error), + + #[error("the operation was cancelled")] + Cancelled, +} + +impl GlobHash { + /// Calculate a hash of the files that match the given glob patterns. + pub fn from_patterns<'a>( + root_dir: &Path, + globs: impl IntoIterator, + ) -> Result { + // If the root is not a directory or does not exist, return an empty map. + if !root_dir.is_dir() { + return Ok(Self::default()); + } + + let glob_set = GlobSet::create(globs)?; + let mut entries = glob_set + .filter_directory(root_dir) + .collect::, _>>()? + .into_iter() + .map(|entry| entry.path().to_path_buf()) + .collect_vec(); + entries.sort(); + + #[cfg(test)] + let mut matching_files = Vec::new(); + + let mut hasher = Sha256::default(); + for entry in entries { + // Construct a normalized file path to ensure consistent hashing across + // platforms. And add it to the hash. + let relative_path = entry.strip_prefix(root_dir).unwrap_or(&entry); + let normalized_file_path = relative_path.to_string_lossy().replace("\\", "/"); + rattler_digest::digest::Update::update(&mut hasher, normalized_file_path.as_bytes()); + + #[cfg(test)] + matching_files.push(normalized_file_path); + + // Concatenate the contents of the file to the hash. + File::open(&entry) + .and_then(|mut file| normalize_line_endings(&mut file, &mut hasher)) + .map_err(move |e| GlobHashError::NormalizeLineEnds(entry, e))?; + } + let hash = hasher.finalize(); + + Ok(Self { + hash, + #[cfg(test)] + matching_files, + }) + } +} + +/// This function copy the contents of the reader to the writer but normalizes +/// the line endings (e.g. replaces `\r\n` with `\n`) in text files. +fn normalize_line_endings(reader: &mut R, writer: &mut W) -> io::Result<()> { + let mut reader = io::BufReader::new(reader); + + // Get the first few bytes of the file and check if there is a `0x0` byte in the + // input. + let mut buffer = reader.fill_buf()?; + if buffer.contains(&0) { + // This file is binary, compute the hash varbatim. + std::io::copy(&mut reader, writer)?; + } else { + // Read the contents of the file but ignore any `\r` characters. + let mut last_cr_pos = None; + let mut offset = 0; + while !buffer.is_empty() { + match memchr::memchr2(b'\r', b'\n', buffer) { + Some(pos) if buffer[pos] == b'\r' => { + if last_cr_pos.is_some() { + // We previously detected a `\r` character but did not encounter a newline + writer.write_all(b"\r")?; + } + + // Process everything up to the '\r' character. Effectively ignoring it. + writer.write_all(&buffer[..pos])?; + reader.consume(pos + 1); + offset += pos + 1; + last_cr_pos = Some(pos + offset); + } + Some(pos) => { + // Encountered a newline character. If the last time we encountered the `\r` was + // not the previous character, we have to process the last + // `\r` character. + match last_cr_pos { + Some(last_cr_pos) if last_cr_pos + 1 == pos + offset => { + writer.write_all(b"\r")?; + offset += pos + 1; + } + _ => last_cr_pos = None, + } + + // Process everything up-to and including the newline character. + writer.write_all(&buffer[..=pos])?; + reader.consume(pos + 1); + offset += pos + 1; + } + None => { + if last_cr_pos.is_some() { + // We previously detected a `\r` character but did not encounter a newline + writer.write_all(b"\r")?; + last_cr_pos = None; + } + + // This batch of data does not contain any `\r` or `\n` characters. Process the + // entire chunk. + writer.write_all(buffer)?; + let buffer_len = buffer.len(); + reader.consume(buffer_len); + offset += buffer_len + } + } + buffer = reader.fill_buf()?; + } + + if last_cr_pos.is_some() { + // We detected a `\r` at the end of the input. + writer.write_all(b"\r")?; + } + } + + Ok(()) +} + +#[cfg(test)] +mod test { + use std::path::Path; + + use itertools::Itertools; + use rstest::*; + + use super::*; + + #[fixture] + pub fn testname() -> String { + let thread_name = std::thread::current().name().unwrap().to_string(); + let test_name = thread_name.rsplit("::").next().unwrap_or(&thread_name); + format!("glob_hash_{test_name}") + } + + #[rstest] + #[case::satisfiability(vec!["tests/data/satisfiability/source-dependency/**/*"])] + #[case::satisfiability_ignore_lock(vec!["tests/data/satisfiability/source-dependency/**/*", "!tests/data/satisfiability/source-dependency/**/*.lock"])] + #[case::non_glob(vec!["tests/data/satisfiability/source-dependency/pixi.toml"])] + fn test_input_hash(testname: String, #[case] globs: Vec<&str>) { + let root_dir = Path::new(env!("CARGO_MANIFEST_DIR")) + .parent() + .and_then(Path::parent) + .unwrap(); + let glob_hash = GlobHash::from_patterns(root_dir, globs.iter().copied()).unwrap(); + let snapshot = format!( + "Globs:\n{}\nHash: {:x}\nMatched files:\n{}", + globs + .iter() + .format_with("\n", |glob, f| f(&format_args!("- {}", glob))), + glob_hash.hash, + glob_hash + .matching_files + .iter() + .format_with("\n", |glob, f| f(&format_args!("- {}", glob))) + ); + insta::assert_snapshot!(testname, snapshot); + } + + #[test] + fn test_normalize_line_endings() { + let input = + "\rHello\r\nWorld\r\nYou are the best\nThere is no-one\r\r \rlike you.\r".repeat(8196); + let mut normalized: Vec = Vec::new(); + normalize_line_endings(&mut input.as_bytes(), &mut normalized).unwrap(); + let output = String::from_utf8(normalized).unwrap(); + assert_eq!(output, input.replace("\r\n", "\n")); + } +} diff --git a/crates/pixi_glob/src/glob_hash_cache.rs b/crates/pixi_glob/src/glob_hash_cache.rs new file mode 100644 index 000000000..857aefbca --- /dev/null +++ b/crates/pixi_glob/src/glob_hash_cache.rs @@ -0,0 +1,110 @@ +//! This module contains the `GlobHashCache` struct which is used to cache the computation of glob hashes. This cache is an in-process cache +//! so it's purpose is to re-use computed hashes across multiple calls to the same glob hash computation for the same set of input files. +//! The input files are deemed not to change between calls. +use std::{ + convert::identity, + path::PathBuf, + sync::{Arc, Weak}, +}; + +use dashmap::{DashMap, Entry}; +use tokio::sync::broadcast; + +use super::{GlobHash, GlobHashError}; + +/// A key for the cache of glob hashes. +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct GlobHashKey { + /// The root directory of the glob patterns. + pub root: PathBuf, + /// The glob patterns. + pub globs: Vec, +} + +#[derive(Debug)] +enum HashCacheEntry { + /// The value is currently being computed. + Pending(Weak>), + + /// We have a value for this key. + Done(GlobHash), +} + +/// An object that caches the computation of glob hashes. It deduplicates +/// requests for the same hash. +/// +/// Its is safe and efficient to use this object from multiple threads. +#[derive(Debug, Default, Clone)] +pub struct GlobHashCache { + cache: Arc>, +} + +impl GlobHashCache { + /// Computes the input hash of the given key. If the hash is already in the + /// cache, it will return the cached value. If the hash is not in the + /// cache, it will compute the hash (deduplicating any request) and return + /// it. + pub async fn compute_hash( + &self, + key: impl Into, + ) -> Result { + let key = key.into(); + match self.cache.entry(key.clone()) { + Entry::Vacant(entry) => { + // Construct a channel over which we will be sending the result and store it in + // the map. If another requests comes in for the same hash it will find this + // entry. + let (tx, _) = broadcast::channel(1); + let tx = Arc::new(tx); + let weak_tx = Arc::downgrade(&tx); + entry.insert(HashCacheEntry::Pending(weak_tx)); + + // Spawn the computation of the hash + let computation_key = key.clone(); + let result = tokio::task::spawn_blocking(move || { + GlobHash::from_patterns( + &computation_key.root, + computation_key.globs.iter().map(String::as_str), + ) + }) + .await + .map_or_else( + |err| match err.try_into_panic() { + Ok(panic) => std::panic::resume_unwind(panic), + Err(_) => Err(GlobHashError::Cancelled), + }, + identity, + )?; + + // Store the result in the cache + self.cache.insert(key, HashCacheEntry::Done(result.clone())); + + // Broadcast the result, ignore the error. If the receiver is dropped, we don't + // care. + let _ = tx.send(result.clone()); + + Ok(result) + } + Entry::Occupied(entry) => { + match entry.get() { + HashCacheEntry::Pending(weak_tx) => { + let sender = weak_tx.clone(); + let mut subscriber = sender + .upgrade() + .ok_or(GlobHashError::Cancelled)? + .subscribe(); + drop(entry); + subscriber + .recv() + .await + .map_err(|_| GlobHashError::Cancelled) + } + HashCacheEntry::Done(hash) => { + // We have a value for this key. + Ok(hash.clone()) + } + } + } + } + } +} diff --git a/crates/pixi_glob/src/glob_mtime.rs b/crates/pixi_glob/src/glob_mtime.rs new file mode 100644 index 000000000..4f98de760 --- /dev/null +++ b/crates/pixi_glob/src/glob_mtime.rs @@ -0,0 +1,166 @@ +use std::{ + path::{Path, PathBuf}, + time::SystemTime, +}; + +use thiserror::Error; + +use crate::glob_set::{self, GlobSet}; + +/// Contains the newest modification time for the files that match the given glob patterns. +#[derive(Debug, Clone)] +pub enum GlobModificationTime { + /// No files matched the given glob patterns. + NoMatches, + /// Files matched the glob patterns, and this variant contains the newest modification time and designated file. + MatchesFound { + /// The newest modification time for the files that match the given glob patterns. + modified_at: SystemTime, + /// The designated file with the newest modification time. + designated_file: PathBuf, + }, +} + +#[derive(Error, Debug)] +#[allow(missing_docs)] +pub enum GlobModificationTimeError { + #[error("error calculating modification time for {}", .0.display())] + CalculateMTime(PathBuf, #[source] std::io::Error), + #[error(transparent)] + GlobSet(#[from] glob_set::GlobSetError), +} + +impl GlobModificationTime { + /// Calculate the newest modification time for the files that match the given glob patterns. + pub fn from_patterns<'a>( + root_dir: &Path, + globs: impl IntoIterator, + ) -> Result { + // If the root is not a directory or does not exist, return NoMatches. + if !root_dir.is_dir() { + return Ok(Self::NoMatches); + } + + let glob_set = GlobSet::create(globs)?; + let entries: Vec<_> = glob_set + .filter_directory(root_dir) + .collect::, _>>()?; + + let mut latest = None; + let mut designated_file = PathBuf::new(); + + // Find the newest modification time and the designated file + for entry in entries { + let matched_path = entry.path().to_owned(); + let metadata = entry.metadata().map_err(|e| { + GlobModificationTimeError::CalculateMTime(matched_path.clone(), e.into()) + })?; + let modified_entry = metadata + .modified() + .map_err(|e| GlobModificationTimeError::CalculateMTime(matched_path.clone(), e))?; + + if let Some(ref current_latest) = latest { + if *current_latest >= modified_entry { + continue; + } + } + + latest = Some(modified_entry); + designated_file = matched_path.clone(); + } + + match latest { + Some(modified_at) => Ok(Self::MatchesFound { + modified_at, + designated_file, + }), + None => Ok(Self::NoMatches), + } + } + + /// Get the newest modification time, if any. + pub fn newest(&self) -> Option { + match self { + Self::MatchesFound { modified_at, .. } => Some(*modified_at), + Self::NoMatches => None, + } + } + + /// Get the designated file with the newest modification time, if any. + pub fn designated_file(&self) -> Option<&Path> { + match self { + Self::MatchesFound { + designated_file, .. + } => Some(designated_file.as_path()), + Self::NoMatches => None, + } + } + + /// Returns `true` if there have been any matches found. + pub fn has_matches(&self) -> bool { + matches!(self, Self::MatchesFound { .. }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs::File; + use std::time::{Duration, SystemTime}; + use tempfile::tempdir; + + #[test] + fn test_glob_modification_time() { + // Create a temporary directory + let temp_dir = tempdir().unwrap(); + let dir_path = temp_dir.path(); + + // Two minutes ago + let now = SystemTime::now() - Duration::from_secs(120); + + // Create files with different modification times + let files = [ + // Three minutes ago + ("file1.txt", now - Duration::from_secs(60)), + // Two minutes ago + ("file2.txt", now), + // One minute ago <- should select this + ("file3.txt", now + Duration::from_secs(60)), + ]; + + // Create files with different modification times + for (name, mtime) in files { + let path = dir_path.join(name); + File::create(&path).unwrap().set_modified(mtime).unwrap(); + } + + // Use glob patterns to match `.txt` files + let glob_mod_time = GlobModificationTime::from_patterns(dir_path, ["*.txt"]).unwrap(); + + match glob_mod_time { + GlobModificationTime::MatchesFound { + modified_at, + designated_file, + } => { + // Assert that the designated file is `file3.txt` with the latest modification time + assert_eq!(designated_file, dir_path.join("file3.txt")); + assert_eq!(modified_at, now + Duration::from_secs(60)); + } + GlobModificationTime::NoMatches => panic!("Expected matches but found none"), + } + } + + #[test] + fn test_glob_modification_time_no_matches() { + // Create a temporary directory + let temp_dir = tempdir().unwrap(); + let dir_path = temp_dir.path(); + + // Use glob patterns that match no files + let glob_mod_time = GlobModificationTime::from_patterns(dir_path, ["*.md"]).unwrap(); + + assert!(matches!(glob_mod_time, GlobModificationTime::NoMatches)); + assert_eq!(glob_mod_time.newest(), None); + assert_eq!(glob_mod_time.designated_file(), None); + } +} diff --git a/crates/pixi_glob/src/glob_set.rs b/crates/pixi_glob/src/glob_set.rs new file mode 100644 index 000000000..bba77bf6a --- /dev/null +++ b/crates/pixi_glob/src/glob_set.rs @@ -0,0 +1,139 @@ +use std::{ + io, + path::{Path, PathBuf}, +}; + +use itertools::{Either, Itertools}; +use thiserror::Error; +use wax::{Glob, WalkEntry}; + +pub(crate) struct GlobSet<'t> { + /// The globs to include in the filter. + pub include: Vec>, + /// The globs to exclude from the filter. + pub exclude: Vec>, +} + +#[derive(Error, Debug)] +pub enum GlobSetError { + #[error("failed to access {}", .0.display())] + Io(PathBuf, #[source] io::Error), + + #[error(transparent)] + DirWalk(#[from] io::Error), + + #[error("failed to read metadata for {0}")] + Metadata(PathBuf, #[source] wax::WalkError), + + #[error(transparent)] + Build(#[from] wax::BuildError), +} + +impl<'t> GlobSet<'t> { + pub fn create(globs: impl IntoIterator) -> Result, GlobSetError> { + // Split the globs into inclusion and exclusion globs based on whether they + // start with `!`. + let (inclusion_globs, exclusion_globs): (Vec<_>, Vec<_>) = + globs.into_iter().partition_map(|g| { + g.strip_prefix("!") + .map(Either::Right) + .unwrap_or(Either::Left(g)) + }); + + // Parse all globs + let inclusion_globs = inclusion_globs + .into_iter() + .map(Glob::new) + .collect::, _>>()?; + let exclusion_globs = exclusion_globs + .into_iter() + .map(Glob::new) + .collect::, _>>()?; + + Ok(Self { + include: inclusion_globs, + exclude: exclusion_globs, + }) + } + + /// Create a function that filters out files that match the globs. + pub fn filter_directory( + &'t self, + root_dir: &Path, + ) -> impl Iterator, GlobSetError>> + 't { + let root_dir = root_dir.to_path_buf(); + let entries = self + .include + .iter() + .flat_map(move |glob| { + glob.walk(root_dir.clone()) + .not(self.exclude.clone()) + .expect("since the globs are already parsed this should not error") + }) + .filter_map(|entry| { + match entry { + Ok(entry) if entry.file_type().is_dir() => None, + Ok(entry) => Some(Ok(entry)), + Err(e) => { + let path = e.path().map(Path::to_path_buf); + let io_err = std::io::Error::from(e); + match io_err.kind() { + // Ignore DONE and permission errors + io::ErrorKind::NotFound | io::ErrorKind::PermissionDenied => None, + _ => Some(Err(if let Some(path) = path { + GlobSetError::Io(path, io_err) + } else { + GlobSetError::DirWalk(io_err) + })), + } + } + } + }); + entries + } +} + +#[cfg(test)] +mod tests { + use super::GlobSet; + use std::{ + fs::{create_dir, File}, + path::PathBuf, + }; + use tempfile::tempdir; + + #[test] + fn test_filter_globs_inclusion_exclusion() { + let temp_dir = tempdir().unwrap(); + let root_path = temp_dir.path(); + + // Create files and directories + File::create(root_path.join("include1.txt")).unwrap(); + File::create(root_path.join("include2.log")).unwrap(); + File::create(root_path.join("exclude.txt")).unwrap(); + create_dir(root_path.join("subdir")).unwrap(); + File::create(root_path.join("subdir/include_subdir.txt")).unwrap(); + + // Test globs: include all .txt but exclude exclude.txt + let filter_globs = GlobSet::create(vec!["**/*.txt", "!exclude.txt"]).unwrap(); + + // Filter directory and get results as strings + let mut filtered_files: Vec<_> = filter_globs + .filter_directory(root_path) + .collect::, _>>() + .unwrap() + .into_iter() + .map(|p| p.path().strip_prefix(root_path).unwrap().to_path_buf()) + .collect(); + + // Assert the expected files are present + filtered_files.sort(); + + let mut expected = vec![ + "include1.txt".parse::().unwrap(), + "subdir/include_subdir.txt".parse().unwrap(), + ]; + expected.sort(); + assert_eq!(filtered_files, expected); + } +} diff --git a/crates/pixi_glob/src/lib.rs b/crates/pixi_glob/src/lib.rs new file mode 100644 index 000000000..16bf8df4e --- /dev/null +++ b/crates/pixi_glob/src/lib.rs @@ -0,0 +1,12 @@ +#![deny(missing_docs)] +//! A crate to deal with glob patterns in Pixi. +//! And the caching thereof. + +mod glob_hash; +mod glob_hash_cache; +mod glob_mtime; +mod glob_set; + +pub use glob_hash::{GlobHash, GlobHashError}; +pub use glob_hash_cache::{GlobHashCache, GlobHashKey}; +pub use glob_mtime::{GlobModificationTime, GlobModificationTimeError}; diff --git a/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_1_satisfiability.snap b/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_1_satisfiability.snap new file mode 100644 index 000000000..0d8512edf --- /dev/null +++ b/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_1_satisfiability.snap @@ -0,0 +1,11 @@ +--- +source: crates/pixi_glob/src/glob_hash.rs +expression: snapshot +--- +Globs: +- tests/data/satisfiability/source-dependency/**/* +Hash: bcec5beb36091c68ee58f72f1f4d33f3658b98732912f2cbe5827898d8a666f9 +Matched files: +- tests/data/satisfiability/source-dependency/child-package/pixi.toml +- tests/data/satisfiability/source-dependency/pixi.lock +- tests/data/satisfiability/source-dependency/pixi.toml diff --git a/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_2_satisfiability_ignore_lock.snap b/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_2_satisfiability_ignore_lock.snap new file mode 100644 index 000000000..713139dfa --- /dev/null +++ b/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_2_satisfiability_ignore_lock.snap @@ -0,0 +1,11 @@ +--- +source: crates/pixi_glob/src/glob_hash.rs +expression: snapshot +--- +Globs: +- tests/data/satisfiability/source-dependency/**/* +- !tests/data/satisfiability/source-dependency/**/*.lock +Hash: c0506e1c483578c78217015a4b19e90826c005bfe2ed9a59c83bc2054d4b5d4a +Matched files: +- tests/data/satisfiability/source-dependency/child-package/pixi.toml +- tests/data/satisfiability/source-dependency/pixi.toml diff --git a/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_3_non_glob.snap b/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_3_non_glob.snap new file mode 100644 index 000000000..d09a1a1ac --- /dev/null +++ b/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_3_non_glob.snap @@ -0,0 +1,9 @@ +--- +source: crates/pixi_glob/src/glob_hash.rs +expression: snapshot +--- +Globs: +- tests/data/satisfiability/source-dependency/pixi.toml +Hash: 0acde3549cf146ff910d07405f8146dea0659c19b25d6292048da9f2d47e667e +Matched files: +- tests/data/satisfiability/source-dependency/pixi.toml diff --git a/crates/pixi_manifest/src/build_system.rs b/crates/pixi_manifest/src/build_system.rs new file mode 100644 index 000000000..60958aa3c --- /dev/null +++ b/crates/pixi_manifest/src/build_system.rs @@ -0,0 +1,70 @@ +//! Defines the build section for the pixi manifest. +use rattler_conda_types::Channel; +use rattler_conda_types::ChannelConfig; +use rattler_conda_types::ChannelUrl; +use rattler_conda_types::MatchSpec; +use rattler_conda_types::NamedChannelOrUrl; +use rattler_conda_types::ParseChannelError; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use serde_with::DisplayFromStr; + +/// A build section in the pixi manifest. +/// that defines what backend is used to build the project. +#[serde_as] +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub struct BuildSystem { + /// The dependencies for the build tools which will be installed in the build environment. + /// These need to be conda packages + #[serde_as(as = "Vec")] + pub dependencies: Vec, + + /// The command to start the build backend + pub build_backend: String, + + /// The channels to use for fetching build tools + pub channels: Vec, +} + +impl BuildSystem { + /// Returns the channels as URLs + pub fn channels_url( + &self, + config: &ChannelConfig, + ) -> Result, ParseChannelError> { + self.channels + .iter() + .map(|c| c.clone().into_base_url(config)) + .collect() + } + + /// Returns the channels as `Channel`s + pub fn channels(&self, config: &ChannelConfig) -> Result, ParseChannelError> { + self.channels + .iter() + .map(|c| c.clone().into_channel(config)) + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_build() { + let toml = r#" + channels = ["conda-forge"] + dependencies = ["pixi-build-python > 12"] + build-backend = "pixi-build-python" + "#; + + let build: BuildSystem = toml_edit::de::from_str(toml).unwrap(); + assert_eq!(build.dependencies.len(), 1); + assert_eq!( + build.dependencies[0].to_string(), + "pixi-build-python >12".to_string() + ); + } +} diff --git a/crates/pixi_manifest/src/channel.rs b/crates/pixi_manifest/src/channel.rs index 7841dd2d4..926d07155 100644 --- a/crates/pixi_manifest/src/channel.rs +++ b/crates/pixi_manifest/src/channel.rs @@ -1,8 +1,6 @@ -use std::str::FromStr; - use itertools::Itertools; use rattler_conda_types::NamedChannelOrUrl; -use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; +use serde::{Deserialize, Serialize}; use serde_with::serde_as; use toml_edit::{Table, Value}; @@ -67,88 +65,3 @@ impl From for Value { } } } - -pub enum TomlPrioritizedChannelStrOrMap { - Map(PrioritizedChannel), - Str(NamedChannelOrUrl), -} - -impl TomlPrioritizedChannelStrOrMap { - pub fn into_prioritized_channel(self) -> PrioritizedChannel { - match self { - TomlPrioritizedChannelStrOrMap::Map(prioritized_channel) => prioritized_channel, - TomlPrioritizedChannelStrOrMap::Str(channel) => PrioritizedChannel { - channel, - priority: None, - }, - } - } -} - -impl From for TomlPrioritizedChannelStrOrMap { - fn from(channel: PrioritizedChannel) -> Self { - if let Some(priority) = channel.priority { - TomlPrioritizedChannelStrOrMap::Map(PrioritizedChannel { - channel: channel.channel, - priority: Some(priority), - }) - } else { - TomlPrioritizedChannelStrOrMap::Str(channel.channel) - } - } -} - -impl<'de> Deserialize<'de> for TomlPrioritizedChannelStrOrMap { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - serde_untagged::UntaggedEnumVisitor::new() - .map(|map| map.deserialize().map(TomlPrioritizedChannelStrOrMap::Map)) - .string(|str| { - NamedChannelOrUrl::from_str(str) - .map_err(serde_untagged::de::Error::custom) - .map(TomlPrioritizedChannelStrOrMap::Str) - }) - .expecting("either a map or a string") - .deserialize(deserializer) - } -} - -impl Serialize for TomlPrioritizedChannelStrOrMap { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match self { - TomlPrioritizedChannelStrOrMap::Map(map) => map.serialize(serializer), - TomlPrioritizedChannelStrOrMap::Str(str) => str.serialize(serializer), - } - } -} - -/// Helper so that we can deserialize -/// [`crate::channel::PrioritizedChannel`] from a string or a -/// map. -impl<'de> serde_with::DeserializeAs<'de, PrioritizedChannel> for TomlPrioritizedChannelStrOrMap { - fn deserialize_as(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let prioritized_channel = TomlPrioritizedChannelStrOrMap::deserialize(deserializer)?; - Ok(prioritized_channel.into_prioritized_channel()) - } -} - -/// Helper so that we can serialize -/// [`crate::channel::PrioritizedChannel`] to a string or a -/// map. -impl serde_with::SerializeAs for TomlPrioritizedChannelStrOrMap { - fn serialize_as(source: &PrioritizedChannel, serializer: S) -> Result - where - S: Serializer, - { - let toml_prioritized_channel: TomlPrioritizedChannelStrOrMap = source.clone().into(); - toml_prioritized_channel.serialize(serializer) - } -} diff --git a/crates/pixi_manifest/src/dependencies.rs b/crates/pixi_manifest/src/dependencies.rs index ac21b6842..7ceb5a922 100644 --- a/crates/pixi_manifest/src/dependencies.rs +++ b/crates/pixi_manifest/src/dependencies.rs @@ -72,6 +72,16 @@ where } } +impl FromIterator<(N, D)> for Dependencies { + fn from_iter>(iter: T) -> Self { + let mut deps = Dependencies::default(); + for (name, spec) in iter { + deps.insert(name, spec); + } + deps + } +} + impl Dependencies { /// Adds a requirement to the list of dependencies. pub fn insert(&mut self, name: N, spec: D) { diff --git a/crates/pixi_manifest/src/environment.rs b/crates/pixi_manifest/src/environment.rs index a0e71bb24..cb08d7aa0 100644 --- a/crates/pixi_manifest/src/environment.rs +++ b/crates/pixi_manifest/src/environment.rs @@ -11,9 +11,14 @@ use serde::{self, Deserialize, Deserializer}; use serde_with::SerializeDisplay; use thiserror::Error; -use crate::consts::DEFAULT_ENVIRONMENT_NAME; -use crate::solve_group::SolveGroupIdx; -use crate::utils::PixiSpanned; +use crate::{consts::DEFAULT_ENVIRONMENT_NAME, solve_group::SolveGroupIdx}; + +#[derive(Debug, Clone, Error, Diagnostic, PartialEq)] +#[error("Failed to parse environment name '{attempted_parse}', please use only lowercase letters, numbers and dashes")] +pub struct ParseEnvironmentNameError { + /// The string that was attempted to be parsed. + pub attempted_parse: String, +} /// The name of an environment. This is either a string or default for the /// default environment. @@ -86,13 +91,6 @@ impl PartialEq for EnvironmentName { } } -#[derive(Debug, Clone, Error, Diagnostic, PartialEq)] -#[error("Failed to parse environment name '{attempted_parse}', please use only lowercase letters, numbers and dashes")] -pub struct ParseEnvironmentNameError { - /// The string that was attempted to be parsed. - pub attempted_parse: String, -} - impl FromStr for EnvironmentName { type Err = ParseEnvironmentNameError; fn from_str(s: &str) -> Result { @@ -151,36 +149,6 @@ pub struct Environment { pub no_default_feature: bool, } -/// Helper struct to deserialize the environment from TOML. -/// The environment description can only hold these values. -#[derive(Deserialize)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] -pub(super) struct TomlEnvironment { - #[serde(default)] - pub features: PixiSpanned>, - pub solve_group: Option, - #[serde(default)] - pub no_default_feature: bool, -} - -pub(super) enum TomlEnvironmentMapOrSeq { - Map(TomlEnvironment), - Seq(Vec), -} - -impl<'de> Deserialize<'de> for TomlEnvironmentMapOrSeq { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - serde_untagged::UntaggedEnumVisitor::new() - .map(|map| map.deserialize().map(TomlEnvironmentMapOrSeq::Map)) - .seq(|seq| seq.deserialize().map(TomlEnvironmentMapOrSeq::Seq)) - .expecting("either a map or a sequence") - .deserialize(deserializer) - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/crates/pixi_manifest/src/error.rs b/crates/pixi_manifest/src/error.rs index d169fb46c..e6009940e 100644 --- a/crates/pixi_manifest/src/error.rs +++ b/crates/pixi_manifest/src/error.rs @@ -1,12 +1,16 @@ -use std::{borrow::Borrow, fmt::Display}; +use std::{ + borrow::{Borrow, Cow}, + fmt::Display, + ops::Range, +}; use itertools::Itertools; -use miette::{Diagnostic, IntoDiagnostic, LabeledSpan, NamedSource, Report}; +use miette::{Diagnostic, LabeledSpan, SourceOffset, SourceSpan}; use rattler_conda_types::{version_spec::ParseVersionSpecError, InvalidPackageNameError}; use thiserror::Error; use super::pypi::pypi_requirement::Pep508ToPyPiRequirementError; -use crate::ParsedManifest; +use crate::{KnownPreviewFeature, WorkspaceManifest}; #[derive(Error, Debug, Clone, Diagnostic)] pub enum DependencyError { @@ -30,14 +34,18 @@ pub enum RequirementConversionError { InvalidVersion(#[from] ParseVersionSpecError), } -#[derive(Error, Debug, Clone, Diagnostic)] +#[derive(Error, Debug, Clone)] pub enum TomlError { - #[error(transparent)] - Error(#[from] toml_edit::TomlError), + #[error("{}", .0.message())] + Error(toml_edit::TomlError), #[error("Missing table `[tool.pixi.project]`. Try running `pixi init`")] NoPixiTable, - #[error("Missing field `name`")] - NoProjectName(Option>), + #[error("Missing field `{0}`")] + MissingField(Cow<'static, str>, Option>), + #[error("{0}")] + Generic(Cow<'static, str>, Option>), + #[error(transparent)] + FeatureNotEnabled(#[from] FeatureNotEnabled), #[error("Could not find or access the part '{part}' in the path '[{table_name}]'")] TableError { part: String, table_name: String }, #[error("Could not find or access array '{array_name}' in '[{table_name}]'")] @@ -47,36 +55,98 @@ pub enum TomlError { }, #[error("Could not convert pep508 to pixi pypi requirement")] Conversion(#[from] Box), + #[error(transparent)] + InvalidNonPackageDependencies(#[from] InvalidNonPackageDependencies), } -impl TomlError { - pub fn to_fancy(&self, file_name: &str, contents: impl Into) -> Result { - if let Some(span) = self.span() { - Err(miette::miette!( - labels = vec![LabeledSpan::at(span, self.message())], - "failed to parse project manifest" - ) - .with_source_code(NamedSource::new(file_name, contents.into()))) +impl From for TomlError { + fn from(e: toml_edit::TomlError) -> Self { + TomlError::Error(e) + } +} + +#[derive(Error, Debug, Clone)] +#[error("{message}")] +pub struct FeatureNotEnabled { + pub feature: Cow<'static, str>, + pub message: Cow<'static, str>, + pub span: Option>, +} + +impl FeatureNotEnabled { + pub fn new(message: impl Into>, feature: KnownPreviewFeature) -> Self { + Self { + feature: feature.as_str().into(), + message: message.into(), + span: None, + } + } + + pub fn with_opt_span(self, span: Option>) -> Self { + Self { span, ..self } + } +} + +impl Diagnostic for FeatureNotEnabled { + fn help<'a>(&'a self) -> Option> { + Some(Box::new(format!( + "Add `preview = [\"{}\"]` under [workspace] to enable the preview feature", + self.feature + ))) + } + + fn labels(&self) -> Option + '_>> { + if let Some(span) = self.span.clone() { + Some(Box::new(std::iter::once( + LabeledSpan::new_primary_with_span(None, span), + ))) } else { - Err(self.clone()).into_diagnostic() + None } } +} - fn span(&self) -> Option> { - match self { - TomlError::Error(e) => e.span(), - TomlError::NoPixiTable => Some(0..1), - TomlError::NoProjectName(span) => span.clone(), +impl Diagnostic for TomlError { + fn labels(&self) -> Option + '_>> { + let span = match self { + TomlError::Error(err) => err.span().map(SourceSpan::from), + TomlError::NoPixiTable => Some(SourceSpan::new(SourceOffset::from(0), 1)), + TomlError::Generic(_, span) | TomlError::MissingField(_, span) => { + span.clone().map(SourceSpan::from) + } + TomlError::FeatureNotEnabled(err) => return err.labels(), + TomlError::InvalidNonPackageDependencies(err) => return err.labels(), _ => None, + }; + + // This is here to make it easier to add more match arms in the future. + #[allow(clippy::match_single_binding)] + let message = match self { + _ => None, + }; + + if let Some(span) = span { + Some(Box::new(std::iter::once( + LabeledSpan::new_primary_with_span(message, span), + ))) + } else { + None } } - fn message(&self) -> String { + + fn help<'a>(&'a self) -> Option> { match self { - TomlError::Error(e) => e.message().to_owned(), - _ => self.to_string(), + TomlError::NoPixiTable => { + Some(Box::new("Run `pixi init` to create a new project manifest")) + } + TomlError::FeatureNotEnabled(err) => err.help(), + TomlError::InvalidNonPackageDependencies(err) => err.help(), + _ => None, } } +} +impl TomlError { pub fn table_error(part: &str, table_name: &str) -> Self { Self::TableError { part: part.into(), @@ -105,7 +175,7 @@ pub struct UnknownFeature { } impl UnknownFeature { - pub fn new(feature: String, manifest: impl Borrow) -> Self { + pub fn new(feature: String, manifest: impl Borrow) -> Self { // Find the top 2 features that are closest to the feature name. let existing_features = manifest .borrow() @@ -148,3 +218,25 @@ impl miette::Diagnostic for UnknownFeature { } } } + +/// An error that indicates that some package sections are only valid when the +/// manifest describes a package instead of a workspace. +#[derive(Debug, Error, Clone)] +#[error("build-, host- and run-dependency sections are only valid for packages.")] +pub struct InvalidNonPackageDependencies { + pub invalid_dependency_sections: Vec>, +} + +impl Diagnostic for InvalidNonPackageDependencies { + fn help<'a>(&'a self) -> Option> { + Some(Box::new( + "These sections are only valid when the manifest describes a package instead of a workspace.\nAdd a `[package]` section to the manifest to fix this error or remove the offending sections.", + )) + } + + fn labels(&self) -> Option + '_>> { + Some(Box::new(self.invalid_dependency_sections.iter().map( + |range| LabeledSpan::new_with_span(None, SourceSpan::from(range.clone())), + ))) + } +} diff --git a/crates/pixi_manifest/src/feature.rs b/crates/pixi_manifest/src/feature.rs index 697647ed8..979a87274 100644 --- a/crates/pixi_manifest/src/feature.rs +++ b/crates/pixi_manifest/src/feature.rs @@ -1,6 +1,5 @@ use std::{ borrow::{Borrow, Cow}, - collections::HashMap, fmt, hash::{Hash, Hasher}, }; @@ -10,19 +9,16 @@ use itertools::Either; use pixi_spec::PixiSpec; use rattler_conda_types::{PackageName, Platform}; use rattler_solve::ChannelPriority; -use serde::{de::Error, Deserialize, Deserializer}; -use serde_with::{serde_as, SerializeDisplay}; +use serde::{de::Error, Deserialize}; +use serde_with::SerializeDisplay; use crate::{ - channel::{PrioritizedChannel, TomlPrioritizedChannelStrOrMap}, + channel::PrioritizedChannel, consts, - parsed_manifest::deserialize_opt_package_map, - parsed_manifest::deserialize_package_map, pypi::{pypi_options::PypiOptions, PyPiPackageName}, target::Targets, - task::{Task, TaskName}, utils::PixiSpanned, - Activation, PyPiRequirement, SpecType, SystemRequirements, Target, TargetSelector, + PyPiRequirement, SpecType, SystemRequirements, WorkspaceTarget, }; /// The name of a feature. This is either a string or default for the default @@ -147,7 +143,7 @@ pub struct Feature { pub pypi_options: Option, /// Target specific configuration. - pub targets: Targets, + pub targets: Targets, } impl Feature { @@ -161,7 +157,7 @@ impl Feature { system_requirements: SystemRequirements::default(), pypi_options: None, - targets: ::default(), + targets: as Default>::default(), } } @@ -184,6 +180,48 @@ impl Feature { self.channels.get_or_insert_with(Default::default) } + /// Returns the run dependencies of the target for the given `platform`. + /// + /// If the platform is `None` no platform specific dependencies are + /// returned. + /// + /// This function returns `None` if there is not a single feature that has + /// any dependencies defined. + pub fn run_dependencies( + &self, + platform: Option, + ) -> Option>> { + self.dependencies(SpecType::Run, platform) + } + + /// Returns the host dependencies of the target for the given `platform`. + /// + /// If the platform is `None` no platform specific dependencies are + /// returned. + /// + /// This function returns `None` if there is not a single feature that has + /// any dependencies defined. + pub fn host_dependencies( + &self, + platform: Option, + ) -> Option>> { + self.dependencies(SpecType::Host, platform) + } + + /// Returns the run dependencies of the target for the given `platform`. + /// + /// If the platform is `None` no platform specific dependencies are + /// returned. + /// + /// This function returns `None` if there is not a single feature that has + /// any dependencies defined. + pub fn build_dependencies( + &self, + platform: Option, + ) -> Option>> { + self.dependencies(SpecType::Build, platform) + } + /// Returns the dependencies of the feature for a given `spec_type` and /// `platform`. /// @@ -193,18 +231,56 @@ impl Feature { /// /// Returns `None` if this feature does not define any target that has any /// of the requested dependencies. + /// + /// If the `platform` is `None` no platform specific dependencies are taken + /// into consideration. pub fn dependencies( &self, - spec_type: Option, + spec_type: SpecType, platform: Option, ) -> Option>> { self.targets .resolve(platform) // Get the targets in reverse order, from least specific to most specific. - // This is required because the extend function will overwrite existing keys. + // This is required because the extent function will overwrite existing keys. .rev() .filter_map(|t| t.dependencies(spec_type)) .filter(|deps| !deps.is_empty()) + .fold(None, |acc, deps| match acc { + None => Some(Cow::Borrowed(deps)), + Some(mut acc) => { + let deps_iter = deps.iter().map(|(name, spec)| (name.clone(), spec.clone())); + acc.to_mut().extend(deps_iter); + Some(acc) + } + }) + } + + /// Returns the combined dependencies of the feature and `platform`. + /// + /// The `build` dependencies overwrite the `host` dependencies which + /// overwrite the `run` dependencies. + /// + /// This function returns a [`Cow`]. If the dependencies are not combined or + /// overwritten by multiple targets than this function returns a + /// reference to the internal dependencies. + /// + /// Returns `None` if this feature does not define any target that has any + /// of the requested dependencies. + /// + /// If the `platform` is `None` no platform specific dependencies are taken + /// into consideration. + pub fn combined_dependencies( + &self, + platform: Option, + ) -> Option>> { + self.targets + .resolve(platform) + // Get the targets in reverse order, from least specific to most specific. + // This is required because the extent function will overwrite existing keys. + .rev() + .filter_map(|t| t.combined_dependencies()) + .filter(|deps| !deps.is_empty()) .fold(None, |acc, deps| match acc { None => Some(deps), Some(mut acc) => { @@ -214,7 +290,6 @@ impl Feature { ), Cow::Owned(deps) => Either::Right(deps.into_iter()), }; - acc.to_mut().extend(deps_iter); Some(acc) } @@ -299,84 +374,6 @@ impl Feature { } } -impl<'de> Deserialize<'de> for Feature { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - #[serde_as] - #[derive(Deserialize)] - #[serde(deny_unknown_fields, rename_all = "kebab-case")] - struct FeatureInner { - #[serde(default)] - platforms: Option>>, - #[serde(default)] - channels: Option>, - #[serde(default)] - channel_priority: Option, - #[serde(default)] - system_requirements: SystemRequirements, - #[serde(default)] - target: IndexMap, Target>, - - #[serde(default, deserialize_with = "deserialize_package_map")] - dependencies: IndexMap, - - #[serde(default, deserialize_with = "deserialize_opt_package_map")] - host_dependencies: Option>, - - #[serde(default, deserialize_with = "deserialize_opt_package_map")] - build_dependencies: Option>, - - #[serde(default)] - pypi_dependencies: Option>, - - /// Additional information to activate an environment. - #[serde(default)] - activation: Option, - - /// Target specific tasks to run in the environment - #[serde(default)] - tasks: HashMap, - - /// Additional options for PyPi dependencies. - #[serde(default)] - pypi_options: Option, - } - - let inner = FeatureInner::deserialize(deserializer)?; - let mut dependencies = HashMap::from_iter([(SpecType::Run, inner.dependencies)]); - if let Some(host_deps) = inner.host_dependencies { - dependencies.insert(SpecType::Host, host_deps); - } - if let Some(build_deps) = inner.build_dependencies { - dependencies.insert(SpecType::Build, build_deps); - } - - let default_target = Target { - dependencies, - pypi_dependencies: inner.pypi_dependencies, - activation: inner.activation, - tasks: inner.tasks, - }; - - Ok(Feature { - name: FeatureName::Default, - platforms: inner.platforms, - channels: inner.channels.map(|channels| { - channels - .into_iter() - .map(|channel| channel.into_prioritized_channel()) - .collect() - }), - channel_priority: inner.channel_priority, - system_requirements: inner.system_requirements, - pypi_options: inner.pypi_options, - targets: Targets::from_default_and_user_defined(default_target, inner.target), - }) - } -} - #[cfg(test)] mod tests { use std::path::Path; @@ -384,7 +381,7 @@ mod tests { use assert_matches::assert_matches; use super::*; - use crate::manifests::manifest::Manifest; + use crate::Manifest; #[test] fn test_dependencies_borrowed() { @@ -414,7 +411,7 @@ mod tests { assert_matches!( manifest .default_feature() - .dependencies(Some(SpecType::Host), None) + .dependencies(SpecType::Host, None) .unwrap(), Cow::Borrowed(_), "[host-dependencies] should be borrowed" @@ -423,31 +420,34 @@ mod tests { assert_matches!( manifest .default_feature() - .dependencies(Some(SpecType::Run), None) + .dependencies(SpecType::Run, None) .unwrap(), Cow::Borrowed(_), "[dependencies] should be borrowed" ); assert_matches!( - manifest.default_feature().dependencies(None, None).unwrap(), + manifest + .default_feature() + .combined_dependencies(None) + .unwrap(), Cow::Owned(_), "combined dependencies should be owned" ); let bla_feature = manifest - .parsed + .workspace .features .get(&FeatureName::Named(String::from("bla"))) .unwrap(); assert_matches!( - bla_feature.dependencies(Some(SpecType::Run), None).unwrap(), + bla_feature.dependencies(SpecType::Run, None).unwrap(), Cow::Borrowed(_), "[feature.bla.dependencies] should be borrowed" ); assert_matches!( - bla_feature.dependencies(None, None).unwrap(), + bla_feature.combined_dependencies(None).unwrap(), Cow::Borrowed(_), "[feature.bla] combined dependencies should also be borrowed" ); @@ -510,6 +510,6 @@ mod tests { // and should now be none, previously this was added // to the default feature assert!(manifest.default_feature().pypi_options().is_some()); - assert!(manifest.parsed.project.pypi_options.is_some()); + assert!(manifest.workspace.workspace.pypi_options.is_some()); } } diff --git a/crates/pixi_manifest/src/features_ext.rs b/crates/pixi_manifest/src/features_ext.rs index a0723d80c..0b53b79f6 100644 --- a/crates/pixi_manifest/src/features_ext.rs +++ b/crates/pixi_manifest/src/features_ext.rs @@ -1,51 +1,73 @@ use std::collections::HashSet; use indexmap::IndexSet; -use rattler_conda_types::{NamedChannelOrUrl, Platform}; +use rattler_conda_types::{ + ChannelConfig, ChannelUrl, NamedChannelOrUrl, ParseChannelError, Platform, +}; use rattler_solve::ChannelPriority; -use crate::{HasManifestRef, PrioritizedChannel, SpecType}; +use crate::{ + has_features_iter::HasFeaturesIter, pypi::pypi_options::PypiOptions, CondaDependencies, + HasManifestRef, PrioritizedChannel, PyPiDependencies, SpecType, SystemRequirements, +}; -use crate::has_features_iter::HasFeaturesIter; -use crate::{pypi::pypi_options::PypiOptions, SystemRequirements}; -use crate::{CondaDependencies, PyPiDependencies}; - -/// ChannelPriorityCombination error, thrown when multiple channel priorities are set +/// ChannelPriorityCombination error, thrown when multiple channel priorities +/// are set #[derive(Debug, thiserror::Error)] #[error("Multiple channel priorities are not allowed in a single environment")] pub struct ChannelPriorityCombinationError; -/// A trait that implement various methods for collections that combine attributes of Features -/// It is implemented by Environment, GroupedEnvironment and SolveGroup. -/// Remove some of the boilerplate of combining features and its derived data from multiple sources. +/// A trait that implement various methods for collections that combine +/// attributes of Features It is implemented by Environment, GroupedEnvironment +/// and SolveGroup. Remove some of the boilerplate of combining features and its +/// derived data from multiple sources. /// -/// The name of the lifetime parameter is named `'source` that the borrow comes from the source of the data -/// for most implementations this will be the pixi project. +/// The name of the lifetime parameter is named `'source` that the borrow comes +/// from the source of the data for most implementations this will be the pixi +/// project. /// -/// There is blanket implementation available for all types that implement [`HasManifestRef`] and [`HasFeaturesIter`] +/// There is blanket implementation available for all types that implement +/// [`HasManifestRef`] and [`HasFeaturesIter`] pub trait FeaturesExt<'source>: HasManifestRef<'source> + HasFeaturesIter<'source> { /// Returns the channels associated with this collection. /// - /// Users can specify custom channels on a per-feature basis. This method collects and - /// deduplicates all the channels from all the features in the order they are defined in the - /// manifest. + /// Users can specify custom channels on a per-feature basis. This method + /// collects and deduplicates all the channels from all the features in + /// the order they are defined in the manifest. /// - /// If a feature does not specify any channel the default channels from the project metadata are - /// used instead. + /// If a feature does not specify any channel the default channels from the + /// project metadata are used instead. fn channels(&self) -> IndexSet<&'source NamedChannelOrUrl> { // Collect all the channels from the features in one set, // deduplicate them and sort them on feature index, default feature comes last. let channels = self.features().flat_map(|feature| match &feature.channels { Some(channels) => channels, - None => &self.manifest().parsed.project.channels, + None => &self.manifest().workspace.workspace.channels, }); PrioritizedChannel::sort_channels_by_priority(channels).collect() } - /// Returns the channel priority, error on multiple values, return None if no value is set. + /// Returns the channels associated with this collection. + /// + /// This function is similar to [`Self::channels]` but it resolves the + /// channel urls using the provided channel config. + fn channel_urls( + &self, + channel_config: &ChannelConfig, + ) -> Result, ParseChannelError> { + self.channels() + .into_iter() + .cloned() + .map(|channel| channel.into_base_url(channel_config)) + .collect() + } + + /// Returns the channel priority, error on multiple values, return None if + /// no value is set. /// - /// When using multiple channel priorities over different features we should error as the user should decide what they want. + /// When using multiple channel priorities over different features we should + /// error as the user should decide what they want. fn channel_priority(&self) -> Result, ChannelPriorityCombinationError> { let mut channel_priority = None; for feature in self.features() { @@ -61,18 +83,20 @@ pub trait FeaturesExt<'source>: HasManifestRef<'source> + HasFeaturesIter<'sourc /// Returns the platforms that this collection is compatible with. /// - /// Which platforms a collection support depends on which platforms the selected features of - /// the collection supports. The platforms that are supported by the collection is the - /// intersection of the platforms supported by its features. + /// Which platforms a collection support depends on which platforms the + /// selected features of the collection supports. The platforms that are + /// supported by the collection is the intersection of the platforms + /// supported by its features. /// - /// Features can specify which platforms they support through the `platforms` key. If a feature - /// does not specify any platforms the features defined by the project are used. + /// Features can specify which platforms they support through the + /// `platforms` key. If a feature does not specify any platforms the + /// features defined by the project are used. fn platforms(&self) -> HashSet { self.features() .map(|feature| { match &feature.platforms { Some(platforms) => &platforms.value, - None => &self.manifest().parsed.project.platforms.value, + None => &self.manifest().workspace.workspace.platforms.value, } .iter() .copied() @@ -86,9 +110,10 @@ pub trait FeaturesExt<'source>: HasManifestRef<'source> + HasFeaturesIter<'sourc /// Returns the system requirements for this collection. /// - /// The system requirements of the collection are the union of the system requirements of all - /// the features in the collection. If multiple features specify a - /// requirement for the same system package, the highest is chosen. + /// The system requirements of the collection are the union of the system + /// requirements of all the features in the collection. If multiple + /// features specify a requirement for the same system package, the + /// highest is chosen. fn local_system_requirements(&self) -> SystemRequirements { self.features() .map(|feature| &feature.system_requirements) @@ -98,16 +123,18 @@ pub trait FeaturesExt<'source>: HasManifestRef<'source> + HasFeaturesIter<'sourc }) } - /// Returns true if any of the features has any reference to a pypi dependency. + /// Returns true if any of the features has any reference to a pypi + /// dependency. fn has_pypi_dependencies(&self) -> bool { self.features().any(|f| f.has_pypi_dependencies()) } /// Returns the PyPi dependencies to install for this collection. /// - /// The dependencies of all features are combined. This means that if two features define a - /// requirement for the same package that both requirements are returned. The different - /// requirements per package are sorted in the same order as the features they came from. + /// The dependencies of all features are combined. This means that if two + /// features define a requirement for the same package that both + /// requirements are returned. The different requirements per package + /// are sorted in the same order as the features they came from. fn pypi_dependencies(&self, platform: Option) -> PyPiDependencies { self.features() .filter_map(|f| f.pypi_dependencies(platform)) @@ -116,25 +143,43 @@ pub trait FeaturesExt<'source>: HasManifestRef<'source> + HasFeaturesIter<'sourc /// Returns the dependencies to install for this collection. /// - /// The dependencies of all features are combined. This means that if two features define a - /// requirement for the same package that both requirements are returned. The different - /// requirements per package are sorted in the same order as the features they came from. - fn dependencies( - &self, - kind: Option, - platform: Option, - ) -> CondaDependencies { + /// The dependencies of all features are combined. This means that if two + /// features define a requirement for the same package that both + /// requirements are returned. The different requirements per package + /// are sorted in the same order as the features they came from. + /// + /// If the `platform` is `None` no platform specific dependencies are taken + /// into consideration. + fn dependencies(&self, kind: SpecType, platform: Option) -> CondaDependencies { self.features() .filter_map(|f| f.dependencies(kind, platform)) .into() } + /// Returns the combined dependencies to install for this collection. + /// + /// The `build` dependencies overwrite the `host` dependencies which + /// overwrite the `run` dependencies. + /// + /// The dependencies of all features are combined. This means that if two + /// features define a requirement for the same package that both + /// requirements are returned. The different requirements per package + /// are sorted in the same order as the features they came from. + /// + /// If the `platform` is `None` no platform specific dependencies are taken + /// into consideration. + fn combined_dependencies(&self, platform: Option) -> CondaDependencies { + self.features() + .filter_map(|f| f.combined_dependencies(platform)) + .into() + } + /// Returns the pypi options for this collection. /// - /// The pypi options of all features are combined. They will be combined in the order - /// that they are defined in the manifest. - /// The index-url is a special case and can only be defined once. This should have been - /// verified beforehand. + /// The pypi options of all features are combined. They will be combined in + /// the order that they are defined in the manifest. + /// The index-url is a special case and can only be defined once. This + /// should have been verified beforehand. fn pypi_options(&self) -> PypiOptions { // Collect all the pypi-options from the features in one set, // deduplicate them and sort them on feature index, default feature comes last. @@ -142,7 +187,7 @@ pub trait FeaturesExt<'source>: HasManifestRef<'source> + HasFeaturesIter<'sourc .features() .filter_map(|feature| { if feature.pypi_options().is_none() { - self.manifest().parsed.project.pypi_options.as_ref() + self.manifest().workspace.workspace.pypi_options.as_ref() } else { feature.pypi_options() } diff --git a/crates/pixi_manifest/src/lib.rs b/crates/pixi_manifest/src/lib.rs index 967558b4c..c178b5376 100644 --- a/crates/pixi_manifest/src/lib.rs +++ b/crates/pixi_manifest/src/lib.rs @@ -1,4 +1,5 @@ mod activation; +mod build_system; pub(crate) mod channel; mod dependencies; mod environment; @@ -9,8 +10,7 @@ mod features_ext; mod has_features_iter; mod has_manifest_ref; mod manifests; -mod metadata; -mod parsed_manifest; +mod package; mod preview; pub mod pypi; pub mod pyproject; @@ -19,37 +19,38 @@ mod spec_type; mod system_requirements; mod target; pub mod task; -mod utils; +pub mod toml; +pub mod utils; mod validation; +mod workspace; -pub use dependencies::{CondaDependencies, Dependencies, PyPiDependencies}; - -pub use manifests::manifest::{Manifest, ManifestKind}; -pub use manifests::TomlManifest; - -pub use crate::environments::Environments; -pub use crate::parsed_manifest::{deserialize_package_map, ParsedManifest}; -pub use crate::solve_group::{SolveGroup, SolveGroups}; pub use activation::Activation; -pub use channel::{PrioritizedChannel, TomlPrioritizedChannelStrOrMap}; +pub use build_system::BuildSystem; +pub use channel::PrioritizedChannel; +pub use dependencies::{CondaDependencies, Dependencies, PyPiDependencies}; pub use environment::{Environment, EnvironmentName}; pub use error::TomlError; pub use feature::{Feature, FeatureName}; +pub use features_ext::FeaturesExt; +pub use has_features_iter::HasFeaturesIter; +pub use has_manifest_ref::HasManifestRef; use itertools::Itertools; -pub use metadata::ProjectMetadata; +pub use manifests::{Manifest, ManifestKind, WorkspaceManifest}; use miette::Diagnostic; +pub use preview::{KnownPreviewFeature, Preview, PreviewFeature}; pub use pypi::pypi_requirement::PyPiRequirement; use rattler_conda_types::Platform; pub use spec_type::SpecType; pub use system_requirements::{LibCSystemRequirement, SystemRequirements}; -pub use target::{Target, TargetSelector, Targets}; +pub use target::{TargetSelector, Targets, WorkspaceTarget}; pub use task::{Task, TaskName}; use thiserror::Error; +pub use workspace::Workspace; -pub use features_ext::FeaturesExt; -pub use has_features_iter::HasFeaturesIter; -pub use has_manifest_ref::HasManifestRef; -pub use preview::{KnownPreviewFeature, Preview, PreviewFeature}; +pub use crate::{ + environments::Environments, + solve_group::{SolveGroup, SolveGroups}, +}; /// Errors that can occur when getting a feature. #[derive(Debug, Clone, Error, Diagnostic)] diff --git a/crates/pixi_manifest/src/manifests/manifest.rs b/crates/pixi_manifest/src/manifests/manifest.rs index b1dea4787..56e2d986f 100644 --- a/crates/pixi_manifest/src/manifests/manifest.rs +++ b/crates/pixi_manifest/src/manifests/manifest.rs @@ -10,7 +10,7 @@ use std::{ use indexmap::{Equivalent, IndexSet}; use itertools::Itertools; -use miette::{miette, IntoDiagnostic, NamedSource, WrapErr}; +use miette::{miette, IntoDiagnostic, NamedSource, Report, WrapErr}; use pixi_spec::PixiSpec; use rattler_conda_types::{ChannelConfig, MatchSpec, PackageName, Platform, Version}; use toml_edit::{DocumentMut, Value}; @@ -18,13 +18,15 @@ use toml_edit::{DocumentMut, Value}; use crate::{ consts, error::{DependencyError, TomlError, UnknownFeature}, - manifests::{ManifestSource, TomlManifest}, + manifests::{ManifestSource, PackageManifest}, preview::Preview, pypi::PyPiPackageName, - pyproject::PyProjectManifest, - to_options, DependencyOverwriteBehavior, Environment, EnvironmentName, Feature, FeatureName, - GetFeatureError, ParsedManifest, PrioritizedChannel, PypiDependencyLocation, SpecType, Target, - TargetSelector, Task, TaskName, + pyproject::{PyProjectManifest, PyProjectToManifestError}, + to_options, + toml::{ExternalWorkspaceProperties, TomlDocument, TomlManifest}, + BuildSystem, DependencyOverwriteBehavior, Environment, EnvironmentName, Feature, FeatureName, + GetFeatureError, PrioritizedChannel, PypiDependencyLocation, SpecType, TargetSelector, Task, + TaskName, WorkspaceManifest, WorkspaceTarget, }; #[derive(Debug, Clone)] @@ -45,28 +47,36 @@ impl ManifestKind { } /// Handles the project's manifest file. +/// /// This struct is responsible for reading, parsing, editing, and saving the /// manifest. It encapsulates all logic related to the manifest's TOML format -/// and structure. The manifest data is represented as a [`ParsedManifest`] +/// and structure. The manifest data is represented as a [`WorkspaceManifest`] /// struct for easy manipulation. #[derive(Debug, Clone)] pub struct Manifest { /// The path to the manifest file pub path: PathBuf, - /// The raw contents of the manifest file - pub contents: String, + /// The raw contents of the original manifest file. This field, in + /// conjunction with [`crate::utils::PixiSpanned`] is used to provide better + /// error messages. + /// + /// Note that if the document is edited, this field will not be updated. + pub contents: Option, /// Editable toml document pub document: ManifestSource, - /// The parsed manifest - pub parsed: ParsedManifest, + /// The parsed workspace manifest + pub workspace: WorkspaceManifest, + + /// Optionally a package manifest + pub package: Option, } -impl Borrow for Manifest { - fn borrow(&self) -> &ParsedManifest { - &self.parsed +impl Borrow for Manifest { + fn borrow(&self) -> &WorkspaceManifest { + &self.workspace } } @@ -97,48 +107,63 @@ impl Manifest { let contents = contents.into(); let (parsed, file_name) = match manifest_kind { - ManifestKind::Pixi => (ParsedManifest::from_toml_str(&contents), "pixi.toml"), + ManifestKind::Pixi => ( + TomlManifest::from_toml_str(&contents).and_then(|manifest| { + manifest.into_manifests(ExternalWorkspaceProperties::default()) + }), + "pixi.toml", + ), ManifestKind::Pyproject => { let manifest = match PyProjectManifest::from_toml_str(&contents) - .and_then(|m| m.ensure_pixi(&contents)) + .and_then(|m| m.ensure_pixi()) { - Ok(manifest) => Ok(manifest.try_into().into_diagnostic()?), + Ok(manifest) => match manifest.into_manifests() { + Ok(manifests) => Ok(manifests), + Err(PyProjectToManifestError::TomlError(err)) => Err(err), + Err(e) => return Err(Report::from(e)), + }, Err(e) => Err(e), }; (manifest, "pyproject.toml") } }; - let (manifest, document) = match parsed.and_then(|manifest| { + let ((workspace_manifest, package_manifest), document) = match parsed.and_then(|manifest| { contents .parse::() .map(|doc| (manifest, doc)) .map_err(TomlError::from) }) { Ok(result) => result, - Err(e) => e.to_fancy(file_name, &contents)?, + Err(e) => { + return Err( + Report::from(e).with_source_code(NamedSource::new(file_name, contents.clone())) + ); + } }; // Validate the contents of the manifest - manifest.validate(NamedSource::new(file_name, contents.to_owned()), root)?; + workspace_manifest.validate(NamedSource::new(file_name, contents.to_owned()), root)?; let source = match manifest_kind { - ManifestKind::Pixi => ManifestSource::PixiToml(TomlManifest::new(document)), - ManifestKind::Pyproject => ManifestSource::PyProjectToml(TomlManifest::new(document)), + ManifestKind::Pixi => ManifestSource::PixiToml(TomlDocument::new(document)), + ManifestKind::Pyproject => ManifestSource::PyProjectToml(TomlDocument::new(document)), }; Ok(Self { path: manifest_path.to_path_buf(), - contents, + contents: Some(contents), document: source, - parsed: manifest, + workspace: workspace_manifest, + package: package_manifest, }) } /// Save the manifest to the file and update the contents pub fn save(&mut self) -> miette::Result<()> { - self.contents = self.document.to_string(); - std::fs::write(&self.path, self.contents.clone()).into_diagnostic()?; + let contents = self.document.to_string(); + std::fs::write(&self.path, &contents).into_diagnostic()?; + self.contents = Some(contents); Ok(()) } @@ -201,7 +226,7 @@ impl Manifest { // Make sure the features exist for feature in features.iter().flatten() { if self.feature(feature.as_str()).is_none() { - return Err(UnknownFeature::new(feature.to_string(), &self.parsed).into()); + return Err(UnknownFeature::new(feature.to_string(), &self.workspace).into()); } } @@ -212,7 +237,7 @@ impl Manifest { no_default_feature, )?; - let environment_idx = self.parsed.environments.add(Environment { + let environment_idx = self.workspace.environments.add(Environment { name: EnvironmentName::Named(name), features: features.unwrap_or_default(), features_source_loc: None, @@ -221,7 +246,9 @@ impl Manifest { }); if let Some(solve_group) = solve_group { - self.parsed.solve_groups.add(solve_group, environment_idx); + self.workspace + .solve_groups + .add(solve_group, environment_idx); } Ok(()) @@ -236,14 +263,14 @@ impl Manifest { // Remove the environment from the internal manifest let environment_idx = self - .parsed + .workspace .environments .by_name .shift_remove(name) .expect("environment should exist"); // Remove the environment from the solve groups - self.parsed + self.workspace .solve_groups .iter_mut() .for_each(|group| group.environments.retain(|&idx| idx != environment_idx)); @@ -284,7 +311,7 @@ impl Manifest { ) -> miette::Result<()> { // Get current and new platforms for the feature let current = match feature_name { - FeatureName::Default => self.parsed.project.platforms.get_mut(), + FeatureName::Default => self.workspace.workspace.platforms.get_mut(), FeatureName::Named(_) => self.get_or_insert_feature_mut(feature_name).platforms_mut(), }; let to_add: IndexSet<_> = platforms.cloned().collect(); @@ -310,7 +337,7 @@ impl Manifest { ) -> miette::Result<()> { // Get current platforms and platform to remove for the feature let current = match feature_name { - FeatureName::Default => self.parsed.project.platforms.get_mut(), + FeatureName::Default => self.workspace.workspace.platforms.get_mut(), FeatureName::Named(_) => self.feature_mut(feature_name)?.platforms_mut(), }; // Get the platforms to remove, while checking if they exist @@ -514,7 +541,7 @@ impl Manifest { /// This also returns true if the `pypi-dependencies` key is defined but /// empty. pub fn has_pypi_dependencies(&self) -> bool { - self.parsed + self.workspace .features .values() .flat_map(|f| f.targets.targets()) @@ -533,7 +560,7 @@ impl Manifest { // Get the current channels and update them let current = match feature_name { - FeatureName::Default => &mut self.parsed.project.channels, + FeatureName::Default => &mut self.workspace.workspace.channels, FeatureName::Named(_) => self.get_or_insert_feature_mut(feature_name).channels_mut(), }; @@ -579,7 +606,7 @@ impl Manifest { ) -> miette::Result<()> { // Get current channels and channels to remove for the feature let current = match feature_name { - FeatureName::Default => &mut self.parsed.project.channels, + FeatureName::Default => &mut self.workspace.workspace.channels, FeatureName::Named(_) => self.feature_mut(feature_name)?.channels_mut(), }; // Get the channels to remove, while checking if they exist @@ -618,7 +645,7 @@ impl Manifest { /// Set the project description pub fn set_description(&mut self, description: &str) -> miette::Result<()> { // Update in both the manifest and the toml - self.parsed.project.description = Some(description.to_string()); + self.workspace.workspace.description = Some(description.to_string()); self.document.set_description(description); Ok(()) @@ -627,7 +654,7 @@ impl Manifest { /// Set the project version pub fn set_version(&mut self, version: &str) -> miette::Result<()> { // Update in both the manifest and the toml - self.parsed.project.version = Some( + self.workspace.workspace.version = Some( Version::from_str(version) .into_diagnostic() .context("could not convert version to a valid project version")?, @@ -641,7 +668,7 @@ impl Manifest { &mut self, platform: Option, name: Option<&FeatureName>, - ) -> &mut Target { + ) -> &mut WorkspaceTarget { let feature = match name { Some(feature) => self.get_or_insert_feature_mut(feature), None => self.default_feature_mut(), @@ -656,7 +683,7 @@ impl Manifest { &mut self, platform: Option, name: &FeatureName, - ) -> Option<&mut Target> { + ) -> Option<&mut WorkspaceTarget> { self.feature_mut(name) .unwrap() .targets @@ -668,12 +695,12 @@ impl Manifest { /// This is the feature that is added implicitly by the tables at the root /// of the project manifest. pub fn default_feature(&self) -> &Feature { - self.parsed.default_feature() + self.workspace.default_feature() } /// Returns a mutable reference to the default feature. pub fn default_feature_mut(&mut self) -> &mut Feature { - self.parsed.default_feature_mut() + self.workspace.default_feature_mut() } /// Returns the mutable feature with the given name or `Err` if it does not @@ -682,7 +709,7 @@ impl Manifest { where Q: ?Sized + Hash + Equivalent + Display, { - self.parsed.features.get_mut(name).ok_or_else(|| { + self.workspace.features.get_mut(name).ok_or_else(|| { miette!( "Feature {} does not exist", consts::FEATURE_STYLE.apply_to(name) @@ -692,7 +719,7 @@ impl Manifest { /// Returns the mutable feature with the given name pub fn get_or_insert_feature_mut(&mut self, name: &FeatureName) -> &mut Feature { - self.parsed + self.workspace .features .entry(name.clone()) .or_insert_with(|| Feature::new(name.clone())) @@ -703,7 +730,7 @@ impl Manifest { where Q: ?Sized + Hash + Equivalent, { - self.parsed.features.get(name) + self.workspace.features.get(name) } /// Returns the default environment @@ -712,7 +739,7 @@ impl Manifest { /// only the default feature. The default environment can be overwritten /// by a environment named `default`. pub fn default_environment(&self) -> &Environment { - self.parsed.default_environment() + self.workspace.default_environment() } /// Returns the environment with the given name or `None` if it does not @@ -721,12 +748,17 @@ impl Manifest { where Q: ?Sized + Hash + Equivalent, { - self.parsed.environments.find(name) + self.workspace.environments.find(name) } /// Returns the preview field of the project - pub fn preview(&self) -> Option<&Preview> { - self.parsed.project.preview.as_ref() + pub fn preview(&self) -> &Preview { + &self.workspace.workspace.preview + } + + /// Return the build section from the parsed manifest + pub fn build_section(&self) -> Option<&BuildSystem> { + self.package.as_ref().map(|package| &package.build_system) } } @@ -734,6 +766,7 @@ impl Manifest { mod tests { use std::str::FromStr; + use glob::glob; use indexmap::IndexMap; use insta::assert_snapshot; use miette::NarratableReportHandler; @@ -748,7 +781,6 @@ mod tests { use super::*; use crate::channel::PrioritizedChannel; - use glob::glob; const PROJECT_BOILERPLATE: &str = r#" [project] @@ -777,9 +809,9 @@ mod tests { // From PathBuf let manifest = Manifest::from_path(path).unwrap(); - assert_eq!(manifest.parsed.project.name.unwrap(), "foo"); + assert_eq!(manifest.workspace.workspace.name, "foo"); assert_eq!( - manifest.parsed.project.version, + manifest.workspace.workspace.version, Some(Version::from_str("0.1.0").unwrap()) ); } @@ -1227,14 +1259,26 @@ feature_target_dep = "*" let mut manifest = Manifest::from_str(Path::new("pixi.toml"), file_contents).unwrap(); assert_eq!( - manifest.parsed.project.version.as_ref().unwrap().clone(), + manifest + .workspace + .workspace + .version + .as_ref() + .unwrap() + .clone(), Version::from_str("0.1.0").unwrap() ); manifest.set_version(&String::from("1.2.3")).unwrap(); assert_eq!( - manifest.parsed.project.version.as_ref().unwrap().clone(), + manifest + .workspace + .workspace + .version + .as_ref() + .unwrap() + .clone(), Version::from_str("1.2.3").unwrap() ); } @@ -1256,8 +1300,8 @@ feature_target_dep = "*" assert_eq!( manifest - .parsed - .project + .workspace + .workspace .description .as_ref() .unwrap() @@ -1271,8 +1315,8 @@ feature_target_dep = "*" assert_eq!( manifest - .parsed - .project + .workspace + .workspace .description .as_ref() .unwrap() @@ -1297,7 +1341,7 @@ feature_target_dep = "*" let mut manifest = Manifest::from_str(Path::new("pixi.toml"), file_contents).unwrap(); assert_eq!( - manifest.parsed.project.platforms.value, + manifest.workspace.workspace.platforms.value, vec![Platform::Linux64, Platform::Win64] .into_iter() .collect::>() @@ -1308,7 +1352,7 @@ feature_target_dep = "*" .unwrap(); assert_eq!( - manifest.parsed.project.platforms.value, + manifest.workspace.workspace.platforms.value, vec![Platform::Linux64, Platform::Win64, Platform::OsxArm64] .into_iter() .collect::>() @@ -1377,7 +1421,7 @@ feature_target_dep = "*" let mut manifest = Manifest::from_str(Path::new("pixi.toml"), file_contents).unwrap(); assert_eq!( - manifest.parsed.project.platforms.value, + manifest.workspace.workspace.platforms.value, vec![Platform::Linux64, Platform::Win64] .into_iter() .collect::>() @@ -1388,7 +1432,7 @@ feature_target_dep = "*" .unwrap(); assert_eq!( - manifest.parsed.project.platforms.value, + manifest.workspace.workspace.platforms.value, vec![Platform::Win64].into_iter().collect::>() ); @@ -1449,7 +1493,7 @@ platforms = ["linux-64", "win-64"] let mut manifest = Manifest::from_str(Path::new("pixi.toml"), file_contents).unwrap(); - assert_eq!(manifest.parsed.project.channels, IndexSet::new()); + assert_eq!(manifest.workspace.workspace.channels, IndexSet::new()); let conda_forge = PrioritizedChannel::from(NamedChannelOrUrl::Name(String::from("conda-forge"))); @@ -1476,7 +1520,7 @@ platforms = ["linux-64", "win-64"] .unwrap(); assert_eq!( - manifest.parsed.project.channels, + manifest.workspace.workspace.channels, vec![PrioritizedChannel { channel: NamedChannelOrUrl::Name(String::from("conda-forge")), priority: None, @@ -1491,7 +1535,7 @@ platforms = ["linux-64", "win-64"] .unwrap(); assert_eq!( - manifest.parsed.project.channels, + manifest.workspace.workspace.channels, vec![PrioritizedChannel { channel: NamedChannelOrUrl::Name(String::from("conda-forge")), priority: None, @@ -1502,7 +1546,7 @@ platforms = ["linux-64", "win-64"] assert_eq!( manifest - .parsed + .workspace .features .get(&cuda_feature) .unwrap() @@ -1524,7 +1568,7 @@ platforms = ["linux-64", "win-64"] assert_eq!( manifest - .parsed + .workspace .features .get(&cuda_feature) .unwrap() @@ -1541,7 +1585,7 @@ platforms = ["linux-64", "win-64"] assert_eq!( manifest - .parsed + .workspace .features .get(&test_feature) .unwrap() @@ -1572,8 +1616,8 @@ platforms = ["linux-64", "win-64"] .unwrap(); assert!(manifest - .parsed - .project + .workspace + .workspace .channels .iter() .any(|c| c.channel == custom_channel.channel)); @@ -1588,8 +1632,8 @@ platforms = ["linux-64", "win-64"] .unwrap(); assert!(manifest - .parsed - .project + .workspace + .workspace .channels .iter() .any(|c| c.channel == prioritized_channel1.channel && c.priority == Some(12i32))); @@ -1603,8 +1647,8 @@ platforms = ["linux-64", "win-64"] .unwrap(); assert!(manifest - .parsed - .project + .workspace + .workspace .channels .iter() .any(|c| c.channel == prioritized_channel2.channel && c.priority == Some(-12i32))); @@ -1631,7 +1675,7 @@ platforms = ["linux-64", "win-64"] let mut manifest = Manifest::from_str(Path::new("pixi.toml"), file_contents).unwrap(); assert_eq!( - manifest.parsed.project.channels, + manifest.workspace.workspace.channels, vec![PrioritizedChannel::from(NamedChannelOrUrl::Name( String::from("conda-forge") ))] @@ -1649,7 +1693,7 @@ platforms = ["linux-64", "win-64"] ) .unwrap(); - assert_eq!(manifest.parsed.project.channels, IndexSet::new()); + assert_eq!(manifest.workspace.workspace.channels, IndexSet::new()); manifest .remove_channels( @@ -1727,7 +1771,7 @@ platforms = ["linux-64", "win-64"] assert_eq!( test1_env .solve_group - .map(|idx| manifest.parsed.solve_groups[idx].name.as_str()), + .map(|idx| manifest.workspace.solve_groups[idx].name.as_str()), Some("test") ); @@ -1738,7 +1782,7 @@ platforms = ["linux-64", "win-64"] assert_eq!( test2_env .solve_group - .map(|idx| manifest.parsed.solve_groups[idx].name.as_str()), + .map(|idx| manifest.workspace.solve_groups[idx].name.as_str()), Some("test") ); @@ -1771,7 +1815,7 @@ platforms = ["linux-64", "win-64"] let manifest = Manifest::from_str(Path::new("pixi.toml"), file_contents).unwrap(); let cuda_feature = manifest - .parsed + .workspace .features .get(&FeatureName::Named("cuda".to_string())) .unwrap(); @@ -2304,8 +2348,8 @@ bar = "*" // Verify pytorch is first in the list assert_eq!( manifest - .parsed - .project + .workspace + .workspace .channels .iter() .next() @@ -2322,12 +2366,29 @@ bar = "*" // Verify order is still pytorch, conda-forge, bioconda let channels: Vec<_> = manifest - .parsed - .project + .workspace + .workspace .channels .iter() .map(|c| c.channel.to_string()) .collect(); assert_eq!(channels, vec!["pytorch", "conda-forge", "bioconda"]); } + + #[test] + fn test_validation_failure_source_dependency() { + let toml = r#" + [project] + name = "test" + channels = ['conda-forge'] + platforms = ['linux-64'] + + [dependencies] + foo = { path = "./foo" } + "#; + + let manifest = Manifest::from_str(Path::new("pixi.toml"), toml); + let err = manifest.unwrap_err(); + insta::assert_snapshot!(err, @"source dependencies are used in the feature 'default', but the `pixi-build` preview feature is not enabled"); + } } diff --git a/crates/pixi_manifest/src/manifests/mod.rs b/crates/pixi_manifest/src/manifests/mod.rs index ac3cc3c5a..b73f75b14 100644 --- a/crates/pixi_manifest/src/manifests/mod.rs +++ b/crates/pixi_manifest/src/manifests/mod.rs @@ -1,211 +1,21 @@ -use std::fmt; - -use toml_edit::{self, Array, Item, Table, TableLike, Value}; +//! Manifests are the core of the Pixi system. They are the files that define +//! the structure of a project, and are used to access and manipulate the +//! workspace and package data. +//! +//! The main entry point into the manifest is the [`Manifest`] struct which +//! represents a parsed `pixi.toml`. This struct is used to both access and +//! manipulate the manifest data. It also holds the original source code of the +//! manifest file which allows relating certain parts of the manifest back to +//! the original source code. pub mod project; -pub mod manifest; - -pub use project::ManifestSource; - -use crate::error::TomlError; - -/// Represents a wrapper around a TOML document. -/// This struct is exposed to other crates -/// to allow for easy manipulation of the TOML document. -#[derive(Debug, Clone, Default)] -pub struct TomlManifest(toml_edit::DocumentMut); - -impl TomlManifest { - /// Create a new `TomlManifest` from a `toml_edit::DocumentMut` document. - pub fn new(document: toml_edit::DocumentMut) -> Self { - Self(document) - } - - /// Get or insert a top-level item - pub fn get_or_insert<'a>(&'a mut self, key: &str, item: Item) -> &'a Item { - self.0.entry(key).or_insert(item) - } - - /// Retrieve a mutable reference to a target table `table_name` - /// in dotted form (e.g. `table1.table2`) from the root of the document. - /// If the table is not found, it is inserted into the document. - pub fn get_or_insert_nested_table<'a>( - &'a mut self, - table_name: &str, - ) -> Result<&'a mut dyn TableLike, TomlError> { - let parts: Vec<&str> = table_name.split('.').collect(); - - let mut current_table = self.0.as_table_mut() as &mut dyn TableLike; - - for part in parts { - let entry = current_table.entry(part); - let item = entry.or_insert(Item::Table(Table::new())); - if let Some(table) = item.as_table_mut() { - // Avoid creating empty tables - table.set_implicit(true); - } - current_table = item - .as_table_like_mut() - .ok_or_else(|| TomlError::table_error(part, table_name))?; - } - Ok(current_table) - } - - /// Inserts a value into a certain table - /// If the most inner table doesn't exist, an inline table will be created. - /// If it already exists, the formatting of the table will be preserved - pub fn insert_into_inline_table<'a>( - &'a mut self, - table_name: &str, - key: &str, - value: Value, - ) -> Result<&'a mut dyn TableLike, TomlError> { - let mut parts: Vec<&str> = table_name.split('.').collect(); - - let last = parts.pop(); - - let mut current_table = self.0.as_table_mut() as &mut dyn TableLike; - - for part in parts { - let entry = current_table.entry(part); - let item = entry.or_insert(Item::Table(Table::new())); - if let Some(table) = item.as_table_mut() { - // Avoid creating empty tables - table.set_implicit(true); - } - current_table = item - .as_table_like_mut() - .ok_or_else(|| TomlError::table_error(part, table_name))?; - } - - // Add dependency as inline table if it doesn't exist - if let Some(last) = last { - if let Some(dependency) = current_table.get_mut(last) { - dependency - .as_table_like_mut() - .map(|table| table.insert(key, Item::Value(value))); - } else { - let mut dependency = toml_edit::InlineTable::new(); - dependency.insert(key, value); - current_table.insert(last, toml_edit::value(dependency)); - } - } - - Ok(current_table) - } - - /// Retrieves a mutable reference to a target array `array_name` - /// in table `table_name` in dotted form (e.g. `table1.table2.array`). - /// - /// If the array is not found, it is inserted into the document. - pub fn get_or_insert_toml_array<'a>( - &'a mut self, - table_name: &str, - array_name: &str, - ) -> Result<&'a mut Array, TomlError> { - self.get_or_insert_nested_table(table_name)? - .entry(array_name) - .or_insert(Item::Value(Value::Array(Array::new()))) - .as_array_mut() - .ok_or_else(|| TomlError::array_error(array_name, table_name.to_string().as_str())) - } - - /// Retrieves a mutable reference to a target array `array_name` - /// in table `table_name` in dotted form (e.g. `table1.table2.array`). - /// - /// If the array is not found, returns None. - pub fn get_toml_array<'a>( - &'a mut self, - table_name: &str, - array_name: &str, - ) -> Result, TomlError> { - let array = self - .get_or_insert_nested_table(table_name)? - .get_mut(array_name) - .and_then(|a| a.as_array_mut()); - Ok(array) - } -} - -impl fmt::Display for TomlManifest { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -#[cfg(test)] -mod tests { - use std::str::FromStr; - - use toml_edit::DocumentMut; - - use super::*; - - #[test] - fn test_get_or_insert_nested_table() { - let toml = r#" -[envs.python] -channels = ["dummy-channel"] -[envs.python.dependencies] -dummy = "3.11.*" -"#; - let dep_name = "test"; - let mut manifest = TomlManifest::new(DocumentMut::from_str(toml).unwrap()); - manifest - .get_or_insert_nested_table("envs.python.dependencies") - .unwrap() - .insert(dep_name, Item::Value(toml_edit::Value::from("6.6"))); - - let dep = manifest - .get_or_insert_nested_table("envs.python.dependencies") - .unwrap() - .get(dep_name); - - assert!(dep.is_some()); - } - - #[test] - fn test_get_or_insert_inline_table() { - let toml = r#" -[envs.python] -channels = ["dummy-channel"] -dependencies = { dummy = "3.11.*" } -"#; - let dep_name = "test"; - let mut manifest = TomlManifest::new(DocumentMut::from_str(toml).unwrap()); - manifest - .get_or_insert_nested_table("envs.python.dependencies") - .unwrap() - .insert(dep_name, Item::Value(toml_edit::Value::from("6.6"))); - - let dep = manifest - .get_or_insert_nested_table("envs.python.dependencies") - .unwrap() - .get(dep_name); - - assert!(dep.is_some()); - - // Existing entries are also still there - let dummy = manifest - .get_or_insert_nested_table("envs.python.dependencies") - .unwrap() - .get("dummy"); - - assert!(dummy.is_some()) - } - - #[test] - fn test_get_or_insert_nested_table_no_empty_tables() { - let toml = r#" -[envs.python] -channels = ["dummy-channel"] -"#; - let table_name = "test"; - let mut manifest = TomlManifest::new(DocumentMut::from_str(toml).unwrap()); - manifest.get_or_insert_nested_table(table_name).unwrap(); +mod manifest; +mod package; +mod source; +mod workspace; - // No empty table is being created - assert!(!manifest.0.to_string().contains("[test]")); - } -} +pub use manifest::{Manifest, ManifestKind}; +pub use package::PackageManifest; +pub use source::ManifestSource; +pub use workspace::WorkspaceManifest; diff --git a/crates/pixi_manifest/src/manifests/package.rs b/crates/pixi_manifest/src/manifests/package.rs new file mode 100644 index 000000000..2064560a4 --- /dev/null +++ b/crates/pixi_manifest/src/manifests/package.rs @@ -0,0 +1,16 @@ +use crate::target::PackageTarget; +use crate::{package::Package, BuildSystem, Targets}; + +/// Holds the parsed content of the package part of a pixi manifest. This +/// describes the part related to the package only. +#[derive(Debug, Clone)] +pub struct PackageManifest { + /// Information about the package + pub package: Package, + + /// Information about the build system for the package + pub build_system: BuildSystem, + + /// Defines the dependencies of the package + pub targets: Targets, +} diff --git a/crates/pixi_manifest/src/manifests/project.rs b/crates/pixi_manifest/src/manifests/project.rs index f3e111305..f142ad5b2 100644 --- a/crates/pixi_manifest/src/manifests/project.rs +++ b/crates/pixi_manifest/src/manifests/project.rs @@ -1,13 +1,8 @@ use std::fmt::{self, Display, Formatter}; -use pixi_spec::PixiSpec; -use rattler_conda_types::{PackageName, Platform}; -use toml_edit::{value, Array, Item, Table, Value}; +use rattler_conda_types::Platform; -use super::TomlManifest; -use crate::PypiDependencyLocation; -use crate::{consts, error::TomlError, pypi::PyPiPackageName, PyPiRequirement}; -use crate::{consts::PYPROJECT_PIXI_PREFIX, FeatureName, SpecType, Task}; +use crate::FeatureName; /// Struct that is used to access a table in `pixi.toml` or `pyproject.toml`. pub struct TableName<'a> { @@ -60,8 +55,8 @@ impl<'a> TableName<'a> { } impl TableName<'_> { - /// Returns the name of the table in dotted form (e.g. `table1.table2.array`). - /// It is composed of + /// Returns the name of the table in dotted form (e.g. + /// `table1.table2.array`). It is composed of /// - the 'tool.pixi' prefix if the manifest is a 'pyproject.toml' file /// - the feature if it is not the default feature /// - the platform if it is not `None` @@ -97,394 +92,13 @@ impl TableName<'_> { } } -/// Discriminates between a 'pixi.toml' and a 'pyproject.toml' manifest -#[derive(Debug, Clone)] -pub enum ManifestSource { - PyProjectToml(TomlManifest), - PixiToml(TomlManifest), -} - -impl fmt::Display for ManifestSource { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ManifestSource::PyProjectToml(document) => write!(f, "{}", document.0), - ManifestSource::PixiToml(document) => write!(f, "{}", document.0), - } - } -} - -impl ManifestSource { - /// Returns a new empty pixi manifest. - #[cfg(test)] - fn empty_pixi() -> Self { - ManifestSource::PixiToml(TomlManifest::default()) - } - - /// Returns a new empty pyproject manifest. - #[cfg(test)] - fn empty_pyproject() -> Self { - ManifestSource::PyProjectToml(TomlManifest::default()) - } - - /// Returns the file name of the manifest - #[cfg(test)] - fn file_name(&self) -> &'static str { - match self { - ManifestSource::PyProjectToml(_) => "pyproject.toml", - ManifestSource::PixiToml(_) => "pixi.toml", - } - } - - fn table_prefix(&self) -> Option<&'static str> { - match self { - ManifestSource::PyProjectToml(_) => Some(PYPROJECT_PIXI_PREFIX), - ManifestSource::PixiToml(_) => None, - } - } - - fn manifest(&mut self) -> &mut TomlManifest { - match self { - ManifestSource::PyProjectToml(document) => document, - ManifestSource::PixiToml(document) => document, - } - } - - /// Returns a mutable reference to the specified array either in project or - /// feature. - pub fn get_array_mut( - &mut self, - array_name: &str, - feature_name: &FeatureName, - ) -> Result<&mut Array, TomlError> { - let table = match feature_name { - FeatureName::Default => Some("project"), - FeatureName::Named(_) => None, - }; - - let table_name = TableName::new() - .with_prefix(self.table_prefix()) - .with_feature_name(Some(feature_name)) - .with_table(table); - - self.manifest() - .get_or_insert_toml_array(table_name.to_string().as_str(), array_name) - } - - fn as_table_mut(&mut self) -> &mut Table { - match self { - ManifestSource::PyProjectToml(document) => document.0.as_table_mut(), - ManifestSource::PixiToml(document) => document.0.as_table_mut(), - } - } - - /// Removes a pypi dependency from the TOML manifest from - /// native pyproject arrays and/or pixi tables as required - /// - /// If will be a no-op if the dependency is not found - pub fn remove_pypi_dependency( - &mut self, - dep: &PyPiPackageName, - platform: Option, - feature_name: &FeatureName, - ) -> Result<(), TomlError> { - // For 'pyproject.toml' manifest, try and remove the dependency from native - // arrays - let remove_requirement = - |source: &mut ManifestSource, table, array_name| -> Result<(), TomlError> { - let array = source.manifest().get_toml_array(table, array_name)?; - if let Some(array) = array { - array.retain(|x| { - let req: pep508_rs::Requirement = x - .as_str() - .unwrap_or("") - .parse() - .expect("should be a valid pep508 dependency"); - let name = PyPiPackageName::from_normalized(req.name); - name != *dep - }); - if array.is_empty() { - source - .manifest() - .get_or_insert_nested_table(table)? - .remove(array_name); - } - } - Ok(()) - }; - - match self { - ManifestSource::PyProjectToml(_) if feature_name.is_default() => { - remove_requirement(self, "project", "dependencies")?; - } - ManifestSource::PyProjectToml(_) => { - let name = feature_name.to_string(); - remove_requirement(self, "project.optional-dependencies", &name)?; - remove_requirement(self, "dependency-groups", &name)?; - } - _ => (), - }; - - // For both 'pyproject.toml' and 'pixi.toml' manifest, - // try and remove the dependency from pixi native tables - let table_name = TableName::new() - .with_prefix(self.table_prefix()) - .with_feature_name(Some(feature_name)) - .with_platform(platform.as_ref()) - .with_table(Some(consts::PYPI_DEPENDENCIES)); - - self.manifest() - .get_or_insert_nested_table(table_name.to_string().as_str()) - .map(|t| t.remove(dep.as_source()))?; - Ok(()) - } - - /// Removes a conda or pypi dependency from the TOML manifest's pixi table - /// for either a 'pyproject.toml' and 'pixi.toml' - /// - /// If will be a no-op if the dependency is not found - pub fn remove_dependency( - &mut self, - dep: &PackageName, - spec_type: SpecType, - platform: Option, - feature_name: &FeatureName, - ) -> Result<(), TomlError> { - let table_name = TableName::new() - .with_prefix(self.table_prefix()) - .with_feature_name(Some(feature_name)) - .with_platform(platform.as_ref()) - .with_table(Some(spec_type.name())); - - self.manifest() - .get_or_insert_nested_table(table_name.to_string().as_str()) - .map(|t| t.remove(dep.as_source()))?; - Ok(()) - } - - /// Adds a conda dependency to the TOML manifest - /// - /// If a dependency with the same name already exists, it will be replaced. - pub fn add_dependency( - &mut self, - name: &PackageName, - spec: &PixiSpec, - spec_type: SpecType, - platform: Option, - feature_name: &FeatureName, - ) -> Result<(), TomlError> { - // let dependency_table = - // self.get_or_insert_toml_table(platform, feature_name, spec_type.name())?; - - let dependency_table = TableName::new() - .with_prefix(self.table_prefix()) - .with_platform(platform.as_ref()) - .with_feature_name(Some(feature_name)) - .with_table(Some(spec_type.name())); - - self.manifest() - .get_or_insert_nested_table(dependency_table.to_string().as_str()) - .map(|t| t.insert(name.as_normalized(), Item::Value(spec.to_toml_value())))?; - - // dependency_table.insert(name.as_normalized(), Item::Value(spec.to_toml_value())); - Ok(()) - } - - /// Adds a pypi dependency to the TOML manifest - /// - /// If a pypi dependency with the same name already exists, it will be - /// replaced. - pub fn add_pypi_dependency( - &mut self, - requirement: &pep508_rs::Requirement, - platform: Option, - feature_name: &FeatureName, - editable: Option, - location: &Option, - ) -> Result<(), TomlError> { - // Pypi dependencies can be stored in different places in pyproject.toml manifests - // so we remove any potential dependency of the same name before adding it back - if matches!(self, ManifestSource::PyProjectToml(_)) { - self.remove_pypi_dependency( - &PyPiPackageName::from_normalized(requirement.name.clone()), - platform, - feature_name, - )?; - } - - // The '[pypi-dependencies]' or '[tool.pixi.pypi-dependencies]' table is selected - // - For 'pixi.toml' manifests where it is the only choice - // - When explicitly requested - // - When a specific platform is requested, as markers are not supported (https://github.com/prefix-dev/pixi/issues/2149) - // - When an editable install is requested - if matches!(self, ManifestSource::PixiToml(_)) - || matches!(location, Some(PypiDependencyLocation::Pixi)) - || platform.is_some() - || editable.is_some_and(|e| e) - { - let mut pypi_requirement = - PyPiRequirement::try_from(requirement.clone()).map_err(Box::new)?; - if let Some(editable) = editable { - pypi_requirement.set_editable(editable); - } - - let dependency_table = TableName::new() - .with_prefix(self.table_prefix()) - .with_platform(platform.as_ref()) - .with_feature_name(Some(feature_name)) - .with_table(Some(consts::PYPI_DEPENDENCIES)); - - self.manifest() - .get_or_insert_nested_table(dependency_table.to_string().as_str())? - .insert( - requirement.name.as_ref(), - Item::Value(pypi_requirement.into()), - ); - return Ok(()); - } - - // Otherwise: - // - the [project.dependencies] array is selected for the default feature - // - the [dependency-groups.feature_name] array is selected unless - // - optional-dependencies is explicitly requested as location - let add_requirement = - |source: &mut ManifestSource, table, array| -> Result<(), TomlError> { - source - .manifest() - .get_or_insert_toml_array(table, array)? - .push(requirement.to_string()); - Ok(()) - }; - if feature_name.is_default() { - add_requirement(self, "project", "dependencies")? - } else if matches!(location, Some(PypiDependencyLocation::OptionalDependencies)) { - add_requirement( - self, - "project.optional-dependencies", - &feature_name.to_string(), - )? - } else { - add_requirement(self, "dependency-groups", &feature_name.to_string())? - } - Ok(()) - } - - /// Removes a task from the TOML manifest - pub fn remove_task( - &mut self, - name: &str, - platform: Option, - feature_name: &FeatureName, - ) -> Result<(), TomlError> { - // Get the task table either from the target platform or the default tasks. - // If it does not exist in TOML, consider this ok as we want to remove it - // anyways - let task_table = TableName::new() - .with_prefix(self.table_prefix()) - .with_platform(platform.as_ref()) - .with_feature_name(Some(feature_name)) - .with_table(Some("tasks")); - - self.manifest() - .get_or_insert_nested_table(task_table.to_string().as_str())? - .remove(name); - - Ok(()) - } - - /// Adds a task to the TOML manifest - pub fn add_task( - &mut self, - name: &str, - task: Task, - platform: Option, - feature_name: &FeatureName, - ) -> Result<(), TomlError> { - // Get the task table either from the target platform or the default tasks. - let task_table = TableName::new() - .with_prefix(self.table_prefix()) - .with_platform(platform.as_ref()) - .with_feature_name(Some(feature_name)) - .with_table(Some("tasks")); - - self.manifest() - .get_or_insert_nested_table(task_table.to_string().as_str())? - .insert(name, task.into()); - - Ok(()) - } - - /// Adds an environment to the manifest - pub fn add_environment( - &mut self, - name: impl Into, - features: Option>, - solve_group: Option, - no_default_features: bool, - ) -> Result<(), TomlError> { - // Construct the TOML item - let item = if solve_group.is_some() || no_default_features { - let mut table = toml_edit::InlineTable::new(); - if let Some(features) = features { - table.insert("features", Array::from_iter(features).into()); - } - if let Some(solve_group) = solve_group { - table.insert("solve-group", solve_group.into()); - } - if no_default_features { - table.insert("no-default-feature", true.into()); - } - Item::Value(table.into()) - } else { - Item::Value(Value::Array(Array::from_iter( - features.into_iter().flatten(), - ))) - }; - - let env_table = TableName::new() - .with_prefix(self.table_prefix()) - .with_feature_name(Some(&FeatureName::Default)) - .with_table(Some("environments")); - - // Get the environment table - self.manifest() - .get_or_insert_nested_table(env_table.to_string().as_str())? - .insert(&name.into(), item); - - Ok(()) - } - - /// Removes an environment from the manifest. Returns `true` if the - /// environment was removed. - pub fn remove_environment(&mut self, name: &str) -> Result { - let env_table = TableName::new() - .with_prefix(self.table_prefix()) - .with_feature_name(Some(&FeatureName::Default)) - .with_table(Some("environments")); - - Ok(self - .manifest() - .get_or_insert_nested_table(env_table.to_string().as_str())? - .remove(name) - .is_some()) - } - - /// Sets the description of the project - pub fn set_description(&mut self, description: &str) { - self.as_table_mut()["project"]["description"] = value(description); - } - - /// Sets the version of the project - pub fn set_version(&mut self, version: &str) { - self.as_table_mut()["project"]["version"] = value(version); - } -} - #[cfg(test)] mod tests { use insta::assert_snapshot; + use pixi_spec::PixiSpec; use rattler_conda_types::{MatchSpec, ParseStrictness::Strict}; - use rstest::rstest; + use toml_edit::Item; use super::*; @@ -555,67 +169,4 @@ mod tests { .to_string() ); } - - #[rstest] - #[case::pixi_toml(ManifestSource::empty_pixi())] - #[case::pyproject_toml(ManifestSource::empty_pyproject())] - fn test_add_environment(#[case] mut source: ManifestSource) { - source - .add_environment("foo", Some(vec![]), None, false) - .unwrap(); - source - .add_environment("bar", Some(vec![String::from("default")]), None, false) - .unwrap(); - source - .add_environment( - "baz", - Some(vec![String::from("default")]), - Some(String::from("group1")), - false, - ) - .unwrap(); - source - .add_environment( - "foobar", - Some(vec![String::from("default")]), - Some(String::from("group1")), - true, - ) - .unwrap(); - source - .add_environment("barfoo", Some(vec![String::from("default")]), None, true) - .unwrap(); - - // Overwrite - source - .add_environment("bar", Some(vec![String::from("not-default")]), None, false) - .unwrap(); - - assert_snapshot!( - format!("test_add_environment_{}", source.file_name()), - source.to_string() - ); - } - - #[rstest] - #[case::pixi_toml(ManifestSource::empty_pixi())] - #[case::pyproject_toml(ManifestSource::empty_pyproject())] - fn test_remove_environment(#[case] mut source: ManifestSource) { - source - .add_environment("foo", Some(vec![String::from("default")]), None, false) - .unwrap(); - source - .add_environment("bar", Some(vec![String::from("default")]), None, false) - .unwrap(); - assert!(!source.remove_environment("default").unwrap()); - source - .add_environment("default", Some(vec![String::from("default")]), None, false) - .unwrap(); - assert!(source.remove_environment("default").unwrap()); - assert!(source.remove_environment("foo").unwrap()); - assert_snapshot!( - format!("test_remove_environment_{}", source.file_name()), - source.to_string() - ); - } } diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_add_environment_pixi.toml.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_add_environment_pixi.toml.snap similarity index 84% rename from crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_add_environment_pixi.toml.snap rename to crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_add_environment_pixi.toml.snap index de74f69c1..9cc1524a7 100644 --- a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_add_environment_pixi.toml.snap +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_add_environment_pixi.toml.snap @@ -1,5 +1,5 @@ --- -source: crates/pixi_manifest/src/manifests/project.rs +source: crates/pixi_manifest/src/manifests/source.rs expression: source.to_string() --- [environments] diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_add_environment_pyproject.toml.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_add_environment_pyproject.toml.snap similarity index 84% rename from crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_add_environment_pyproject.toml.snap rename to crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_add_environment_pyproject.toml.snap index 2f7e10267..155f0cf96 100644 --- a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_add_environment_pyproject.toml.snap +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_add_environment_pyproject.toml.snap @@ -1,5 +1,5 @@ --- -source: crates/pixi_manifest/src/manifests/project.rs +source: crates/pixi_manifest/src/manifests/source.rs expression: source.to_string() --- [tool.pixi.environments] diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_remove_environment_pixi.toml.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_remove_environment_pixi.toml.snap similarity index 57% rename from crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_remove_environment_pixi.toml.snap rename to crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_remove_environment_pixi.toml.snap index 1aab14d74..3781fbfd5 100644 --- a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_remove_environment_pixi.toml.snap +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_remove_environment_pixi.toml.snap @@ -1,5 +1,5 @@ --- -source: crates/pixi_manifest/src/manifests/project.rs +source: crates/pixi_manifest/src/manifests/source.rs expression: source.to_string() --- [environments] diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_remove_environment_pyproject.toml.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_remove_environment_pyproject.toml.snap similarity index 60% rename from crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_remove_environment_pyproject.toml.snap rename to crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_remove_environment_pyproject.toml.snap index 230896be9..6ed1d2394 100644 --- a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__project__tests__test_remove_environment_pyproject.toml.snap +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__source__test__test_remove_environment_pyproject.toml.snap @@ -1,5 +1,5 @@ --- -source: crates/pixi_manifest/src/manifests/project.rs +source: crates/pixi_manifest/src/manifests/source.rs expression: source.to_string() --- [tool.pixi.environments] diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_invalid_matchspec.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_invalid_matchspec.snap new file mode 100644 index 000000000..49b456e6e --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_invalid_matchspec.snap @@ -0,0 +1,5 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: err.unwrap().to_string() +--- +"TOML parse error at line 8, column 25\n |\n8 | dependencies = [\"python-build-backend > > 12\"]\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\nunable to parse version spec: > > 12\n" diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_section_deserialization.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_section_deserialization.snap new file mode 100644 index 000000000..f55db85c1 --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__build_section_deserialization.snap @@ -0,0 +1,8 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: manifest.build.clone().unwrap() +--- +dependencies: + - python-build-backend >12 +build-backend: python-build-backend +channels: [] diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key.snap new file mode 100644 index 000000000..38f7bb4ff --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key.snap @@ -0,0 +1,7 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: "examples.into_iter().map(|example|\nWorkspaceManifest::from_toml_str(&example).unwrap_err().to_string()).collect::>().join(\"\\n\")" +--- +unknown field `foobar`, expected one of `project`, `workspace`, `package`, `system-requirements`, `target`, `dependencies`, `host-dependencies`, `build-dependencies`, `pypi-dependencies`, `activation`, `tasks`, `feature`, `environments`, `pypi-options`, `build-system`, `$schema`, `tool` +unknown field `hostdependencies`, expected one of `dependencies`, `host-dependencies`, `build-dependencies`, `pypi-dependencies`, `activation`, `tasks` +Failed to parse environment name 'INVALID', please use only lowercase letters, numbers and dashes diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@environment.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@environment.snap new file mode 100644 index 000000000..9c1d96c03 --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@environment.snap @@ -0,0 +1,10 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: "expect_parse_failure(&format!(\"{PROJECT_BOILERPLATE}\\n[environments.INVALID]\"))" +--- + × Failed to parse environment name 'INVALID', please use only lowercase letters, numbers and dashes + ╭─[pixi.toml:8:15] + 7 │ + 8 │ [environments.INVALID] + · ─────── + ╰──── diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@foobar.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@foobar.snap new file mode 100644 index 000000000..aa67cbf37 --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@foobar.snap @@ -0,0 +1,11 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: "expect_parse_failure(&format!(\"{PROJECT_BOILERPLATE}\\n[foobar]\"))" +--- + × unknown field `foobar`, expected one of `project`, `workspace`, `package`, `system-requirements`, `target`, `dependencies`, `host-dependencies`, `build-dependencies`, `run-dependencies`, `pypi- + │ dependencies`, `activation`, `tasks`, `feature`, `environments`, `pypi-options`, `build-system`, `$schema`, `tool` + ╭─[pixi.toml:8:2] + 7 │ + 8 │ [foobar] + · ────── + ╰──── diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@hostdependencies.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@hostdependencies.snap new file mode 100644 index 000000000..890014f33 --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_key@hostdependencies.snap @@ -0,0 +1,10 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: "expect_parse_failure(&format!(\"{PROJECT_BOILERPLATE}\\n[target.win-64.hostdependencies]\"))" +--- + × unknown field `hostdependencies`, expected one of `dependencies`, `host-dependencies`, `build-dependencies`, `run-dependencies`, `pypi-dependencies`, `activation`, `tasks` + ╭─[pixi.toml:8:16] + 7 │ + 8 │ [target.win-64.hostdependencies] + · ──────────────── + ╰──── diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_target_specific.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_target_specific.snap new file mode 100644 index 000000000..ec6114206 --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__invalid_target_specific.snap @@ -0,0 +1,12 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: "expect_parse_failure(&format!(\"{PROJECT_BOILERPLATE}\\n{}\", examples[0]))" +--- + × 'foobar' is not a known platform. Valid platforms are 'noarch', 'unknown', 'linux-32', 'linux-64', 'linux-aarch64', 'linux-armv6l', 'linux-armv7l', 'linux-ppc64le', 'linux-ppc64', 'linux-s390x', + │ 'linux-riscv32', 'linux-riscv64', 'osx-64', 'osx-arm64', 'win-32', 'win-64', 'win-arm64', 'emscripten-wasm32', 'wasi-wasm32', 'zos-z' + ╭─[pixi.toml:8:9] + 7 │ + 8 │ [target.foobar.dependencies] + · ────── + 9 │ invalid_platform = "henk" + ╰──── diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__pypi_options_default_feature.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__pypi_options_default_feature.snap new file mode 100644 index 000000000..92d731f8d --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__pypi_options_default_feature.snap @@ -0,0 +1,12 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: "toml_edit::de::from_str::(&contents).expect(\"parsing should succeed!\").workspace.pypi_options.clone().unwrap()" +--- +index-url: "https://pypi.org/simple" +extra-index-urls: + - "https://pypi.org/simple2" +find-links: + - path: "../foo" + - url: "https://example.com/bar" +no-build-isolation: ~ +index-strategy: ~ diff --git a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__pypy_options_project_and_default_feature.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__pypy_options_project_and_default_feature.snap similarity index 51% rename from crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__pypy_options_project_and_default_feature.snap rename to crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__pypy_options_project_and_default_feature.snap index e58b050ec..3c897c775 100644 --- a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__pypy_options_project_and_default_feature.snap +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__pypy_options_project_and_default_feature.snap @@ -1,6 +1,6 @@ --- -source: crates/pixi_manifest/src/parsed_manifest.rs -expression: manifest.project.pypi_options.clone().unwrap() +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: manifest.workspace.pypi_options.clone().unwrap() --- index-url: ~ extra-index-urls: diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__python_dependencies.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__python_dependencies.snap new file mode 100644 index 000000000..ca469b315 --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__python_dependencies.snap @@ -0,0 +1,6 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: "toml_edit::de::from_str::(&contents).expect(\"parsing should succeed!\").default_feature().targets.default().pypi_dependencies.clone().into_iter().flat_map(|d|\nd.into_iter()).map(|(name, spec)|\nformat!(\"{} = {}\", name.as_source(), toml_edit::Value::from(spec))).join(\"\\n\")" +--- +foo = ">=3.12" +bar = { version = ">=3.12", extras = ["baz"] } diff --git a/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__target_specific_tasks.snap b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__target_specific_tasks.snap new file mode 100644 index 000000000..00f184498 --- /dev/null +++ b/crates/pixi_manifest/src/manifests/snapshots/pixi_manifest__manifests__workspace__tests__target_specific_tasks.snap @@ -0,0 +1,7 @@ +--- +source: crates/pixi_manifest/src/manifests/workspace.rs +expression: "manifest.default_feature().targets.iter().flat_map(|(target, selector)|\n{\n let selector_name =\n selector.map_or_else(|| String::from(\"default\"), ToString::to_string);\n target.tasks.iter().filter_map(move |(name, task)|\n {\n Some(format!(\"{}/{} = {}\", &selector_name, name.as_str(),\n task.as_single_command()?))\n })\n}).join(\"\\n\")" +--- +default/test = test multi +win-64/test = test win +linux-64/test = test linux diff --git a/crates/pixi_manifest/src/manifests/source.rs b/crates/pixi_manifest/src/manifests/source.rs new file mode 100644 index 000000000..123ffe12a --- /dev/null +++ b/crates/pixi_manifest/src/manifests/source.rs @@ -0,0 +1,467 @@ +use std::fmt; + +use pixi_consts::{consts, consts::PYPROJECT_PIXI_PREFIX}; +use pixi_spec::PixiSpec; +use rattler_conda_types::{PackageName, Platform}; +use toml_edit::{value, Array, Item, Table, Value}; + +use crate::toml::TomlDocument; +use crate::{ + manifests::project::TableName, pypi::PyPiPackageName, FeatureName, PyPiRequirement, + PypiDependencyLocation, SpecType, Task, TomlError, +}; + +/// Discriminates between a 'pixi.toml' and a 'pyproject.toml' manifest. +#[derive(Debug, Clone)] +pub enum ManifestSource { + PyProjectToml(TomlDocument), + PixiToml(TomlDocument), +} + +impl fmt::Display for ManifestSource { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ManifestSource::PyProjectToml(document) => write!(f, "{}", document), + ManifestSource::PixiToml(document) => write!(f, "{}", document), + } + } +} + +impl ManifestSource { + /// Returns a new empty pixi manifest. + #[cfg(test)] + fn empty_pixi() -> Self { + ManifestSource::PixiToml(TomlDocument::default()) + } + + /// Returns a new empty pyproject manifest. + #[cfg(test)] + fn empty_pyproject() -> Self { + ManifestSource::PyProjectToml(TomlDocument::default()) + } + + /// Returns the file name of the manifest + #[cfg(test)] + fn file_name(&self) -> &'static str { + match self { + ManifestSource::PyProjectToml(_) => "pyproject.toml", + ManifestSource::PixiToml(_) => "pixi.toml", + } + } + + fn table_prefix(&self) -> Option<&'static str> { + match self { + ManifestSource::PyProjectToml(_) => Some(PYPROJECT_PIXI_PREFIX), + ManifestSource::PixiToml(_) => None, + } + } + + fn manifest(&mut self) -> &mut TomlDocument { + match self { + ManifestSource::PyProjectToml(document) => document, + ManifestSource::PixiToml(document) => document, + } + } + + /// Returns a mutable reference to the specified array either in project or + /// feature. + pub fn get_array_mut( + &mut self, + array_name: &str, + feature_name: &FeatureName, + ) -> Result<&mut Array, TomlError> { + let table = match feature_name { + FeatureName::Default => Some("project"), + FeatureName::Named(_) => None, + }; + + let table_name = TableName::new() + .with_prefix(self.table_prefix()) + .with_feature_name(Some(feature_name)) + .with_table(table); + + self.manifest() + .get_or_insert_toml_array(table_name.to_string().as_str(), array_name) + } + + fn as_table_mut(&mut self) -> &mut Table { + match self { + ManifestSource::PyProjectToml(document) => document.as_table_mut(), + ManifestSource::PixiToml(document) => document.as_table_mut(), + } + } + + /// Removes a pypi dependency from the TOML manifest from native pyproject + /// arrays and/or pixi tables as required. + /// + /// If will be a no-op if the dependency is not found. + pub fn remove_pypi_dependency( + &mut self, + dep: &PyPiPackageName, + platform: Option, + feature_name: &FeatureName, + ) -> Result<(), TomlError> { + // For 'pyproject.toml' manifest, try and remove the dependency from native + // arrays + let remove_requirement = + |source: &mut ManifestSource, table, array_name| -> Result<(), TomlError> { + let array = source.manifest().get_toml_array(table, array_name)?; + if let Some(array) = array { + array.retain(|x| { + let req: pep508_rs::Requirement = x + .as_str() + .unwrap_or("") + .parse() + .expect("should be a valid pep508 dependency"); + let name = PyPiPackageName::from_normalized(req.name); + name != *dep + }); + if array.is_empty() { + source + .manifest() + .get_or_insert_nested_table(table)? + .remove(array_name); + } + } + Ok(()) + }; + + match self { + ManifestSource::PyProjectToml(_) if feature_name.is_default() => { + remove_requirement(self, "project", "dependencies")?; + } + ManifestSource::PyProjectToml(_) => { + let name = feature_name.to_string(); + remove_requirement(self, "project.optional-dependencies", &name)?; + remove_requirement(self, "dependency-groups", &name)?; + } + _ => (), + }; + + // For both 'pyproject.toml' and 'pixi.toml' manifest, + // try and remove the dependency from pixi native tables + let table_name = TableName::new() + .with_prefix(self.table_prefix()) + .with_feature_name(Some(feature_name)) + .with_platform(platform.as_ref()) + .with_table(Some(consts::PYPI_DEPENDENCIES)); + + self.manifest() + .get_or_insert_nested_table(table_name.to_string().as_str()) + .map(|t| t.remove(dep.as_source()))?; + Ok(()) + } + + /// Removes a conda or pypi dependency from the TOML manifest's pixi table + /// for either a 'pyproject.toml' and 'pixi.toml' + /// + /// If will be a no-op if the dependency is not found + pub fn remove_dependency( + &mut self, + dep: &PackageName, + spec_type: SpecType, + platform: Option, + feature_name: &FeatureName, + ) -> Result<(), TomlError> { + let table_name = TableName::new() + .with_prefix(self.table_prefix()) + .with_feature_name(Some(feature_name)) + .with_platform(platform.as_ref()) + .with_table(Some(spec_type.name())); + + self.manifest() + .get_or_insert_nested_table(table_name.to_string().as_str()) + .map(|t| t.remove(dep.as_source()))?; + Ok(()) + } + + /// Adds a conda dependency to the TOML manifest + /// + /// If a dependency with the same name already exists, it will be replaced. + pub fn add_dependency( + &mut self, + name: &PackageName, + spec: &PixiSpec, + spec_type: SpecType, + platform: Option, + feature_name: &FeatureName, + ) -> Result<(), TomlError> { + // let dependency_table = + // self.get_or_insert_toml_table(platform, feature_name, spec_type.name())?; + + let dependency_table = TableName::new() + .with_prefix(self.table_prefix()) + .with_platform(platform.as_ref()) + .with_feature_name(Some(feature_name)) + .with_table(Some(spec_type.name())); + + self.manifest() + .get_or_insert_nested_table(dependency_table.to_string().as_str()) + .map(|t| t.insert(name.as_normalized(), Item::Value(spec.to_toml_value())))?; + + // dependency_table.insert(name.as_normalized(), + // Item::Value(spec.to_toml_value())); + Ok(()) + } + + /// Adds a pypi dependency to the TOML manifest + /// + /// If a pypi dependency with the same name already exists, it will be + /// replaced. + pub fn add_pypi_dependency( + &mut self, + requirement: &pep508_rs::Requirement, + platform: Option, + feature_name: &FeatureName, + editable: Option, + location: &Option, + ) -> Result<(), TomlError> { + // Pypi dependencies can be stored in different places in pyproject.toml + // manifests so we remove any potential dependency of the same name + // before adding it back + if matches!(self, ManifestSource::PyProjectToml(_)) { + self.remove_pypi_dependency( + &PyPiPackageName::from_normalized(requirement.name.clone()), + platform, + feature_name, + )?; + } + + // The '[pypi-dependencies]' or '[tool.pixi.pypi-dependencies]' table is + // selected + // - For 'pixi.toml' manifests where it is the only choice + // - When explicitly requested + // - When a specific platform is requested, as markers are not supported (https://github.com/prefix-dev/pixi/issues/2149) + // - When an editable install is requested + if matches!(self, ManifestSource::PixiToml(_)) + || matches!(location, Some(PypiDependencyLocation::Pixi)) + || platform.is_some() + || editable.is_some_and(|e| e) + { + let mut pypi_requirement = + PyPiRequirement::try_from(requirement.clone()).map_err(Box::new)?; + if let Some(editable) = editable { + pypi_requirement.set_editable(editable); + } + + let dependency_table = TableName::new() + .with_prefix(self.table_prefix()) + .with_platform(platform.as_ref()) + .with_feature_name(Some(feature_name)) + .with_table(Some(consts::PYPI_DEPENDENCIES)); + + self.manifest() + .get_or_insert_nested_table(dependency_table.to_string().as_str())? + .insert( + requirement.name.as_ref(), + Item::Value(pypi_requirement.into()), + ); + return Ok(()); + } + + // Otherwise: + // - the [project.dependencies] array is selected for the default feature + // - the [dependency-groups.feature_name] array is selected unless + // - optional-dependencies is explicitly requested as location + let add_requirement = + |source: &mut ManifestSource, table, array| -> Result<(), TomlError> { + source + .manifest() + .get_or_insert_toml_array(table, array)? + .push(requirement.to_string()); + Ok(()) + }; + if feature_name.is_default() { + add_requirement(self, "project", "dependencies")? + } else if matches!(location, Some(PypiDependencyLocation::OptionalDependencies)) { + add_requirement( + self, + "project.optional-dependencies", + &feature_name.to_string(), + )? + } else { + add_requirement(self, "dependency-groups", &feature_name.to_string())? + } + Ok(()) + } + + /// Removes a task from the TOML manifest + pub fn remove_task( + &mut self, + name: &str, + platform: Option, + feature_name: &FeatureName, + ) -> Result<(), TomlError> { + // Get the task table either from the target platform or the default tasks. + // If it does not exist in TOML, consider this ok as we want to remove it + // anyways + let task_table = TableName::new() + .with_prefix(self.table_prefix()) + .with_platform(platform.as_ref()) + .with_feature_name(Some(feature_name)) + .with_table(Some("tasks")); + + self.manifest() + .get_or_insert_nested_table(task_table.to_string().as_str())? + .remove(name); + + Ok(()) + } + + /// Adds a task to the TOML manifest + pub fn add_task( + &mut self, + name: &str, + task: Task, + platform: Option, + feature_name: &FeatureName, + ) -> Result<(), TomlError> { + // Get the task table either from the target platform or the default tasks. + let task_table = TableName::new() + .with_prefix(self.table_prefix()) + .with_platform(platform.as_ref()) + .with_feature_name(Some(feature_name)) + .with_table(Some("tasks")); + + self.manifest() + .get_or_insert_nested_table(task_table.to_string().as_str())? + .insert(name, task.into()); + + Ok(()) + } + + /// Adds an environment to the manifest + pub fn add_environment( + &mut self, + name: impl Into, + features: Option>, + solve_group: Option, + no_default_features: bool, + ) -> Result<(), TomlError> { + // Construct the TOML item + let item = if solve_group.is_some() || no_default_features { + let mut table = toml_edit::InlineTable::new(); + if let Some(features) = features { + table.insert("features", Array::from_iter(features).into()); + } + if let Some(solve_group) = solve_group { + table.insert("solve-group", solve_group.into()); + } + if no_default_features { + table.insert("no-default-feature", true.into()); + } + Item::Value(table.into()) + } else { + Item::Value(Value::Array(Array::from_iter( + features.into_iter().flatten(), + ))) + }; + + let env_table = TableName::new() + .with_prefix(self.table_prefix()) + .with_feature_name(Some(&FeatureName::Default)) + .with_table(Some("environments")); + + // Get the environment table + self.manifest() + .get_or_insert_nested_table(env_table.to_string().as_str())? + .insert(&name.into(), item); + + Ok(()) + } + + /// Removes an environment from the manifest. Returns `true` if the + /// environment was removed. + pub fn remove_environment(&mut self, name: &str) -> Result { + let env_table = TableName::new() + .with_prefix(self.table_prefix()) + .with_feature_name(Some(&FeatureName::Default)) + .with_table(Some("environments")); + + Ok(self + .manifest() + .get_or_insert_nested_table(env_table.to_string().as_str())? + .remove(name) + .is_some()) + } + + /// Sets the description of the project + pub fn set_description(&mut self, description: &str) { + self.as_table_mut()["project"]["description"] = value(description); + } + + /// Sets the version of the project + pub fn set_version(&mut self, version: &str) { + self.as_table_mut()["project"]["version"] = value(version); + } +} + +#[cfg(test)] +mod test { + use super::*; + use insta::assert_snapshot; + use rstest::rstest; + + #[rstest] + #[case::pixi_toml(ManifestSource::empty_pixi())] + #[case::pyproject_toml(ManifestSource::empty_pyproject())] + fn test_add_environment(#[case] mut source: ManifestSource) { + source + .add_environment("foo", Some(vec![]), None, false) + .unwrap(); + source + .add_environment("bar", Some(vec![String::from("default")]), None, false) + .unwrap(); + source + .add_environment( + "baz", + Some(vec![String::from("default")]), + Some(String::from("group1")), + false, + ) + .unwrap(); + source + .add_environment( + "foobar", + Some(vec![String::from("default")]), + Some(String::from("group1")), + true, + ) + .unwrap(); + source + .add_environment("barfoo", Some(vec![String::from("default")]), None, true) + .unwrap(); + + // Overwrite + source + .add_environment("bar", Some(vec![String::from("not-default")]), None, false) + .unwrap(); + + assert_snapshot!( + format!("test_add_environment_{}", source.file_name()), + source.to_string() + ); + } + + #[rstest] + #[case::pixi_toml(ManifestSource::empty_pixi())] + #[case::pyproject_toml(ManifestSource::empty_pyproject())] + fn test_remove_environment(#[case] mut source: ManifestSource) { + source + .add_environment("foo", Some(vec![String::from("default")]), None, false) + .unwrap(); + source + .add_environment("bar", Some(vec![String::from("default")]), None, false) + .unwrap(); + assert!(!source.remove_environment("default").unwrap()); + source + .add_environment("default", Some(vec![String::from("default")]), None, false) + .unwrap(); + assert!(source.remove_environment("default").unwrap()); + assert!(source.remove_environment("foo").unwrap()); + assert_snapshot!( + format!("test_remove_environment_{}", source.file_name()), + source.to_string() + ); + } +} diff --git a/crates/pixi_manifest/src/parsed_manifest.rs b/crates/pixi_manifest/src/manifests/workspace.rs similarity index 50% rename from crates/pixi_manifest/src/parsed_manifest.rs rename to crates/pixi_manifest/src/manifests/workspace.rs index 70b01765b..e4faef076 100644 --- a/crates/pixi_manifest/src/parsed_manifest.rs +++ b/crates/pixi_manifest/src/manifests/workspace.rs @@ -1,37 +1,24 @@ -use std::{collections::HashMap, fmt, hash::Hash, iter::FromIterator, marker::PhantomData}; +use std::hash::Hash; use indexmap::{map::IndexMap, Equivalent}; -use pixi_spec::PixiSpec; -use rattler_conda_types::PackageName; -use serde::de::{Deserialize, DeserializeSeed, Deserializer, MapAccess, Visitor}; -use serde_with::{serde_as, serde_derive::Deserialize}; -use toml_edit::DocumentMut; use crate::{ - activation::Activation, consts, - environment::{Environment, EnvironmentIdx, EnvironmentName, TomlEnvironmentMapOrSeq}, + environment::{Environment, EnvironmentName}, environments::Environments, - error::TomlError, feature::{Feature, FeatureName}, - metadata::ProjectMetadata, - pypi::{ - pypi_options::PypiOptions, pypi_requirement::PyPiRequirement, - pypi_requirement_types::PyPiPackageName, - }, solve_group::SolveGroups, - spec_type::SpecType, - system_requirements::SystemRequirements, - target::{Target, TargetSelector, Targets}, - task::{Task, TaskName}, - utils::PixiSpanned, + toml::ExternalWorkspaceProperties, + workspace::Workspace, + TomlError, }; -/// Describes the contents of a parsed project manifest. +/// Holds the parsed content of the workspace part of a pixi manifest. This +/// describes the part related to the workspace only. #[derive(Debug, Clone)] -pub struct ParsedManifest { +pub struct WorkspaceManifest { /// Information about the project - pub project: ProjectMetadata, + pub workspace: Workspace, /// All the features defined in the project. pub features: IndexMap, @@ -43,18 +30,13 @@ pub struct ParsedManifest { pub solve_groups: SolveGroups, } -impl ParsedManifest { - /// Parses a toml string into a project manifest. - pub fn from_toml_str(source: &str) -> Result { - let manifest: ParsedManifest = toml_edit::de::from_str(source).map_err(TomlError::from)?; - - // Make sure project.name is defined - if manifest.project.name.is_none() { - let span = source.parse::().map_err(TomlError::from)?["project"].span(); - return Err(TomlError::NoProjectName(span)); - } - - Ok(manifest) +impl WorkspaceManifest { + /// Parses a TOML string into a `WorkspaceManifest`. + pub fn from_toml_str(toml_str: &str) -> Result { + let manifest = crate::toml::TomlManifest::from_toml_str(toml_str)?; + Ok(manifest + .into_manifests(ExternalWorkspaceProperties::default())? + .0) } /// Returns the default feature. @@ -98,246 +80,13 @@ impl ParsedManifest { } } -impl<'de> Deserialize<'de> for ParsedManifest { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - #[serde_as] - #[derive(Deserialize)] - #[serde(deny_unknown_fields, rename_all = "kebab-case")] - pub struct TomlProjectManifest { - project: ProjectMetadata, - #[serde(default)] - system_requirements: SystemRequirements, - #[serde(default)] - target: IndexMap, Target>, - - // HACK: If we use `flatten`, unknown keys will point to the wrong location in the - // file. When https://github.com/toml-rs/toml/issues/589 is fixed we should use that - // - // Instead we currently copy the keys from the Target deserialize implementation which - // is really ugly. - // - // #[serde(flatten)] - // default_target: Target, - #[serde(default, deserialize_with = "deserialize_package_map")] - dependencies: IndexMap, - - #[serde(default, deserialize_with = "deserialize_opt_package_map")] - host_dependencies: Option>, - - #[serde(default, deserialize_with = "deserialize_opt_package_map")] - build_dependencies: Option>, - - #[serde(default)] - pypi_dependencies: Option>, - - /// Additional information to activate an environment. - #[serde(default)] - activation: Option, - - /// Target specific tasks to run in the environment - #[serde(default)] - tasks: HashMap, - - /// The features defined in the project. - #[serde(default)] - feature: IndexMap, - - /// The environments the project can create. - #[serde(default)] - environments: IndexMap, - - /// pypi-options - #[serde(default)] - pypi_options: Option, - - /// The tool configuration which is unused by pixi - #[serde(default, skip_serializing, rename = "tool")] - _tool: serde::de::IgnoredAny, - - /// The URI for the manifest schema which is unused by pixi - #[allow(dead_code)] - #[serde(rename = "$schema")] - schema: Option, - } - - let toml_manifest = TomlProjectManifest::deserialize(deserializer)?; - let mut dependencies = HashMap::from_iter([(SpecType::Run, toml_manifest.dependencies)]); - if let Some(host_deps) = toml_manifest.host_dependencies { - dependencies.insert(SpecType::Host, host_deps); - } - if let Some(build_deps) = toml_manifest.build_dependencies { - dependencies.insert(SpecType::Build, build_deps); - } - - let default_target = Target { - dependencies, - pypi_dependencies: toml_manifest.pypi_dependencies, - activation: toml_manifest.activation, - tasks: toml_manifest.tasks, - }; - - // Construct a default feature - let default_feature = Feature { - name: FeatureName::Default, - - // The default feature does not overwrite the platforms or channels from the project - // metadata. - platforms: None, - channels: None, - - channel_priority: toml_manifest.project.channel_priority, - - system_requirements: toml_manifest.system_requirements, - - // Use the pypi-options from the manifest for - // the default feature - pypi_options: toml_manifest.pypi_options, - - // Combine the default target with all user specified targets - targets: Targets::from_default_and_user_defined(default_target, toml_manifest.target), - }; - - // Construct the features including the default feature - let features: IndexMap = - IndexMap::from_iter([(FeatureName::Default, default_feature)]); - let named_features = toml_manifest - .feature - .into_iter() - .map(|(name, mut feature)| { - feature.name = name.clone(); - (name, feature) - }) - .collect::>(); - let features = features.into_iter().chain(named_features).collect(); - - // Construct the environments including the default environment - let mut environments = Environments::default(); - let mut solve_groups = SolveGroups::default(); - - // Add the default environment first if it was not redefined. - if !toml_manifest - .environments - .contains_key(&EnvironmentName::Default) - { - environments.environments.push(Some(Environment::default())); - environments - .by_name - .insert(EnvironmentName::Default, EnvironmentIdx(0)); - } - - // Add all named environments - for (name, env) in toml_manifest.environments { - // Decompose the TOML - let (features, features_source_loc, solve_group, no_default_feature) = match env { - TomlEnvironmentMapOrSeq::Map(env) => ( - env.features.value, - env.features.span, - env.solve_group, - env.no_default_feature, - ), - TomlEnvironmentMapOrSeq::Seq(features) => (features, None, None, false), - }; - - let environment_idx = EnvironmentIdx(environments.environments.len()); - environments.by_name.insert(name.clone(), environment_idx); - environments.environments.push(Some(Environment { - name, - features, - features_source_loc, - solve_group: solve_group.map(|sg| solve_groups.add(sg, environment_idx)), - no_default_feature, - })); - } - - Ok(Self { - project: toml_manifest.project, - features, - environments, - solve_groups, - }) - } -} - -struct PackageMap<'a>(&'a IndexMap); - -impl<'de, 'a> DeserializeSeed<'de> for PackageMap<'a> { - type Value = PackageName; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let package_name = PackageName::deserialize(deserializer)?; - match self.0.get_key_value(&package_name) { - Some((package_name, _)) => { - Err(serde::de::Error::custom( - format!( - "duplicate dependency: {} (please avoid using capitalized names for the dependencies)", package_name.as_source()) - )) - } - None => Ok(package_name), - } - } -} - -pub fn deserialize_package_map<'de, D>( - deserializer: D, -) -> Result, D::Error> -where - D: Deserializer<'de>, -{ - struct PackageMapVisitor(PhantomData<()>); - - impl<'de> Visitor<'de> for PackageMapVisitor { - type Value = IndexMap; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!(formatter, "a map") - } - - fn visit_map(self, mut map: A) -> Result - where - A: MapAccess<'de>, - { - let mut result = IndexMap::new(); - while let Some((package_name, spec)) = - map.next_entry_seed::(PackageMap(&result), PhantomData::)? - { - if spec.is_source() { - return Err(serde::de::Error::custom( - "source dependencies are not allowed yet", - )); - } - - result.insert(package_name, spec); - } - - Ok(result) - } - } - let visitor = PackageMapVisitor(PhantomData); - deserializer.deserialize_seq(visitor) -} - -pub fn deserialize_opt_package_map<'de, D>( - deserializer: D, -) -> Result>, D::Error> -where - D: Deserializer<'de>, -{ - Ok(Some(deserialize_package_map(deserializer)?)) -} - #[cfg(test)] mod tests { use insta::{assert_snapshot, assert_yaml_snapshot}; use itertools::Itertools; use rattler_conda_types::{NamedChannelOrUrl, Platform}; - use crate::{parsed_manifest::ParsedManifest, TargetSelector}; + use crate::{utils::test_utils::expect_parse_failure, TargetSelector, WorkspaceManifest}; const PROJECT_BOILERPLATE: &str = r#" [project] @@ -361,7 +110,7 @@ mod tests { "# ); - let manifest = ParsedManifest::from_toml_str(&contents).unwrap(); + let manifest = WorkspaceManifest::from_toml_str(&contents).unwrap(); let targets = &manifest.default_feature().targets; assert_eq!( targets.user_defined_selectors().cloned().collect_vec(), @@ -416,7 +165,7 @@ mod tests { "# ); - let manifest = ParsedManifest::from_toml_str(&contents).unwrap(); + let manifest = WorkspaceManifest::from_toml_str(&contents).unwrap(); let deps = manifest .default_feature() .targets @@ -504,7 +253,7 @@ mod tests { "# ); - let manifest = ParsedManifest::from_toml_str(&contents).unwrap(); + let manifest = WorkspaceManifest::from_toml_str(&contents).unwrap(); let default_target = manifest.default_feature().targets.default(); let run_dependencies = default_target.run_dependencies().unwrap(); let build_dependencies = default_target.build_dependencies().unwrap(); @@ -544,31 +293,25 @@ mod tests { let examples = [r#"[target.foobar.dependencies] invalid_platform = "henk""#]; - assert_snapshot!(examples - .into_iter() - .map(|example| ParsedManifest::from_toml_str(&format!( - "{PROJECT_BOILERPLATE}\n{example}" - )) - .unwrap_err() - .to_string()) - .collect::>() - .join("\n")) + assert_snapshot!(expect_parse_failure(&format!( + "{PROJECT_BOILERPLATE}\n{}", + examples[0] + ))); } #[test] fn test_invalid_key() { - let examples = [ - format!("{PROJECT_BOILERPLATE}\n[foobar]"), - format!("{PROJECT_BOILERPLATE}\n[target.win-64.hostdependencies]"), - format!("{PROJECT_BOILERPLATE}\n[environments.INVALID]"), - ]; - assert_snapshot!(examples - .into_iter() - .map(|example| ParsedManifest::from_toml_str(&example) - .unwrap_err() - .to_string()) - .collect::>() - .join("\n")) + insta::with_settings!({snapshot_suffix => "foobar"}, { + assert_snapshot!(expect_parse_failure(&format!("{PROJECT_BOILERPLATE}\n[foobar]"))) + }); + + insta::with_settings!({snapshot_suffix => "hostdependencies"}, { + assert_snapshot!(expect_parse_failure(&format!("{PROJECT_BOILERPLATE}\n[target.win-64.hostdependencies]"))) + }); + + insta::with_settings!({snapshot_suffix => "environment"}, { + assert_snapshot!(expect_parse_failure(&format!("{PROJECT_BOILERPLATE}\n[environments.INVALID]"))) + }); } #[test] @@ -587,7 +330,7 @@ mod tests { "# ); - let manifest = ParsedManifest::from_toml_str(&contents).unwrap(); + let manifest = WorkspaceManifest::from_toml_str(&contents).unwrap(); assert_snapshot!(manifest .default_feature() @@ -619,7 +362,7 @@ mod tests { "# ); - assert_snapshot!(toml_edit::de::from_str::(&contents) + assert_snapshot!(WorkspaceManifest::from_toml_str(&contents) .expect("parsing should succeed!") .default_feature() .targets @@ -647,9 +390,9 @@ mod tests { "# ); - assert_yaml_snapshot!(toml_edit::de::from_str::(&contents) + assert_yaml_snapshot!(WorkspaceManifest::from_toml_str(&contents) .expect("parsing should succeed!") - .project + .workspace .pypi_options .clone() .unwrap()); @@ -669,8 +412,8 @@ mod tests { ); let manifest = - toml_edit::de::from_str::(&contents).expect("parsing should succeed!"); - assert_yaml_snapshot!(manifest.project.pypi_options.clone().unwrap()); + WorkspaceManifest::from_toml_str(&contents).expect("parsing should succeed!"); + assert_yaml_snapshot!(manifest.workspace.pypi_options.clone().unwrap()); } #[test] @@ -684,7 +427,7 @@ mod tests { flask = "2.*" "# ); - let manifest = ParsedManifest::from_toml_str(&contents); + let manifest = WorkspaceManifest::from_toml_str(&contents); assert!(manifest.is_err()); assert!(manifest @@ -704,7 +447,7 @@ mod tests { libc = "2.12" "# ); - let manifest = ParsedManifest::from_toml_str(&contents); + let manifest = WorkspaceManifest::from_toml_str(&contents); assert!(manifest.is_err()); assert!(manifest @@ -731,6 +474,6 @@ mod tests { [tool.poetry] test = "test" "#; - let _manifest = ParsedManifest::from_toml_str(contents).unwrap(); + let _manifest = WorkspaceManifest::from_toml_str(contents).unwrap(); } } diff --git a/crates/pixi_manifest/src/package.rs b/crates/pixi_manifest/src/package.rs new file mode 100644 index 000000000..106fba42a --- /dev/null +++ b/crates/pixi_manifest/src/package.rs @@ -0,0 +1,38 @@ +use std::path::PathBuf; + +use rattler_conda_types::Version; +use url::Url; + +/// Defines the contents of the `[package]` section of the project manifest. +#[derive(Debug, Clone)] +pub struct Package { + /// The name of the project + pub name: String, + + /// The version of the project + pub version: Version, + + /// An optional project description + pub description: Option, + + /// Optional authors + pub authors: Option>, + + /// The license as a valid SPDX string (e.g. MIT AND Apache-2.0) + pub license: Option, + + /// The license file (relative to the project root) + pub license_file: Option, + + /// Path to the README file of the project (relative to the project root) + pub readme: Option, + + /// URL of the project homepage + pub homepage: Option, + + /// URL of the project source repository + pub repository: Option, + + /// URL of the project documentation + pub documentation: Option, +} diff --git a/crates/pixi_manifest/src/preview.rs b/crates/pixi_manifest/src/preview.rs index c7bb190d1..e403f2d74 100644 --- a/crates/pixi_manifest/src/preview.rs +++ b/crates/pixi_manifest/src/preview.rs @@ -12,7 +12,9 @@ //! We do this for backwards compatibility with the old features that may have been used in the past. //! The [`KnownFeature`] enum contains all the known features. Extend this if you want to add support //! for new features. + use serde::{Deserialize, Deserializer, Serialize}; +use std::fmt::{Display, Formatter}; #[derive(Debug, Serialize, Clone, PartialEq)] #[serde(untagged)] @@ -24,6 +26,12 @@ pub enum Preview { Features(Vec), // For `preview = ["feature"]` } +impl Default for Preview { + fn default() -> Self { + Self::Features(Vec::new()) + } +} + impl Preview { /// Returns true if all preview features are enabled pub fn all_enabled(&self) -> bool { @@ -92,7 +100,14 @@ impl PartialEq for PreviewFeature { #[serde(rename_all = "kebab-case")] /// Currently supported preview features are listed here pub enum KnownPreviewFeature { - // Add known features here + /// Build feature, to enable conda source builds + PixiBuild, +} + +impl Display for KnownPreviewFeature { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } } impl<'de> Deserialize<'de> for PreviewFeature { @@ -113,6 +128,15 @@ impl<'de> Deserialize<'de> for PreviewFeature { } } +impl KnownPreviewFeature { + /// Returns the string representation of the feature + pub fn as_str(&self) -> &'static str { + match self { + KnownPreviewFeature::PixiBuild => "pixi-build", + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/pixi_manifest/src/pyproject.rs b/crates/pixi_manifest/src/pyproject.rs index 28e93e3b5..d2c588eac 100644 --- a/crates/pixi_manifest/src/pyproject.rs +++ b/crates/pixi_manifest/src/pyproject.rs @@ -2,35 +2,39 @@ use std::{collections::HashMap, fs, path::PathBuf, str::FromStr}; use indexmap::IndexMap; use miette::{Diagnostic, IntoDiagnostic, Report, WrapErr}; -use pep440_rs::VersionSpecifiers; +use pep440_rs::{Version, VersionSpecifiers}; use pep508_rs::Requirement; use pixi_spec::PixiSpec; -use pyproject_toml::{self, pep735_resolve::Pep735Error, Contact, Project}; +use pyproject_toml::{self, pep735_resolve::Pep735Error, Contact, DependencyGroups, Project}; use rattler_conda_types::{PackageName, ParseStrictness::Lenient, VersionSpec}; use serde::Deserialize; use thiserror::Error; -use toml_edit::DocumentMut; use super::{ error::{RequirementConversionError, TomlError}, - DependencyOverwriteBehavior, Feature, ParsedManifest, SpecType, + DependencyOverwriteBehavior, Feature, SpecType, WorkspaceManifest, +}; +use crate::{ + error::DependencyError, + manifests::PackageManifest, + toml::{ExternalWorkspaceProperties, TomlManifest}, + FeatureName, }; -use crate::{error::DependencyError, FeatureName}; -#[derive(Deserialize, Debug, Clone)] +#[derive(Deserialize, Debug)] pub struct PyProjectManifest { #[serde(flatten)] inner: pyproject_toml::PyProjectToml, - pub tool: Option, + tool: Option, } -#[derive(Deserialize, Debug, Clone)] +#[derive(Deserialize, Debug)] pub struct Tool { - pub pixi: Option, + pub pixi: Option, pub poetry: Option, } -#[derive(Deserialize, Debug, Clone)] +#[derive(Default, Deserialize, Debug)] pub struct ToolPoetry { pub name: Option, pub description: Option, @@ -62,127 +66,70 @@ impl PyProjectManifest { /// Ensures the `pyproject.toml` contains a `[tool.pixi]` table /// and project name is defined - pub fn ensure_pixi(self, source: &str) -> Result { + pub fn ensure_pixi(self) -> Result { // Make sure the `[tool.pixi]` table exist - if !self.is_pixi() { + if !self.has_pixi_table() { return Err(TomlError::NoPixiTable); } // Make sure a 'name' is defined if self.name().is_none() { - let document = source.parse::().map_err(TomlError::from)?; - let span = document["tool"]["pixi"]["project"].span(); - return Err(TomlError::NoProjectName(span)); + let span = self + .pixi_manifest() + .and_then(|manifest| manifest.workspace.span()); + return Err(TomlError::MissingField("name".into(), span)); } Ok(self) } - fn tool(&self) -> Option<&Tool> { - self.tool.as_ref() - } - - pub fn project(&self) -> Option<&Project> { - self.project.as_ref() - } - - pub fn poetry(&self) -> Option<&ToolPoetry> { - self.tool().and_then(|t| t.poetry.as_ref()) - } - - fn pixi(&self) -> Option<&ParsedManifest> { - self.tool().and_then(|t| t.pixi.as_ref()) - } - - /// Checks whether a `pyproject.toml` is valid for use with pixi by - /// checking it contains a `[tool.pixi]` table. - pub fn is_pixi(&self) -> bool { - self.pixi().is_some() - } - /// Returns the project name from, in order of priority /// - the `[tool.pixi.project]` table /// - the `[project]` table /// - the `[tool.poetry]` table - pub fn name(&self) -> Option { - if let Some(pixi_name) = self.pixi().and_then(|p| p.project.name.as_ref()) { - return Some(pixi_name.clone()); + pub fn name(&self) -> Option<&str> { + if let Some(pixi_name) = self + .pixi_manifest() + .and_then(|p| p.workspace.value.name.as_deref()) + { + return Some(pixi_name); } if let Some(pyproject) = &self.project { - return Some(pyproject.name.clone()); + return Some(pyproject.name.as_str()); } if let Some(poetry_name) = self.poetry().and_then(|p| p.name.as_ref()) { - return Some(poetry_name.clone()); + return Some(poetry_name.as_str()); } None } - /// Returns the project description from, in order of priority - /// - the `[tool.pixi.project]` table - /// - the `[project]` table - /// - the `[tool.poetry]` table - fn description(&self) -> Option { - if let Some(pixi_description) = self.pixi().and_then(|p| p.project.description.as_ref()) { - return Some(pixi_description.to_string()); - } - if let Some(pyproject_description) = - self.project.as_ref().and_then(|p| p.description.as_ref()) - { - return Some(pyproject_description.to_string()); - } - if let Some(poetry_description) = self.poetry().and_then(|p| p.description.as_ref()) { - return Some(poetry_description.clone()); - } - None + /// Returns the project name as PEP508 name + fn package_name(&self) -> Option { + pep508_rs::PackageName::new(self.name()?.to_string()).ok() } - /// Returns the project version from, in order of priority - /// - the `[tool.pixi.project]` table - /// - the `[project]` table - /// - the `[tool.poetry]` table - fn version(&self) -> Option { - if let Some(pixi_version) = self.pixi().and_then(|p| p.project.version.as_ref()) { - return Some(pixi_version.to_string()); - } - if let Some(pyproject_version) = self.project.as_ref().and_then(|p| p.version.as_ref()) { - return Some(pyproject_version.to_string()); - } - if let Some(poetry_version) = self.poetry().and_then(|p| p.version.as_ref()) { - return Some(poetry_version.clone()); - } - None + fn tool(&self) -> Option<&Tool> { + self.tool.as_ref() } - /// Returns the project authors from, in order of priority - /// - the `[tool.pixi.project]` table - /// - the `[project]` table - /// - the `[tool.poetry]` table - fn authors(&self) -> Option> { - if let Some(pixi_authors) = self.pixi().and_then(|p| p.project.authors.as_ref()) { - return Some(pixi_authors.clone()); - } - if let Some(pyproject_authors) = self.project.as_ref().and_then(|p| p.authors.as_ref()) { - return Some( - pyproject_authors - .iter() - .map(|contact| match contact { - Contact::NameEmail { name, email } => format!("{} <{}>", name, email), - Contact::Name { name } => name.clone(), - Contact::Email { email } => email.clone(), - }) - .collect(), - ); - } - if let Some(poetry_authors) = self.poetry().and_then(|p| p.authors.as_ref()) { - return Some(poetry_authors.clone()); - } - None + pub fn project(&self) -> Option<&Project> { + self.project.as_ref() } - /// Returns the project name as PEP508 name - fn package_name(&self) -> Option { - self.name() - .and_then(|n| pep508_rs::PackageName::new(n).ok()) + /// Returns a reference to the poetry section if it exists. + pub fn poetry(&self) -> Option<&ToolPoetry> { + self.tool().and_then(|t| t.poetry.as_ref()) + } + + /// Returns a reference to the pixi section if it exists. + fn pixi_manifest(&self) -> Option<&TomlManifest> { + self.tool().and_then(|t| t.pixi.as_ref()) + } + + /// Checks whether a `pyproject.toml` is valid for use with pixi by + /// checking it contains a `[tool.pixi]` table. + pub fn has_pixi_table(&self) -> bool { + self.pixi_manifest().is_some() } /// Returns optional dependencies from the `[project.optional-dependencies]` @@ -201,8 +148,8 @@ impl PyProjectManifest { /// - one environment is created per group with the same name /// - each environment includes the feature of the same name /// - it will also include other features inferred from any self references - /// to other groups of optional dependencies (but won't for dependency groups, - /// as recursion between groups is resolved upstream) + /// to other groups of optional dependencies (but won't for dependency + /// groups, as recursion between groups is resolved upstream) pub fn environments_from_extras(&self) -> Result>, Pep735Error> { let mut environments = HashMap::new(); if let Some(extras) = self.optional_dependencies() { @@ -225,7 +172,8 @@ impl PyProjectManifest { if let Some(groups) = self.dependency_groups().transpose()? { for group in groups.into_keys() { let normalised = group.replace('_', "-"); - // Nothing to do if a group of optional dependencies has the same name as the dependency group + // Nothing to do if a group of optional dependencies has the same name as the + // dependency group if !environments.contains_key(&normalised) { environments.insert(normalised.clone(), vec![normalised]); } @@ -238,51 +186,107 @@ impl PyProjectManifest { #[derive(Debug, Error, Diagnostic)] pub enum PyProjectToManifestError { + #[error("The [tool.pixi] table is missing")] + MissingPixiTable, #[error("Unsupported pep508 requirement: '{0}'")] DependencyError(Requirement, #[source] DependencyError), #[error(transparent)] DependencyGroupError(#[from] Pep735Error), + #[error(transparent)] + TomlError(#[from] TomlError), } -impl TryFrom for ParsedManifest { - type Error = PyProjectToManifestError; +#[derive(Default)] +pub struct PyProjectFields { + pub name: Option, + pub description: Option, + pub version: Option, + pub authors: Option>, + pub requires_python: Option, + pub dependencies: Option>, + pub optional_dependencies: Option>>, +} - fn try_from(item: PyProjectManifest) -> Result { - // Load the data nested under '[tool.pixi]' as pixi manifest - let mut manifest = item - .pixi() - .expect("The [tool.pixi] table should exist") - .clone(); - - // Set pixi project name, version, description and authors (if they are not set) - // with the ones from the `[project]` or `[tool.poetry]` tables of the - // `pyproject.toml`. - manifest.project.name = item.name(); - manifest.project.description = item.description(); - manifest.project.version = item.version().and_then(|v| v.parse().ok()); - manifest.project.authors = item.authors(); +impl From for PyProjectFields { + fn from(project: pyproject_toml::Project) -> Self { + Self { + name: Some(project.name), + description: project.description, + version: project.version, + authors: project.authors, + requires_python: project.requires_python, + dependencies: project.dependencies, + optional_dependencies: project.optional_dependencies, + } + } +} +impl PyProjectManifest { + #[allow(clippy::result_large_err)] + pub fn into_manifests( + self, + ) -> Result<(WorkspaceManifest, Option), PyProjectToManifestError> { + // Load the data nested under '[tool.pixi]' as pixi manifest + let Some(Tool { + pixi: Some(pixi), + poetry, + }) = self.tool + else { + return Err(PyProjectToManifestError::MissingPixiTable); + }; + + // Extract the values we are interested in from the pyproject.toml + let pyproject_toml::PyProjectToml { + project, + dependency_groups, + .. + } = self.inner; + let project = project.map(PyProjectFields::from).unwrap_or_default(); + + // Extract some of the values we are interested in from the poetry table. + let poetry = poetry.unwrap_or_default(); + + // Convert the TOML document into a pixi manifest. // TODO: would be nice to add license, license-file, readme, homepage, // repository, documentation, regarding the above, the types are a bit // different than we expect, so the conversion is not straightforward we // could change these types or we can convert. Let's decide when we make it. // etc. + let (mut workspace_manifest, package_manifest) = + pixi.into_manifests(ExternalWorkspaceProperties { + name: project.name, + version: project + .version + .and_then(|v| v.to_string().parse().ok()) + .or(poetry.version.and_then(|v| v.parse().ok())), + description: project.description.or(poetry.description), + authors: project.authors.map(contacts_to_authors).or(poetry.authors), + license: None, + license_file: None, + readme: None, + homepage: None, + repository: None, + documentation: None, + })?; // Add python as dependency based on the `project.requires_python` property - let python_spec = item.project().and_then(|p| p.requires_python.clone()); + let python_spec = project.requires_python; - let target = manifest.default_feature_mut().targets.default_mut(); + let target = workspace_manifest + .default_feature_mut() + .targets + .default_mut(); let python = PackageName::from_str("python").unwrap(); // If the target doesn't have any python dependency, we add it from the // `requires-python` - if !target.has_dependency(&python, Some(SpecType::Run), None) { + if !target.has_dependency(&python, SpecType::Run, None) { target.add_dependency( &python, &version_or_url_to_spec(&python_spec).unwrap(), SpecType::Run, ); } else if let Some(_spec) = python_spec { - if target.has_dependency(&python, Some(SpecType::Run), None) { + if target.has_dependency(&python, SpecType::Run, None) { // TODO: implement some comparison or spec merging logic here tracing::info!( "Overriding the requires-python with the one defined in pixi dependencies" @@ -291,7 +295,7 @@ impl TryFrom for ParsedManifest { } // Add pyproject dependencies as pypi dependencies - if let Some(deps) = item.project().and_then(|p| p.dependencies.clone()) { + if let Some(deps) = project.dependencies { for requirement in deps.iter() { target .try_add_pep508_dependency( @@ -306,19 +310,26 @@ impl TryFrom for ParsedManifest { } // Define an iterator over both optional dependencies and dependency groups - let groups = item - .optional_dependencies() + let groups = project + .optional_dependencies .into_iter() - .chain(item.dependency_groups().transpose()?) + .chain( + dependency_groups + .as_ref() + .map(DependencyGroups::resolve) + .transpose()?, + ) .flat_map(|map| map.into_iter()); // For each group of optional dependency or dependency group, // create a feature of the same name if it does not exist, - // and add pypi dependencies, filtering out self-references in optional dependencies - let project_name = item.package_name(); + // and add pypi dependencies, filtering out self-references in optional + // dependencies + let project_name = + pep508_rs::PackageName::new(workspace_manifest.workspace.name.clone()).ok(); for (group, reqs) in groups { let feature_name = FeatureName::Named(group.to_string()); - let target = manifest + let target = workspace_manifest .features .entry(feature_name.clone()) .or_insert_with(move || Feature::new(feature_name)) @@ -340,7 +351,7 @@ impl TryFrom for ParsedManifest { } } - Ok(manifest) + Ok((workspace_manifest, package_manifest)) } } @@ -362,6 +373,19 @@ fn version_or_url_to_spec( } } +/// Converts [`Contact`] from pyproject.toml to a representation that is used in +/// pixi. +fn contacts_to_authors(contacts: Vec) -> Vec { + contacts + .into_iter() + .map(|contact| match contact { + Contact::NameEmail { name, email } => format!("{} <{}>", name, email), + Contact::Name { name } => name.clone(), + Contact::Email { email } => email.clone(), + }) + .collect() +} + #[cfg(test)] mod tests { use std::{path::Path, str::FromStr}; @@ -371,8 +395,7 @@ mod tests { use rattler_conda_types::{ParseStrictness, VersionSpec}; use crate::{ - manifests::manifest::Manifest, pypi::PyPiPackageName, DependencyOverwriteBehavior, - FeatureName, + manifests::Manifest, pypi::PyPiPackageName, DependencyOverwriteBehavior, FeatureName, }; const PYPROJECT_FULL: &str = r#" diff --git a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__invalid_key.snap b/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__invalid_key.snap deleted file mode 100644 index 4e2464b39..000000000 --- a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__invalid_key.snap +++ /dev/null @@ -1,21 +0,0 @@ ---- -source: crates/pixi_manifest/src/parsed_manifest.rs -expression: "examples.into_iter().map(|example|\n ParsedManifest::from_toml_str(&example).unwrap_err().to_string()).collect::>().join(\"\\n\")" ---- -TOML parse error at line 8, column 2 - | -8 | [foobar] - | ^^^^^^ -unknown field `foobar`, expected one of `project`, `system-requirements`, `target`, `dependencies`, `host-dependencies`, `build-dependencies`, `pypi-dependencies`, `activation`, `tasks`, `feature`, `environments`, `pypi-options`, `tool`, `$schema` - -TOML parse error at line 8, column 16 - | -8 | [target.win-64.hostdependencies] - | ^^^^^^^^^^^^^^^^ -unknown field `hostdependencies`, expected one of `dependencies`, `host-dependencies`, `build-dependencies`, `pypi-dependencies`, `activation`, `tasks` - -TOML parse error at line 8, column 15 - | -8 | [environments.INVALID] - | ^^^^^^^ -Failed to parse environment name 'INVALID', please use only lowercase letters, numbers and dashes diff --git a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__invalid_target_specific.snap b/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__invalid_target_specific.snap deleted file mode 100644 index 1b9e4b33f..000000000 --- a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__invalid_target_specific.snap +++ /dev/null @@ -1,10 +0,0 @@ ---- -source: crates/pixi_manifest/src/parsed_manifest.rs -assertion_line: 498 -expression: "examples.into_iter().map(|example|\n ParsedManifest::from_toml_str(&format!(\"{PROJECT_BOILERPLATE}\\n{example}\")).unwrap_err().to_string()).collect::>().join(\"\\n\")" ---- -TOML parse error at line 8, column 9 - | -8 | [target.foobar.dependencies] - | ^^^^^^ -'foobar' is not a known platform. Valid platforms are 'noarch', 'unknown', 'linux-32', 'linux-64', 'linux-aarch64', 'linux-armv6l', 'linux-armv7l', 'linux-ppc64le', 'linux-ppc64', 'linux-s390x', 'linux-riscv32', 'linux-riscv64', 'osx-64', 'osx-arm64', 'win-32', 'win-64', 'win-arm64', 'emscripten-wasm32', 'wasi-wasm32', 'zos-z' diff --git a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__pypi_options_default_feature.snap b/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__pypi_options_default_feature.snap deleted file mode 100644 index 48660dc4c..000000000 --- a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__pypi_options_default_feature.snap +++ /dev/null @@ -1,12 +0,0 @@ ---- -source: crates/pixi_manifest/src/parsed_manifest.rs -expression: "toml_edit::de::from_str::(&contents).expect(\"parsing should succeed!\").project.pypi_options.clone().unwrap()" ---- -index-url: "https://pypi.org/simple" -extra-index-urls: - - "https://pypi.org/simple2" -find-links: - - path: "../foo" - - url: "https://example.com/bar" -no-build-isolation: ~ -index-strategy: ~ diff --git a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__python_dependencies.snap b/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__python_dependencies.snap deleted file mode 100644 index 4dbba6d35..000000000 --- a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__python_dependencies.snap +++ /dev/null @@ -1,7 +0,0 @@ ---- -source: crates/pixi_manifest/src/parsed_manifest.rs -assertion_line: 572 -expression: "toml_edit::de::from_str::(&contents).expect(\"parsing should succeed!\").default_feature().targets.default().pypi_dependencies.clone().into_iter().flat_map(|d|\n d.into_iter()).map(|(name, spec)|\n format!(\"{} = {}\", name.as_source(),\n toml_edit::Value::from(spec))).join(\"\\n\")" ---- -foo = ">=3.12" -bar = { version = ">=3.12", extras = ["baz"] } diff --git a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__target_specific_tasks.snap b/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__target_specific_tasks.snap deleted file mode 100644 index 40bb166dc..000000000 --- a/crates/pixi_manifest/src/snapshots/pixi_manifest__parsed_manifest__tests__target_specific_tasks.snap +++ /dev/null @@ -1,8 +0,0 @@ ---- -source: crates/pixi_manifest/src/parsed_manifest.rs -assertion_line: 542 -expression: "manifest.default_feature().targets.iter().flat_map(|(target, selector)|\n {\n let selector_name =\n selector.map_or_else(|| String::from(\"default\"),\n ToString::to_string);\n target.tasks.iter().filter_map(move |(name, task)|\n {\n Some(format!(\"{}/{} = {}\", &selector_name, name.as_str(),\n task.as_single_command()?))\n })\n }).join(\"\\n\")" ---- -default/test = test multi -win-64/test = test win -linux-64/test = test linux diff --git a/crates/pixi_manifest/src/target.rs b/crates/pixi_manifest/src/target.rs index affc225c4..139523901 100644 --- a/crates/pixi_manifest/src/target.rs +++ b/crates/pixi_manifest/src/target.rs @@ -5,7 +5,6 @@ use itertools::Either; use pixi_spec::PixiSpec; use rattler_conda_types::{PackageName, Platform}; use serde::{Deserialize, Deserializer}; -use serde_with::serde_as; use super::error::DependencyError; use crate::{ @@ -16,12 +15,16 @@ use crate::{ DependencyOverwriteBehavior, PyPiRequirement, SpecType, }; -/// A target describes the dependencies, activations and task available to a -/// specific feature, in a specific environment, and optionally for a specific -/// platform. +/// A workspace target describes the dependencies, activations and task +/// available to a specific feature, in a specific environment, and optionally +/// for a specific platform. #[derive(Default, Debug, Clone)] -pub struct Target { +pub struct WorkspaceTarget { /// Dependencies for this target. + /// + /// TODO: While the pixi-build feature is not stabilized yet, a workspace + /// can have host- and build dependencies. When pixi-build is stabilized, we + /// can simplify this part of the code. pub dependencies: HashMap>, /// Specific python dependencies @@ -34,7 +37,14 @@ pub struct Target { pub tasks: HashMap, } -impl Target { +/// A package target describes the dependencies for a specific platform. +#[derive(Default, Debug, Clone)] +pub struct PackageTarget { + /// Dependencies for this target. + pub dependencies: HashMap>, +} + +impl WorkspaceTarget { /// Returns the run dependencies of the target pub fn run_dependencies(&self) -> Option<&IndexMap> { self.dependencies.get(&SpecType::Run) @@ -50,26 +60,9 @@ impl Target { self.dependencies.get(&SpecType::Build) } - /// Returns the dependencies to use for the given `spec_type`. If `None` is - /// specified, the combined dependencies are returned. - /// - /// The `build` dependencies overwrite the `host` dependencies which - /// overwrite the `run` dependencies. - /// - /// This function returns `None` if no dependencies are specified for the - /// given `spec_type`. - /// - /// This function returns a `Cow` to avoid cloning the dependencies if they - /// can be returned directly from the underlying map. - pub fn dependencies( - &self, - spec_type: Option, - ) -> Option>> { - if let Some(spec_type) = spec_type { - self.dependencies.get(&spec_type).map(Cow::Borrowed) - } else { - self.combined_dependencies() - } + /// Returns the dependencies of a certain type. + pub fn dependencies(&self, spec_type: SpecType) -> Option<&IndexMap> { + self.dependencies.get(&spec_type) } /// Determines the combined set of dependencies. @@ -82,7 +75,7 @@ impl Target { /// /// This function returns a `Cow` to avoid cloning the dependencies if they /// can be returned directly from the underlying map. - fn combined_dependencies(&self) -> Option>> { + pub fn combined_dependencies(&self) -> Option>> { let mut all_deps = None; for spec_type in [SpecType::Run, SpecType::Host, SpecType::Build] { let Some(specs) = self.dependencies.get(&spec_type) else { @@ -113,7 +106,7 @@ impl Target { pub fn has_dependency( &self, dep_name: &PackageName, - spec_type: Option, + spec_type: SpecType, exact: Option<&PixiSpec>, ) -> bool { let current_dependency = self @@ -164,7 +157,7 @@ impl Target { spec_type: SpecType, dependency_overwrite_behavior: DependencyOverwriteBehavior, ) -> Result { - if self.has_dependency(dep_name, Some(spec_type), None) { + if self.has_dependency(dep_name, spec_type, None) { match dependency_overwrite_behavior { DependencyOverwriteBehavior::OverwriteIfExplicit if !spec.has_version_spec() => { return Ok(false) @@ -269,6 +262,99 @@ impl Target { } } +impl PackageTarget { + /// Returns the dependencies of a certain type. + pub fn dependencies(&self, spec_type: SpecType) -> Option<&IndexMap> { + self.dependencies.get(&spec_type) + } + + /// Returns the run dependencies of the target + pub fn run_dependencies(&self) -> Option<&IndexMap> { + self.dependencies.get(&SpecType::Run) + } + + /// Returns the host dependencies of the target + pub fn host_dependencies(&self) -> Option<&IndexMap> { + self.dependencies.get(&SpecType::Host) + } + + /// Returns the build dependencies of the target + pub fn build_dependencies(&self) -> Option<&IndexMap> { + self.dependencies.get(&SpecType::Build) + } + + /// Checks if this target contains a dependency + pub fn has_dependency( + &self, + dep_name: &PackageName, + spec_type: SpecType, + exact: Option<&PixiSpec>, + ) -> bool { + let current_dependency = self + .dependencies(spec_type) + .and_then(|deps| deps.get(dep_name).cloned()); + + match (current_dependency, exact) { + (Some(current_spec), Some(spec)) => current_spec == *spec, + (Some(_), None) => true, + (None, _) => false, + } + } + + /// Removes a dependency from this target + /// + /// it will Err if the dependency is not found + pub fn remove_dependency( + &mut self, + dep_name: &PackageName, + spec_type: SpecType, + ) -> Result<(PackageName, PixiSpec), DependencyError> { + let Some(dependencies) = self.dependencies.get_mut(&spec_type) else { + return Err(DependencyError::NoSpecType(spec_type.name().into())); + }; + dependencies + .shift_remove_entry(dep_name) + .ok_or_else(|| DependencyError::NoDependency(dep_name.as_normalized().into())) + } + + /// Adds a dependency to a target + /// + /// This will overwrite any existing dependency of the same name + pub fn add_dependency(&mut self, dep_name: &PackageName, spec: &PixiSpec, spec_type: SpecType) { + self.dependencies + .entry(spec_type) + .or_default() + .insert(dep_name.clone(), spec.clone()); + } + + /// Adds a dependency to a target + /// + /// This will return an error if the exact same dependency already exist + /// This will overwrite any existing dependency of the same name + pub fn try_add_dependency( + &mut self, + dep_name: &PackageName, + spec: &PixiSpec, + spec_type: SpecType, + dependency_overwrite_behavior: DependencyOverwriteBehavior, + ) -> Result { + if self.has_dependency(dep_name, spec_type, None) { + match dependency_overwrite_behavior { + DependencyOverwriteBehavior::OverwriteIfExplicit if !spec.has_version_spec() => { + return Ok(false) + } + DependencyOverwriteBehavior::IgnoreDuplicate => return Ok(false), + DependencyOverwriteBehavior::Error => { + return Err(DependencyError::Duplicate(dep_name.as_normalized().into())); + } + _ => {} + } + } + self.add_dependency(dep_name, spec, spec_type); + Ok(true) + } +} + /// Represents a target selector. Currently we only support explicit platform /// selection. #[derive(Debug, Clone, Eq, PartialEq, Hash)] @@ -331,75 +417,25 @@ impl<'de> Deserialize<'de> for TargetSelector { } } -impl<'de> Deserialize<'de> for Target { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - #[serde_as] - #[derive(Debug, Clone, Default, Deserialize)] - #[serde(rename_all = "kebab-case")] - #[serde(deny_unknown_fields)] - pub struct TomlTarget { - #[serde(default)] - dependencies: IndexMap, - - #[serde(default)] - host_dependencies: Option>, - - #[serde(default)] - build_dependencies: Option>, - - #[serde(default)] - pypi_dependencies: Option>, - - /// Additional information to activate an environment. - #[serde(default)] - activation: Option, - - /// Target specific tasks to run in the environment - #[serde(default)] - tasks: HashMap, - } - - let target = TomlTarget::deserialize(deserializer)?; - - let mut dependencies = HashMap::from_iter([(SpecType::Run, target.dependencies)]); - if let Some(host_deps) = target.host_dependencies { - dependencies.insert(SpecType::Host, host_deps); - } - if let Some(build_deps) = target.build_dependencies { - dependencies.insert(SpecType::Build, build_deps); - } - - Ok(Self { - dependencies, - pypi_dependencies: target.pypi_dependencies, - activation: target.activation, - tasks: target.tasks, - }) - } -} - /// A collect of targets including a default target. #[derive(Debug, Clone, Default)] -pub struct Targets { - default_target: Target, +pub struct Targets { + default_target: T, /// We use an [`IndexMap`] to preserve the order in which the items where /// defined in the manifest. - targets: IndexMap, + targets: IndexMap, /// The source location of the target selector in the manifest. source_locs: HashMap>, } -impl Targets { +impl Targets { /// Constructs a new [`Targets`] from a default target and additional user /// defined targets. pub fn from_default_and_user_defined( - default_target: Target, - user_defined_targets: IndexMap, Target>, + default_target: T, + user_defined_targets: IndexMap, T>, ) -> Self { let mut targets = IndexMap::with_capacity(user_defined_targets.len()); let mut source_locs = HashMap::with_capacity(user_defined_targets.len()); @@ -418,12 +454,12 @@ impl Targets { } /// Returns the default target. - pub fn default(&self) -> &Target { + pub fn default(&self) -> &T { &self.default_target } /// Returns the default target - pub fn default_mut(&mut self) -> &mut Target { + pub fn default_mut(&mut self) -> &mut T { &mut self.default_target } @@ -438,7 +474,7 @@ impl Targets { pub fn resolve( &self, platform: Option, - ) -> impl DoubleEndedIterator + '_ { + ) -> impl DoubleEndedIterator + '_ { if let Some(platform) = platform { Either::Left(self.resolve_for_platform(platform)) } else { @@ -458,7 +494,7 @@ impl Targets { fn resolve_for_platform( &self, platform: Platform, - ) -> impl DoubleEndedIterator + '_ { + ) -> impl DoubleEndedIterator + '_ { std::iter::once(&self.default_target) .chain(self.targets.iter().filter_map(move |(selector, target)| { if selector.matches(platform) { @@ -472,13 +508,13 @@ impl Targets { } /// Returns the target for the given target selector. - pub fn for_target(&self, target: &TargetSelector) -> Option<&Target> { + pub fn for_target(&self, target: &TargetSelector) -> Option<&T> { self.targets.get(target) } /// Returns the target for the given target selector or the default target /// if the selector is `None`. - pub fn for_opt_target(&self, target: Option<&TargetSelector>) -> Option<&Target> { + pub fn for_opt_target(&self, target: Option<&TargetSelector>) -> Option<&T> { if let Some(sel) = target { self.targets.get(sel) } else { @@ -488,7 +524,7 @@ impl Targets { /// Returns the target for the given target selector or the default target /// if no target is specified. - pub fn for_opt_target_mut(&mut self, target: Option<&TargetSelector>) -> Option<&mut Target> { + pub fn for_opt_target_mut(&mut self, target: Option<&TargetSelector>) -> Option<&mut T> { if let Some(sel) = target { self.targets.get_mut(sel) } else { @@ -501,7 +537,7 @@ impl Targets { /// /// If a target is specified and it does not exist the default target is /// returned instead. - pub fn for_opt_target_or_default(&self, target: Option<&TargetSelector>) -> &Target { + pub fn for_opt_target_or_default(&self, target: Option<&TargetSelector>) -> &T { if let Some(sel) = target { self.targets.get(sel).unwrap_or(&self.default_target) } else { @@ -513,10 +549,10 @@ impl Targets { /// or the default target if no target is specified. /// /// If a target is specified and it does not exist, it will be created. - pub fn for_opt_target_or_default_mut( - &mut self, - target: Option<&TargetSelector>, - ) -> &mut Target { + pub fn for_opt_target_or_default_mut(&mut self, target: Option<&TargetSelector>) -> &mut T + where + T: Default, + { if let Some(sel) = target { self.targets.entry(sel.clone()).or_default() } else { @@ -525,18 +561,18 @@ impl Targets { } /// Returns the target for the given target selector. - pub fn target_entry(&mut self, selector: TargetSelector) -> Entry<'_, TargetSelector, Target> { + pub fn target_entry(&mut self, selector: TargetSelector) -> Entry<'_, TargetSelector, T> { self.targets.entry(selector) } /// Returns an iterator over all targets and selectors. - pub fn iter(&self) -> impl Iterator)> + '_ { + pub fn iter(&self) -> impl Iterator)> + '_ { std::iter::once((&self.default_target, None)) .chain(self.targets.iter().map(|(sel, target)| (target, Some(sel)))) } /// Returns an iterator over all targets. - pub fn targets(&self) -> impl Iterator + '_ { + pub fn targets(&self) -> impl Iterator + '_ { std::iter::once(&self.default_target).chain(self.targets.iter().map(|(_, target)| target)) } @@ -558,7 +594,7 @@ mod tests { use insta::assert_snapshot; use itertools::Itertools; - use crate::manifests::manifest::Manifest; + use crate::Manifest; #[test] fn test_targets_overwrite_order() { @@ -589,7 +625,7 @@ mod tests { .default_feature() .targets .default() - .dependencies(None) + .combined_dependencies() .unwrap_or_default() .iter() .map(|(name, spec)| format!("{} = {}", name.as_source(), spec.as_version_spec().unwrap())) diff --git a/crates/pixi_manifest/src/toml/channel.rs b/crates/pixi_manifest/src/toml/channel.rs new file mode 100644 index 000000000..9c77d472a --- /dev/null +++ b/crates/pixi_manifest/src/toml/channel.rs @@ -0,0 +1,99 @@ +use std::str::FromStr; + +use rattler_conda_types::NamedChannelOrUrl; +use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; + +use crate::PrioritizedChannel; + +/// Layout of a prioritized channel in a toml file. +/// +/// Supports the following formats: +/// +/// ```toml +/// channel = "some-channel" +/// channel = "https://prefix.dev/some-channel" +/// channel = { channel = "some-channel", priority = 10 } +/// ``` +#[derive(Debug)] +pub enum TomlPrioritizedChannel { + Map(PrioritizedChannel), + Str(NamedChannelOrUrl), +} + +impl From for PrioritizedChannel { + fn from(channel: TomlPrioritizedChannel) -> Self { + match channel { + TomlPrioritizedChannel::Map(prioritized_channel) => prioritized_channel, + TomlPrioritizedChannel::Str(channel) => PrioritizedChannel { + channel, + priority: None, + }, + } + } +} + +impl From for TomlPrioritizedChannel { + fn from(channel: PrioritizedChannel) -> Self { + if let Some(priority) = channel.priority { + TomlPrioritizedChannel::Map(PrioritizedChannel { + channel: channel.channel, + priority: Some(priority), + }) + } else { + TomlPrioritizedChannel::Str(channel.channel) + } + } +} + +impl<'de> Deserialize<'de> for TomlPrioritizedChannel { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + serde_untagged::UntaggedEnumVisitor::new() + .map(|map| map.deserialize().map(TomlPrioritizedChannel::Map)) + .string(|str| { + NamedChannelOrUrl::from_str(str) + .map_err(serde_untagged::de::Error::custom) + .map(TomlPrioritizedChannel::Str) + }) + .expecting("either a map or a string") + .deserialize(deserializer) + } +} + +impl Serialize for TomlPrioritizedChannel { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + TomlPrioritizedChannel::Map(map) => map.serialize(serializer), + TomlPrioritizedChannel::Str(str) => str.serialize(serializer), + } + } +} + +/// Helper so that we can deserialize [`crate::channel::PrioritizedChannel`] +/// from a string or a map. +impl<'de> serde_with::DeserializeAs<'de, PrioritizedChannel> for TomlPrioritizedChannel { + fn deserialize_as(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let prioritized_channel = TomlPrioritizedChannel::deserialize(deserializer)?; + Ok(prioritized_channel.into()) + } +} + +/// Helper so that we can serialize [`crate::channel::PrioritizedChannel`] to a +/// string or a map. +impl serde_with::SerializeAs for TomlPrioritizedChannel { + fn serialize_as(source: &PrioritizedChannel, serializer: S) -> Result + where + S: Serializer, + { + let toml_prioritized_channel: TomlPrioritizedChannel = source.clone().into(); + toml_prioritized_channel.serialize(serializer) + } +} diff --git a/crates/pixi_manifest/src/toml/document.rs b/crates/pixi_manifest/src/toml/document.rs new file mode 100644 index 000000000..84128969a --- /dev/null +++ b/crates/pixi_manifest/src/toml/document.rs @@ -0,0 +1,214 @@ +use std::{ + fmt, + fmt::{Display, Formatter}, +}; + +use toml_edit::{Array, Item, Table, TableLike, Value}; + +use crate::TomlError; + +/// Represents a wrapper around a TOML document. +/// +/// This struct is exposed to other crates to allow for easy manipulation of the +/// TOML document. +#[derive(Debug, Clone, Default)] +pub struct TomlDocument(toml_edit::DocumentMut); + +impl Display for TomlDocument { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl TomlDocument { + /// Create a new `TomlManifest` from a `toml_edit::DocumentMut` document. + pub fn new(document: toml_edit::DocumentMut) -> Self { + Self(document) + } + + /// Returns the manifest as a mutable table + pub fn as_table_mut(&mut self) -> &mut Table { + self.0.as_table_mut() + } + + /// Get or insert a top-level item + pub fn get_or_insert<'a>(&'a mut self, key: &str, item: Item) -> &'a Item { + self.0.entry(key).or_insert(item) + } + + /// Retrieve a mutable reference to a target table `table_name` + /// in dotted form (e.g. `table1.table2`) from the root of the document. + /// If the table is not found, it is inserted into the document. + pub fn get_or_insert_nested_table<'a>( + &'a mut self, + table_name: &str, + ) -> Result<&'a mut dyn TableLike, TomlError> { + let parts: Vec<&str> = table_name.split('.').collect(); + + let mut current_table = self.0.as_table_mut() as &mut dyn TableLike; + + for part in parts { + let entry = current_table.entry(part); + let item = entry.or_insert(Item::Table(Table::new())); + if let Some(table) = item.as_table_mut() { + // Avoid creating empty tables + table.set_implicit(true); + } + current_table = item + .as_table_like_mut() + .ok_or_else(|| TomlError::table_error(part, table_name))?; + } + Ok(current_table) + } + + /// Inserts a value into a certain table + /// If the most inner table doesn't exist, an inline table will be created. + /// If it already exists, the formatting of the table will be preserved + pub fn insert_into_inline_table<'a>( + &'a mut self, + table_name: &str, + key: &str, + value: Value, + ) -> Result<&'a mut dyn TableLike, TomlError> { + let mut parts: Vec<&str> = table_name.split('.').collect(); + + let last = parts.pop(); + + let mut current_table = self.0.as_table_mut() as &mut dyn TableLike; + + for part in parts { + let entry = current_table.entry(part); + let item = entry.or_insert(Item::Table(Table::new())); + if let Some(table) = item.as_table_mut() { + // Avoid creating empty tables + table.set_implicit(true); + } + current_table = item + .as_table_like_mut() + .ok_or_else(|| TomlError::table_error(part, table_name))?; + } + + // Add dependency as inline table if it doesn't exist + if let Some(last) = last { + if let Some(dependency) = current_table.get_mut(last) { + dependency + .as_table_like_mut() + .map(|table| table.insert(key, Item::Value(value))); + } else { + let mut dependency = toml_edit::InlineTable::new(); + dependency.insert(key, value); + current_table.insert(last, toml_edit::value(dependency)); + } + } + + Ok(current_table) + } + + /// Retrieves a mutable reference to a target array `array_name` + /// in table `table_name` in dotted form (e.g. `table1.table2.array`). + /// + /// If the array is not found, it is inserted into the document. + pub fn get_or_insert_toml_array<'a>( + &'a mut self, + table_name: &str, + array_name: &str, + ) -> Result<&'a mut Array, TomlError> { + self.get_or_insert_nested_table(table_name)? + .entry(array_name) + .or_insert(Item::Value(Value::Array(Array::new()))) + .as_array_mut() + .ok_or_else(|| TomlError::array_error(array_name, table_name.to_string().as_str())) + } + + /// Retrieves a mutable reference to a target array `array_name` + /// in table `table_name` in dotted form (e.g. `table1.table2.array`). + /// + /// If the array is not found, returns None. + pub fn get_toml_array<'a>( + &'a mut self, + table_name: &str, + array_name: &str, + ) -> Result, TomlError> { + let array = self + .get_or_insert_nested_table(table_name)? + .get_mut(array_name) + .and_then(|a| a.as_array_mut()); + Ok(array) + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use toml_edit::{DocumentMut, Item}; + + use crate::toml::document::TomlDocument; + + #[test] + fn test_get_or_insert_nested_table() { + let toml = r#" +[envs.python] +channels = ["dummy-channel"] +[envs.python.dependencies] +dummy = "3.11.*" +"#; + let dep_name = "test"; + let mut manifest = TomlDocument::new(DocumentMut::from_str(toml).unwrap()); + manifest + .get_or_insert_nested_table("envs.python.dependencies") + .unwrap() + .insert(dep_name, Item::Value(toml_edit::Value::from("6.6"))); + + let dep = manifest + .get_or_insert_nested_table("envs.python.dependencies") + .unwrap() + .get(dep_name); + + assert!(dep.is_some()); + } + + #[test] + fn test_get_or_insert_inline_table() { + let toml = r#" +[envs.python] +channels = ["dummy-channel"] +dependencies = { dummy = "3.11.*" } +"#; + let dep_name = "test"; + let mut manifest = TomlDocument::new(DocumentMut::from_str(toml).unwrap()); + manifest + .get_or_insert_nested_table("envs.python.dependencies") + .unwrap() + .insert(dep_name, Item::Value(toml_edit::Value::from("6.6"))); + + let dep = manifest + .get_or_insert_nested_table("envs.python.dependencies") + .unwrap() + .get(dep_name); + + assert!(dep.is_some()); + + // Existing entries are also still there + let dummy = manifest + .get_or_insert_nested_table("envs.python.dependencies") + .unwrap() + .get("dummy"); + + assert!(dummy.is_some()) + } + + #[test] + fn test_get_or_insert_nested_table_no_empty_tables() { + let toml = r#" +[envs.python] +channels = ["dummy-channel"] +"#; + let table_name = "test"; + let mut manifest = TomlDocument::new(DocumentMut::from_str(toml).unwrap()); + manifest.get_or_insert_nested_table(table_name).unwrap(); + + // No empty table is being created + assert!(!manifest.0.to_string().contains("[test]")); + } +} diff --git a/crates/pixi_manifest/src/toml/environment.rs b/crates/pixi_manifest/src/toml/environment.rs new file mode 100644 index 000000000..cebe7fe61 --- /dev/null +++ b/crates/pixi_manifest/src/toml/environment.rs @@ -0,0 +1,33 @@ +use crate::utils::PixiSpanned; +use serde::{Deserialize, Deserializer}; + +/// Helper struct to deserialize the environment from TOML. +/// The environment description can only hold these values. +#[derive(Debug, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub struct TomlEnvironment { + #[serde(default)] + pub features: PixiSpanned>, + pub solve_group: Option, + #[serde(default)] + pub no_default_feature: bool, +} + +#[derive(Debug)] +pub enum TomlEnvironmentList { + Map(TomlEnvironment), + Seq(Vec), +} + +impl<'de> Deserialize<'de> for TomlEnvironmentList { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + serde_untagged::UntaggedEnumVisitor::new() + .map(|map| map.deserialize().map(TomlEnvironmentList::Map)) + .seq(|seq| seq.deserialize().map(TomlEnvironmentList::Seq)) + .expecting("either a map or a sequence") + .deserialize(deserializer) + } +} diff --git a/crates/pixi_manifest/src/toml/feature.rs b/crates/pixi_manifest/src/toml/feature.rs new file mode 100644 index 000000000..b695479bc --- /dev/null +++ b/crates/pixi_manifest/src/toml/feature.rs @@ -0,0 +1,84 @@ +use std::collections::HashMap; + +use indexmap::{IndexMap, IndexSet}; +use rattler_conda_types::Platform; +use rattler_solve::ChannelPriority; +use serde::Deserialize; +use serde_with::serde_as; + +use crate::{ + pypi::{pypi_options::PypiOptions, PyPiPackageName}, + toml::{TomlPrioritizedChannel, TomlTarget}, + utils::{package_map::UniquePackageMap, PixiSpanned}, + Activation, Feature, FeatureName, Preview, PyPiRequirement, SystemRequirements, TargetSelector, + Targets, Task, TaskName, TomlError, +}; + +#[serde_as] +#[derive(Debug, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub struct TomlFeature { + #[serde(default)] + pub platforms: Option>>, + #[serde(default)] + pub channels: Option>, + #[serde(default)] + pub channel_priority: Option, + #[serde(default)] + pub system_requirements: SystemRequirements, + #[serde(default)] + pub target: IndexMap, TomlTarget>, + #[serde(default)] + pub dependencies: Option>, + #[serde(default)] + pub host_dependencies: Option>, + #[serde(default)] + pub build_dependencies: Option>, + #[serde(default)] + pub pypi_dependencies: Option>, + + /// Additional information to activate an environment. + #[serde(default)] + pub activation: Option, + + /// Target specific tasks to run in the environment + #[serde(default)] + pub tasks: HashMap, + + /// Additional options for PyPi dependencies. + #[serde(default)] + pub pypi_options: Option, +} + +impl TomlFeature { + pub fn into_feature(self, name: FeatureName, preview: &Preview) -> Result { + let default_target = TomlTarget { + dependencies: self.dependencies, + host_dependencies: self.host_dependencies, + build_dependencies: self.build_dependencies, + run_dependencies: None, + pypi_dependencies: self.pypi_dependencies, + activation: self.activation, + tasks: self.tasks, + } + .into_feature_target(preview)?; + + let mut targets = IndexMap::new(); + for (selector, target) in self.target { + let target = target.into_feature_target(preview)?; + targets.insert(selector, target); + } + + Ok(Feature { + name, + platforms: self.platforms, + channels: self + .channels + .map(|channels| channels.into_iter().map(|channel| channel.into()).collect()), + channel_priority: self.channel_priority, + system_requirements: self.system_requirements, + pypi_options: self.pypi_options, + targets: Targets::from_default_and_user_defined(default_target, targets), + }) + } +} diff --git a/crates/pixi_manifest/src/toml/manifest.rs b/crates/pixi_manifest/src/toml/manifest.rs new file mode 100644 index 000000000..0e4057bed --- /dev/null +++ b/crates/pixi_manifest/src/toml/manifest.rs @@ -0,0 +1,590 @@ +use std::collections::HashMap; + +use indexmap::IndexMap; +use itertools::chain; +use serde::Deserialize; +use serde_with::serde_as; + +use crate::{ + environment::EnvironmentIdx, + error::{FeatureNotEnabled, InvalidNonPackageDependencies}, + manifests::PackageManifest, + pypi::{pypi_options::PypiOptions, PyPiPackageName}, + toml::{ + environment::TomlEnvironmentList, ExternalPackageProperties, ExternalWorkspaceProperties, + PackageError, TomlFeature, TomlPackage, TomlTarget, TomlWorkspace, WorkspaceError, + }, + utils::{package_map::UniquePackageMap, PixiSpanned}, + Activation, BuildSystem, Environment, EnvironmentName, Environments, Feature, FeatureName, + KnownPreviewFeature, PyPiRequirement, SolveGroups, SystemRequirements, TargetSelector, Targets, + Task, TaskName, TomlError, WorkspaceManifest, +}; + +/// Raw representation of a pixi manifest. This is the deserialized form of the +/// manifest without any validation logic applied. +#[serde_as] +#[derive(Debug, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub struct TomlManifest { + #[serde(alias = "project")] + pub workspace: PixiSpanned, + + pub package: Option>, + + #[serde(default)] + pub system_requirements: SystemRequirements, + + #[serde(default)] + pub target: IndexMap, TomlTarget>, + + // HACK: If we use `flatten`, unknown keys will point to the wrong location in the + // file. When https://github.com/toml-rs/toml/issues/589 is fixed we should use that + // + // Instead we currently copy the keys from the Target deserialize implementation which + // is really ugly. + // + // #[serde(flatten)] + // default_target: Target, + #[serde(default)] + pub dependencies: Option>, + + #[serde(default)] + pub host_dependencies: Option>, + + #[serde(default)] + pub build_dependencies: Option>, + + #[serde(default)] + pub run_dependencies: Option>, + + #[serde(default)] + pub pypi_dependencies: Option>, + + /// Additional information to activate an environment. + #[serde(default)] + pub activation: Option, + + /// Target specific tasks to run in the environment + #[serde(default)] + pub tasks: HashMap, + + /// The features defined in the project. + #[serde(default)] + pub feature: IndexMap, + + /// The environments the project can create. + #[serde(default)] + pub environments: IndexMap, + + /// pypi-options + #[serde(default)] + pub pypi_options: Option, + + /// The build section + #[serde(default)] + pub build_system: Option>, + + /// The URI for the manifest schema which is unused by pixi + #[serde(rename = "$schema")] + pub _schema: Option, + + /// The tool configuration which is unused by pixi + #[serde(default, skip_serializing, rename = "tool")] + pub _tool: serde::de::IgnoredAny, +} + +impl TomlManifest { + /// Parses a toml string into a project manifest. + pub fn from_toml_str(source: &str) -> Result { + toml_edit::de::from_str(source).map_err(TomlError::from) + } + + pub fn is_pixi_build_enabled(&self) -> bool { + self.workspace + .value + .preview + .is_enabled(KnownPreviewFeature::PixiBuild) + } + + /// Check if some dependency types are used which will not be used. + fn check_dependency_usage(&self) -> Result<(), TomlError> { + // If `pixi-build` is not enabled then we can ignore the checks. + if !self.is_pixi_build_enabled() { + return Ok(()); + } + + // If the `[package]` section is present then we can ignore the checks. + if self.package.is_some() { + return Ok(()); + } + + // Find all the dependency sections which are not allowed without the + // `[package]` section. + let top_level_dependencies = vec![ + self.run_dependencies.as_ref().and_then(PixiSpanned::span), + self.host_dependencies.as_ref().and_then(PixiSpanned::span), + self.build_dependencies.as_ref().and_then(PixiSpanned::span), + ]; + let target_dependencies = self.target.values().flat_map(|t| { + [ + t.run_dependencies.as_ref().and_then(PixiSpanned::span), + t.host_dependencies.as_ref().and_then(PixiSpanned::span), + t.build_dependencies.as_ref().and_then(PixiSpanned::span), + ] + }); + let feature_dependencies = self.feature.values().flat_map(|f| { + let top_level_dependencies = [ + f.host_dependencies.as_ref().and_then(PixiSpanned::span), + f.build_dependencies.as_ref().and_then(PixiSpanned::span), + ]; + let target_dependencies = f.target.values().flat_map(|t| { + [ + t.host_dependencies.as_ref().and_then(PixiSpanned::span), + t.build_dependencies.as_ref().and_then(PixiSpanned::span), + ] + }); + chain!(top_level_dependencies, target_dependencies) + }); + let invalid_dependency_sections = chain!( + top_level_dependencies, + target_dependencies, + feature_dependencies + ) + .flatten() + .collect::>(); + + if invalid_dependency_sections.is_empty() { + Ok(()) + } else { + Err(InvalidNonPackageDependencies { + invalid_dependency_sections, + } + .into()) + } + } + + /// Converts the raw manifest into a workspace manifest. + /// + /// The `name` is used to set the workspace name in the manifest if it is + /// not set there. A missing name in the manifest is not allowed. + pub fn into_manifests( + self, + external: ExternalWorkspaceProperties, + ) -> Result<(WorkspaceManifest, Option), TomlError> { + self.check_dependency_usage()?; + + let preview = &self.workspace.value.preview; + let pixi_build_enabled = self.is_pixi_build_enabled(); + + let default_top_level_target = TomlTarget { + dependencies: self.dependencies, + host_dependencies: self.host_dependencies, + build_dependencies: self.build_dependencies, + run_dependencies: self.run_dependencies, + pypi_dependencies: self.pypi_dependencies, + activation: self.activation, + tasks: self.tasks, + }; + + let (default_workspace_target, default_package_target) = + default_top_level_target.into_top_level_targets(preview)?; + + let mut workspace_targets = IndexMap::new(); + let mut package_targets = IndexMap::new(); + for (selector, target) in self.target { + let (workspace_target, package_target) = target.into_top_level_targets(preview)?; + if let Some(package_target) = package_target { + package_targets.insert(selector.clone(), package_target); + } + workspace_targets.insert(selector, workspace_target); + } + + // Construct a default feature + let default_feature = Feature { + name: FeatureName::Default, + + // The default feature does not overwrite the platforms or channels from the project + // metadata. + platforms: None, + channels: None, + + channel_priority: self.workspace.value.channel_priority, + + system_requirements: self.system_requirements, + + // Use the pypi-options from the manifest for + // the default feature + pypi_options: self.pypi_options, + + // Combine the default target with all user specified targets + targets: Targets::from_default_and_user_defined( + default_workspace_target, + workspace_targets, + ), + }; + + // Construct the features including the default feature + let features: IndexMap = + IndexMap::from_iter([(FeatureName::Default, default_feature)]); + let named_features = self + .feature + .into_iter() + .map(|(name, feature)| { + let feature = feature.into_feature(name.clone(), preview)?; + Ok((name, feature)) + }) + .collect::, TomlError>>()?; + let features = features.into_iter().chain(named_features).collect(); + + // Construct the environments including the default environment + let mut environments = Environments::default(); + let mut solve_groups = SolveGroups::default(); + + // Add the default environment first if it was not redefined. + if !self.environments.contains_key(&EnvironmentName::Default) { + environments.environments.push(Some(Environment::default())); + environments + .by_name + .insert(EnvironmentName::Default, EnvironmentIdx(0)); + } + + // Add all named environments + for (name, env) in self.environments { + // Decompose the TOML + let (features, features_source_loc, solve_group, no_default_feature) = match env { + TomlEnvironmentList::Map(env) => ( + env.features.value, + env.features.span, + env.solve_group, + env.no_default_feature, + ), + TomlEnvironmentList::Seq(features) => (features, None, None, false), + }; + + let environment_idx = EnvironmentIdx(environments.environments.len()); + environments.by_name.insert(name.clone(), environment_idx); + environments.environments.push(Some(Environment { + name, + features, + features_source_loc, + solve_group: solve_group.map(|sg| solve_groups.add(sg, environment_idx)), + no_default_feature, + })); + } + + // Get the name from the [package] section if it's missing from the workspace. + let project_name = self + .package + .as_ref() + .and_then(|p| p.value.name.as_ref()) + .cloned(); + + let PixiSpanned { + span: workspace_span, + value: workspace, + } = self.workspace; + let workspace = workspace + .into_workspace(ExternalWorkspaceProperties { + name: project_name.or(external.name), + ..external + }) + .map_err(|e| match e { + WorkspaceError::MissingName => { + TomlError::MissingField("name".into(), workspace_span) + } + })?; + + let package_manifest = if let Some(PixiSpanned { + value: package, + span: package_span, + }) = self.package + { + if !pixi_build_enabled { + return Err(FeatureNotEnabled::new( + format!( + "[package] section is only allowed when the `{}` feature is enabled", + KnownPreviewFeature::PixiBuild + ), + KnownPreviewFeature::PixiBuild, + ) + .with_opt_span(package_span) + .into()); + } + + let PixiSpanned { + value: build_system, + span: _build_system_span, + } = self + .build_system + .ok_or_else(|| TomlError::MissingField("[build-system]".into(), None))?; + + let package = package + .into_package(ExternalPackageProperties { + name: Some(workspace.name.clone()), + version: workspace.version.clone(), + description: workspace.description.clone(), + authors: workspace.authors.clone(), + license: workspace.license.clone(), + license_file: workspace.license_file.clone(), + readme: workspace.readme.clone(), + homepage: workspace.homepage.clone(), + repository: workspace.repository.clone(), + documentation: workspace.documentation.clone(), + }) + .map_err(|e| match e { + PackageError::MissingName => { + TomlError::MissingField("name".into(), package_span) + } + PackageError::MissingVersion => { + TomlError::MissingField("version".into(), package_span) + } + })?; + + Some(PackageManifest { + package, + build_system, + targets: Targets::from_default_and_user_defined( + default_package_target.unwrap_or_default(), + package_targets, + ), + }) + } else { + // If we do have a build-system section we have to error out. + if let Some(PixiSpanned { + value: _, + span: build_system_span, + }) = self.build_system + { + return if !pixi_build_enabled { + Err(FeatureNotEnabled::new( + format!( + "[build-system] section is only allowed when the `{}` feature is enabled", + KnownPreviewFeature::PixiBuild + ), + KnownPreviewFeature::PixiBuild, + ) + .with_opt_span(build_system_span) + .into()) + } else { + Err(TomlError::Generic( + "Cannot use [build-system] without [package]".into(), + build_system_span, + )) + }; + } + + None + }; + + let workspace_manifest = WorkspaceManifest { + workspace, + features, + environments, + solve_groups, + }; + + Ok((workspace_manifest, package_manifest)) + } +} + +#[cfg(test)] +mod test { + use insta::assert_snapshot; + + use super::*; + use crate::utils::test_utils::expect_parse_failure; + + #[test] + fn test_build_section_without_preview() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + name = "foo" + channels = [] + platforms = [] + + [build-system] + dependencies = ["python-build-backend > 12"] + build-backend = "python-build-backend" + channels = [] + "#, + )); + } + + #[test] + fn test_build_section_without_package() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + name = "foo" + channels = [] + platforms = [] + preview = ["pixi-build"] + + [build-system] + dependencies = ["python-build-backend > 12"] + build-backend = "python-build-backend" + channels = [] + "#, + )); + } + + #[test] + fn test_package_without_build_section() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + name = "foo" + channels = [] + platforms = [] + + [package] + "#, + )); + } + + #[test] + fn test_missing_version() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + name = "foo" + channels = [] + platforms = [] + preview = ["pixi-build"] + + [package] + + [build-system] + dependencies = ["python-build-backend > 12"] + build-backend = "python-build-backend" + channels = [] + "#, + )); + } + + #[test] + fn test_workspace_name_required() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + channels = [] + platforms = [] + preview = ["pixi-build"] + "#, + )); + } + + #[test] + fn test_workspace_name_from_workspace() { + let workspace_manifest = WorkspaceManifest::from_toml_str( + r#" + [workspace] + channels = [] + platforms = [] + preview = ["pixi-build"] + + [package] + name = "foo" + version = "0.1.0" + + [build-system] + dependencies = ["python-build-backend > 12"] + build-backend = "python-build-backend" + channels = [] + "#, + ) + .unwrap(); + + assert_eq!(workspace_manifest.workspace.name, "foo"); + } + + #[test] + fn test_run_dependencies_without_pixi_build() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + channels = [] + platforms = [] + + [run-dependencies] + "#, + )); + } + + #[test] + fn test_run_dependencies_in_target_without_pixi_build() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + channels = [] + platforms = [] + + [target.win.run-dependencies] + "#, + )); + } + + #[test] + fn test_run_dependencies_in_feature() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + channels = [] + platforms = [] + + [feature.foobar.run-dependencies] + "#, + )); + } + + #[test] + fn test_host_dependencies_in_feature_with_pixi_build() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + channels = [] + platforms = [] + preview = ["pixi-build"] + + [package] + + [feature.foobar.host-dependencies] + "#, + )); + } + + #[test] + fn test_build_dependencies_in_feature_with_pixi_build() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + channels = [] + platforms = [] + preview = ["pixi-build"] + + [package] + + [feature.foobar.build-dependencies] + "#, + )); + } + + #[test] + fn test_invalid_non_package_sections() { + assert_snapshot!(expect_parse_failure( + r#" + [workspace] + channels = [] + platforms = [] + preview = ["pixi-build"] + + [build-dependencies] + + [host-dependencies] + + [target.win.host-dependencies] + "#, + )); + } +} diff --git a/crates/pixi_manifest/src/toml/mod.rs b/crates/pixi_manifest/src/toml/mod.rs new file mode 100644 index 000000000..beb0b05a1 --- /dev/null +++ b/crates/pixi_manifest/src/toml/mod.rs @@ -0,0 +1,17 @@ +mod channel; +mod document; +mod environment; +mod feature; +mod manifest; +mod package; +mod target; +mod workspace; + +pub use channel::TomlPrioritizedChannel; +pub use document::TomlDocument; +pub use environment::{TomlEnvironment, TomlEnvironmentList}; +pub use feature::TomlFeature; +pub use manifest::TomlManifest; +pub use package::{ExternalPackageProperties, PackageError, TomlPackage}; +pub use target::TomlTarget; +pub use workspace::{ExternalWorkspaceProperties, TomlWorkspace, WorkspaceError}; diff --git a/crates/pixi_manifest/src/toml/package.rs b/crates/pixi_manifest/src/toml/package.rs new file mode 100644 index 000000000..b05b43ece --- /dev/null +++ b/crates/pixi_manifest/src/toml/package.rs @@ -0,0 +1,108 @@ +use std::path::PathBuf; + +use rattler_conda_types::Version; +use serde::Deserialize; +use serde_with::{serde_as, DisplayFromStr}; +use thiserror::Error; +use url::Url; + +use crate::package::Package; +use crate::toml::workspace::ExternalWorkspaceProperties; + +/// The TOML representation of the `[workspace]` section in a pixi manifest. +/// +/// In TOML some of the fields can be empty even though they are required in the +/// data model (e.g. `name`, `version`). This is allowed because some of the +/// fields might be derived from other sections of the TOML. +#[serde_as] +#[derive(Debug, Clone, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub struct TomlPackage { + // In TOML the workspace name can be empty. It is a required field though, but this is enforced + // when converting the TOML model to the actual manifest. When using a PyProject we want to use + // the name from the PyProject file. + pub name: Option, + #[serde_as(as = "Option")] + pub version: Option, + pub description: Option, + pub authors: Option>, + pub license: Option, + pub license_file: Option, + pub readme: Option, + pub homepage: Option, + pub repository: Option, + pub documentation: Option, +} + +/// Defines some of the properties that might be defined in other parts of the +/// manifest but we do require to be set in the package section. +/// +/// This can be used to inject these properties. +#[derive(Debug, Clone)] +pub struct ExternalPackageProperties { + pub name: Option, + pub version: Option, + pub description: Option, + pub authors: Option>, + pub license: Option, + pub license_file: Option, + pub readme: Option, + pub homepage: Option, + pub repository: Option, + pub documentation: Option, +} + +impl From for ExternalPackageProperties { + fn from(value: ExternalWorkspaceProperties) -> Self { + Self { + name: value.name, + version: value.version, + description: value.description, + authors: value.authors, + license: value.license, + license_file: value.license_file, + readme: value.readme, + homepage: value.homepage, + repository: value.repository, + documentation: value.documentation, + } + } +} + +#[derive(Debug, Error)] +pub enum PackageError { + #[error("missing `name` in `[package]` section")] + MissingName, + + #[error("missing `version` in `[package]` section")] + MissingVersion, +} + +impl TomlPackage { + pub fn into_package( + self, + external: ExternalPackageProperties, + ) -> Result { + let name = self + .name + .or(external.name) + .ok_or(PackageError::MissingName)?; + let version = self + .version + .or(external.version) + .ok_or(PackageError::MissingVersion)?; + + Ok(Package { + name, + version, + description: self.description.or(external.description), + authors: self.authors.or(external.authors), + license: self.license.or(external.license), + license_file: self.license_file.or(external.license_file), + readme: self.readme.or(external.readme), + homepage: self.homepage.or(external.homepage), + repository: self.repository.or(external.repository), + documentation: self.documentation.or(external.documentation), + }) + } +} diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_dependencies_in_feature_with_pixi_build.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_dependencies_in_feature_with_pixi_build.snap new file mode 100644 index 000000000..26ea1bf26 --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_dependencies_in_feature_with_pixi_build.snap @@ -0,0 +1,11 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n preview = [\"pixi-build\"]\n\n [package]\n\n [feature.foobar.build-dependencies]\n \"#,)" +--- + × [build-dependencies] in features are not supported when `pixi-build` is enabled. + ╭─[pixi.toml:9:9] + 8 │ + 9 │ [feature.foobar.build-dependencies] + · ─────────────────────────────────── + 10 │ + ╰──── diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_section_without_package.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_section_without_package.snap new file mode 100644 index 000000000..e6c6b84ab --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_section_without_package.snap @@ -0,0 +1,13 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n name = \"foo\"\n channels = []\n platforms = []\n preview = [\"pixi-build\"]\n\n [build-system]\n dependencies = [\"python-build-backend > 12\"]\n build-backend = \"python-build-backend\"\n channels = []\n \"#,)" +--- + × Cannot use [build-system] without [package] + ╭─[pixi.toml:8:9] + 7 │ + 8 │ ╭─▶ [build-system] + 9 │ │ dependencies = ["python-build-backend > 12"] + 10 │ │ build-backend = "python-build-backend" + 11 │ ╰─▶ channels = [] + 12 │ + ╰──── diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_section_without_preview.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_section_without_preview.snap new file mode 100644 index 000000000..0618e57e6 --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__build_section_without_preview.snap @@ -0,0 +1,14 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n name = \"foo\"\n channels = []\n platforms = []\n\n [build-system]\n dependencies = [\"python-build-backend > 12\"]\n build-backend = \"python-build-backend\"\n channels = []\n \"#,)" +--- + × [build-system] section is only allowed when the `pixi-build` feature is enabled + ╭─[pixi.toml:7:9] + 6 │ + 7 │ ╭─▶ [build-system] + 8 │ │ dependencies = ["python-build-backend > 12"] + 9 │ │ build-backend = "python-build-backend" + 10 │ ╰─▶ channels = [] + 11 │ + ╰──── + help: Add `preview = ["pixi-build"]` under [workspace] to enable the preview feature diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__host_dependencies_in_feature_with_pixi_build.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__host_dependencies_in_feature_with_pixi_build.snap new file mode 100644 index 000000000..10b023621 --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__host_dependencies_in_feature_with_pixi_build.snap @@ -0,0 +1,11 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n preview = [\"pixi-build\"]\n\n [package]\n\n [feature.foobar.host-dependencies]\n \"#,)" +--- + × [host-dependencies] in features are not supported when `pixi-build` is enabled. + ╭─[pixi.toml:9:9] + 8 │ + 9 │ [feature.foobar.host-dependencies] + · ────────────────────────────────── + 10 │ + ╰──── diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__invalid_non_package_sections.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__invalid_non_package_sections.snap new file mode 100644 index 000000000..279af7eca --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__invalid_non_package_sections.snap @@ -0,0 +1,19 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n preview = [\"pixi-build\"]\n\n [build-dependencies]\n\n [host-dependencies]\n\n [target.win.host-dependencies]\n \"#,)" +--- + × build-, host- and run-dependency sections are only valid for packages. + ╭─[pixi.toml:7:9] + 6 │ + 7 │ [build-dependencies] + · ──────────────────── + 8 │ + 9 │ [host-dependencies] + · ─────────────────── + 10 │ + 11 │ [target.win.host-dependencies] + · ────────────────────────────── + 12 │ + ╰──── + help: These sections are only valid when the manifest describes a package instead of a workspace. + Add a `[package]` section to the manifest to fix this error or remove the offending sections. diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__missing_version.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__missing_version.snap new file mode 100644 index 000000000..1a8ad2439 --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__missing_version.snap @@ -0,0 +1,11 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n name = \"foo\"\n channels = []\n platforms = []\n preview = [\"pixi-build\"]\n\n [package]\n\n [build-system]\n dependencies = [\"python-build-backend > 12\"]\n build-backend = \"python-build-backend\"\n channels = []\n \"#,)" +--- + × Missing field `version` + ╭─[pixi.toml:8:9] + 7 │ + 8 │ [package] + · ───────── + 9 │ + ╰──── diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__package_without_build_section.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__package_without_build_section.snap new file mode 100644 index 000000000..6c58c6796 --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__package_without_build_section.snap @@ -0,0 +1,12 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n name = \"foo\"\n channels = []\n platforms = []\n\n [package]\n \"#,)" +--- + × [package] section is only allowed when the `pixi-build` feature is enabled + ╭─[pixi.toml:7:9] + 6 │ + 7 │ [package] + · ───────── + 8 │ + ╰──── + help: Add `preview = ["pixi-build"]` under [workspace] to enable the preview feature diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_feature.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_feature.snap new file mode 100644 index 000000000..703307914 --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_feature.snap @@ -0,0 +1,12 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n\n [feature.foobar.run-dependencies]\n \"#,)" +--- + × unknown field `run-dependencies`, expected one of `platforms`, `channels`, `channel-priority`, `system-requirements`, `target`, `dependencies`, `host-dependencies`, `build-dependencies`, `pypi- + │ dependencies`, `activation`, `tasks`, `pypi-options` + ╭─[pixi.toml:6:25] + 5 │ + 6 │ [feature.foobar.run-dependencies] + · ──────────────── + 7 │ + ╰──── diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_in_feature.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_in_feature.snap new file mode 100644 index 000000000..703307914 --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_in_feature.snap @@ -0,0 +1,12 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n\n [feature.foobar.run-dependencies]\n \"#,)" +--- + × unknown field `run-dependencies`, expected one of `platforms`, `channels`, `channel-priority`, `system-requirements`, `target`, `dependencies`, `host-dependencies`, `build-dependencies`, `pypi- + │ dependencies`, `activation`, `tasks`, `pypi-options` + ╭─[pixi.toml:6:25] + 5 │ + 6 │ [feature.foobar.run-dependencies] + · ──────────────── + 7 │ + ╰──── diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_in_target_without_pixi_build.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_in_target_without_pixi_build.snap new file mode 100644 index 000000000..8dd37163c --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_in_target_without_pixi_build.snap @@ -0,0 +1,12 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n\n [target.win.run-dependencies]\n \"#,)" +--- + × [run-dependencies] are only available when using the `pixi-build` feature. + ╭─[pixi.toml:6:9] + 5 │ + 6 │ [target.win.run-dependencies] + · ───────────────────────────── + 7 │ + ╰──── + help: Add `preview = ["pixi-build"]` under [workspace] to enable the preview feature diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_without_pixi_build.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_without_pixi_build.snap new file mode 100644 index 000000000..21d5a74ca --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__run_dependencies_without_pixi_build.snap @@ -0,0 +1,12 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n\n [run-dependencies]\n \"#,)" +--- + × [run-dependencies] are only available when using the `pixi-build` feature. + ╭─[pixi.toml:6:9] + 5 │ + 6 │ [run-dependencies] + · ────────────────── + 7 │ + ╰──── + help: Add `preview = ["pixi-build"]` under [workspace] to enable the preview feature diff --git a/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__workspace_name_required.snap b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__workspace_name_required.snap new file mode 100644 index 000000000..febb40266 --- /dev/null +++ b/crates/pixi_manifest/src/toml/snapshots/pixi_manifest__toml__manifest__test__workspace_name_required.snap @@ -0,0 +1,13 @@ +--- +source: crates/pixi_manifest/src/toml/manifest.rs +expression: "expect_parse_failure(r#\"\n [workspace]\n channels = []\n platforms = []\n preview = [\"pixi-build\"]\n \"#,)" +--- + × Missing field `name` + ╭─[pixi.toml:2:9] + 1 │ + 2 │ ╭─▶ [workspace] + 3 │ │ channels = [] + 4 │ │ platforms = [] + 5 │ ╰─▶ preview = ["pixi-build"] + 6 │ + ╰──── diff --git a/crates/pixi_manifest/src/toml/target.rs b/crates/pixi_manifest/src/toml/target.rs new file mode 100644 index 000000000..a3cbc2710 --- /dev/null +++ b/crates/pixi_manifest/src/toml/target.rs @@ -0,0 +1,165 @@ +use std::collections::HashMap; + +use indexmap::IndexMap; +use pixi_spec::PixiSpec; +use serde::Deserialize; +use serde_with::serde_as; + +use crate::{ + error::FeatureNotEnabled, + pypi::PyPiPackageName, + target::PackageTarget, + utils::{package_map::UniquePackageMap, PixiSpanned}, + Activation, KnownPreviewFeature, Preview, PyPiRequirement, SpecType, Task, TaskName, TomlError, + WorkspaceTarget, +}; + +#[serde_as] +#[derive(Debug, Default, Deserialize)] +#[serde(rename_all = "kebab-case")] +#[serde(deny_unknown_fields)] +pub struct TomlTarget { + #[serde(default)] + pub dependencies: Option>, + + #[serde(default)] + pub host_dependencies: Option>, + + #[serde(default)] + pub build_dependencies: Option>, + + #[serde(default)] + pub run_dependencies: Option>, + + #[serde(default)] + pub pypi_dependencies: Option>, + + /// Additional information to activate an environment. + #[serde(default)] + pub activation: Option, + + /// Target specific tasks to run in the environment + #[serde(default)] + pub tasks: HashMap, +} + +impl TomlTarget { + /// Called to convert this instance into a workspace and optional package + /// target. Based on whether `pixi-build` is enabled a different path is + /// used. + pub fn into_top_level_targets( + self, + preview: &Preview, + ) -> Result<(WorkspaceTarget, Option), TomlError> { + let pixi_build_enabled = preview.is_enabled(KnownPreviewFeature::PixiBuild); + + if pixi_build_enabled { + self.into_workspace_and_package_targets() + } else { + Ok((self.into_workspace_target()?, None)) + } + } + + /// Called to convert this instance into a workspace target of a feature. + pub fn into_feature_target(self, preview: &Preview) -> Result { + let pixi_build_enabled = preview.is_enabled(KnownPreviewFeature::PixiBuild); + + if pixi_build_enabled { + if let Some(run_dependencies) = self.run_dependencies { + return Err(TomlError::Generic( + "[run-dependencies] in features are not supported.".into(), + run_dependencies.span, + )); + } + + if let Some(host_dependencies) = self.host_dependencies { + return Err(TomlError::Generic( + "[host-dependencies] in features are not supported when `pixi-build` is enabled." + .into(), + host_dependencies.span, + )); + } + + if let Some(build_dependencies) = self.build_dependencies { + return Err(TomlError::Generic( + "[build-dependencies] in features are not supported when `pixi-build` is enabled." + .into(), + build_dependencies.span, + )); + } + } + + Ok(WorkspaceTarget { + dependencies: combine_target_dependencies([ + (SpecType::Run, self.dependencies), + (SpecType::Host, self.host_dependencies), + (SpecType::Build, self.build_dependencies), + ]), + pypi_dependencies: self.pypi_dependencies, + activation: self.activation, + tasks: self.tasks, + }) + } + + /// Called to convert this instance into a workspace and optional package + /// target. + fn into_workspace_and_package_targets( + self, + ) -> Result<(WorkspaceTarget, Option), TomlError> { + let workspace_target = WorkspaceTarget { + dependencies: combine_target_dependencies([(SpecType::Run, self.dependencies)]), + pypi_dependencies: self.pypi_dependencies, + activation: self.activation, + tasks: self.tasks, + }; + + let package_dependencies = combine_target_dependencies([ + (SpecType::Run, self.run_dependencies), + (SpecType::Host, self.host_dependencies), + (SpecType::Build, self.build_dependencies), + ]); + + let package_target = if package_dependencies.is_empty() { + None + } else { + Some(PackageTarget { + dependencies: package_dependencies, + }) + }; + + Ok((workspace_target, package_target)) + } + + /// Called when parsing the manifest as a pre-pixi-build manifest. + fn into_workspace_target(self) -> Result { + if let Some(run_dependencies) = self.run_dependencies { + return Err(TomlError::FeatureNotEnabled( + FeatureNotEnabled::new( + "[run-dependencies] are only available when using the `pixi-build` feature.", + KnownPreviewFeature::PixiBuild, + ) + .with_opt_span(run_dependencies.span), + )); + } + + Ok(WorkspaceTarget { + dependencies: combine_target_dependencies([ + (SpecType::Run, self.dependencies), + (SpecType::Host, self.host_dependencies), + (SpecType::Build, self.build_dependencies), + ]), + pypi_dependencies: self.pypi_dependencies, + activation: self.activation, + tasks: self.tasks, + }) + } +} + +/// Combines different target dependencies into a single map. +pub(super) fn combine_target_dependencies( + iter: impl IntoIterator>)>, +) -> HashMap> { + iter.into_iter() + .filter_map(|(ty, deps)| deps.map(|deps| (ty, deps.value.into()))) + .collect() +} diff --git a/crates/pixi_manifest/src/toml/workspace.rs b/crates/pixi_manifest/src/toml/workspace.rs new file mode 100644 index 000000000..9c77ad14c --- /dev/null +++ b/crates/pixi_manifest/src/toml/workspace.rs @@ -0,0 +1,101 @@ +use std::{collections::HashMap, path::PathBuf}; + +use indexmap::IndexSet; +use rattler_conda_types::{NamedChannelOrUrl, Platform, Version}; +use rattler_solve::ChannelPriority; +use serde::Deserialize; +use serde_with::{serde_as, DisplayFromStr}; +use thiserror::Error; +use url::Url; + +use crate::{ + preview::Preview, pypi::pypi_options::PypiOptions, utils::PixiSpanned, PrioritizedChannel, + Workspace, +}; + +/// The TOML representation of the `[[workspace]]` section in a pixi manifest. +#[serde_as] +#[derive(Debug, Clone, Deserialize)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] +pub struct TomlWorkspace { + // In TOML the workspace name can be empty. It is a required field though, but this is enforced + // when converting the TOML model to the actual manifest. When using a PyProject we want to use + // the name from the PyProject file. + pub name: Option, + + #[serde_as(as = "Option")] + pub version: Option, + pub description: Option, + pub authors: Option>, + #[serde_as(as = "IndexSet")] + pub channels: IndexSet, + #[serde(default)] + pub channel_priority: Option, + // TODO: This is actually slightly different from the rattler_conda_types::Platform because it + // should not include noarch. + pub platforms: PixiSpanned>, + pub license: Option, + pub license_file: Option, + pub readme: Option, + pub homepage: Option, + pub repository: Option, + pub documentation: Option, + pub conda_pypi_map: Option>, + pub pypi_options: Option, + + #[serde(default)] + pub preview: Preview, +} + +/// Defines some of the properties that might be defined in other parts of the +/// manifest but we do require to be set in the workspace section. +/// +/// This can be used to inject these properties. +#[derive(Debug, Clone, Default)] +pub struct ExternalWorkspaceProperties { + pub name: Option, + pub version: Option, + pub description: Option, + pub authors: Option>, + pub license: Option, + pub license_file: Option, + pub readme: Option, + pub homepage: Option, + pub repository: Option, + pub documentation: Option, +} + +#[derive(Debug, Error)] +pub enum WorkspaceError { + #[error("missing `name` in `[workspace]` section")] + MissingName, +} + +impl TomlWorkspace { + pub fn into_workspace( + self, + external: ExternalWorkspaceProperties, + ) -> Result { + Ok(Workspace { + name: self + .name + .or(external.name) + .ok_or(WorkspaceError::MissingName)?, + version: self.version.or(external.version), + description: self.description.or(external.description), + authors: self.authors.or(external.authors), + license: self.license.or(external.license), + license_file: self.license_file.or(external.license_file), + readme: self.readme.or(external.readme), + homepage: self.homepage.or(external.homepage), + repository: self.repository.or(external.repository), + documentation: self.documentation.or(external.documentation), + channels: self.channels, + channel_priority: self.channel_priority, + platforms: self.platforms, + conda_pypi_map: self.conda_pypi_map, + pypi_options: self.pypi_options, + preview: self.preview, + }) + } +} diff --git a/crates/pixi_manifest/src/utils/mod.rs b/crates/pixi_manifest/src/utils/mod.rs index 32888c077..8f481517e 100644 --- a/crates/pixi_manifest/src/utils/mod.rs +++ b/crates/pixi_manifest/src/utils/mod.rs @@ -1,5 +1,9 @@ +pub mod package_map; mod spanned; +#[cfg(test)] +pub(crate) mod test_utils; + pub use spanned::PixiSpanned; use url::Url; diff --git a/crates/pixi_manifest/src/utils/package_map.rs b/crates/pixi_manifest/src/utils/package_map.rs new file mode 100644 index 000000000..019de95b2 --- /dev/null +++ b/crates/pixi_manifest/src/utils/package_map.rs @@ -0,0 +1,89 @@ +use indexmap::IndexMap; +use pixi_spec::PixiSpec; +use serde::{ + de::{DeserializeSeed, MapAccess, Visitor}, + Deserialize, Deserializer, Serialize, +}; +use std::ops::DerefMut; +use std::{fmt, marker::PhantomData, ops::Deref}; + +#[derive(Clone, Default, Debug, Serialize)] +#[serde(transparent)] +pub struct UniquePackageMap(IndexMap); + +impl From for IndexMap { + fn from(value: UniquePackageMap) -> Self { + value.0 + } +} + +impl Deref for UniquePackageMap { + type Target = IndexMap; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for UniquePackageMap { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl<'de> Deserialize<'de> for UniquePackageMap { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct PackageMapVisitor(PhantomData<()>); + + impl<'de> Visitor<'de> for PackageMapVisitor { + type Value = IndexMap; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "a map") + } + + fn visit_map(self, mut map: A) -> Result + where + A: MapAccess<'de>, + { + let mut result = IndexMap::new(); + while let Some((package_name, spec)) = map.next_entry_seed::( + PackageMap(&result), + PhantomData::, + )? { + result.insert(package_name, spec); + } + + Ok(result) + } + } + let visitor = PackageMapVisitor(PhantomData); + let packages = deserializer.deserialize_map(visitor)?; + Ok(UniquePackageMap(packages)) + } +} + +struct PackageMap<'a>(&'a IndexMap); + +impl<'de, 'a> DeserializeSeed<'de> for PackageMap<'a> { + type Value = rattler_conda_types::PackageName; + + fn deserialize(self, deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let package_name = rattler_conda_types::PackageName::deserialize(deserializer)?; + match self.0.get_key_value(&package_name) { + Some((package_name, _)) => { + Err(serde::de::Error::custom( + format!( + "duplicate dependency: {} (please avoid using capitalized names for the dependencies)", package_name.as_source()) + )) + } + None => Ok(package_name), + } + } +} diff --git a/crates/pixi_manifest/src/utils/test_utils.rs b/crates/pixi_manifest/src/utils/test_utils.rs new file mode 100644 index 000000000..28c6b73c6 --- /dev/null +++ b/crates/pixi_manifest/src/utils/test_utils.rs @@ -0,0 +1,28 @@ +use crate::toml::{ExternalWorkspaceProperties, TomlManifest}; +use miette::{GraphicalReportHandler, GraphicalTheme, NamedSource, Report}; + +/// A helper function that generates a snapshot of the error message when +/// parsing a manifest TOML. The error is returned. +#[must_use] +pub(crate) fn expect_parse_failure(pixi_toml: &str) -> String { + let parse_error = TomlManifest::from_toml_str(pixi_toml) + .and_then(|manifest| manifest.into_manifests(ExternalWorkspaceProperties::default())) + .expect_err("parsing should fail"); + + // Disable colors in tests + let mut s = String::new(); + let report_handler = GraphicalReportHandler::new() + .with_cause_chain() + .with_break_words(false) + .with_theme(GraphicalTheme::unicode_nocolor()); + report_handler + .render_report( + &mut s, + Report::from(parse_error) + .with_source_code(NamedSource::new("pixi.toml", pixi_toml.to_string())) + .as_ref(), + ) + .unwrap(); + + s +} diff --git a/crates/pixi_manifest/src/validation.rs b/crates/pixi_manifest/src/validation.rs index c34a38cc1..90f17c1a4 100644 --- a/crates/pixi_manifest/src/validation.rs +++ b/crates/pixi_manifest/src/validation.rs @@ -9,10 +9,11 @@ use std::{ use super::pypi::pypi_options::PypiOptions; use crate::{ - Environment, Feature, FeatureName, ParsedManifest, SystemRequirements, TargetSelector, + Environment, Feature, FeatureName, KnownPreviewFeature, SystemRequirements, TargetSelector, + WorkspaceManifest, }; -impl ParsedManifest { +impl WorkspaceManifest { /// Validate the project manifest. pub fn validate(&self, source: NamedSource, root_folder: &Path) -> miette::Result<()> { // Check if the targets are defined for existing platforms @@ -20,7 +21,7 @@ impl ParsedManifest { let platforms = feature .platforms .as_ref() - .unwrap_or(&self.project.platforms); + .unwrap_or(&self.workspace.platforms); for target_sel in feature.targets.user_defined_selectors() { match target_sel { TargetSelector::Platform(p) => { @@ -98,7 +99,7 @@ impl ParsedManifest { } // parse the SPDX license expression to make sure that it is a valid expression. - if let Some(spdx_expr) = &self.project.license { + if let Some(spdx_expr) = &self.workspace.license { spdx::Expression::parse(spdx_expr) .into_diagnostic() .with_context(|| { @@ -122,8 +123,8 @@ impl ParsedManifest { Ok(()) }; - check_file_existence(&self.project.license_file)?; - check_file_existence(&self.project.readme)?; + check_file_existence(&self.workspace.license_file)?; + check_file_existence(&self.workspace.readme)?; // Validate the environments defined in the project for env in self.environments.iter() { @@ -133,18 +134,37 @@ impl ParsedManifest { } // Warn on any unknown preview features - if let Some(preview) = self.project.preview.as_ref() { - let preview = preview.unknown_preview_features(); - if !preview.is_empty() { - let are = if preview.len() > 1 { "are" } else { "is" }; - let s = if preview.len() > 1 { "s" } else { "" }; - let preview_array = if preview.len() == 1 { - format!("{:?}", preview) - } else { - format!("[{:?}]", preview.iter().format(", ")) - }; - tracing::warn!( - "The preview feature{s}: {preview_array} {are} defined in the manifest but un-used pixi"); + let preview = self.workspace.preview.unknown_preview_features(); + if !preview.is_empty() { + let are = if preview.len() > 1 { "are" } else { "is" }; + let s = if preview.len() > 1 { "s" } else { "" }; + let preview_array = if preview.len() == 1 { + format!("{:?}", preview) + } else { + format!("[{:?}]", preview.iter().format(", ")) + }; + tracing::warn!( + "The preview feature{s}: {preview_array} {are} defined in the manifest but un-used pixi"); + } + + // Check if the pixi build feature is enabled + let build_enabled = self + .workspace + .preview + .is_enabled(KnownPreviewFeature::PixiBuild); + + // Error any conda source dependencies are used and is not set + if !build_enabled { + let supported_platforms = self.workspace.platforms.as_ref(); + // Check all features for source dependencies + for feature in self.features.values() { + if is_using_source_deps(feature, supported_platforms.iter()) { + return Err(miette::miette!( + help = "enable the `pixi-build` preview feature to use source dependencies", + "source dependencies are used in the feature '{}', but the `pixi-build` preview feature is not enabled", + feature.name + )); + } } } @@ -222,7 +242,7 @@ impl ParsedManifest { .filter_map(|feature| { if feature.pypi_options().is_none() { // Use the project default features - self.project.pypi_options.as_ref() + self.workspace.pypi_options.as_ref() } else { feature.pypi_options() } @@ -234,6 +254,32 @@ impl ParsedManifest { } } +/// Check if any feature is making use of conda source dependencies +fn is_using_source_deps<'a>( + feature: &Feature, + supported_platforms: impl IntoIterator, +) -> bool { + // List all spec types + let spec_types = [ + crate::SpecType::Build, + crate::SpecType::Run, + crate::SpecType::Host, + ]; + // Check if any of the spec types have source dependencies + for platform in supported_platforms { + for spec in spec_types { + let deps = feature.dependencies(spec, Some(*platform)); + if let Some(deps) = deps { + if deps.iter().any(|(_, spec)| spec.is_source()) { + return true; + } + } + } + } + + false +} + // Create an error report for using a platform that is not supported by the // project. fn create_unsupported_platform_report( diff --git a/crates/pixi_manifest/src/metadata.rs b/crates/pixi_manifest/src/workspace.rs similarity index 70% rename from crates/pixi_manifest/src/metadata.rs rename to crates/pixi_manifest/src/workspace.rs index ddf1bd7c1..82f47373b 100644 --- a/crates/pixi_manifest/src/metadata.rs +++ b/crates/pixi_manifest/src/workspace.rs @@ -3,24 +3,18 @@ use std::{collections::HashMap, path::PathBuf}; use indexmap::IndexSet; use rattler_conda_types::{NamedChannelOrUrl, Platform, Version}; use rattler_solve::ChannelPriority; -use serde::Deserialize; -use serde_with::{serde_as, DisplayFromStr}; use url::Url; use super::pypi::pypi_options::PypiOptions; -use crate::preview::Preview; -use crate::utils::PixiSpanned; - -/// Describes the contents of the `[package]` section of the project manifest. -#[serde_as] -#[derive(Debug, Clone, Deserialize)] -#[serde(deny_unknown_fields, rename_all = "kebab-case")] -pub struct ProjectMetadata { +use crate::{preview::Preview, utils::PixiSpanned, PrioritizedChannel}; + +/// Describes the contents of the `[workspace]` section of the project manifest. +#[derive(Debug, Clone)] +pub struct Workspace { /// The name of the project - pub name: Option, // set as optional to handle conversion from pyproject.toml + pub name: String, /// The version of the project - #[serde_as(as = "Option")] pub version: Option, /// An optional project description @@ -30,11 +24,9 @@ pub struct ProjectMetadata { pub authors: Option>, /// The channels used by the project - #[serde_as(as = "IndexSet")] - pub channels: IndexSet, + pub channels: IndexSet, /// Channel priority for the whole project - #[serde(default)] pub channel_priority: Option, /// The platforms this project supports @@ -67,5 +59,5 @@ pub struct ProjectMetadata { pub pypi_options: Option, /// Preview features - pub preview: Option, + pub preview: Preview, } diff --git a/crates/pixi_record/Cargo.toml b/crates/pixi_record/Cargo.toml new file mode 100644 index 000000000..28d8782a2 --- /dev/null +++ b/crates/pixi_record/Cargo.toml @@ -0,0 +1,21 @@ +[package] +authors.workspace = true +description = "Defines a superset of conda repodata records which can also represent dependencies from source" +edition.workspace = true +homepage.workspace = true +license.workspace = true +name = "pixi_record" +readme.workspace = true +repository.workspace = true +version = "0.1.0" + +[dependencies] +file_url = { workspace = true } +pixi_spec = { workspace = true } +rattler_conda_types = { workspace = true } +rattler_digest = { workspace = true } +rattler_lock = { workspace = true } +serde = { workspace = true, features = ["derive"] } +thiserror = { workspace = true } +typed-path = { workspace = true } +url = { workspace = true } diff --git a/crates/pixi_record/src/lib.rs b/crates/pixi_record/src/lib.rs new file mode 100644 index 000000000..ad1d0d72a --- /dev/null +++ b/crates/pixi_record/src/lib.rs @@ -0,0 +1,160 @@ +mod pinned_source; +mod source_record; + +pub use pinned_source::{ + MutablePinnedSourceSpec, ParseError, PinnedGitSpec, PinnedPathSpec, PinnedSourceSpec, + PinnedUrlSpec, SourceMismatchError, +}; +use rattler_conda_types::{MatchSpec, Matches, NamelessMatchSpec, PackageRecord, RepoDataRecord}; +use rattler_lock::{CondaPackageData, ConversionError, UrlOrPath}; +pub use source_record::{InputHash, SourceRecord}; +use thiserror::Error; + +/// A record of a conda package that is either something installable from a +/// binary file or something that still requires building. +/// +/// This is basically a superset of a regular [`RepoDataRecord`]. +#[derive(Debug, Clone)] +pub enum PixiRecord { + Binary(RepoDataRecord), + Source(SourceRecord), +} +impl PixiRecord { + /// Metadata information of the package. + pub fn package_record(&self) -> &PackageRecord { + match self { + PixiRecord::Binary(record) => &record.package_record, + PixiRecord::Source(record) => &record.package_record, + } + } + + /// Returns a reference to the binary record if it is a binary record. + pub fn as_binary(&self) -> Option<&RepoDataRecord> { + match self { + PixiRecord::Binary(record) => Some(record), + PixiRecord::Source(_) => None, + } + } + + /// Converts this instance into a binary record if it is a binary record. + pub fn into_binary(self) -> Option { + match self { + PixiRecord::Binary(record) => Some(record), + PixiRecord::Source(_) => None, + } + } + + /// Returns a mutable reference to the binary record if it is a binary + /// record. + pub fn as_binary_mut(&mut self) -> Option<&mut RepoDataRecord> { + match self { + PixiRecord::Binary(record) => Some(record), + PixiRecord::Source(_) => None, + } + } + + /// Returns the source record if it is a source record. + pub fn as_source(&self) -> Option<&SourceRecord> { + match self { + PixiRecord::Binary(_) => None, + PixiRecord::Source(record) => Some(record), + } + } + + /// Converts this instance into a source record if it is a source record. + pub fn into_source(self) -> Option { + match self { + PixiRecord::Binary(_) => None, + PixiRecord::Source(record) => Some(record), + } + } + + /// Returns a mutable reference to the source record if it is a source + /// record. + pub fn as_source_mut(&mut self) -> Option<&mut SourceRecord> { + match self { + PixiRecord::Binary(_) => None, + PixiRecord::Source(record) => Some(record), + } + } +} + +impl From for PixiRecord { + fn from(value: SourceRecord) -> Self { + PixiRecord::Source(value) + } +} + +impl From for PixiRecord { + fn from(value: RepoDataRecord) -> Self { + PixiRecord::Binary(value) + } +} + +#[derive(Debug, Error)] +pub enum ParseLockFileError { + #[error("missing field/fields '{1}' for package {0}")] + Missing(UrlOrPath, String), + + #[error("invalid url for package {0}")] + InvalidRecordUrl(UrlOrPath, #[source] file_url::FileURLParseError), + + #[error(transparent)] + PinnedSourceSpecError(#[from] pinned_source::ParseError), +} + +impl TryFrom for PixiRecord { + type Error = ParseLockFileError; + + fn try_from(value: CondaPackageData) -> Result { + let record = match value { + CondaPackageData::Binary(value) => { + let location = value.location.clone(); + PixiRecord::Binary(value.try_into().map_err(|err| match err { + ConversionError::Missing(field) => ParseLockFileError::Missing(location, field), + ConversionError::LocationToUrlConversionError(err) => { + ParseLockFileError::InvalidRecordUrl(location, err) + } + })?) + } + CondaPackageData::Source(value) => PixiRecord::Source(value.try_into()?), + }; + Ok(record) + } +} + +impl From for CondaPackageData { + fn from(value: PixiRecord) -> Self { + match value { + PixiRecord::Binary(record) => record.into(), + PixiRecord::Source(record) => record.into(), + } + } +} + +impl Matches for NamelessMatchSpec { + fn matches(&self, record: &PixiRecord) -> bool { + match record { + PixiRecord::Binary(record) => self.matches(record), + PixiRecord::Source(record) => self.matches(record), + } + } +} + +impl Matches for MatchSpec { + fn matches(&self, record: &PixiRecord) -> bool { + match record { + PixiRecord::Binary(record) => self.matches(record), + PixiRecord::Source(record) => self.matches(record), + } + } +} + +impl AsRef for PixiRecord { + fn as_ref(&self) -> &PackageRecord { + match self { + PixiRecord::Binary(record) => record.as_ref(), + PixiRecord::Source(record) => record.as_ref(), + } + } +} diff --git a/crates/pixi_record/src/pinned_source.rs b/crates/pixi_record/src/pinned_source.rs new file mode 100644 index 000000000..e355365f6 --- /dev/null +++ b/crates/pixi_record/src/pinned_source.rs @@ -0,0 +1,357 @@ +use std::{ + fmt::{Display, Formatter}, + path::{Path, PathBuf}, +}; + +use pixi_spec::{GitReference, GitSpec, PathSourceSpec, SourceSpec, UrlSourceSpec}; +use rattler_digest::{Md5Hash, Sha256Hash}; +use rattler_lock::UrlOrPath; +use thiserror::Error; +use typed_path::Utf8TypedPathBuf; +use url::Url; + +/// Describes an exact revision of a source checkout. This is used to pin a +/// particular source definition to a revision. A git source spec does not +/// describe an exact commit. This struct describes an exact commit. +#[derive(Debug, Clone)] +pub enum PinnedSourceSpec { + Url(PinnedUrlSpec), + Git(PinnedGitSpec), + Path(PinnedPathSpec), +} + +/// Describes a mutable source spec. This is similar to a [`PinnedSourceSpec`] +/// but the contents can change over time. +#[derive(Debug, Clone)] +pub enum MutablePinnedSourceSpec { + Path(PinnedPathSpec), +} + +impl PinnedSourceSpec { + pub fn as_path(&self) -> Option<&PinnedPathSpec> { + match self { + PinnedSourceSpec::Path(spec) => Some(spec), + _ => None, + } + } + + pub fn as_url(&self) -> Option<&PinnedUrlSpec> { + match self { + PinnedSourceSpec::Url(spec) => Some(spec), + _ => None, + } + } + + pub fn as_git(&self) -> Option<&PinnedGitSpec> { + match self { + PinnedSourceSpec::Git(spec) => Some(spec), + _ => None, + } + } + + pub fn into_path(self) -> Option { + match self { + PinnedSourceSpec::Path(spec) => Some(spec), + _ => None, + } + } + + pub fn into_url(self) -> Option { + match self { + PinnedSourceSpec::Url(spec) => Some(spec), + _ => None, + } + } + + pub fn into_git(self) -> Option { + match self { + PinnedSourceSpec::Git(spec) => Some(spec), + _ => None, + } + } + + /// Converts this instance into a [`MutablePinnedSourceSpec`], or if this + /// instance does not refer to mutable source the original + /// [`PinnedSourceSpec`]. + /// + /// A mutable source is a source that can change over time. For example, a + /// local path. + #[allow(clippy::result_large_err)] + pub fn into_mutable(self) -> Result { + match self { + PinnedSourceSpec::Path(spec) => Ok(MutablePinnedSourceSpec::Path(spec)), + _ => Err(self), + } + } + + /// Returns true if the pinned source will never change. This can be useful + /// for caching purposes. + pub fn is_immutable(&self) -> bool { + !matches!(self, PinnedSourceSpec::Path(_)) + } +} + +impl MutablePinnedSourceSpec { + /// Returns the path spec if this instance is a path spec. + pub fn as_path(&self) -> Option<&PinnedPathSpec> { + match self { + MutablePinnedSourceSpec::Path(spec) => Some(spec), + } + } + + /// Returns the path spec if this instance is a path spec. + pub fn into_path(self) -> Option { + match self { + MutablePinnedSourceSpec::Path(spec) => Some(spec), + } + } +} + +impl From for PinnedSourceSpec { + fn from(value: MutablePinnedSourceSpec) -> Self { + match value { + MutablePinnedSourceSpec::Path(spec) => PinnedSourceSpec::Path(spec), + } + } +} + +/// A pinned url archive. +#[derive(Debug, Clone)] +pub struct PinnedUrlSpec { + pub url: Url, + pub sha256: Sha256Hash, + pub md5: Option, +} + +impl From for PinnedSourceSpec { + fn from(value: PinnedUrlSpec) -> Self { + PinnedSourceSpec::Url(value) + } +} + +/// A pinned version of a git checkout. +#[derive(Debug, Clone)] +pub struct PinnedGitSpec { + pub git: Url, + pub commit: String, + pub rev: Option, +} + +impl From for PinnedSourceSpec { + fn from(value: PinnedGitSpec) -> Self { + PinnedSourceSpec::Git(value) + } +} + +/// A pinned version of a path based source dependency. +#[derive(Debug, Clone)] +pub struct PinnedPathSpec { + pub path: Utf8TypedPathBuf, +} + +impl PinnedPathSpec { + /// Resolves the path to an absolute path. + pub fn resolve(&self, project_root: &Path) -> PathBuf { + let native_path = Path::new(self.path.as_str()); + if native_path.is_absolute() { + native_path.to_path_buf() + } else { + project_root.join(native_path) + } + } +} + +impl From for PinnedSourceSpec { + fn from(value: PinnedPathSpec) -> Self { + PinnedSourceSpec::Path(value) + } +} + +impl From for UrlOrPath { + fn from(value: PinnedSourceSpec) -> Self { + match value { + PinnedSourceSpec::Url(spec) => spec.into(), + PinnedSourceSpec::Git(spec) => spec.into(), + PinnedSourceSpec::Path(spec) => spec.into(), + } + } +} + +impl From for UrlOrPath { + fn from(value: PinnedPathSpec) -> Self { + UrlOrPath::Path(value.path) + } +} + +impl From for UrlOrPath { + fn from(_value: PinnedGitSpec) -> Self { + unimplemented!() + } +} + +impl From for UrlOrPath { + fn from(_value: PinnedUrlSpec) -> Self { + unimplemented!() + } +} + +#[derive(Debug, Error)] +pub enum ParseError {} + +impl TryFrom for PinnedSourceSpec { + type Error = ParseError; + + fn try_from(value: UrlOrPath) -> Result { + match value { + UrlOrPath::Url(_) => unimplemented!(), + UrlOrPath::Path(path) => Ok(PinnedPathSpec { path }.into()), + } + } +} + +#[derive(Debug, Error)] +pub enum SourceMismatchError { + #[error("the locked path '{locked}' does not match the requested path '{requested}'")] + PathMismatch { + locked: Utf8TypedPathBuf, + requested: Utf8TypedPathBuf, + }, + + #[error("the locked url '{locked}' does not match the requested url '{requested}'")] + UrlMismatch { locked: Url, requested: Url }, + + #[error("the locked {hash} of url '{url}' ({locked}) does not match the requested {hash} ({requested})")] + UrlHashMismatch { + hash: &'static str, + url: Url, + locked: String, + requested: String, + }, + + #[error("the locked git rev '{locked}' for '{git}' does not match the requested git rev '{requested}'")] + GitRevMismatch { + git: Url, + locked: String, + requested: String, + }, + + #[error("the locked source type does not match the requested type")] + SourceTypeMismatch, +} + +impl PinnedPathSpec { + #[allow(clippy::result_large_err)] + pub fn satisfies(&self, spec: &PathSourceSpec) -> Result<(), SourceMismatchError> { + if spec.path != self.path { + return Err(SourceMismatchError::PathMismatch { + locked: self.path.clone(), + requested: spec.path.clone(), + }); + } + Ok(()) + } +} + +impl PinnedUrlSpec { + #[allow(clippy::result_large_err)] + pub fn satisfies(&self, spec: &UrlSourceSpec) -> Result<(), SourceMismatchError> { + if spec.url != self.url { + return Err(SourceMismatchError::UrlMismatch { + locked: self.url.clone(), + requested: spec.url.clone(), + }); + } + if let Some(sha256) = &spec.sha256 { + if *sha256 != self.sha256 { + return Err(SourceMismatchError::UrlHashMismatch { + hash: "sha256", + url: self.url.clone(), + locked: format!("{:x}", self.sha256), + requested: format!("{:x}", sha256), + }); + } + } + if let Some(md5) = &spec.md5 { + if Some(md5) != self.md5.as_ref() { + return Err(SourceMismatchError::UrlHashMismatch { + hash: "md5", + url: self.url.clone(), + locked: self + .md5 + .map_or("None".to_string(), |md5| format!("{:x}", md5)), + requested: format!("{:x}", md5), + }); + } + } + Ok(()) + } +} + +impl PinnedGitSpec { + #[allow(clippy::result_large_err)] + pub fn satisfies(&self, spec: &GitSpec) -> Result<(), SourceMismatchError> { + // TODO: Normalize the git urls before comparing. + if spec.git != self.git { + return Err(SourceMismatchError::UrlMismatch { + locked: self.git.clone(), + requested: spec.git.clone(), + }); + } + + let locked_git_ref = self + .rev + .clone() + .unwrap_or_else(|| GitReference::Rev(self.commit.clone())); + + if let Some(requested_ref) = &spec.rev { + if requested_ref != &locked_git_ref { + return Err(SourceMismatchError::GitRevMismatch { + git: self.git.clone(), + locked: locked_git_ref.to_string(), + requested: requested_ref.to_string(), + }); + } + } + Ok(()) + } +} + +impl PinnedSourceSpec { + #[allow(clippy::result_large_err)] + pub fn satisfies(&self, spec: &SourceSpec) -> Result<(), SourceMismatchError> { + match (self, spec) { + (PinnedSourceSpec::Path(locked), SourceSpec::Path(spec)) => locked.satisfies(spec), + (PinnedSourceSpec::Url(locked), SourceSpec::Url(spec)) => locked.satisfies(spec), + (PinnedSourceSpec::Git(locked), SourceSpec::Git(spec)) => locked.satisfies(spec), + (_, _) => Err(SourceMismatchError::SourceTypeMismatch), + } + } +} + +impl Display for PinnedSourceSpec { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + PinnedSourceSpec::Path(spec) => write!(f, "{}", spec.path), + PinnedSourceSpec::Url(spec) => write!(f, "{}", spec.url), + PinnedSourceSpec::Git(spec) => write!(f, "{}", spec.git), + } + } +} + +impl Display for PinnedUrlSpec { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.url) + } +} + +impl Display for PinnedPathSpec { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.path) + } +} + +impl Display for PinnedGitSpec { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}@{}", self.git, self.commit) + } +} diff --git a/crates/pixi_record/src/source_record.rs b/crates/pixi_record/src/source_record.rs new file mode 100644 index 000000000..d6538ec76 --- /dev/null +++ b/crates/pixi_record/src/source_record.rs @@ -0,0 +1,103 @@ +use rattler_conda_types::{MatchSpec, Matches, NamelessMatchSpec, PackageRecord}; +use rattler_digest::{Sha256, Sha256Hash}; +use rattler_lock::{CondaPackageData, CondaSourceData}; +use serde::{Deserialize, Serialize}; + +use crate::{ParseLockFileError, PinnedSourceSpec}; + +/// A record of a conda package that still requires building. +#[derive(Debug, Clone)] +pub struct SourceRecord { + /// Information about the conda package. This is metadata of the package + /// after it has been build. + pub package_record: PackageRecord, + + /// Exact definition of the source of the package. + pub source: PinnedSourceSpec, + + /// The hash of the input that was used to build the metadata of the + /// package. This can be used to verify that the metadata is still valid. + /// + /// If this is `None`, the input hash was not computed or is not relevant + /// for this record. The record can always be considered up to date. + pub input_hash: Option, +} + +/// Defines the hash of the input files that were used to build the metadata of +/// the record. If reevaluating and hashing the globs results in a different +/// hash, the metadata is considered invalid. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct InputHash { + #[serde( + serialize_with = "rattler_digest::serde::serialize::<_, Sha256>", + deserialize_with = "rattler_digest::serde::deserialize::<_, Sha256>" + )] + pub hash: Sha256Hash, + pub globs: Vec, +} + +impl From for CondaPackageData { + fn from(value: SourceRecord) -> Self { + CondaPackageData::Source(CondaSourceData { + package_record: value.package_record, + location: value.source.into(), + input: value.input_hash.map(|i| rattler_lock::InputHash { + hash: i.hash, + globs: i.globs, + }), + }) + } +} + +impl TryFrom for SourceRecord { + type Error = ParseLockFileError; + + fn try_from(value: CondaSourceData) -> Result { + Ok(Self { + package_record: value.package_record, + source: value.location.try_into()?, + input_hash: value.input.map(|hash| InputHash { + hash: hash.hash, + globs: hash.globs, + }), + }) + } +} + +impl Matches for NamelessMatchSpec { + fn matches(&self, pkg: &SourceRecord) -> bool { + if !self.matches(&pkg.package_record) { + return false; + } + + if self.channel.is_some() { + // We don't have a channel in a source record. So if a matchspec requires that + // information it can't match. + return false; + } + + true + } +} + +impl Matches for MatchSpec { + fn matches(&self, pkg: &SourceRecord) -> bool { + if !self.matches(&pkg.package_record) { + return false; + } + + if self.channel.is_some() { + // We don't have a channel in a source record. So if a matchspec requires that + // information it can't match. + return false; + } + + true + } +} + +impl AsRef for SourceRecord { + fn as_ref(&self) -> &PackageRecord { + &self.package_record + } +} diff --git a/crates/pixi_spec/Cargo.toml b/crates/pixi_spec/Cargo.toml index 9df286523..2917dbc5a 100644 --- a/crates/pixi_spec/Cargo.toml +++ b/crates/pixi_spec/Cargo.toml @@ -12,6 +12,7 @@ version = "0.1.0" [dependencies] dirs = { workspace = true } file_url = { workspace = true } +itertools = { workspace = true } rattler_conda_types = { workspace = true } rattler_digest = { workspace = true, features = ["serde"] } serde = { workspace = true } diff --git a/crates/pixi_spec/src/git.rs b/crates/pixi_spec/src/git.rs index c6e4ca812..b6aee8972 100644 --- a/crates/pixi_spec/src/git.rs +++ b/crates/pixi_spec/src/git.rs @@ -1,3 +1,5 @@ +use std::fmt::Display; + use url::Url; /// A specification of a package from a git repository. @@ -25,3 +27,13 @@ pub enum GitReference { /// A specific commit. Rev(String), } + +impl Display for GitReference { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + GitReference::Branch(branch) => write!(f, "{}", branch), + GitReference::Tag(tag) => write!(f, "{}", tag), + GitReference::Rev(rev) => write!(f, "{}", rev), + } + } +} diff --git a/crates/pixi_spec/src/lib.rs b/crates/pixi_spec/src/lib.rs index df8a6e2f4..cb5259054 100644 --- a/crates/pixi_spec/src/lib.rs +++ b/crates/pixi_spec/src/lib.rs @@ -18,9 +18,11 @@ use std::{path::PathBuf, str::FromStr}; pub use detailed::DetailedSpec; pub use git::{GitReference, GitSpec}; +use itertools::Either; pub use path::{PathSourceSpec, PathSpec}; use rattler_conda_types::{ - ChannelConfig, NamedChannelOrUrl, NamelessMatchSpec, ParseChannelError, VersionSpec, + ChannelConfig, MatchSpec, NamedChannelOrUrl, NamelessMatchSpec, PackageName, ParseChannelError, + VersionSpec, }; use thiserror::Error; pub use url::{UrlSourceSpec, UrlSpec}; @@ -253,6 +255,41 @@ impl PixiSpec { Ok(spec) } + /// Converts this instance in a source or binary spec. + pub fn into_source_or_binary( + self, + channel_config: &ChannelConfig, + ) -> Result, SpecConversionError> { + match self { + PixiSpec::Version(version) => Ok(Either::Right(NamelessMatchSpec { + version: Some(version), + ..NamelessMatchSpec::default() + })), + PixiSpec::DetailedVersion(detailed) => Ok(Either::Right( + detailed.try_into_nameless_match_spec(channel_config)?, + )), + PixiSpec::Url(url) => Ok(url.into_source_or_binary().map_left(SourceSpec::Url)), + PixiSpec::Git(git) => Ok(Either::Left(SourceSpec::Git(git))), + PixiSpec::Path(path) => Ok(path + .into_source_or_binary(&channel_config.root_dir)? + .map_left(SourceSpec::Path)), + } + } + + /// Converts this instance into a named source or binary spec. + pub fn into_named_source_or_binary( + self, + package_name: PackageName, + channel_config: &ChannelConfig, + ) -> Result, SpecConversionError> { + Ok(match self.into_source_or_binary(channel_config)? { + Either::Left(source) => Either::Left((package_name, source)), + Either::Right(spec) => { + Either::Right(MatchSpec::from_nameless(spec, Some(package_name))) + } + }) + } + /// Converts this instance into a source spec if this instance represents a /// source package. #[allow(clippy::result_large_err)] diff --git a/crates/pixi_spec/src/path.rs b/crates/pixi_spec/src/path.rs index 93abf6707..3262b84da 100644 --- a/crates/pixi_spec/src/path.rs +++ b/crates/pixi_spec/src/path.rs @@ -1,5 +1,6 @@ -use std::path::Path; +use std::path::{Path, PathBuf}; +use itertools::Either; use rattler_conda_types::{package::ArchiveIdentifier, NamelessMatchSpec}; use typed_path::{Utf8NativePathBuf, Utf8TypedPathBuf}; @@ -60,6 +61,15 @@ impl PathSpec { })) } + /// Resolves the path relative to `root_dir`. If the path is absolute, + /// it is returned verbatim. + /// + /// May return an error if the path is prefixed with `~` and the home + /// directory is undefined. + pub fn resolve(&self, root_dir: impl AsRef) -> Result { + resolve_path(Path::new(self.path.as_str()), root_dir) + } + /// Converts this instance into a [`PathSourceSpec`] if the path points to a /// source package. Otherwise, returns this instance unmodified. #[allow(clippy::result_large_err)] @@ -78,6 +88,23 @@ impl PathSpec { .and_then(ArchiveIdentifier::try_from_path) .is_some() } + + /// Converts this instance into a [`PathSourceSpec`] if the path points to a + /// source package. Or to a [`NamelessMatchSpec`] otherwise. + pub fn into_source_or_binary( + self, + root_dir: &Path, + ) -> Result, SpecConversionError> { + match self.try_into_source_path() { + Ok(spec) => Ok(Either::Left(spec)), + Err(spec) => { + let nameless_match_spec = spec + .try_into_nameless_match_spec(root_dir)? + .expect("if the path is not a source package, it should be a binary package"); + Ok(Either::Right(nameless_match_spec)) + } + } + } } /// Path to a source package. Different from [`PathSpec`] in that this type only @@ -93,3 +120,31 @@ impl From for PathSpec { Self { path: value.path } } } + +impl PathSourceSpec { + /// Resolves the path relative to `root_dir`. If the path is absolute, + /// it is returned verbatim. + /// + /// May return an error if the path is prefixed with `~` and the home + /// directory is undefined. + pub fn resolve(&self, root_dir: impl AsRef) -> Result { + resolve_path(Path::new(self.path.as_str()), root_dir) + } +} + +/// Resolves the path relative to `root_dir`. If the path is absolute, +/// it is returned verbatim. +/// +/// May return an error if the path is prefixed with `~` and the home +/// directory is undefined. +fn resolve_path(path: &Path, root_dir: impl AsRef) -> Result { + if path.is_absolute() { + Ok(PathBuf::from(path)) + } else if let Ok(user_path) = path.strip_prefix("~/") { + let home_dir = dirs::home_dir() + .ok_or_else(|| SpecConversionError::InvalidPath(path.display().to_string()))?; + Ok(home_dir.join(user_path)) + } else { + Ok(root_dir.as_ref().join(path)) + } +} diff --git a/crates/pixi_spec/src/snapshots/pixi_spec__test__into_nameless_match_spec.snap b/crates/pixi_spec/src/snapshots/pixi_spec__test__into_nameless_match_spec.snap index d688c740d..87fd9407d 100644 --- a/crates/pixi_spec/src/snapshots/pixi_spec__test__into_nameless_match_spec.snap +++ b/crates/pixi_spec/src/snapshots/pixi_spec__test__into_nameless_match_spec.snap @@ -25,7 +25,7 @@ expression: snapshot subdir: linux-64 result: channel: - base_url: "https://conda.anaconda.org/conda-forge/" + base_url: "https://conda.anaconda.org/conda-forge" name: conda-forge subdir: linux-64 - input: @@ -33,7 +33,7 @@ expression: snapshot subdir: linux-64 result: channel: - base_url: "https://conda.anaconda.org/conda-forge/" + base_url: "https://conda.anaconda.org/conda-forge" name: conda-forge subdir: linux-64 - input: diff --git a/crates/pixi_spec/src/url.rs b/crates/pixi_spec/src/url.rs index cd4162ab8..229e77382 100644 --- a/crates/pixi_spec/src/url.rs +++ b/crates/pixi_spec/src/url.rs @@ -1,3 +1,4 @@ +use itertools::Either; use rattler_conda_types::{package::ArchiveIdentifier, NamelessMatchSpec}; use rattler_digest::{Md5Hash, Sha256Hash}; use serde_with::serde_as; @@ -54,6 +55,25 @@ impl UrlSpec { } } + /// Converts this instance into a [`UrlSourceSpec`] if the URL points to a + /// source package. Or to a [`NamelessMatchSpec`] otherwise. + pub fn into_source_or_binary(self) -> Either { + if self.is_binary() { + Either::Right(NamelessMatchSpec { + url: Some(self.url), + md5: self.md5, + sha256: self.sha256, + ..NamelessMatchSpec::default() + }) + } else { + Either::Left(UrlSourceSpec { + url: self.url, + md5: self.md5, + sha256: self.sha256, + }) + } + } + /// Returns true if the URL points to a binary package. pub fn is_binary(&self) -> bool { ArchiveIdentifier::try_from_url(&self.url).is_some() diff --git a/crates/pixi_utils/src/cache.rs b/crates/pixi_utils/src/cache.rs new file mode 100644 index 000000000..46310eb5a --- /dev/null +++ b/crates/pixi_utils/src/cache.rs @@ -0,0 +1,30 @@ +use std::hash::{DefaultHasher, Hash, Hasher}; + +use rattler_conda_types::MatchSpec; + +/// A hash that uniquely identifies an environment. +#[derive(Hash)] +pub struct EnvironmentHash { + pub command: String, + pub specs: Vec, + pub channels: Vec, +} + +impl EnvironmentHash { + /// Creates a new environment hash. + pub fn new(command: String, specs: Vec, channels: Vec) -> Self { + Self { + command, + specs, + channels, + } + } + + /// Returns the name of the environment. + pub fn name(&self) -> String { + let mut hasher = DefaultHasher::new(); + self.hash(&mut hasher); + let hash = hasher.finish(); + format!("{}-{:x}", &self.command, hash) + } +} diff --git a/crates/pixi_utils/src/lib.rs b/crates/pixi_utils/src/lib.rs index dbc4c9cc4..9b5816528 100644 --- a/crates/pixi_utils/src/lib.rs +++ b/crates/pixi_utils/src/lib.rs @@ -1,3 +1,4 @@ +pub mod cache; pub mod conda_environment_file; pub mod indicatif; mod prefix_guard; @@ -6,4 +7,5 @@ pub mod reqwest; mod executable_utils; pub use executable_utils::{executable_from_path, is_binary_folder, strip_executable_extension}; +pub use cache::EnvironmentHash; pub use prefix_guard::{PrefixGuard, WriteGuard}; diff --git a/crates/pypi_mapping/src/custom_pypi_mapping.rs b/crates/pypi_mapping/src/custom_pypi_mapping.rs index 84f4cd032..522ce87ea 100644 --- a/crates/pypi_mapping/src/custom_pypi_mapping.rs +++ b/crates/pypi_mapping/src/custom_pypi_mapping.rs @@ -64,14 +64,22 @@ pub fn fetch_mapping_from_path(path: &Path) -> miette::Result pub async fn amend_pypi_purls( client: &ClientWithMiddleware, mapping_url: &CustomMapping, - conda_packages: &mut [RepoDataRecord], + conda_packages: impl IntoIterator, reporter: Option>, ) -> miette::Result<()> { - trim_conda_packages_channel_url_suffix(conda_packages); - let packages_for_prefix_mapping: Vec = conda_packages + let mut conda_packages = conda_packages.into_iter().collect::>(); + + for package in conda_packages.iter_mut() { + package.channel = package + .channel + .as_ref() + .map(|c| c.trim_end_matches('/').to_string()); + } + let packages_for_prefix_mapping: Vec<_> = conda_packages .iter() - .filter(|package| !mapping_url.mapping.contains_key(&package.channel)) - .cloned() + .filter_map(|record| record.channel.as_ref().map(|channel| (channel, record))) + .filter(|(channel, _)| !mapping_url.mapping.contains_key(*channel)) + .map(|(_, p)| (**p).clone()) .collect(); let custom_mapping = mapping_url.fetch_custom_mapping(client).await?; @@ -80,7 +88,9 @@ pub async fn amend_pypi_purls( // to request from the prefix_mapping. This will avoid fetching unwanted // URLs, e.g. behind corporate firewalls if packages_for_prefix_mapping.is_empty() { - _amend_only_custom_pypi_purls(conda_packages, &custom_mapping)?; + for record in conda_packages { + amend_pypi_purls_for_record(record, &custom_mapping)?; + } } else { let prefix_mapping = prefix_pypi_name_mapping::conda_pypi_name_mapping( client, @@ -91,8 +101,11 @@ pub async fn amend_pypi_purls( let compressed_mapping = prefix_pypi_name_mapping::conda_pypi_name_compressed_mapping(client).await?; - for record in conda_packages.iter_mut() { - if !mapping_url.mapping.contains_key(&record.channel) { + for record in conda_packages { + let Some(channel) = record.channel.as_ref() else { + continue; + }; + if !mapping_url.mapping.contains_key(channel) { prefix_pypi_name_mapping::amend_pypi_purls_for_record( record, &prefix_mapping, @@ -130,7 +143,11 @@ fn amend_pypi_purls_for_record( let mut purls = Vec::new(); // we verify if we have package channel and name in user provided mapping - if let Some(mapped_channel) = custom_mapping.get(&record.channel) { + if let Some(mapped_channel) = record + .channel + .as_ref() + .and_then(|channel| custom_mapping.get(channel)) + { if let Some(mapped_name) = mapped_channel.get(record.package_record.name.as_normalized()) { // we have a pypi name for it so we record a purl if let Some(name) = mapped_name { @@ -173,9 +190,3 @@ pub fn _amend_only_custom_pypi_purls( } Ok(()) } - -fn trim_conda_packages_channel_url_suffix(conda_packages: &mut [RepoDataRecord]) { - for package in conda_packages { - package.channel = package.channel.trim_end_matches('/').to_string(); - } -} diff --git a/crates/pypi_mapping/src/lib.rs b/crates/pypi_mapping/src/lib.rs index 7d27e6f64..3a5507891 100644 --- a/crates/pypi_mapping/src/lib.rs +++ b/crates/pypi_mapping/src/lib.rs @@ -138,7 +138,7 @@ impl PurlSource { pub async fn amend_pypi_purls( client: ClientWithMiddleware, mapping_source: &MappingSource, - conda_packages: &mut [RepoDataRecord], + conda_packages: impl IntoIterator, reporter: Option>, ) -> miette::Result<()> { // Construct a client with a retry policy and local caching @@ -168,7 +168,7 @@ pub async fn amend_pypi_purls( prefix_pypi_name_mapping::amend_pypi_purls(&client, conda_packages, reporter).await?; } MappingSource::Disabled => { - for record in conda_packages.iter_mut() { + for record in conda_packages { if let Some(purl) = prefix_pypi_name_mapping::assume_conda_is_pypi(None, record) { record .package_record @@ -185,7 +185,11 @@ pub async fn amend_pypi_purls( /// Returns `true` if the specified record refers to a conda-forge package. pub fn is_conda_forge_record(record: &RepoDataRecord) -> bool { - Url::from_str(&record.channel).map_or(false, |u| is_conda_forge_url(&u)) + record + .channel + .as_ref() + .and_then(|channel| Url::from_str(channel).ok()) + .map_or(false, |u| is_conda_forge_url(&u)) } /// Returns `true` if the specified url refers to a conda-forge channel. diff --git a/crates/pypi_mapping/src/prefix_pypi_name_mapping.rs b/crates/pypi_mapping/src/prefix_pypi_name_mapping.rs index b36ecc350..7a31c4414 100644 --- a/crates/pypi_mapping/src/prefix_pypi_name_mapping.rs +++ b/crates/pypi_mapping/src/prefix_pypi_name_mapping.rs @@ -63,13 +63,13 @@ async fn try_fetch_single_mapping( } /// Downloads and caches the conda-forge conda-to-pypi name mapping. -pub async fn conda_pypi_name_mapping( +pub async fn conda_pypi_name_mapping<'r>( client: &ClientWithMiddleware, - conda_packages: &[RepoDataRecord], + conda_packages: impl IntoIterator, reporter: Option>, ) -> miette::Result> { let filtered_packages = conda_packages - .iter() + .into_iter() // because we later skip adding purls for packages // that have purls // here we only filter packages that don't them @@ -157,13 +157,15 @@ pub async fn conda_pypi_name_compressed_mapping( /// Amend the records with pypi purls if they are not present yet. pub async fn amend_pypi_purls( client: &ClientWithMiddleware, - conda_packages: &mut [RepoDataRecord], + conda_packages: impl IntoIterator, reporter: Option>, ) -> miette::Result<()> { - let conda_mapping = conda_pypi_name_mapping(client, conda_packages, reporter).await?; + let conda_packages = conda_packages.into_iter().collect_vec(); + let conda_mapping = + conda_pypi_name_mapping(client, conda_packages.iter().map(|p| *p as &_), reporter).await?; let compressed_mapping = conda_pypi_name_compressed_mapping(client).await?; - for record in conda_packages.iter_mut() { + for record in conda_packages { amend_pypi_purls_for_record(record, &conda_mapping, &compressed_mapping)?; } @@ -248,7 +250,8 @@ pub fn amend_pypi_purls_for_record( Ok(()) } -/// Try to assume that the conda-forge package is a PyPI package and return a purl. +/// Try to assume that the conda-forge package is a PyPI package and return a +/// purl. pub fn assume_conda_is_pypi( purls: Option<&Vec>, record: &RepoDataRecord, diff --git a/examples/boltons/.gitattributes b/examples/boltons/.gitattributes new file mode 100644 index 000000000..8f61a8e77 --- /dev/null +++ b/examples/boltons/.gitattributes @@ -0,0 +1,2 @@ +# SCM syntax highlighting +pixi.lock linguist-language=YAML linguist-generated=true diff --git a/examples/boltons/.gitignore b/examples/boltons/.gitignore new file mode 100644 index 000000000..740bb7d1a --- /dev/null +++ b/examples/boltons/.gitignore @@ -0,0 +1,4 @@ + +# pixi environments +.pixi +*.egg-info diff --git a/examples/boltons/README.md b/examples/boltons/README.md new file mode 100644 index 000000000..b56bd4969 --- /dev/null +++ b/examples/boltons/README.md @@ -0,0 +1 @@ +# Some boltons docs diff --git a/crates/pixi_manifest/src/consts.rs b/examples/boltons/boltons/__init__.py similarity index 100% rename from crates/pixi_manifest/src/consts.rs rename to examples/boltons/boltons/__init__.py diff --git a/examples/boltons/boltons/main.py b/examples/boltons/boltons/main.py new file mode 100644 index 000000000..e69de29bb diff --git a/examples/boltons/pixi.lock b/examples/boltons/pixi.lock new file mode 100644 index 000000000..c48082ad6 --- /dev/null +++ b/examples/boltons/pixi.lock @@ -0,0 +1,274 @@ +version: 6 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/noarch/boltons-24.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.8.30-hf0a4a13_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/editables-0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hatchling-1.26.3-pypyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.4-h286801f_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libmpdec-4.0.0-h99b78c6_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.47.0-hbaaea75_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.4.0-h39f12f2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhff2d567_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.13.0-h75c3a9f_100_cp313.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.1.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2024.10.21.16-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda +packages: +- conda: https://conda.anaconda.org/conda-forge/noarch/boltons-24.0.0-pyhd8ed1ab_0.conda + sha256: e44d07932306392372411ab1261670a552f96077f925af00c1559a18a73a1bdc + md5: 61de176bd62041f9cd5bd4fcd09eb0ff + depends: + - python ==2.7.*|>=3.7 + license: BSD-3-Clause + license_family: BSD + size: 297896 + timestamp: 1711936529147 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda + sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 + md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab + depends: + - __osx >=11.0 + license: bzip2-1.0.6 + license_family: BSD + size: 122909 + timestamp: 1720974522888 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.8.30-hf0a4a13_0.conda + sha256: 2db1733f4b644575dbbdd7994a8f338e6ef937f5ebdb74acd557e9dda0211709 + md5: 40dec13fd8348dbe303e57be74bd3d35 + license: ISC + size: 158482 + timestamp: 1725019034582 +- conda: https://conda.anaconda.org/conda-forge/noarch/editables-0.5-pyhd8ed1ab_0.conda + sha256: de160a7494e7bc72360eea6a29cbddf194d0a79f45ff417a4de20e6858cf79a9 + md5: 9873878e2a069bc358b69e9a29c1ecd5 + depends: + - python >=3.7 + license: MIT + license_family: MIT + size: 10988 + timestamp: 1705857085102 +- conda: https://conda.anaconda.org/conda-forge/noarch/hatchling-1.26.3-pypyhff2d567_0.conda + sha256: bcd1e3b68ed11c11c974c890341ec03784354c68f6e2fcc518eb3ce8e90d452a + md5: 31c57e2a780803fd44aba9b726398058 + depends: + - editables >=0.3 + - importlib-metadata + - packaging >=21.3 + - pathspec >=0.10.1 + - pluggy >=1.0.0 + - python >=3.7 + - python >=3.8 + - tomli >=1.2.2 + - trove-classifiers + license: MIT + license_family: MIT + size: 56816 + timestamp: 1731469419003 +- conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + sha256: 7194700ce1a5ad2621fd68e894dd8c1ceaff9a38723e6e0e5298fdef13017b1c + md5: 54198435fce4d64d8a89af22573012a8 + depends: + - python >=3.8 + - zipp >=0.5 + license: Apache-2.0 + license_family: APACHE + size: 28646 + timestamp: 1726082927916 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.4-h286801f_0.conda + sha256: e42ab5ace927ee7c84e3f0f7d813671e1cf3529f5f06ee5899606630498c2745 + md5: 38d2656dd914feb0cab8c629370768bf + depends: + - __osx >=11.0 + constrains: + - expat 2.6.4.* + license: MIT + license_family: MIT + size: 64693 + timestamp: 1730967175868 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 + sha256: 41b3d13efb775e340e4dba549ab5c029611ea6918703096b2eaa9c015c0750ca + md5: 086914b672be056eb70fd4285b6783b6 + license: MIT + license_family: MIT + size: 39020 + timestamp: 1636488587153 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libmpdec-4.0.0-h99b78c6_0.conda + sha256: f7917de9117d3a5fe12a39e185c7ce424f8d5010a6f97b4333e8a1dcb2889d16 + md5: 7476305c35dd9acef48da8f754eedb40 + depends: + - __osx >=11.0 + license: BSD-2-Clause + license_family: BSD + size: 69263 + timestamp: 1723817629767 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.47.0-hbaaea75_1.conda + sha256: 5a96caa566c11e5a5ebdcdb86a0759a7fb27d3c5f42e6a0fd0d6023c1e935d9e + md5: 07a14fbe439eef078cc479deca321161 + depends: + - __osx >=11.0 + - libzlib >=1.3.1,<2.0a0 + license: Unlicense + size: 837683 + timestamp: 1730208293578 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + sha256: ce34669eadaba351cd54910743e6a2261b67009624dbc7daeeafdef93616711b + md5: 369964e85dc26bfe78f41399b366c435 + depends: + - __osx >=11.0 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + size: 46438 + timestamp: 1727963202283 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda + sha256: 27d0b9ff78ad46e1f3a6c96c479ab44beda5f96def88e2fe626e0a49429d8afc + md5: cb2b0ea909b97b3d70cd3921d1445e1a + depends: + - __osx >=11.0 + license: X11 AND BSD-3-Clause + size: 802321 + timestamp: 1724658775723 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.4.0-h39f12f2_0.conda + sha256: bd1d58ced46e75efa3b842c61642fd12272c69e9fe4d7261078bc082153a1d53 + md5: df307bbc703324722df0293c9ca2e418 + depends: + - __osx >=11.0 + - ca-certificates + license: Apache-2.0 + license_family: Apache + size: 2935176 + timestamp: 1731377561525 +- conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhff2d567_1.conda + sha256: 74843f871e5cd8a1baf5ed8c406c571139c287141efe532f8ffbdafa3664d244 + md5: 8508b703977f4c4ada34d657d051972c + depends: + - python >=3.8 + license: Apache-2.0 + license_family: APACHE + size: 60380 + timestamp: 1731802602808 +- conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda + sha256: 4e534e66bfe8b1e035d2169d0e5b185450546b17e36764272863e22e0370be4d + md5: 17064acba08d3686f1135b5ec1b32b12 + depends: + - python >=3.7 + license: MPL-2.0 + license_family: MOZILLA + size: 41173 + timestamp: 1702250135032 +- conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda + sha256: 33eaa3359948a260ebccf9cdc2fd862cea5a6029783289e13602d8e634cd9a26 + md5: d3483c8fc2dc2cc3f5cf43e26d60cabf + depends: + - python >=3.8 + license: MIT + license_family: MIT + size: 23815 + timestamp: 1713667175451 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.13.0-h75c3a9f_100_cp313.conda + build_number: 100 + sha256: be9464399b76ae1fef77853eed70267ef657a98a5f69f7df012b7c6a34792151 + md5: 94ae22ea862d056ad1bc095443d02d73 + depends: + - __osx >=11.0 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.6.3,<3.0a0 + - libffi >=3.4,<4.0a0 + - libmpdec >=4.0.0,<5.0a0 + - libsqlite >=3.46.1,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.3.2,<4.0a0 + - python_abi 3.13.* *_cp313 + - readline >=8.2,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - xz >=5.2.6,<6.0a0 + license: Python-2.0 + size: 12804842 + timestamp: 1729168680448 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.13-5_cp313.conda + build_number: 5 + sha256: 4437198eae80310f40b23ae2f8a9e0a7e5c2b9ae411a8621eb03d87273666199 + md5: b8e82d0a5c1664638f87f63cc5d241fb + constrains: + - python 3.13.* *_cp313 + license: BSD-3-Clause + license_family: BSD + size: 6322 + timestamp: 1723823058879 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda + sha256: a1dfa679ac3f6007362386576a704ad2d0d7a02e98f5d0b115f207a2da63e884 + md5: 8cbb776a2f641b943d413b3e19df71f4 + depends: + - ncurses >=6.3,<7.0a0 + license: GPL-3.0-only + license_family: GPL + size: 250351 + timestamp: 1679532511311 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda + sha256: 72457ad031b4c048e5891f3f6cb27a53cb479db68a52d965f796910e71a403a8 + md5: b50a57ba89c32b62428b71a875291c9b + depends: + - libzlib >=1.2.13,<2.0.0a0 + license: TCL + license_family: BSD + size: 3145523 + timestamp: 1699202432999 +- conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.1.0-pyhff2d567_0.conda + sha256: 354b8a64d4f3311179d85aefc529ca201a36afc1af090d0010c46be7b79f9a47 + md5: 3fa1089b4722df3a900135925f4519d9 + depends: + - python >=3.9 + license: MIT + license_family: MIT + size: 18741 + timestamp: 1731426862834 +- conda: https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2024.10.21.16-pyhd8ed1ab_0.conda + sha256: 591e4ffdc95660b9e596c15b65cad35a70b36235f02dbd089ccc198dd5af0e71 + md5: 501f6d3288160a31d99a2f1321e77393 + depends: + - python >=3.7 + license: Apache-2.0 + license_family: Apache + size: 18429 + timestamp: 1729552033760 +- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda + sha256: 4fde5c3008bf5d2db82f2b50204464314cc3c91c1d953652f7bd01d9e52aefdf + md5: 8ac3367aafb1cc0a068483c580af8015 + license: LicenseRef-Public-Domain + size: 122354 + timestamp: 1728047496079 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 + sha256: 59d78af0c3e071021cfe82dc40134c19dab8cdf804324b62940f5c8cd71803ec + md5: 39c6b54e94014701dd157f4f576ed211 + license: LGPL-2.1 and GPL-2.0 + size: 235693 + timestamp: 1660346961024 +- conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda + sha256: 232a30e4b0045c9de5e168dda0328dc0e28df9439cdecdfb97dd79c1c82c4cec + md5: fee389bf8a4843bd7a2248ce11b7f188 + depends: + - python >=3.8 + license: MIT + license_family: MIT + size: 21702 + timestamp: 1731262194278 diff --git a/examples/boltons/pixi.toml b/examples/boltons/pixi.toml new file mode 100644 index 000000000..b1ddc2fd0 --- /dev/null +++ b/examples/boltons/pixi.toml @@ -0,0 +1,31 @@ +[workspace] +authors = ["nichmor "] +channels = ["conda-forge"] +description = "Add a short description here" +name = "boltons" +platforms = ["osx-arm64"] +preview = ['pixi-build'] +version = "0.1.0" + +[tasks] + +[dependencies] +# boltons = { path = "." } +boltons = "*" +hatchling = "==1.26.3" + + +[package] +authors = ["nichmor "] +description = "Add a short description here" +name = "boltons" +version = "0.1.0" + + +[build-system] +build-backend = "pixi-build-rattler-build" +channels = [ + "https://repo.prefix.dev/pixi-build-backends", + "https://fast.prefix.dev/conda-forge", +] +dependencies = ["pixi-build-rattler-build"] diff --git a/examples/boltons/pyproject.toml b/examples/boltons/pyproject.toml new file mode 100644 index 000000000..4543a4d3a --- /dev/null +++ b/examples/boltons/pyproject.toml @@ -0,0 +1,11 @@ +[project] +dependencies = ["rich"] +description = "Example how to use pixi to build bolton." +name = "boltons" +readme = "README.md" +requires-python = ">=3.11" +version = "0.1.0" + +[build-system] +build-backend = "hatchling.build" +requires = ["hatchling"] diff --git a/examples/boltons/recipe.yaml b/examples/boltons/recipe.yaml new file mode 100644 index 000000000..357ce46cb --- /dev/null +++ b/examples/boltons/recipe.yaml @@ -0,0 +1,35 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json + +context: + version: "23.0.0" + + +package: + name: boltons-with-extra + version: ${{ version }} + +source: + url: https://github.com/mahmoud/boltons/archive/refs/tags/${{ version }}.tar.gz + sha256: 9b2998cd9525ed472079c7dd90fbd216a887202e8729d5969d4f33878f0ff668 + +build: + noarch: python + script: + - python -m pip install . --no-deps -vv + +requirements: + host: + # - if: linux + # then: + - python + - pip + - setuptools + # - numpy + # - ${{ stdlib('c') }} + run: + - pip + # - ${{ pin_compatible('numpy', min_pin='x.x', max_pin='x') }} + +about: + license: BSD-3-Clause + license_file: LICENSE diff --git a/examples/cpp-sdl/CMakeLists.txt b/examples/cpp-sdl/CMakeLists.txt index c5f4f956c..d6eb0ba8f 100644 --- a/examples/cpp-sdl/CMakeLists.txt +++ b/examples/cpp-sdl/CMakeLists.txt @@ -16,3 +16,12 @@ target_link_libraries( SDL2::SDL2 SDL2::SDL2main ) + +include(GNUInstallDirs) +install( + TARGETS ${PROJECT_NAME} + EXPORT ${PROJECT_NAME}Targets + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${BINDIR} +) diff --git a/examples/cpp-sdl/pixi.lock b/examples/cpp-sdl/pixi.lock index 6dcbbf230..c8e70b59c 100644 --- a/examples/cpp-sdl/pixi.lock +++ b/examples/cpp-sdl/pixi.lock @@ -1,284 +1,41 @@ -version: 5 +version: 6 environments: - build: - channels: - - url: https://conda.anaconda.org/conda-forge/ - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_16.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/binutils-2.40-h4852527_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-ha1999f0_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/binutils_linux-64-2.40-hb3c18ed_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/c-compiler-1.5.2-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cmake-3.26.4-hcfe8598_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cxx-compiler-1.5.2-hf52228f_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gcc-11.4.0-h602e360_13.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-11.4.0-h00c12a0_13.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gcc_linux-64-11.4.0-ha077dfb_4.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gettext-tools-0.22.5-he02047a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gxx-11.4.0-h602e360_13.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-11.4.0-h634f3ee_13.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gxx_linux-64-11.4.0-h35bfe5d_4.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-h4a8ded7_16.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.22.5-he8f35ee_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-11.4.0-h8f596e0_113.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.11.0-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.22.5-he02047a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-devel-0.22.5-he02047a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.50-h4f305b6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-11.4.0-h5763a12_13.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/libstdcxx-devel_linux-64-11.4.0-h8f596e0_113.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-256.5-hb6d7363_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuv-1.48.0-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/make-4.4.1-hb9d3cd8_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.6-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ninja-1.11.1-h924138e_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-hb9d3cd8_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/rhash-1.4.3-hd590300_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/sdl2-2.26.5-h949db6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_16.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-hb711507_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda - osx-64: - - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.1-h44e7173_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/c-compiler-1.5.2-hbf74d83_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/cctools-973.0.1-h76f1dac_13.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/cctools_osx-64-973.0.1-hcc6d90d_13.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/clang-14-14.0.6-default_hdb78580_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/clang-14.0.6-h694c41f_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/clang_osx-64-14.0.6-h3113cd8_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/clangxx-14.0.6-default_hdb78580_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/clangxx_osx-64-14.0.6-h6f97653_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/cmake-3.26.4-hf40c264_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/compiler-rt-14.0.6-h613da45_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/compiler-rt_osx-64-14.0.6-hab78ec2_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/cxx-compiler-1.5.2-hb8565cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.2-h73e2aa4_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/krb5-1.21.3-h37d8d59_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ld64-609-hc6ad406_13.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ld64_osx-64-609-hfd63004_13.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libclang-cpp14-14.0.6-default_hdb78580_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.9.1-hfcf2730_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-h10d778d_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libllvm14-14.0.6-hc8e404f_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.58.0-h64cf6d3_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.11.0-hd019ec5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libuv-1.48.0-h67532ce_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-18.1.8-h15ab845_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/llvm-tools-14.0.6-hc8e404f_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/make-4.4.1-h00291cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/ninja-1.11.1-hb8565cd_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-hd23fc13_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/rhash-1.4.3-h0dc2134_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/sdl2-2.26.5-hf0c8a7f_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/sigtool-0.1.3-h88f4db0_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/tapi-1100.0.11-h9ce4665_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-h87427d6_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.6-h915ae27_0.conda - osx-arm64: - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.33.1-hd74edd7_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-compiler-1.5.2-h5008568_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.7.4-hf0a4a13_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cctools-973.0.1-hcbb26d4_13.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cctools_osx-arm64-973.0.1-hef52d2f_13.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/clang-14-14.0.6-default_h5dc8d65_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/clang-14.0.6-hce30654_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/clang_osx-arm64-14.0.6-h15773ab_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/clangxx-14.0.6-default_h610c423_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/clangxx_osx-arm64-14.0.6-he29aa18_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cmake-3.26.4-hc0af03a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/compiler-rt-14.0.6-h30b49de_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/noarch/compiler-rt_osx-arm64-14.0.6-h48302dc_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cxx-compiler-1.5.2-hffc8910_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/expat-2.6.2-hebf3989_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.21.3-h237132a_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ld64-609-h619f069_13.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ld64_osx-arm64-609-h7167370_13.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libclang-cpp14-14.0.6-default_h5dc8d65_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.9.1-hfd8ffcc_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-18.1.8-h3ed4263_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20191231-hc8eb9b7_2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.2-hebf3989_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libllvm14-14.0.6-hd1a9a77_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.58.0-ha4dd798_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.0-h7a5bd25_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.48.0-h93a5062_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-hfb2fe0b_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-18.1.8-hde57baf_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-tools-14.0.6-hd1a9a77_4.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/make-4.4.1-hc9fafa5_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ninja-1.11.1-hffc8910_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.3.1-h8359307_3.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rhash-1.4.3-hb547adb_2.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/sdl2-2.26.5-hb7217d7_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/sigtool-0.1.3-h44b9a77_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tapi-1100.0.11-he4954df_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.1-hfb2fe0b_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.6-hb46c0d2_0.conda - win-64: - - conda: https://conda.anaconda.org/conda-forge/win-64/cmake-3.26.4-h1537add_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cxx-compiler-1.5.2-h91493d7_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/make-4.4.1-h013a479_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/ninja-1.11.1-h91493d7_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/sdl2-2.26.5-h63175ca_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2019_win-64-19.29.30139-he1865b1_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vswhere-3.1.7-h57928b3_0.conda default: channels: - - url: https://conda.anaconda.org/conda-forge/ + - url: https://prefix.dev/conda-forge/ packages: linux-64: - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/gettext-tools-0.22.5-he02047a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.22.5-he8f35ee_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.11.0-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.22.5-he02047a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-devel-0.22.5-he02047a_3.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.50-h4f305b6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-256.5-hb6d7363_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.6-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/sdl2-2.26.5-h949db6a_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-hb711507_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda + - conda: . + subdir: linux-64 osx-64: - - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/sdl2-2.26.5-hf0c8a7f_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.3-hf95d169_0.conda + - conda: . + subdir: osx-64 osx-arm64: - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-18.1.8-h3ed4263_6.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/sdl2-2.26.5-hb7217d7_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-19.1.3-ha82da77_0.conda + - conda: . + subdir: osx-arm64 win-64: - - conda: https://conda.anaconda.org/conda-forge/win-64/sdl2-2.26.5-h63175ca_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_20.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_23.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.42.34433-he29a5d6_23.conda + - conda: . + subdir: win-64 packages: -- kind: conda - name: _libgcc_mutex - version: '0.1' - build: conda_forge - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 md5: d7c89558ba9fa0495403155b64376d81 + channel: https://prefix.dev/conda-forge license: None size: 2562 timestamp: 1578324546067 -- kind: conda - name: _openmp_mutex - version: '4.5' - build: 2_gnu +- conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 build_number: 16 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 md5: 73aaf86a425cc6e73fcf236a5a46396d depends: @@ -286,3008 +43,141 @@ packages: - libgomp >=7.5.0 constrains: - openmp_impl 9999 + channel: https://prefix.dev/conda-forge license: BSD-3-Clause license_family: BSD size: 23621 timestamp: 1650670423406 -- kind: conda - name: _sysroot_linux-64_curr_repodata_hack - version: '3' - build: h69a702a_16 - build_number: 16 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_16.conda - sha256: 6ac30acdbfd3136ee7a1de28af4355165291627e905715611726e674499b0786 - md5: 1c005af0c6ff22814b7c52ee448d4bea - license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 - license_family: GPL - size: 20798 - timestamp: 1720621358501 -- kind: conda - name: attr - version: 2.5.1 - build: h166bdaf_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2 - sha256: 82c13b1772c21fc4a17441734de471d3aabf82b61db9b11f4a1bd04a9c4ac324 - md5: d9c69a24ad678ffce24c6543a0176b00 - depends: - - libgcc-ng >=12 - license: GPL-2.0-or-later - license_family: GPL - size: 71042 - timestamp: 1660065501192 -- kind: conda - name: binutils - version: '2.40' - build: h4852527_7 - build_number: 7 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/binutils-2.40-h4852527_7.conda - sha256: 75d7f5cda999fe1efe9f1de1be2d3e4ce32b20cbf97d1ef7b770e2e90c062858 - md5: df53aa8418f8c289ae9b9665986034f8 - depends: - - binutils_impl_linux-64 >=2.40,<2.41.0a0 - license: GPL-3.0-only - license_family: GPL - size: 31696 - timestamp: 1718625692046 -- kind: conda - name: binutils_impl_linux-64 - version: '2.40' - build: ha1999f0_7 - build_number: 7 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-ha1999f0_7.conda - sha256: 230f3136d17fdcf0e6da3a3ae59118570bc18106d79dd29bf2f341338d2a42c4 - md5: 3f840c7ed70a96b5ebde8044b2f36f32 - depends: - - ld_impl_linux-64 2.40 hf3520f5_7 - - sysroot_linux-64 - license: GPL-3.0-only - license_family: GPL - size: 6250821 - timestamp: 1718625666382 -- kind: conda - name: binutils_linux-64 - version: '2.40' - build: hb3c18ed_4 - build_number: 4 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/binutils_linux-64-2.40-hb3c18ed_4.conda - sha256: 4ddb8c7c0d42f4efe673cce579f66e8b2644a257d3aa935f9ab7b69c525d4485 - md5: 19286994c03c5207a70c7cfabe294570 - depends: - - binutils_impl_linux-64 2.40.* - - sysroot_linux-64 - license: BSD-3-Clause - license_family: BSD - size: 29029 - timestamp: 1717999138247 -- kind: conda - name: bzip2 - version: 1.0.8 - build: h4bc722e_7 - build_number: 7 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - sha256: 5ced96500d945fb286c9c838e54fa759aa04a7129c59800f0846b4335cee770d - md5: 62ee74e96c5ebb0af99386de58cf9553 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - license: bzip2-1.0.6 - license_family: BSD - size: 252783 - timestamp: 1720974456583 -- kind: conda - name: bzip2 - version: 1.0.8 - build: h99b78c6_7 - build_number: 7 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 - md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab - depends: - - __osx >=11.0 - license: bzip2-1.0.6 - license_family: BSD - size: 122909 - timestamp: 1720974522888 -- kind: conda - name: bzip2 - version: 1.0.8 - build: hfdf4475_7 - build_number: 7 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 - md5: 7ed4301d437b59045be7e051a0308211 - depends: - - __osx >=10.13 - license: bzip2-1.0.6 - license_family: BSD - size: 134188 - timestamp: 1720974491916 -- kind: conda - name: c-ares - version: 1.33.1 - build: h44e7173_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.33.1-h44e7173_0.conda - sha256: 98b0ac09472e6737fc4685147d1755028cc650d428369cbe3cb74ab38b327095 - md5: b31a2de5edfddb308dda802eab2956dc +- conda: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.3-hf95d169_0.conda + sha256: 466f259bb13a8058fef28843977c090d21ad337b71a842ccc0407bccf8d27011 + md5: 86801fc56d4641e3ef7a63f5d996b960 depends: - __osx >=10.13 - license: MIT - license_family: MIT - size: 163203 - timestamp: 1724438157472 -- kind: conda - name: c-ares - version: 1.33.1 - build: hd74edd7_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.33.1-hd74edd7_0.conda - sha256: ad29a9cffa0504cb4bf7605963816feff3c7833f36b050e1e71912d09c38e3f6 - md5: 5b69c16ee900aeffcf0103268d708518 - depends: - - __osx >=11.0 - license: MIT - license_family: MIT - size: 159389 - timestamp: 1724438175204 -- kind: conda - name: c-ares - version: 1.33.1 - build: heb4867d_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda - sha256: 2cb24f613eaf2850b1a08f28f967b10d8bd44ef623efa0154dc45eb718776be6 - md5: 0d3c60291342c0c025db231353376dfb - depends: - - __glibc >=2.28,<3.0.a0 - - libgcc-ng >=13 - license: MIT - license_family: MIT - size: 182796 - timestamp: 1724438109690 -- kind: conda - name: c-compiler - version: 1.5.2 - build: h0b41bf4_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/c-compiler-1.5.2-h0b41bf4_0.conda - sha256: fe4c0080648c3448939919ddc49339cd8e250124b69a518e66ef6989794fa58a - md5: 69afb4e35be6366c2c1f9ed7f49bc3e6 - depends: - - binutils - - gcc - - gcc_linux-64 11.* - license: BSD - size: 5812 - timestamp: 1670951514202 -- kind: conda - name: c-compiler - version: 1.5.2 - build: h5008568_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/c-compiler-1.5.2-h5008568_0.conda - sha256: 54fabbef178e857a639a9c7a302cdab072ca5c2b94052ac939a7ebcf9dad32e4 - md5: 56a88306583601d05b6eeded173d73d9 - depends: - - cctools >=949.0.1 - - clang_osx-arm64 14.* - - ld64 >=530 - - llvm-openmp - license: BSD - size: 5667 - timestamp: 1670951842052 -- kind: conda - name: c-compiler - version: 1.5.2 - build: hbf74d83_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/c-compiler-1.5.2-hbf74d83_0.conda - sha256: 0f97b6cc2215f0789ffa2781eb8a6304efaf5c4592c4c619d6e0a63c23f2b877 - md5: c1413ef5a20d658923e12dd3b566d8f3 - depends: - - cctools >=949.0.1 - - clang_osx-64 14.* - - ld64 >=530 - - llvm-openmp - license: BSD - size: 5660 - timestamp: 1670951603925 -- kind: conda - name: ca-certificates - version: 2024.7.4 - build: h8857fd0_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda - sha256: d16f46c489cb3192305c7d25b795333c5fc17bb0986de20598ed519f8c9cc9e4 - md5: 7df874a4b05b2d2b82826190170eaa0f - license: ISC - size: 154473 - timestamp: 1720077510541 -- kind: conda - name: ca-certificates - version: 2024.7.4 - build: hbcca054_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - sha256: c1548a3235376f464f9931850b64b02492f379b2f2bb98bc786055329b080446 - md5: 23ab7665c5f63cfb9f1f6195256daac6 - license: ISC - size: 154853 - timestamp: 1720077432978 -- kind: conda - name: ca-certificates - version: 2024.7.4 - build: hf0a4a13_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.7.4-hf0a4a13_0.conda - sha256: 33a61116dae7f369b6ce92a7f2a1ff361ae737c675a493b11feb5570b89e0e3b - md5: 21f9a33e5fe996189e470c19c5354dbe - license: ISC - size: 154517 - timestamp: 1720077468981 -- kind: conda - name: cctools - version: 973.0.1 - build: h76f1dac_13 - build_number: 13 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cctools-973.0.1-h76f1dac_13.conda - sha256: f36d2b5eccd494f0ade668bd1a63e09b78427d0abb8066c6041e8776d9582114 - md5: 802cae917abdc5a7cdfa699ff02da42d - depends: - - cctools_osx-64 973.0.1 hcc6d90d_13 - - ld64 609 hc6ad406_13 - - libllvm14 >=14.0.6,<14.1.0a0 - license: APSL-2.0 - license_family: Other - size: 21657 - timestamp: 1679505516215 -- kind: conda - name: cctools - version: 973.0.1 - build: hcbb26d4_13 - build_number: 13 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/cctools-973.0.1-hcbb26d4_13.conda - sha256: a7c66e9d8a353e5996dc63dc1f179a7dda40a4133d056dcbca90855b9157e199 - md5: 0f89fe3819b672b09d7e6cadc8932cea - depends: - - cctools_osx-arm64 973.0.1 hef52d2f_13 - - ld64 609 h619f069_13 - - libllvm14 >=14.0.6,<14.1.0a0 - license: APSL-2.0 - license_family: Other - size: 21680 - timestamp: 1679508607015 -- kind: conda - name: cctools_osx-64 - version: 973.0.1 - build: hcc6d90d_13 - build_number: 13 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cctools_osx-64-973.0.1-hcc6d90d_13.conda - sha256: 468bc6d052fd57928049fa05efc42e2a197026cf817942de833f9f526fdc39c0 - md5: 76e5fa849e2042cd657d9eec96095680 - depends: - - ld64_osx-64 >=609,<610.0a0 - - libcxx - - libllvm14 >=14.0.6,<14.1.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - sigtool - constrains: - - cctools 973.0.1.* - - clang 14.0.* - - ld64 609.* - license: APSL-2.0 - license_family: Other - size: 1114891 - timestamp: 1679505481476 -- kind: conda - name: cctools_osx-arm64 - version: 973.0.1 - build: hef52d2f_13 - build_number: 13 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/cctools_osx-arm64-973.0.1-hef52d2f_13.conda - sha256: 1fbb33b31de5a1a31f7658cdb540f76aaf43f52c81e53f83761486820f653610 - md5: ec8f13409a2be66371f6d1022ce4fe9a - depends: - - ld64_osx-arm64 >=609,<610.0a0 - - libcxx - - libllvm14 >=14.0.6,<14.1.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - sigtool - constrains: - - cctools 973.0.1.* - - ld64 609.* - - clang 14.0.* - license: APSL-2.0 - license_family: Other - size: 1125905 - timestamp: 1679508483899 -- kind: conda - name: clang - version: 14.0.6 - build: h694c41f_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/clang-14.0.6-h694c41f_1.conda - sha256: f757328f9924d93b09aa16423160b6ebbe28dac7508cce76d50795db4d1d39c9 - md5: 1305da4c85c7eaa2e90fa14efc35f591 - depends: - - clang-14 14.0.6 default_hdb78580_1 - constrains: - - clang-tools 14.0.6.* - - llvm 14.0.6.* - - llvm-tools 14.0.6.* - - llvmdev 14.0.6.* + channel: https://prefix.dev/conda-forge license: Apache-2.0 WITH LLVM-exception license_family: Apache - size: 133085 - timestamp: 1684412402322 -- kind: conda - name: clang - version: 14.0.6 - build: hce30654_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/clang-14.0.6-hce30654_1.conda - sha256: da28397f5ab383d9cdcc0d3ed9b3e51a6b2a73edbb1867e13120e2f137eeb69e - md5: 74394af220230cb88970984c086d2cba - depends: - - clang-14 14.0.6 default_h5dc8d65_1 - constrains: - - clang-tools 14.0.6.* - - llvm 14.0.6.* - - llvm-tools 14.0.6.* - - llvmdev 14.0.6.* - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 133297 - timestamp: 1684413800824 -- kind: conda - name: clang-14 - version: 14.0.6 - build: default_h5dc8d65_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/clang-14-14.0.6-default_h5dc8d65_1.conda - sha256: b674c9ff31b9bbf225b6c62e0b8a2ca45fe70ef78ad5ca22a79f6c8e41f37306 - md5: aafcdf5d9f0b5a00540469242d7ab67b - depends: - - libclang-cpp14 14.0.6 default_h5dc8d65_1 - - libcxx >=15.0.7 - - libllvm14 >=14.0.6,<14.1.0a0 - constrains: - - clangxx 14.0.6 - - llvm-tools 14.0.6 - - clang-tools 14.0.6 - - clangdev 14.0.6 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 1028007 - timestamp: 1684413668339 -- kind: conda - name: clang-14 - version: 14.0.6 - build: default_hdb78580_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/clang-14-14.0.6-default_hdb78580_1.conda - sha256: a8ef6982c0da903e31215425219693a45e39b47189018cf479b03290764793cd - md5: ce19ccaee311132f299ffd0eec9c4581 - depends: - - libclang-cpp14 14.0.6 default_hdb78580_1 - - libcxx >=15.0.7 - - libllvm14 >=14.0.6,<14.1.0a0 - constrains: - - clangxx 14.0.6 - - llvm-tools 14.0.6 - - clangdev 14.0.6 - - clang-tools 14.0.6 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 1029529 - timestamp: 1684412293094 -- kind: conda - name: clang_osx-64 - version: 14.0.6 - build: h3113cd8_6 - build_number: 6 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/clang_osx-64-14.0.6-h3113cd8_6.conda - sha256: 26092a2c8f9d87c1113e3a20cf0700df4df388c58eba04fabe33fafc0e62190d - md5: 1b191288877fac1564184b28ce07de84 + size: 528991 + timestamp: 1730314340106 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-19.1.3-ha82da77_0.conda + sha256: 6d062760c6439e75b9a44d800d89aff60fe3441998d87506c62dc94c50412ef4 + md5: bf691071fba4734984231617783225bc depends: - - cctools_osx-64 - - clang 14.0.6.* - - compiler-rt 14.0.6.* - - ld64_osx-64 - - llvm-tools 14.0.6.* - license: BSD-3-Clause - license_family: BSD - size: 20488 - timestamp: 1679865229912 -- kind: conda - name: clang_osx-arm64 - version: 14.0.6 - build: h15773ab_6 - build_number: 6 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/clang_osx-arm64-14.0.6-h15773ab_6.conda - sha256: 4b9c93f2a13c8329db93c6acd3cdb6a4bace7848357943e3f2f8097153e71262 - md5: d8f3900d91a67bc95856b297b1fbd4ae - depends: - - cctools_osx-arm64 - - clang 14.0.6.* - - compiler-rt 14.0.6.* - - ld64_osx-arm64 - - llvm-tools 14.0.6.* - license: BSD-3-Clause - license_family: BSD - size: 20547 - timestamp: 1679865317135 -- kind: conda - name: clangxx - version: 14.0.6 - build: default_h610c423_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/clangxx-14.0.6-default_h610c423_1.conda - sha256: abf47478baa7c0d183b43150ca600630dd5ce6762890e0bca925e05bd3247c6b - md5: a9b37a157fdc4a3584aa7f5fb69d3a1a - depends: - - clang 14.0.6 hce30654_1 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 133461 - timestamp: 1684413825082 -- kind: conda - name: clangxx - version: 14.0.6 - build: default_hdb78580_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/clangxx-14.0.6-default_hdb78580_1.conda - sha256: 6c5ed5942dc9627926741e001a042f2a8dc31e221c0a7e4bcbbe35cd0e6681b8 - md5: cc2ac1c5c838cb0edd65258da7c38294 - depends: - - clang 14.0.6 h694c41f_1 + - __osx >=11.0 + channel: https://prefix.dev/conda-forge license: Apache-2.0 WITH LLVM-exception license_family: Apache - size: 133262 - timestamp: 1684412419561 -- kind: conda - name: clangxx_osx-64 - version: 14.0.6 - build: h6f97653_6 - build_number: 6 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/clangxx_osx-64-14.0.6-h6f97653_6.conda - sha256: 185f9ccec322cfee2303ba0129ad37c0803492e535978f9e5432e23973004d50 - md5: 3989d08f74e7d987e94d9003cea30080 - depends: - - clang_osx-64 14.0.6 h3113cd8_6 - - clangxx 14.0.6.* - - libcxx >=14.0.6 - - libllvm14 >=14.0.6,<14.1.0a0 - license: BSD-3-Clause - license_family: BSD - size: 19289 - timestamp: 1679865245043 -- kind: conda - name: clangxx_osx-arm64 - version: 14.0.6 - build: he29aa18_6 - build_number: 6 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/clangxx_osx-arm64-14.0.6-he29aa18_6.conda - sha256: 175291d3e349829dc1f03d598815f70b11e978b9e286828c88b9d08a74eb1927 - md5: 7a6be3e4c92a1cedf333f44d77fd732f - depends: - - clang_osx-arm64 14.0.6 h15773ab_6 - - clangxx 14.0.6.* - - libcxx >=14.0.6 - - libllvm14 >=14.0.6,<14.1.0a0 - license: BSD-3-Clause - license_family: BSD - size: 19398 - timestamp: 1679865337838 -- kind: conda - name: cmake - version: 3.26.4 - build: h1537add_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/cmake-3.26.4-h1537add_0.conda - sha256: c114c7fb3de04329620b715c0677d6dc943954be3877ee5a232ef0dc09f202d9 - md5: d208c156437ff251e83a1061fa082064 - depends: - - ucrt >=10.0.20348.0 - - vc14_runtime >=14.29.30139 - - vs2015_runtime - license: BSD-3-Clause - license_family: BSD - size: 15107993 - timestamp: 1684462053404 -- kind: conda - name: cmake - version: 3.26.4 - build: hc0af03a_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/cmake-3.26.4-hc0af03a_0.conda - sha256: cba596b6bba58e347bd8be51fce82583f16c5707f6c98b22a7e7ba1ecdb09783 - md5: 84c170713e88c74f45fd87ce2a065cd3 - depends: - - bzip2 >=1.0.8,<2.0a0 - - expat - - libcurl >=8.1.0,<9.0a0 - - libcxx >=15.0.7 - - libexpat >=2.5.0,<3.0a0 - - libuv - - libzlib >=1.2.13,<2.0.0a0 - - ncurses >=6.3,<7.0a0 - - rhash <=1.4.3 - - xz >=5.2.6,<6.0a0 - - zlib - - zstd >=1.5.2,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 13748747 - timestamp: 1684461655168 -- kind: conda - name: cmake - version: 3.26.4 - build: hcfe8598_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/cmake-3.26.4-hcfe8598_0.conda - sha256: 37533b572a676017704c989c392998c344e889010786d6555dccdfc524a8e238 - md5: 1714cf0f0facaeb609a0846e4270aff2 - depends: - - bzip2 >=1.0.8,<2.0a0 - - expat - - libcurl >=8.1.0,<9.0a0 - - libexpat >=2.5.0,<3.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libuv - - libzlib >=1.2.13,<2.0.0a0 - - ncurses >=6.3,<7.0a0 - - rhash <=1.4.3 - - xz >=5.2.6,<6.0a0 - - zlib - - zstd >=1.5.2,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 16343060 - timestamp: 1684460894541 -- kind: conda - name: cmake - version: 3.26.4 - build: hf40c264_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cmake-3.26.4-hf40c264_0.conda - sha256: a13eb9e0a7f6b46c1035af0830324ba458315ed973022a21e0473f5d5090bfc9 - md5: 24d42ac02d1968f731740c14e30d72b9 - depends: - - bzip2 >=1.0.8,<2.0a0 - - expat - - libcurl >=8.1.0,<9.0a0 - - libcxx >=15.0.7 - - libexpat >=2.5.0,<3.0a0 - - libuv - - libzlib >=1.2.13,<2.0.0a0 - - ncurses >=6.3,<7.0a0 - - rhash <=1.4.3 - - xz >=5.2.6,<6.0a0 - - zlib - - zstd >=1.5.2,<1.6.0a0 - license: BSD-3-Clause - license_family: BSD - size: 14420122 - timestamp: 1684462885897 -- kind: conda - name: compiler-rt - version: 14.0.6 - build: h30b49de_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/compiler-rt-14.0.6-h30b49de_0.tar.bz2 - sha256: 266578ae49450e6b4a778b454f8e7fd988676dd9146bb186093066ab1589ba06 - md5: b88a5457fa7def557e5902046ab56b6e - depends: - - clang 14.0.6.* - - clangxx 14.0.6.* - - compiler-rt_osx-arm64 14.0.6.* - license: Apache-2.0 WITH LLVM-exception - license_family: APACHE - size: 85467 - timestamp: 1667980555472 -- kind: conda - name: compiler-rt - version: 14.0.6 - build: h613da45_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/compiler-rt-14.0.6-h613da45_0.tar.bz2 - sha256: 2dea3b5efea587329320c70a335fa5666c3a814e70e76464734b90a40b70e8a8 - md5: b44e0625319f9933e584dc3b96f5baf7 - depends: - - clang 14.0.6.* - - clangxx 14.0.6.* - - compiler-rt_osx-64 14.0.6.* - license: Apache-2.0 WITH LLVM-exception - license_family: APACHE - size: 85454 - timestamp: 1667980424247 -- kind: conda - name: compiler-rt_osx-64 - version: 14.0.6 - build: hab78ec2_0 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/compiler-rt_osx-64-14.0.6-hab78ec2_0.tar.bz2 - sha256: a8351d6a47a8a2cd8267862d36ad5a06f16955c68111140b8b147ee126433712 - md5: 4fdde3f4ed31722a1c811723f5db82f0 + size: 520771 + timestamp: 1730314603920 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda + sha256: 53eb8a79365e58849e7b1a068d31f4f9e718dc938d6f2c03e960345739a03569 + md5: 3cb76c3f10d3bc7f1105b2fc9db984df depends: - - clang 14.0.6.* - - clangxx 14.0.6.* - constrains: - - compiler-rt 14.0.6 - license: Apache-2.0 WITH LLVM-exception - license_family: APACHE - size: 15896681 - timestamp: 1667980403834 -- kind: conda - name: compiler-rt_osx-arm64 - version: 14.0.6 - build: h48302dc_0 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/compiler-rt_osx-arm64-14.0.6-h48302dc_0.tar.bz2 - sha256: f9f63e8779ff31368cc92ee668308c8e7e974f68457f62148c5663aa0136a42d - md5: ebcb473032038866101b70f9f270a9a2 - depends: - - clang 14.0.6.* - - clangxx 14.0.6.* + - _libgcc_mutex 0.1 conda_forge + - _openmp_mutex >=4.5 constrains: - - compiler-rt 14.0.6 - license: Apache-2.0 WITH LLVM-exception - license_family: APACHE - size: 15828315 - timestamp: 1667980533329 -- kind: conda - name: cxx-compiler - version: 1.5.2 - build: h91493d7_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/cxx-compiler-1.5.2-h91493d7_0.conda - sha256: 2f8178bc97fff699ae54dd90b96ffa980ec148136e18fe3351a13741a55cc755 - md5: 96a8cc05d246f92e122e0affa390cba6 - depends: - - vs2019_win-64 - license: BSD - size: 6078 - timestamp: 1670951778972 -- kind: conda - name: cxx-compiler - version: 1.5.2 - build: hb8565cd_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cxx-compiler-1.5.2-hb8565cd_0.conda - sha256: 91193c9029594d102217457ce8b4fe1cfd4a1e13e652451e94f851e91b45a147 - md5: 349ae14723b98f76ea0fcb8e532b2ead - depends: - - c-compiler 1.5.2 hbf74d83_0 - - clangxx_osx-64 14.* - license: BSD - size: 5674 - timestamp: 1670951614642 -- kind: conda - name: cxx-compiler - version: 1.5.2 - build: hf52228f_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/cxx-compiler-1.5.2-hf52228f_0.conda - sha256: c6916082ea28b905dd59d4b6b5b07be413a3a5a814193df43c28101e4d29a7fc - md5: 6b3b19e359824b97df7145c8c878c8be - depends: - - c-compiler 1.5.2 h0b41bf4_0 - - gxx - - gxx_linux-64 11.* - license: BSD - size: 5782 - timestamp: 1670951518183 -- kind: conda - name: cxx-compiler - version: 1.5.2 - build: hffc8910_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/cxx-compiler-1.5.2-hffc8910_0.conda - sha256: 84f23671f8b18aeabcfd4b5315383442c3bdff3c9194b85c30ec5690d14e721a - md5: 3dd2dd956573a59e32711e2e08bb5d8b - depends: - - c-compiler 1.5.2 h5008568_0 - - clangxx_osx-arm64 14.* - license: BSD - size: 5689 - timestamp: 1670951872433 -- kind: conda - name: dbus - version: 1.13.6 - build: h5008d03_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - sha256: 8f5f995699a2d9dbdd62c61385bfeeb57c82a681a7c8c5313c395aa0ccab68a5 - md5: ecfff944ba3960ecb334b9a2663d708d - depends: - - expat >=2.4.2,<3.0a0 - - libgcc-ng >=9.4.0 - - libglib >=2.70.2,<3.0a0 - license: GPL-2.0-or-later - license_family: GPL - size: 618596 - timestamp: 1640112124844 -- kind: conda - name: expat - version: 2.6.2 - build: h59595ed_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.2-h59595ed_0.conda - sha256: 89916c536ae5b85bb8bf0cfa27d751e274ea0911f04e4a928744735c14ef5155 - md5: 53fb86322bdb89496d7579fe3f02fd61 - depends: - - libexpat 2.6.2 h59595ed_0 - - libgcc-ng >=12 - license: MIT - license_family: MIT - size: 137627 - timestamp: 1710362144873 -- kind: conda - name: expat - version: 2.6.2 - build: h73e2aa4_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/expat-2.6.2-h73e2aa4_0.conda - sha256: 0fd1befb18d9d937358a90d5b8f97ac2402761e9d4295779cbad9d7adfb47976 - md5: dc0882915da2ec74696ad87aa2350f27 - depends: - - libexpat 2.6.2 h73e2aa4_0 - license: MIT - license_family: MIT - size: 126612 - timestamp: 1710362607162 -- kind: conda - name: expat - version: 2.6.2 - build: hebf3989_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/expat-2.6.2-hebf3989_0.conda - sha256: 9ac22553a4d595d7e4c9ca9aa09a0b38da65314529a7a7008edc73d3f9e7904a - md5: de0cff0ec74f273c4b6aa281479906c3 - depends: - - libexpat 2.6.2 hebf3989_0 - license: MIT - license_family: MIT - size: 124594 - timestamp: 1710362455984 -- kind: conda - name: gcc - version: 11.4.0 - build: h602e360_13 - build_number: 13 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gcc-11.4.0-h602e360_13.conda - sha256: 34fc2b2a32d4dc96de68012df13a39d1afd2e7fa2ad6eee71e71c0f45e616912 - md5: 4ec3797a424877eb56910c12f92480d7 - depends: - - gcc_impl_linux-64 11.4.0.* - license: BSD-3-Clause - license_family: BSD - size: 50247 - timestamp: 1719178517505 -- kind: conda - name: gcc_impl_linux-64 - version: 11.4.0 - build: h00c12a0_13 - build_number: 13 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-11.4.0-h00c12a0_13.conda - sha256: 92d19dcacde45abe3011d559d34db72f5d038ad2e78b5d6ea54ff3819a26f244 - md5: 84a8282b1e54ec4db2855069a917680d - depends: - - binutils_impl_linux-64 >=2.40 - - libgcc-devel_linux-64 11.4.0 h8f596e0_113 - - libgcc-ng >=11.4.0 - - libgomp >=11.4.0 - - libsanitizer 11.4.0 h5763a12_13 - - libstdcxx-ng >=11.4.0 - - sysroot_linux-64 + - libgomp 14.2.0 h77fa898_1 + - libgcc-ng ==14.2.0=*_1 + channel: https://prefix.dev/conda-forge license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL - size: 55558644 - timestamp: 1719178423187 -- kind: conda - name: gcc_linux-64 - version: 11.4.0 - build: ha077dfb_4 - build_number: 4 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gcc_linux-64-11.4.0-ha077dfb_4.conda - sha256: 31bbd2d6d89ba6b371a341ff0abe3ad86b40652c86420d74bc8a44feb47323f7 - md5: 4486720bf24fecb4a21ab3840433411f - depends: - - binutils_linux-64 2.40 hb3c18ed_4 - - gcc_impl_linux-64 11.4.0.* - - sysroot_linux-64 - license: BSD-3-Clause - license_family: BSD - size: 31262 - timestamp: 1717999560626 -- kind: conda - name: gettext - version: 0.22.5 - build: he02047a_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda - sha256: c3d9a453f523acbf2b3e1c82a42edfc7c7111b4686a2180ab48cb9b51a274218 - md5: c7f243bbaea97cd6ea1edd693270100e - depends: - - __glibc >=2.17,<3.0.a0 - - gettext-tools 0.22.5 he02047a_3 - - libasprintf 0.22.5 he8f35ee_3 - - libasprintf-devel 0.22.5 he8f35ee_3 - - libgcc-ng >=12 - - libgettextpo 0.22.5 he02047a_3 - - libgettextpo-devel 0.22.5 he02047a_3 - - libstdcxx-ng >=12 - license: LGPL-2.1-or-later AND GPL-3.0-or-later - size: 479452 - timestamp: 1723626088190 -- kind: conda - name: gettext-tools - version: 0.22.5 - build: he02047a_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gettext-tools-0.22.5-he02047a_3.conda - sha256: 0fd003953ce1ce9f4569458aab9ffaa397e3be2bc069250e2f05fd93b0ad2976 - md5: fcd2016d1d299f654f81021e27496818 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - license: GPL-3.0-or-later - license_family: GPL - size: 2750908 - timestamp: 1723626056740 -- kind: conda - name: gxx - version: 11.4.0 - build: h602e360_13 - build_number: 13 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gxx-11.4.0-h602e360_13.conda - sha256: ff539ef20255cfb57c9a36d01aac2ebad783f69399a2908de1222c0ba542ce36 - md5: 88c169cb8305ee14fd9a4d76a4824572 - depends: - - gcc 11.4.0.* - - gxx_impl_linux-64 11.4.0.* - license: BSD-3-Clause - license_family: BSD - size: 49758 - timestamp: 1719178623215 -- kind: conda - name: gxx_impl_linux-64 - version: 11.4.0 - build: h634f3ee_13 - build_number: 13 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-11.4.0-h634f3ee_13.conda - sha256: ba6cd472e28268a6de1d4af74bafd9034789c132b15c38e727900fe6b85ebdb7 - md5: aa66d62e50aba20923e0c0121145fcbf + size: 848745 + timestamp: 1729027721139 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda + sha256: 1911c29975ec99b6b906904040c855772ccb265a1c79d5d75c8ceec4ed89cd63 + md5: cc3573974587f12dda90d96e3e55a702 depends: - - gcc_impl_linux-64 11.4.0 h00c12a0_13 - - libstdcxx-devel_linux-64 11.4.0 h8f596e0_113 - - sysroot_linux-64 + - _libgcc_mutex 0.1 conda_forge + channel: https://prefix.dev/conda-forge license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL - size: 11702935 - timestamp: 1719178592572 -- kind: conda - name: gxx_linux-64 - version: 11.4.0 - build: h35bfe5d_4 - build_number: 4 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gxx_linux-64-11.4.0-h35bfe5d_4.conda - sha256: bc55c16382edde80880d1f4802ceaf460f259d20cd44156749e6e7f91ef19b7a - md5: a3b2fe00ce0def24b147d08db9ac0bde - depends: - - binutils_linux-64 2.40 hb3c18ed_4 - - gcc_linux-64 11.4.0 ha077dfb_4 - - gxx_impl_linux-64 11.4.0.* - - sysroot_linux-64 - license: BSD-3-Clause - license_family: BSD - size: 29582 - timestamp: 1717999588172 -- kind: conda - name: kernel-headers_linux-64 - version: 3.10.0 - build: h4a8ded7_16 - build_number: 16 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-h4a8ded7_16.conda - sha256: a55044e0f61058a5f6bab5e1dd7f15a1fa7a08ec41501dbfca5ab0fc50b9c0c1 - md5: ff7f38675b226cfb855aebfc32a13e31 + size: 460992 + timestamp: 1729027639220 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda + sha256: 4661af0eb9bdcbb5fb33e5d0023b001ad4be828fccdcc56500059d56f9869462 + md5: 234a5554c53625688d51062645337328 depends: - - _sysroot_linux-64_curr_repodata_hack 3.* - constrains: - - sysroot_linux-64 ==2.17 - license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 + - libgcc 14.2.0 h77fa898_1 + channel: https://prefix.dev/conda-forge + license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL - size: 944344 - timestamp: 1720621422017 -- kind: conda - name: keyutils - version: 1.6.1 - build: h166bdaf_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 - sha256: 150c05a6e538610ca7c43beb3a40d65c90537497a4f6a5f4d15ec0451b6f5ebb - md5: 30186d27e2c9fa62b45fb1476b7200e3 - depends: - - libgcc-ng >=10.3.0 - license: LGPL-2.1-or-later - size: 117831 - timestamp: 1646151697040 -- kind: conda - name: krb5 - version: 1.21.3 - build: h237132a_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.21.3-h237132a_0.conda - sha256: 4442f957c3c77d69d9da3521268cad5d54c9033f1a73f99cde0a3658937b159b - md5: c6dc8a0fdec13a0565936655c33069a1 - depends: - - __osx >=11.0 - - libcxx >=16 - - libedit >=3.1.20191231,<3.2.0a0 - - libedit >=3.1.20191231,<4.0a0 - - openssl >=3.3.1,<4.0a0 - license: MIT - license_family: MIT - size: 1155530 - timestamp: 1719463474401 -- kind: conda - name: krb5 - version: 1.21.3 - build: h37d8d59_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/krb5-1.21.3-h37d8d59_0.conda - sha256: 83b52685a4ce542772f0892a0f05764ac69d57187975579a0835ff255ae3ef9c - md5: d4765c524b1d91567886bde656fb514b - depends: - - __osx >=10.13 - - libcxx >=16 - - libedit >=3.1.20191231,<3.2.0a0 - - libedit >=3.1.20191231,<4.0a0 - - openssl >=3.3.1,<4.0a0 - license: MIT - license_family: MIT - size: 1185323 - timestamp: 1719463492984 -- kind: conda - name: krb5 - version: 1.21.3 - build: h659f571_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda - sha256: 99df692f7a8a5c27cd14b5fb1374ee55e756631b9c3d659ed3ee60830249b238 - md5: 3f43953b7d3fb3aaa1d0d0723d91e368 - depends: - - keyutils >=1.6.1,<2.0a0 - - libedit >=3.1.20191231,<3.2.0a0 - - libedit >=3.1.20191231,<4.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - openssl >=3.3.1,<4.0a0 - license: MIT - license_family: MIT - size: 1370023 - timestamp: 1719463201255 -- kind: conda - name: lame - version: '3.100' - build: h166bdaf_1003 - build_number: 1003 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2 - sha256: aad2a703b9d7b038c0f745b853c6bb5f122988fe1a7a096e0e606d9cbec4eaab - md5: a8832b479f93521a9e7b5b743803be51 - depends: - - libgcc-ng >=12 - license: LGPL-2.0-only - license_family: LGPL - size: 508258 - timestamp: 1664996250081 -- kind: conda - name: ld64 - version: '609' - build: h619f069_13 - build_number: 13 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ld64-609-h619f069_13.conda - sha256: f39fdd5b8bfacc811d812370c26b5bd7a95551c130775664ed7eb259c509c9da - md5: c771ebc91a8088d5078332c83758e774 - depends: - - ld64_osx-arm64 609 h7167370_13 - - libllvm14 >=14.0.6,<14.1.0a0 - constrains: - - cctools 973.0.1.* - - cctools_osx-arm64 973.0.1.* - license: APSL-2.0 - license_family: Other - size: 18839 - timestamp: 1679508545094 -- kind: conda - name: ld64 - version: '609' - build: hc6ad406_13 - build_number: 13 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ld64-609-hc6ad406_13.conda - sha256: cc7d0de073179de57763c5cf9ee7c2bc9855d362c13245427cccbd517fe794fe - md5: 5d7676eee44dfa3e48bf21700e044aa9 - depends: - - ld64_osx-64 609 hfd63004_13 - - libllvm14 >=14.0.6,<14.1.0a0 + size: 3893695 + timestamp: 1729027746910 +- conda: . + name: sdl_example + version: 0.1.0 + build: hbf21a9e_0 + subdir: linux-64 + depends: + - libstdcxx >=14 + - libgcc >=14 + input: + hash: e15d918ba08c91b2a76a222f189538832b722b151ce7de1f26f6f3804107e401 + globs: + - pixi.toml +- conda: . + name: sdl_example + version: 0.1.0 + build: hbf21a9e_0 + subdir: osx-64 + depends: + - libcxx >=19 + input: + hash: e15d918ba08c91b2a76a222f189538832b722b151ce7de1f26f6f3804107e401 + globs: + - pixi.toml +- conda: . + name: sdl_example + version: 0.1.0 + build: hbf21a9e_0 + subdir: osx-arm64 + depends: + - libcxx >=19 + input: + hash: e15d918ba08c91b2a76a222f189538832b722b151ce7de1f26f6f3804107e401 + globs: + - pixi.toml +- conda: . + name: sdl_example + version: 0.1.0 + build: hbf21a9e_0 + subdir: win-64 + depends: + - vc >=14.1,<15 + - vc14_runtime >=14.16.27033 + input: + hash: e15d918ba08c91b2a76a222f189538832b722b151ce7de1f26f6f3804107e401 + globs: + - pixi.toml +- conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda + sha256: db8dead3dd30fb1a032737554ce91e2819b43496a0db09927edf01c32b577450 + md5: 6797b005cd0f439c4c5c9ac565783700 constrains: - - cctools_osx-64 973.0.1.* - - cctools 973.0.1.* - license: APSL-2.0 - license_family: Other - size: 18850 - timestamp: 1679505500085 -- kind: conda - name: ld64_osx-64 - version: '609' - build: hfd63004_13 - build_number: 13 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ld64_osx-64-609-hfd63004_13.conda - sha256: 97c752c2b4518c394adf6db1538d8dff233a4a3072964fb4d834f3993f988e43 - md5: 58fcda6a84fb42f51c6c2d6d175b435d - depends: - - libcxx - - libllvm14 >=14.0.6,<14.1.0a0 - - sigtool - - tapi >=1100.0.11,<1101.0a0 - constrains: - - cctools_osx-64 973.0.1.* - - cctools 973.0.1.* - - ld 609.* - - clang >=14.0.6,<15.0a0 - license: APSL-2.0 - license_family: Other - size: 1061393 - timestamp: 1679505296948 -- kind: conda - name: ld64_osx-arm64 - version: '609' - build: h7167370_13 - build_number: 13 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ld64_osx-arm64-609-h7167370_13.conda - sha256: baed60a74f3fa1e3158240ca0562fecb9398e7568d4b6126e494771323995b5c - md5: 95b0f14e103c5a2c27f5d53609e60643 - depends: - - libcxx - - libllvm14 >=14.0.6,<14.1.0a0 - - sigtool - - tapi >=1100.0.11,<1101.0a0 - constrains: - - cctools 973.0.1.* - - clang >=14.0.6,<15.0a0 - - ld 609.* - - cctools_osx-arm64 973.0.1.* - license: APSL-2.0 - license_family: Other - size: 1041975 - timestamp: 1679507658652 -- kind: conda - name: ld_impl_linux-64 - version: '2.40' - build: hf3520f5_7 - build_number: 7 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - sha256: 764b6950aceaaad0c67ef925417594dd14cd2e22fff864aeef455ac259263d15 - md5: b80f2f396ca2c28b8c14c437a4ed1e74 - constrains: - - binutils_impl_linux-64 2.40 - license: GPL-3.0-only - license_family: GPL - size: 707602 - timestamp: 1718625640445 -- kind: conda - name: libasprintf - version: 0.22.5 - build: he8f35ee_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.22.5-he8f35ee_3.conda - sha256: 2da5c735811cbf38c7f7844ab457ff8b25046bbf5fe5ebd5dc1c2fafdf4fbe1c - md5: 4fab9799da9571266d05ca5503330655 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: LGPL-2.1-or-later - size: 42817 - timestamp: 1723626012203 -- kind: conda - name: libasprintf-devel - version: 0.22.5 - build: he8f35ee_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda - sha256: ccc7967e298ddf3124c8ad9741c7180dc6f778ae4135ec87978214f7b3c64dc2 - md5: 1091193789bb830127ed067a9e01ac57 - depends: - - __glibc >=2.17,<3.0.a0 - - libasprintf 0.22.5 he8f35ee_3 - - libgcc-ng >=12 - license: LGPL-2.1-or-later - size: 34172 - timestamp: 1723626026096 -- kind: conda - name: libcap - version: '2.69' - build: h0f662aa_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libcap-2.69-h0f662aa_0.conda - sha256: 942f9564b4228609f017b6617425d29a74c43b8a030e12239fa4458e5cb6323c - md5: 25cb5999faa414e5ccb2c1388f62d3d5 - depends: - - attr >=2.5.1,<2.6.0a0 - - libgcc-ng >=12 + - vs2015_runtime >=14.29.30037 + channel: https://prefix.dev/conda-forge + license: LicenseRef-MicrosoftWindowsSDK10 + size: 559710 + timestamp: 1728377334097 +- conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_23.conda + sha256: 986ddaf8feec2904eac9535a7ddb7acda1a1dfb9482088fdb8129f1595181663 + md5: 7c10ec3158d1eb4ddff7007c9101adb0 + depends: + - vc14_runtime >=14.38.33135 + channel: https://prefix.dev/conda-forge + track_features: + - vc14 license: BSD-3-Clause license_family: BSD - size: 100582 - timestamp: 1684162447012 -- kind: conda - name: libclang-cpp14 - version: 14.0.6 - build: default_h5dc8d65_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libclang-cpp14-14.0.6-default_h5dc8d65_1.conda - sha256: 9cb083dae695cda715e280c2cadd08b625ecc5842f99fb575901990b0e60bf7c - md5: 7caa562f3551c16cd82d0147640091f8 - depends: - - libcxx >=15.0.7 - - libllvm14 >=14.0.6,<14.1.0a0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 10930694 - timestamp: 1684413524265 -- kind: conda - name: libclang-cpp14 - version: 14.0.6 - build: default_hdb78580_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libclang-cpp14-14.0.6-default_hdb78580_1.conda - sha256: 5720d4662bd032a1dc07d0ae1368fa5e454c938545bdd93f78dacd25b9f597d3 - md5: 9a235664bf087994aa3acc1a60614964 - depends: - - libcxx >=15.0.7 - - libllvm14 >=14.0.6,<14.1.0a0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 11897758 - timestamp: 1684412176370 -- kind: conda - name: libcurl - version: 8.9.1 - build: hdb1bdb2_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda - sha256: 0ba60f83709068e9ec1ab543af998cb5a201c8379c871205447684a34b5abfd8 - md5: 7da1d242ca3591e174a3c7d82230d3c0 - depends: - - krb5 >=1.21.3,<1.22.0a0 - - libgcc-ng >=12 - - libnghttp2 >=1.58.0,<2.0a0 - - libssh2 >=1.11.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: curl - license_family: MIT - size: 416057 - timestamp: 1722439924963 -- kind: conda - name: libcurl - version: 8.9.1 - build: hfcf2730_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.9.1-hfcf2730_0.conda - sha256: a7ce066fbb2d34f7948d8e5da30d72ff01f0a5bcde05ea46fa2d647eeedad3a7 - md5: 6ea09f173c46d135ee6d6845fe50a9c0 - depends: - - krb5 >=1.21.3,<1.22.0a0 - - libnghttp2 >=1.58.0,<2.0a0 - - libssh2 >=1.11.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: curl - license_family: MIT - size: 397060 - timestamp: 1722440158491 -- kind: conda - name: libcurl - version: 8.9.1 - build: hfd8ffcc_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.9.1-hfd8ffcc_0.conda - sha256: 4d6006c866844a39fb835436a48407f54f2310111a6f1d3e89efb16cf5c4d81b - md5: be0f46c6362775504d8894bd788a45b2 - depends: - - krb5 >=1.21.3,<1.22.0a0 - - libnghttp2 >=1.58.0,<2.0a0 - - libssh2 >=1.11.0,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - openssl >=3.3.1,<4.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: curl - license_family: MIT - size: 374937 - timestamp: 1722440523552 -- kind: conda - name: libcxx - version: 18.1.8 - build: h3ed4263_6 - build_number: 6 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-18.1.8-h3ed4263_6.conda - sha256: 6e267698e575bb02c8ed86184fad6d6d3504643dcfa10dad0306d3d25a3d22e3 - md5: 9fefa1597c93b710cc9bce87bffb0428 - depends: - - __osx >=11.0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 1216771 - timestamp: 1724726498879 -- kind: conda - name: libcxx - version: 18.1.8 - build: hd876a4e_6 - build_number: 6 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-18.1.8-hd876a4e_6.conda - sha256: 17f9d82da076bee9db33272f43e04be98afbcb27eba7cd83dda3212a7ee1c218 - md5: 93efb2350f312a3c871e87d9fdc09813 - depends: - - __osx >=10.13 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 1223212 - timestamp: 1724726420315 -- kind: conda - name: libedit - version: 3.1.20191231 - build: h0678c8f_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2 - sha256: dbd3c3f2eca1d21c52e4c03b21930bbce414c4592f8ce805801575b9e9256095 - md5: 6016a8a1d0e63cac3de2c352cd40208b - depends: - - ncurses >=6.2,<7.0.0a0 - license: BSD-2-Clause - license_family: BSD - size: 105382 - timestamp: 1597616576726 -- kind: conda - name: libedit - version: 3.1.20191231 - build: hc8eb9b7_2 - build_number: 2 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20191231-hc8eb9b7_2.tar.bz2 - sha256: 3912636197933ecfe4692634119e8644904b41a58f30cad9d1fc02f6ba4d9fca - md5: 30e4362988a2623e9eb34337b83e01f9 - depends: - - ncurses >=6.2,<7.0.0a0 - license: BSD-2-Clause - license_family: BSD - size: 96607 - timestamp: 1597616630749 -- kind: conda - name: libedit - version: 3.1.20191231 - build: he28a2e2_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 - sha256: a57d37c236d8f7c886e01656f4949d9dcca131d2a0728609c6f7fa338b65f1cf - md5: 4d331e44109e3f0e19b4cb8f9b82f3e1 - depends: - - libgcc-ng >=7.5.0 - - ncurses >=6.2,<7.0.0a0 - license: BSD-2-Clause - license_family: BSD - size: 123878 - timestamp: 1597616541093 -- kind: conda - name: libev - version: '4.33' - build: h10d778d_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-h10d778d_2.conda - sha256: 0d238488564a7992942aa165ff994eca540f687753b4f0998b29b4e4d030ff43 - md5: 899db79329439820b7e8f8de41bca902 - license: BSD-2-Clause - license_family: BSD - size: 106663 - timestamp: 1702146352558 -- kind: conda - name: libev - version: '4.33' - build: h93a5062_2 - build_number: 2 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda - sha256: 95cecb3902fbe0399c3a7e67a5bed1db813e5ab0e22f4023a5e0f722f2cc214f - md5: 36d33e440c31857372a72137f78bacf5 - license: BSD-2-Clause - license_family: BSD - size: 107458 - timestamp: 1702146414478 -- kind: conda - name: libev - version: '4.33' - build: hd590300_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda - sha256: 1cd6048169fa0395af74ed5d8f1716e22c19a81a8a36f934c110ca3ad4dd27b4 - md5: 172bf1cd1ff8629f2b1179945ed45055 + size: 17479 + timestamp: 1731710827215 +- conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.42.34433-he29a5d6_23.conda + sha256: c483b090c4251a260aba6ff3e83a307bcfb5fb24ad7ced872ab5d02971bd3a49 + md5: 32b37d0cfa80da34548501cdc913a832 depends: - - libgcc-ng >=12 - license: BSD-2-Clause - license_family: BSD - size: 112766 - timestamp: 1702146165126 -- kind: conda - name: libexpat - version: 2.6.2 - build: h59595ed_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - sha256: 331bb7c7c05025343ebd79f86ae612b9e1e74d2687b8f3179faec234f986ce19 - md5: e7ba12deb7020dd080c6c70e7b6f6a3d - depends: - - libgcc-ng >=12 - constrains: - - expat 2.6.2.* - license: MIT - license_family: MIT - size: 73730 - timestamp: 1710362120304 -- kind: conda - name: libexpat - version: 2.6.2 - build: h73e2aa4_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda - sha256: a188a77b275d61159a32ab547f7d17892226e7dac4518d2c6ac3ac8fc8dfde92 - md5: 3d1d51c8f716d97c864d12f7af329526 - constrains: - - expat 2.6.2.* - license: MIT - license_family: MIT - size: 69246 - timestamp: 1710362566073 -- kind: conda - name: libexpat - version: 2.6.2 - build: hebf3989_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.2-hebf3989_0.conda - sha256: ba7173ac30064ea901a4c9fb5a51846dcc25512ceb565759be7d18cbf3e5415e - md5: e3cde7cfa87f82f7cb13d482d5e0ad09 + - ucrt >=10.0.20348.0 constrains: - - expat 2.6.2.* - license: MIT - license_family: MIT - size: 63655 - timestamp: 1710362424980 -- kind: conda - name: libffi - version: 3.4.2 - build: h7f98852_5 - build_number: 5 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e - md5: d645c6d2ac96843a2bfaccd2d62b3ac3 - depends: - - libgcc-ng >=9.4.0 - license: MIT - license_family: MIT - size: 58292 - timestamp: 1636488182923 -- kind: conda - name: libflac - version: 1.4.3 - build: h59595ed_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda - sha256: 65908b75fa7003167b8a8f0001e11e58ed5b1ef5e98b96ab2ba66d7c1b822c7d - md5: ee48bf17cc83a00f59ca1494d5646869 - depends: - - gettext >=0.21.1,<1.0a0 - - libgcc-ng >=12 - - libogg 1.3.* - - libogg >=1.3.4,<1.4.0a0 - - libstdcxx-ng >=12 - license: BSD-3-Clause - license_family: BSD - size: 394383 - timestamp: 1687765514062 -- kind: conda - name: libgcc - version: 14.1.0 - build: h77fa898_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda - sha256: 10fa74b69266a2be7b96db881e18fa62cfa03082b65231e8d652e897c4b335a3 - md5: 002ef4463dd1e2b44a94a4ace468f5d2 - depends: - - _libgcc_mutex 0.1 conda_forge - - _openmp_mutex >=4.5 - constrains: - - libgomp 14.1.0 h77fa898_1 - - libgcc-ng ==14.1.0=*_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 846380 - timestamp: 1724801836552 -- kind: conda - name: libgcc-devel_linux-64 - version: 11.4.0 - build: h8f596e0_113 - build_number: 113 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-11.4.0-h8f596e0_113.conda - sha256: 45fe07880c918baf144377abfdaf0d409617ceb1758f28e022e83615b9f67863 - md5: 8e1973bcc2b9405e80ed0cad1b9bd607 - depends: - - __unix - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 2451854 - timestamp: 1719178273409 -- kind: conda - name: libgcc-ng - version: 14.1.0 - build: h69a702a_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda - sha256: b91f7021e14c3d5c840fbf0dc75370d6e1f7c7ff4482220940eaafb9c64613b7 - md5: 1efc0ad219877a73ef977af7dbb51f17 - depends: - - libgcc 14.1.0 h77fa898_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 52170 - timestamp: 1724801842101 -- kind: conda - name: libgcrypt - version: 1.11.0 - build: h4ab18f5_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.11.0-h4ab18f5_1.conda - sha256: 9e97e4a753d2ee238cfc7375f0882830f0d8c1667431bc9d070a0f6718355570 - md5: 14858a47d4cc995892e79f2b340682d7 - depends: - - libgcc-ng >=12 - - libgpg-error >=1.50,<2.0a0 - license: LGPL-2.1-or-later AND GPL-2.0-or-later - license_family: GPL - size: 684307 - timestamp: 1721392291497 -- kind: conda - name: libgettextpo - version: 0.22.5 - build: he02047a_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.22.5-he02047a_3.conda - sha256: 7f2d1f4d69973e2c3c3d2b6420d5eb989982baba97d63ab2d7a2b25a92d886b4 - md5: efab66b82ec976930b96d62a976de8e7 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - license: GPL-3.0-or-later - license_family: GPL - size: 170646 - timestamp: 1723626019265 -- kind: conda - name: libgettextpo-devel - version: 0.22.5 - build: he02047a_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-devel-0.22.5-he02047a_3.conda - sha256: 0a66cdd46d1cd5201061252535cd91905b3222328a9294c1a5bcd32e85531545 - md5: 9aba7960731e6b4547b3a52f812ed801 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - - libgettextpo 0.22.5 he02047a_3 - license: GPL-3.0-or-later - license_family: GPL - size: 36790 - timestamp: 1723626032786 -- kind: conda - name: libglib - version: 2.80.3 - build: h315aac3_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.80.3-h315aac3_2.conda - sha256: 7470e664b780b91708bed356cc634874dfc3d6f17cbf884a1d6f5d6d59c09f91 - md5: b0143a3e98136a680b728fdf9b42a258 - depends: - - __glibc >=2.17,<3.0.a0 - - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 - - libiconv >=1.17,<2.0a0 - - libzlib >=1.3.1,<2.0a0 - - pcre2 >=10.44,<10.45.0a0 - constrains: - - glib 2.80.3 *_2 - license: LGPL-2.1-or-later - size: 3922900 - timestamp: 1723208802469 -- kind: conda - name: libgomp - version: 14.1.0 - build: h77fa898_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda - sha256: c96724c8ae4ee61af7674c5d9e5a3fbcf6cd887a40ad5a52c99aa36f1d4f9680 - md5: 23c255b008c4f2ae008f81edcabaca89 - depends: - - _libgcc_mutex 0.1 conda_forge - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 460218 - timestamp: 1724801743478 -- kind: conda - name: libgpg-error - version: '1.50' - build: h4f305b6_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.50-h4f305b6_0.conda - sha256: c60969d5c315f33fee90a1f2dd5d169e2834ace5a55f5a6f822aa7485a3a84cc - md5: 0d7ff1a8e69565ca3add6925e18e708f - depends: - - gettext - - libasprintf >=0.22.5,<1.0a0 - - libgcc-ng >=12 - - libgettextpo >=0.22.5,<1.0a0 - - libstdcxx-ng >=12 - license: GPL-2.0-only - license_family: GPL - size: 273774 - timestamp: 1719390736440 -- kind: conda - name: libiconv - version: '1.17' - build: hd590300_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda - sha256: 8ac2f6a9f186e76539439e50505d98581472fedb347a20e7d1f36429849f05c9 - md5: d66573916ffcf376178462f1b61c941e - depends: - - libgcc-ng >=12 - license: LGPL-2.1-only - size: 705775 - timestamp: 1702682170569 -- kind: conda - name: libllvm14 - version: 14.0.6 - build: hc8e404f_4 - build_number: 4 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libllvm14-14.0.6-hc8e404f_4.conda - sha256: 0df3902a300cfe092425f86144d5e00ef67be3cd1cc89fd63084d45262a772ad - md5: ed06753e2ba7c66ed0ca7f19578fcb68 - depends: - - libcxx >=15 - - libzlib >=1.2.13,<2.0.0a0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 22467131 - timestamp: 1690563140552 -- kind: conda - name: libllvm14 - version: 14.0.6 - build: hd1a9a77_4 - build_number: 4 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libllvm14-14.0.6-hd1a9a77_4.conda - sha256: 6f603914fe8633a615f0d2f1383978eb279eeb552079a78449c9fbb43f22a349 - md5: 9f3dce5d26ea56a9000cd74c034582bd - depends: - - libcxx >=15 - - libzlib >=1.2.13,<2.0.0a0 - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 20571387 - timestamp: 1690559110016 -- kind: conda - name: libnghttp2 - version: 1.58.0 - build: h47da74e_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda - sha256: 1910c5306c6aa5bcbd623c3c930c440e9c77a5a019008e1487810e3c1d3716cb - md5: 700ac6ea6d53d5510591c4344d5c989a - depends: - - c-ares >=1.23.0,<2.0a0 - - libev >=4.33,<4.34.0a0 - - libev >=4.33,<5.0a0 - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.0,<4.0a0 - license: MIT - license_family: MIT - size: 631936 - timestamp: 1702130036271 -- kind: conda - name: libnghttp2 - version: 1.58.0 - build: h64cf6d3_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.58.0-h64cf6d3_1.conda - sha256: 412fd768e787e586602f8e9ea52bf089f3460fc630f6987f0cbd89b70e9a4380 - md5: faecc55c2a8155d9ff1c0ff9a0fef64f - depends: - - __osx >=10.9 - - c-ares >=1.23.0,<2.0a0 - - libcxx >=16.0.6 - - libev >=4.33,<4.34.0a0 - - libev >=4.33,<5.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.0,<4.0a0 - license: MIT - license_family: MIT - size: 599736 - timestamp: 1702130398536 -- kind: conda - name: libnghttp2 - version: 1.58.0 - build: ha4dd798_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.58.0-ha4dd798_1.conda - sha256: fc97aaaf0c6d0f508be313d86c2705b490998d382560df24be918b8e977802cd - md5: 1813e066bfcef82de579a0be8a766df4 - depends: - - __osx >=10.9 - - c-ares >=1.23.0,<2.0a0 - - libcxx >=16.0.6 - - libev >=4.33,<4.34.0a0 - - libev >=4.33,<5.0a0 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.2.0,<4.0a0 - license: MIT - license_family: MIT - size: 565451 - timestamp: 1702130473930 -- kind: conda - name: libogg - version: 1.3.5 - build: h4ab18f5_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda - sha256: 5eda3fe92b99b25dd4737226a9485078ab405672d9f621be75edcb68f1e9026d - md5: 601bfb4b3c6f0b844443bb81a56651e0 - depends: - - libgcc-ng >=12 - license: BSD-3-Clause - license_family: BSD - size: 205914 - timestamp: 1719301575771 -- kind: conda - name: libopus - version: 1.3.1 - build: h7f98852_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2 - sha256: 0e1c2740ebd1c93226dc5387461bbcf8142c518f2092f3ea7551f77755decc8f - md5: 15345e56d527b330e1cacbdf58676e8f - depends: - - libgcc-ng >=9.3.0 - license: BSD-3-Clause - license_family: BSD - size: 260658 - timestamp: 1606823578035 -- kind: conda - name: libsanitizer - version: 11.4.0 - build: h5763a12_13 - build_number: 13 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-11.4.0-h5763a12_13.conda - sha256: c565078a650e3bab45b63d43c34cea10b1169560547d032a04297560849c362e - md5: 2c5415ef0640355a2154ebdd8494dc05 - depends: - - libgcc-ng >=11.4.0 - - libstdcxx-ng >=11.4.0 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 3694079 - timestamp: 1719178384870 -- kind: conda - name: libsndfile - version: 1.2.2 - build: hc60ed4a_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda - sha256: f709cbede3d4f3aee4e2f8d60bd9e256057f410bd60b8964cb8cf82ec1457573 - md5: ef1910918dd895516a769ed36b5b3a4e - depends: - - lame >=3.100,<3.101.0a0 - - libflac >=1.4.3,<1.5.0a0 - - libgcc-ng >=12 - - libogg >=1.3.4,<1.4.0a0 - - libopus >=1.3.1,<2.0a0 - - libstdcxx-ng >=12 - - libvorbis >=1.3.7,<1.4.0a0 - - mpg123 >=1.32.1,<1.33.0a0 - license: LGPL-2.1-or-later - license_family: LGPL - size: 354372 - timestamp: 1695747735668 -- kind: conda - name: libssh2 - version: 1.11.0 - build: h0841786_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda - sha256: 50e47fd9c4f7bf841a11647ae7486f65220cfc988ec422a4475fe8d5a823824d - md5: 1f5a58e686b13bcfde88b93f547d23fe - depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.1.1,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 271133 - timestamp: 1685837707056 -- kind: conda - name: libssh2 - version: 1.11.0 - build: h7a5bd25_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.0-h7a5bd25_0.conda - sha256: bb57d0c53289721fff1eeb3103a1c6a988178e88d8a8f4345b0b91a35f0e0015 - md5: 029f7dc931a3b626b94823bc77830b01 - depends: - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.1.1,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 255610 - timestamp: 1685837894256 -- kind: conda - name: libssh2 - version: 1.11.0 - build: hd019ec5_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.11.0-hd019ec5_0.conda - sha256: f3886763b88f4b24265db6036535ef77b7b77ce91b1cbe588c0fbdd861eec515 - md5: ca3a72efba692c59a90d4b9fc0dfe774 - depends: - - libzlib >=1.2.13,<2.0.0a0 - - openssl >=3.1.1,<4.0a0 - license: BSD-3-Clause - license_family: BSD - size: 259556 - timestamp: 1685837820566 -- kind: conda - name: libstdcxx - version: 14.1.0 - build: hc0a3c3a_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda - sha256: 44decb3d23abacf1c6dd59f3c152a7101b7ca565b4ef8872804ceaedcc53a9cd - md5: 9dbb9699ea467983ba8a4ba89b08b066 - depends: - - libgcc 14.1.0 h77fa898_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 3892781 - timestamp: 1724801863728 -- kind: conda - name: libstdcxx-devel_linux-64 - version: 11.4.0 - build: h8f596e0_113 - build_number: 113 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/libstdcxx-devel_linux-64-11.4.0-h8f596e0_113.conda - sha256: 3bb19ec8f159db05b94e15905eeb7e9be6400567a210213480cd33cb59afb69a - md5: 511cb3d827a35d3030d42679d286a7cd - depends: - - __unix - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 11655670 - timestamp: 1719178312465 -- kind: conda - name: libstdcxx-ng - version: 14.1.0 - build: h4852527_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda - sha256: a2dc44f97290740cc187bfe94ce543e6eb3c2ea8964d99f189a1d8c97b419b8c - md5: bd2598399a70bb86d8218e95548d735e - depends: - - libstdcxx 14.1.0 hc0a3c3a_1 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL - size: 52219 - timestamp: 1724801897766 -- kind: conda - name: libsystemd0 - version: '256.5' - build: hb6d7363_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-256.5-hb6d7363_0.conda - sha256: 4f4f8c1d8351d0f0936b86ebb6b4965e3b7f0e8f49c82919edf96222a65801f7 - md5: 3b3912077a5515b2a39bda92008bc2c3 - depends: - - __glibc >=2.17,<3.0.a0 - - libcap >=2.69,<2.70.0a0 - - libgcc - - libgcc-ng >=12 - - libgcrypt >=1.11.0,<2.0a0 - - lz4-c >=1.9.3,<1.10.0a0 - - xz >=5.2.6,<6.0a0 - - zstd >=1.5.6,<1.6.0a0 - license: LGPL-2.1-or-later - size: 411284 - timestamp: 1724880354389 -- kind: conda - name: libuv - version: 1.48.0 - build: h67532ce_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libuv-1.48.0-h67532ce_0.conda - sha256: fb87f7bfd464a3a841d23f418c86a206818da0c4346984392071d9342c9ea367 - md5: c8e7344c74f0d86584f7ecdc9f25c198 - license: MIT - license_family: MIT - size: 407040 - timestamp: 1709913680478 -- kind: conda - name: libuv - version: 1.48.0 - build: h93a5062_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.48.0-h93a5062_0.conda - sha256: 60bed2a7a85096387ab0381cbc32ea2da7f8dd99bd90e440983019c0cdd96ad1 - md5: abfd49e80f13453b62a56be226120ea8 - license: MIT - license_family: MIT - size: 405988 - timestamp: 1709913494015 -- kind: conda - name: libuv - version: 1.48.0 - build: hd590300_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libuv-1.48.0-hd590300_0.conda - sha256: b7c0e8a0c93c2621be7645b37123d4e8d27e8a974da26a3fba47a9c37711aa7f - md5: 7e8b914b1062dd4386e3de4d82a3ead6 - depends: - - libgcc-ng >=12 - license: MIT - license_family: MIT - size: 899979 - timestamp: 1709913354710 -- kind: conda - name: libvorbis - version: 1.3.7 - build: h9c3ff4c_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2 - sha256: 53080d72388a57b3c31ad5805c93a7328e46ff22fab7c44ad2a86d712740af33 - md5: 309dec04b70a3cc0f1e84a4013683bc0 - depends: - - libgcc-ng >=9.3.0 - - libogg >=1.3.4,<1.4.0a0 - - libstdcxx-ng >=9.3.0 - license: BSD-3-Clause - license_family: BSD - size: 286280 - timestamp: 1610609811627 -- kind: conda - name: libxcb - version: '1.16' - build: hb9d3cd8_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.16-hb9d3cd8_1.conda - sha256: 33aa5fc997468b07ab3020b142eacc5479e4e2c2169f467b20ab220f33dd08de - md5: 3601598f0db0470af28985e3e7ad0158 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=13 - - pthread-stubs - - xorg-libxau >=1.0.11,<2.0a0 - - xorg-libxdmcp - license: MIT - license_family: MIT - size: 395570 - timestamp: 1724419104778 -- kind: conda - name: libzlib - version: 1.3.1 - build: h4ab18f5_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - sha256: adf6096f98b537a11ae3729eaa642b0811478f0ea0402ca67b5108fe2cb0010d - md5: 57d7dc60e9325e3de37ff8dffd18e814 - depends: - - libgcc-ng >=12 - constrains: - - zlib 1.3.1 *_1 - license: Zlib - license_family: Other - size: 61574 - timestamp: 1716874187109 -- kind: conda - name: libzlib - version: 1.3.1 - build: h87427d6_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda - sha256: 80a62db652b1da0ccc100812a1d86e94f75028968991bfb17f9536f3aa72d91d - md5: b7575b5aa92108dcc9aaab0f05f2dbce - depends: - - __osx >=10.13 - constrains: - - zlib 1.3.1 *_1 - license: Zlib - license_family: Other - size: 57372 - timestamp: 1716874211519 -- kind: conda - name: libzlib - version: 1.3.1 - build: hfb2fe0b_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-hfb2fe0b_1.conda - sha256: c34365dd37b0eab27b9693af32a1f7f284955517c2cc91f1b88a7ef4738ff03e - md5: 636077128927cf79fd933276dc3aed47 - depends: - - __osx >=11.0 - constrains: - - zlib 1.3.1 *_1 - license: Zlib - license_family: Other - size: 46921 - timestamp: 1716874262512 -- kind: conda - name: llvm-openmp - version: 18.1.8 - build: h15ab845_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-18.1.8-h15ab845_1.conda - sha256: 06a245abb6e6d8d6662a35ad162eacb39f431349edf7cea9b1ff73b2da213c58 - md5: ad0afa524866cc1c08b436865d0ae484 - depends: - - __osx >=10.13 - constrains: - - openmp 18.1.8|18.1.8.* - license: Apache-2.0 WITH LLVM-exception - license_family: APACHE - size: 300358 - timestamp: 1723605369115 -- kind: conda - name: llvm-openmp - version: 18.1.8 - build: hde57baf_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-18.1.8-hde57baf_1.conda - sha256: 7a76e2932ac77e6314bfa1c4ff83f617c8260313bfed1b8401b508ed3e9d70ba - md5: fe89757e3cd14bb1c6ebd68dac591363 - depends: - - __osx >=11.0 - constrains: - - openmp 18.1.8|18.1.8.* - license: Apache-2.0 WITH LLVM-exception - license_family: APACHE - size: 276263 - timestamp: 1723605341828 -- kind: conda - name: llvm-tools - version: 14.0.6 - build: hc8e404f_4 - build_number: 4 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/llvm-tools-14.0.6-hc8e404f_4.conda - sha256: bbd455bb43d23a2daaa20d6805b4764b97c7a79ca8dfa80dc95c3a0b2155cbbc - md5: a55c7b960756f104bcbf5bfcd5c8890a - depends: - - libllvm14 14.0.6 hc8e404f_4 - - libzlib >=1.2.13,<2.0.0a0 - constrains: - - llvmdev 14.0.6 - - clang 14.0.6.* - - clang-tools 14.0.6.* - - llvm 14.0.6.* - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 11207189 - timestamp: 1690563328190 -- kind: conda - name: llvm-tools - version: 14.0.6 - build: hd1a9a77_4 - build_number: 4 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-tools-14.0.6-hd1a9a77_4.conda - sha256: a15fd9113f3ef412c4eb99363b517d1347ae771271571565ac573e3959151749 - md5: fd0a3b0a22ba6f5ace86ce5b5e26c4c5 - depends: - - libllvm14 14.0.6 hd1a9a77_4 - - libzlib >=1.2.13,<2.0.0a0 - constrains: - - llvmdev 14.0.6 - - clang 14.0.6.* - - clang-tools 14.0.6.* - - llvm 14.0.6.* - license: Apache-2.0 WITH LLVM-exception - license_family: Apache - size: 9776772 - timestamp: 1690559269462 -- kind: conda - name: lz4-c - version: 1.9.4 - build: hcb278e6_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda - sha256: 1b4c105a887f9b2041219d57036f72c4739ab9e9fe5a1486f094e58c76b31f5f - md5: 318b08df404f9c9be5712aaa5a6f0bb0 - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: BSD-2-Clause - license_family: BSD - size: 143402 - timestamp: 1674727076728 -- kind: conda - name: m2w64-gcc-libgfortran - version: 5.3.0 - build: '6' - build_number: 6 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2 - sha256: 9de95a7996d5366ae0808eef2acbc63f9b11b874aa42375f55379e6715845dc6 - md5: 066552ac6b907ec6d72c0ddab29050dc - depends: - - m2w64-gcc-libs-core - - msys2-conda-epoch ==20160418 - license: GPL, LGPL, FDL, custom - size: 350687 - timestamp: 1608163451316 -- kind: conda - name: m2w64-gcc-libs - version: 5.3.0 - build: '7' - build_number: 7 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2 - sha256: 3bd1ab02b7c89a5b153a17be03b36d833f1517ff2a6a77ead7c4a808b88196aa - md5: fe759119b8b3bfa720b8762c6fdc35de - depends: - - m2w64-gcc-libgfortran - - m2w64-gcc-libs-core - - m2w64-gmp - - m2w64-libwinpthread-git - - msys2-conda-epoch ==20160418 - license: GPL3+, partial:GCCRLE, partial:LGPL2+ - size: 532390 - timestamp: 1608163512830 -- kind: conda - name: m2w64-gcc-libs-core - version: 5.3.0 - build: '7' - build_number: 7 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2 - sha256: 58afdfe859ed2e9a9b1cc06bc408720cb2c3a6a132e59d4805b090d7574f4ee0 - md5: 4289d80fb4d272f1f3b56cfe87ac90bd - depends: - - m2w64-gmp - - m2w64-libwinpthread-git - - msys2-conda-epoch ==20160418 - license: GPL3+, partial:GCCRLE, partial:LGPL2+ - size: 219240 - timestamp: 1608163481341 -- kind: conda - name: m2w64-gmp - version: 6.1.0 - build: '2' - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2 - sha256: 7e3cd95f554660de45f8323fca359e904e8d203efaf07a4d311e46d611481ed1 - md5: 53a1c73e1e3d185516d7e3af177596d9 - depends: - - msys2-conda-epoch ==20160418 - license: LGPL3 - size: 743501 - timestamp: 1608163782057 -- kind: conda - name: m2w64-libwinpthread-git - version: 5.0.0.4634.697f757 - build: '2' - build_number: 2 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 - sha256: f63a09b2cae7defae0480f1740015d6235f1861afa6fe2e2d3e10bd0d1314ee0 - md5: 774130a326dee16f1ceb05cc687ee4f0 - depends: - - msys2-conda-epoch ==20160418 - license: MIT, BSD - size: 31928 - timestamp: 1608166099896 -- kind: conda - name: make - version: 4.4.1 - build: h00291cd_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/make-4.4.1-h00291cd_0.conda - sha256: 32df3c8ae3a0e7d46fb7ece7b8024ec8bb1d1d78eee589802ef0ea85ffac57cd - md5: 3d166e7626d15b97713cf576af6662a3 - depends: - - __osx >=10.13 - license: GPL-3.0-or-later - license_family: GPL - size: 278710 - timestamp: 1724373842343 -- kind: conda - name: make - version: 4.4.1 - build: h013a479_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/make-4.4.1-h013a479_0.conda - sha256: cbab8a92a78d5ece45ba9e7c6d6711d416ab0c880a2e91cd678b9e8c2fb98239 - md5: 6a02708c5d31013b846cffa5facaf0b5 - depends: - - m2w64-gcc-libs - - m2w64-gcc-libs-core - license: GPL-3.0-or-later - license_family: GPL - size: 1275152 - timestamp: 1724373968396 -- kind: conda - name: make - version: 4.4.1 - build: hb9d3cd8_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/make-4.4.1-hb9d3cd8_0.conda - sha256: 1c6bac75dff518720d86d4535d8eb10647037925a2662d6cf8009e7252966cb2 - md5: 0e5a55445a0a4d12a50945eb11da90d3 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=13 - license: GPL-3.0-or-later - license_family: GPL - size: 516062 - timestamp: 1724373778473 -- kind: conda - name: make - version: 4.4.1 - build: hc9fafa5_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/make-4.4.1-hc9fafa5_0.conda - sha256: cfb20b4832af19e1816ed49f692e3f84e0dea42d995cc29a61ce4f5091dd24d8 - md5: e944c0125471fab3b75253aa13551fbe - depends: - - __osx >=11.0 - license: GPL-3.0-or-later - license_family: GPL - size: 273728 - timestamp: 1724373828543 -- kind: conda - name: mpg123 - version: 1.32.6 - build: h59595ed_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.6-h59595ed_0.conda - sha256: 8895a5ce5122a3b8f59afcba4b032f198e8a690a0efc95ef61f2135357ef0d72 - md5: 9160cdeb523a1b20cf8d2a0bf821f45d - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: LGPL-2.1-only - license_family: LGPL - size: 491811 - timestamp: 1712327176955 -- kind: conda - name: msys2-conda-epoch - version: '20160418' - build: '1' - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2 - sha256: 99358d58d778abee4dca82ad29fb58058571f19b0f86138363c260049d4ac7f1 - md5: b0309b72560df66f71a9d5e34a5efdfa - size: 3227 - timestamp: 1608166968312 -- kind: conda - name: ncurses - version: '6.5' - build: h7bae524_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda - sha256: 27d0b9ff78ad46e1f3a6c96c479ab44beda5f96def88e2fe626e0a49429d8afc - md5: cb2b0ea909b97b3d70cd3921d1445e1a - depends: - - __osx >=11.0 - license: X11 AND BSD-3-Clause - size: 802321 - timestamp: 1724658775723 -- kind: conda - name: ncurses - version: '6.5' - build: he02047a_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda - sha256: 6a1d5d8634c1a07913f1c525db6455918cbc589d745fac46d9d6e30340c8731a - md5: 70caf8bb6cf39a0b6b7efc885f51c0fe - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - license: X11 AND BSD-3-Clause - size: 889086 - timestamp: 1724658547447 -- kind: conda - name: ncurses - version: '6.5' - build: hf036a51_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda - sha256: b0b3180039ef19502525a2abd5833c00f9624af830fd391f851934d57bffb9af - md5: e102bbf8a6ceeaf429deab8032fc8977 - depends: - - __osx >=10.13 - license: X11 AND BSD-3-Clause - size: 822066 - timestamp: 1724658603042 -- kind: conda - name: ninja - version: 1.11.1 - build: h91493d7_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ninja-1.11.1-h91493d7_0.conda - sha256: 0ffb1912768af8354a930f482368ef170bf3d8217db328dfea1c8b09772c8c71 - md5: 44a99ef26178ea98626ff8e027702795 - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vs2015_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - size: 279200 - timestamp: 1676838681615 -- kind: conda - name: ninja - version: 1.11.1 - build: h924138e_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ninja-1.11.1-h924138e_0.conda - sha256: b555247ac8859b4ff311e3d708a0640f1bfe9fae7125c485b444072474a84c41 - md5: 73a4953a2d9c115bdc10ff30a52f675f - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - license: Apache-2.0 - license_family: Apache - size: 2251263 - timestamp: 1676837602636 -- kind: conda - name: ninja - version: 1.11.1 - build: hb8565cd_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ninja-1.11.1-hb8565cd_0.conda - sha256: 6f738d9a26fa275317b95b2b96832daab9059ef64af9a338f904a3cb684ae426 - md5: 49ad513efe39447aa51affd47e3aa68f - depends: - - libcxx >=14.0.6 - license: Apache-2.0 - license_family: Apache - size: 121284 - timestamp: 1676837793132 -- kind: conda - name: ninja - version: 1.11.1 - build: hffc8910_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ninja-1.11.1-hffc8910_0.conda - sha256: a594e90b0ed8202c280fff4a008f6a355d0db54a62b17067dc4a950370ddffc0 - md5: fdecec4002f41cf6ea1eea5b52947ee0 - depends: - - libcxx >=14.0.6 - license: Apache-2.0 - license_family: Apache - size: 107047 - timestamp: 1676837935565 -- kind: conda - name: openssl - version: 3.3.1 - build: h8359307_3 - build_number: 3 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.3.1-h8359307_3.conda - sha256: 9dd1ee7a8c21ff4fcbb98e9d0be0e83e5daf8a555c73589ad9e3046966b72e5e - md5: 644904d696d83c0ac78d594e0cf09f66 - depends: - - __osx >=11.0 - - ca-certificates - license: Apache-2.0 - license_family: Apache - size: 2888820 - timestamp: 1724402552318 -- kind: conda - name: openssl - version: 3.3.1 - build: hb9d3cd8_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-hb9d3cd8_3.conda - sha256: 9e27441b273a7cf9071f6e88ba9ad565d926d8083b154c64a74b99fba167b137 - md5: 6c566a46baae794daf34775d41eb180a - depends: - - __glibc >=2.17,<3.0.a0 - - ca-certificates - - libgcc-ng >=13 - license: Apache-2.0 - license_family: Apache - size: 2892042 - timestamp: 1724402701933 -- kind: conda - name: openssl - version: 3.3.1 - build: hd23fc13_3 - build_number: 3 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-hd23fc13_3.conda - sha256: 63921822fbb66337e0fd50b2a07412583fbe7783bc92c663bdf93c9a09026fdc - md5: ad8c8c9556a701817bd1aca75a302e96 - depends: - - __osx >=10.13 - - ca-certificates - license: Apache-2.0 - license_family: Apache - size: 2549881 - timestamp: 1724403015051 -- kind: conda - name: pcre2 - version: '10.44' - build: hba22ea6_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - sha256: 1087716b399dab91cc9511d6499036ccdc53eb29a288bebcb19cf465c51d7c0d - md5: df359c09c41cd186fffb93a2d87aa6f5 - depends: - - __glibc >=2.17,<3.0.a0 - - bzip2 >=1.0.8,<2.0a0 - - libgcc-ng >=12 - - libzlib >=1.3.1,<2.0a0 - license: BSD-3-Clause - license_family: BSD - size: 952308 - timestamp: 1723488734144 -- kind: conda - name: pthread-stubs - version: '0.4' - build: h36c2ea0_1001 - build_number: 1001 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 - sha256: 67c84822f87b641d89df09758da498b2d4558d47b920fd1d3fe6d3a871e000ff - md5: 22dad4df6e8630e8dff2428f6f6a7036 - depends: - - libgcc-ng >=7.5.0 - license: MIT - license_family: MIT - size: 5625 - timestamp: 1606147468727 -- kind: conda - name: pulseaudio-client - version: '16.1' - build: hb77b528_5 - build_number: 5 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-16.1-hb77b528_5.conda - sha256: 9981c70893d95c8cac02e7edd1a9af87f2c8745b772d529f08b7f9dafbe98606 - md5: ac902ff3c1c6d750dd0dfc93a974ab74 - depends: - - dbus >=1.13.6,<2.0a0 - - libgcc-ng >=12 - - libglib >=2.76.4,<3.0a0 - - libsndfile >=1.2.2,<1.3.0a0 - - libsystemd0 >=254 - constrains: - - pulseaudio 16.1 *_5 - license: LGPL-2.1-or-later - license_family: LGPL - size: 754844 - timestamp: 1693928953742 -- kind: conda - name: rhash - version: 1.4.3 - build: h0dc2134_2 - build_number: 2 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/rhash-1.4.3-h0dc2134_2.conda - sha256: 33af1f1ca0fcbda09a52604ff195195722cf9e26ffff4ed37ba761a890264b5c - md5: 2769cf2da9a1502417cb839b693e3006 - license: MIT - license_family: MIT - size: 176493 - timestamp: 1693427666172 -- kind: conda - name: rhash - version: 1.4.3 - build: hb547adb_2 - build_number: 2 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/rhash-1.4.3-hb547adb_2.conda - sha256: f20c9765768d0c61bbbb462dc83b55372328498a746f60e1e93c985e3436ef73 - md5: 24308c7e0949c572688900b8b2f2a3ba - license: MIT - license_family: MIT - size: 176444 - timestamp: 1693427792263 -- kind: conda - name: rhash - version: 1.4.3 - build: hd590300_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/rhash-1.4.3-hd590300_2.conda - sha256: 475f68cac8981ff2b10c56e53c2f376fc3c805fbc7ec30d22f870cd88f1479ba - md5: 4cabe3858a856bff08d9a0992e413084 - depends: - - libgcc-ng >=12 - license: MIT - license_family: MIT - size: 184509 - timestamp: 1693427593121 -- kind: conda - name: sdl2 - version: 2.26.5 - build: h63175ca_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/sdl2-2.26.5-h63175ca_0.conda - sha256: 4ebc9b29b04b2087dfff77ecbe6d7fc7e95c7223ed0447966f3a0aa1f007e8af - md5: 620fcad9da41516d395411099908bb3b - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vs2015_runtime >=14.29.30139 - license: Zlib - size: 2211251 - timestamp: 1680736427464 -- kind: conda - name: sdl2 - version: 2.26.5 - build: h949db6a_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/sdl2-2.26.5-h949db6a_0.conda - sha256: d61329f63735a871b620316b7115358a693bc856709f34cf8f9b241b37261ca4 - md5: 062142393e7fe4bc8e20a4eea9b639e6 - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - pulseaudio-client >=16.1,<16.2.0a0 - - xorg-libx11 >=1.8.4,<2.0a0 - - xorg-libxext >=1.3.4,<2.0a0 - license: Zlib - size: 1334948 - timestamp: 1680735969345 -- kind: conda - name: sdl2 - version: 2.26.5 - build: hb7217d7_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/sdl2-2.26.5-hb7217d7_0.conda - sha256: f71c5bcc3f462e2bf8df9f7e9aa4ee58a82f631a230eb33d47618f19153cb8d4 - md5: e2c2df7276617a8afff97b81f3ac847f - depends: - - libcxx >=14.0.6 - license: Zlib - size: 1219085 - timestamp: 1680736651870 -- kind: conda - name: sdl2 - version: 2.26.5 - build: hf0c8a7f_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/sdl2-2.26.5-hf0c8a7f_0.conda - sha256: 63a616aa3997c58dacbb0f6b721f629bb32c454042b6e0f786b17cd4ff80966f - md5: cd1f00dcd5e7922e9d57cfbaf1115bd0 - depends: - - libcxx >=14.0.6 - license: Zlib - size: 1180736 - timestamp: 1680736409923 -- kind: conda - name: sigtool - version: 0.1.3 - build: h44b9a77_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/sigtool-0.1.3-h44b9a77_0.tar.bz2 - sha256: 70791ae00a3756830cb50451db55f63e2a42a2fa2a8f1bab1ebd36bbb7d55bff - md5: 4a2cac04f86a4540b8c9b8d8f597848f - depends: - - openssl >=3.0.0,<4.0a0 - license: MIT - license_family: MIT - size: 210264 - timestamp: 1643442231687 -- kind: conda - name: sigtool - version: 0.1.3 - build: h88f4db0_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/sigtool-0.1.3-h88f4db0_0.tar.bz2 - sha256: 46fdeadf8f8d725819c4306838cdfd1099cd8fe3e17bd78862a5dfdcd6de61cf - md5: fbfb84b9de9a6939cb165c02c69b1865 - depends: - - openssl >=3.0.0,<4.0a0 - license: MIT - license_family: MIT - size: 213817 - timestamp: 1643442169866 -- kind: conda - name: sysroot_linux-64 - version: '2.17' - build: h4a8ded7_16 - build_number: 16 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_16.conda - sha256: b892b0b9c6dc8efe8b9b5442597d1ab8d65c0dc7e4e5a80f822cbdf0a639bd77 - md5: 223fe8a3ff6d5e78484a9d58eb34d055 - depends: - - _sysroot_linux-64_curr_repodata_hack 3.* - - kernel-headers_linux-64 3.10.0 h4a8ded7_16 - - tzdata - license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 - license_family: GPL - size: 15513240 - timestamp: 1720621429816 -- kind: conda - name: tapi - version: 1100.0.11 - build: h9ce4665_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/tapi-1100.0.11-h9ce4665_0.tar.bz2 - sha256: 34b18ce8d1518b67e333ca1d3af733c3976ecbdf3a36b727f9b4dedddcc588fa - md5: f9ff42ccf809a21ba6f8607f8de36108 - depends: - - libcxx >=10.0.0.a0 - license: NCSA - license_family: MIT - size: 201044 - timestamp: 1602664232074 -- kind: conda - name: tapi - version: 1100.0.11 - build: he4954df_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/tapi-1100.0.11-he4954df_0.tar.bz2 - sha256: 1709265fbee693a9e8b4126b0a3e68a6c4718b05821c659279c1af051f2d40f3 - md5: d83362e7d0513f35f454bc50b0ca591d - depends: - - libcxx >=11.0.0.a0 - license: NCSA - license_family: MIT - size: 191416 - timestamp: 1602687595316 -- kind: conda - name: tzdata - version: 2024a - build: h8827d51_1 - build_number: 1 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - sha256: 7d21c95f61319dba9209ca17d1935e6128af4235a67ee4e57a00908a1450081e - md5: 8bfdead4e0fff0383ae4c9c50d0531bd - license: LicenseRef-Public-Domain - size: 124164 - timestamp: 1724736371498 -- kind: conda - name: ucrt - version: 10.0.22621.0 - build: h57928b3_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - sha256: f29cdaf8712008f6b419b8b1a403923b00ab2504bfe0fb2ba8eb60e72d4f14c6 - md5: 72608f6cd3e5898229c3ea16deb1ac43 - constrains: - - vs2015_runtime >=14.29.30037 - license: LicenseRef-Proprietary - license_family: PROPRIETARY - size: 1283972 - timestamp: 1666630199266 -- kind: conda - name: vc - version: '14.3' - build: h8a93ad2_20 - build_number: 20 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda - sha256: 23ac5feb15a9adf3ab2b8c4dcd63650f8b7ae860c5ceb073e49cf71d203eddef - md5: 8558f367e1d7700554f7cdb823c46faf - depends: - - vc14_runtime >=14.40.33810 - track_features: - - vc14 - license: BSD-3-Clause - license_family: BSD - size: 17391 - timestamp: 1717709040616 -- kind: conda - name: vc14_runtime - version: 14.40.33810 - build: hcc2c482_20 - build_number: 20 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_20.conda - sha256: bba8daa6f78b26b48fb7e1377eb52160e25495710bf53146c5f405bd50565982 - md5: ad33c7cd933d69b9dee0f48317cdf137 - depends: - - ucrt >=10.0.20348.0 - constrains: - - vs2015_runtime 14.40.33810.* *_20 - license: LicenseRef-ProprietaryMicrosoft + - vs2015_runtime 14.42.34433.* *_23 + channel: https://prefix.dev/conda-forge + license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime license_family: Proprietary - size: 751028 - timestamp: 1724712684919 -- kind: conda - name: vs2015_runtime - version: 14.40.33810 - build: h3bf8584_20 - build_number: 20 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda - sha256: 0c2803f7a788c51f28235a7228dc2ab3f107b4b16ab0845a3e595c8c51e50a7a - md5: c21f1b4a3a30bbc3ef35a50957578e0e - depends: - - vc14_runtime >=14.40.33810 - license: BSD-3-Clause - license_family: BSD - size: 17395 - timestamp: 1717709043353 -- kind: conda - name: vs2019_win-64 - version: 19.29.30139 - build: he1865b1_20 - build_number: 20 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vs2019_win-64-19.29.30139-he1865b1_20.conda - sha256: b9b3faf4fa20301ad1886cfde20d339ea6c2e95de8f4710e0b49af1ca1d3a657 - md5: bc2f92e632f5c6b0d94e365546c7fc6e - depends: - - vswhere - constrains: - - vs_win-64 2019.11 - track_features: - - vc14 - license: BSD-3-Clause - license_family: BSD - size: 19744 - timestamp: 1716231200159 -- kind: conda - name: vswhere - version: 3.1.7 - build: h57928b3_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vswhere-3.1.7-h57928b3_0.conda - sha256: 8caeda9c0898cb8ee2cf4f45640dbbbdf772ddc01345cfb0f7b352c58b4d8025 - md5: ba83df93b48acfc528f5464c9a882baa - license: MIT - license_family: MIT - size: 219013 - timestamp: 1719460515960 -- kind: conda - name: xorg-kbproto - version: 1.0.7 - build: h7f98852_1002 - build_number: 1002 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - sha256: e90b0a6a5d41776f11add74aa030f789faf4efd3875c31964d6f9cfa63a10dd1 - md5: 4b230e8381279d76131116660f5a241a - depends: - - libgcc-ng >=9.3.0 - license: MIT - license_family: MIT - size: 27338 - timestamp: 1610027759842 -- kind: conda - name: xorg-libx11 - version: 1.8.9 - build: hb711507_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-hb711507_1.conda - sha256: 66eabe62b66c1597c4a755dcd3f4ce2c78adaf7b32e25dfee45504d67d7735c1 - md5: 4a6d410296d7e39f00bacdee7df046e9 - depends: - - libgcc-ng >=12 - - libxcb >=1.16,<1.17.0a0 - - xorg-kbproto - - xorg-xextproto >=7.3.0,<8.0a0 - - xorg-xproto - license: MIT - license_family: MIT - size: 832198 - timestamp: 1718846846409 -- kind: conda - name: xorg-libxau - version: 1.0.11 - build: hd590300_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda - sha256: 309751371d525ce50af7c87811b435c176915239fc9e132b99a25d5e1703f2d4 - md5: 2c80dc38fface310c9bd81b17037fee5 - depends: - - libgcc-ng >=12 - license: MIT - license_family: MIT - size: 14468 - timestamp: 1684637984591 -- kind: conda - name: xorg-libxdmcp - version: 1.1.3 - build: h7f98852_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 - sha256: 4df7c5ee11b8686d3453e7f3f4aa20ceef441262b49860733066c52cfd0e4a77 - md5: be93aabceefa2fac576e971aef407908 - depends: - - libgcc-ng >=9.3.0 - license: MIT - license_family: MIT - size: 19126 - timestamp: 1610071769228 -- kind: conda - name: xorg-libxext - version: 1.3.4 - build: h0b41bf4_2 - build_number: 2 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda - sha256: 73e5cfbdff41ef8a844441f884412aa5a585a0f0632ec901da035a03e1fe1249 - md5: 82b6df12252e6f32402b96dacc656fec - depends: - - libgcc-ng >=12 - - xorg-libx11 >=1.7.2,<2.0a0 - - xorg-xextproto - license: MIT - license_family: MIT - size: 50143 - timestamp: 1677036907815 -- kind: conda - name: xorg-xextproto - version: 7.3.0 - build: h0b41bf4_1003 - build_number: 1003 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda - sha256: b8dda3b560e8a7830fe23be1c58cc41f407b2e20ae2f3b6901eb5842ba62b743 - md5: bce9f945da8ad2ae9b1d7165a64d0f87 - depends: - - libgcc-ng >=12 - license: MIT - license_family: MIT - size: 30270 - timestamp: 1677036833037 -- kind: conda - name: xorg-xproto - version: 7.0.31 - build: h7f98852_1007 - build_number: 1007 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 - sha256: f197bb742a17c78234c24605ad1fe2d88b1d25f332b75d73e5ba8cf8fbc2a10d - md5: b4a4381d54784606820704f7b5f05a15 - depends: - - libgcc-ng >=9.3.0 - license: MIT - license_family: MIT - size: 74922 - timestamp: 1607291557628 -- kind: conda - name: xz - version: 5.2.6 - build: h166bdaf_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 - md5: 2161070d867d1b1204ea749c8eec4ef0 - depends: - - libgcc-ng >=12 - license: LGPL-2.1 and GPL-2.0 - size: 418368 - timestamp: 1660346797927 -- kind: conda - name: xz - version: 5.2.6 - build: h57fd34a_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 - sha256: 59d78af0c3e071021cfe82dc40134c19dab8cdf804324b62940f5c8cd71803ec - md5: 39c6b54e94014701dd157f4f576ed211 - license: LGPL-2.1 and GPL-2.0 - size: 235693 - timestamp: 1660346961024 -- kind: conda - name: xz - version: 5.2.6 - build: h775f41a_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - sha256: eb09823f34cc2dd663c0ec4ab13f246f45dcd52e5b8c47b9864361de5204a1c8 - md5: a72f9d4ea13d55d745ff1ed594747f10 - license: LGPL-2.1 and GPL-2.0 - size: 238119 - timestamp: 1660346964847 -- kind: conda - name: zlib - version: 1.3.1 - build: h4ab18f5_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda - sha256: cee16ab07a11303de721915f0a269e8c7a54a5c834aa52f74b1cc3a59000ade8 - md5: 9653f1bf3766164d0e65fa723cabbc54 - depends: - - libgcc-ng >=12 - - libzlib 1.3.1 h4ab18f5_1 - license: Zlib - license_family: Other - size: 93004 - timestamp: 1716874213487 -- kind: conda - name: zlib - version: 1.3.1 - build: h87427d6_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/zlib-1.3.1-h87427d6_1.conda - sha256: 41bd5fef28b2755d637e3a8ea5c84010628392fbcf80c7e3d7370aaced7ee4fe - md5: 3ac9ef8975965f9698dbedd2a4cc5894 - depends: - - __osx >=10.13 - - libzlib 1.3.1 h87427d6_1 - license: Zlib - license_family: Other - size: 88782 - timestamp: 1716874245467 -- kind: conda - name: zlib - version: 1.3.1 - build: hfb2fe0b_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.1-hfb2fe0b_1.conda - sha256: 87360c2dc662916aac37cf01e53324b4f4f78db6f399220818076752b093ede5 - md5: f27e021db7862b6ddbc1d3578f10d883 - depends: - - __osx >=11.0 - - libzlib 1.3.1 hfb2fe0b_1 - license: Zlib - license_family: Other - size: 78260 - timestamp: 1716874280334 -- kind: conda - name: zstd - version: 1.5.6 - build: h915ae27_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.6-h915ae27_0.conda - sha256: efa04a98cb149643fa54c4dad5a0179e36a5fbc88427ea0eec88ceed87fd0f96 - md5: 4cb2cd56f039b129bb0e491c1164167e - depends: - - __osx >=10.9 - - libzlib >=1.2.13,<2.0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 498900 - timestamp: 1714723303098 -- kind: conda - name: zstd - version: 1.5.6 - build: ha6fb4c9_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda - sha256: c558b9cc01d9c1444031bd1ce4b9cff86f9085765f17627a6cd85fc623c8a02b - md5: 4d056880988120e29d75bfff282e0f45 - depends: - - libgcc-ng >=12 - - libstdcxx-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 554846 - timestamp: 1714722996770 -- kind: conda - name: zstd - version: 1.5.6 - build: hb46c0d2_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.6-hb46c0d2_0.conda - sha256: 2d4fd1ff7ee79cd954ca8e81abf11d9d49954dd1fef80f27289e2402ae9c2e09 - md5: d96942c06c3e84bfcc5efb038724a7fd - depends: - - __osx >=11.0 - - libzlib >=1.2.13,<2.0.0a0 - license: BSD-3-Clause - license_family: BSD - size: 405089 - timestamp: 1714723101397 + size: 754247 + timestamp: 1731710681163 diff --git a/examples/cpp-sdl/pixi.toml b/examples/cpp-sdl/pixi.toml index 68a5a057e..408f4e8d3 100644 --- a/examples/cpp-sdl/pixi.toml +++ b/examples/cpp-sdl/pixi.toml @@ -1,44 +1,30 @@ -[project] +[workspace] +channels = ["https://prefix.dev/conda-forge"] +platforms = ["win-64", "linux-64", "osx-arm64", "osx-64"] +preview = ["pixi-build"] + +[package] authors = ["Bas Zalmstra "] -channels = ["conda-forge"] description = "Showcases how to create a simple C++ executable with Pixi" name = "sdl_example" -platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] - -[tasks.start] -# Start the built executable -cmd = ".build/bin/sdl_example" -depends-on = ["build"] - -[dependencies] -sdl2 = "2.26.5.*" +version = "0.1.0" -[feature.build.dependencies] -cmake = "3.26.4.*" -cxx-compiler = "1.5.2.*" -make = ">=4.3,<5" -ninja = "1.11.1.*" - -[feature.build.tasks.configure] -# Configures CMake -cmd = [ - "cmake", - # Use the cross-platform Ninja generator - "-GNinja", - # The source is in the root directory - "-S.", - # We wanna build in the .build directory - "-B.build", +[build-system] +build-backend = "pixi-build-cmake" +channels = [ + "https://prefix.dev/pixi-build-backends", + "https://prefix.dev/conda-forge", ] -inputs = ["CMakeLists.txt"] -outputs = [".build/CMakeFiles/"] +dependencies = ["pixi-build-cmake"] + +[tasks.start] +cmd = "sdl_example" -# Build the executable but make sure CMake is configured first. -[feature.build.tasks.build] -cmd = ["cmake", "--build", ".build"] -depends-on = ["configure"] -inputs = ["CMakeLists.txt", "src/*"] -outputs = [".build/bin/sdl_example"] +[host-dependencies] +# This ensures that SDL2 is available at build time. +sdl2 = ">=2.26.5,<3.0" -[environments] -build = ["build"] +[dependencies] +# Define a dependency on ourselves. This will invoke the build backend to build +# the C++ code and install the executable in an environment ready to be used. +sdl_example = { path = "." } diff --git a/examples/editable-with-extras/pixi.lock b/examples/editable-with-extras/pixi.lock index 128a65c6b..debe7850b 100644 --- a/examples/editable-with-extras/pixi.lock +++ b/examples/editable-with-extras/pixi.lock @@ -753,7 +753,7 @@ packages: name: package-with-extras version: 0.0.2 path: ./package_with_extras - sha256: c2160511f4d81f291503314dccd8ef5c739ec3f57e0e4e570b2e08da77ba36fc + sha256: 41880f3aa3e04bee7863021efb45246f65be05fcfac9462763edc48423bdac37 requires_dist: - boltons - rich ; extra == 'color' diff --git a/examples/flask-hello-world-pyproject/pixi.lock b/examples/flask-hello-world-pyproject/pixi.lock index e96fa001f..c28aa083a 100644 --- a/examples/flask-hello-world-pyproject/pixi.lock +++ b/examples/flask-hello-world-pyproject/pixi.lock @@ -1,4 +1,4 @@ -version: 5 +version: 6 environments: default: channels: @@ -9,8 +9,14 @@ environments: linux-64: - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 @@ -22,105 +28,95 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-hb9d3cd8_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda - pypi: . osx-64: + - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.0-h1b8f9f3_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.2-py312hbe3f5e4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-hd23fc13_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda - pypi: . osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.7.4-hf0a4a13_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.2-hebf3989_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.46.0-hfb93653_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-hfb2fe0b_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py312ha0ccf2a_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.3.1-h8359307_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.5-h30c5eda_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl - - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda - pypi: . win-64: + - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.7.4-h56e8100_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-win_pyh7428d3b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.2-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.0-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py312h31fea79_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.1-h2466b09_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_20.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl - - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda - pypi: . test: channels: @@ -131,8 +127,14 @@ environments: linux-64: - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 @@ -144,126 +146,122 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-hb9d3cd8_3.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl - pypi: . osx-64: + - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.0-h1b8f9f3_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.2-py312hbe3f5e4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-hd23fc13_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl - pypi: . osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.7.4-hf0a4a13_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.2-hebf3989_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.46.0-hfb93653_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-hfb2fe0b_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py312ha0ccf2a_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.3.1-h8359307_3.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.5-h30c5eda_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl - pypi: . win-64: + - conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.7.4-h56e8100_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-win_pyh7428d3b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.2-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.0-h2466b09_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py312h31fea79_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.1-h2466b09_3.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_20.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl - pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl - pypi: . packages: -- kind: conda - name: _libgcc_mutex - version: '0.1' - build: conda_forge - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 md5: d7c89558ba9fa0495403155b64376d81 license: None purls: [] size: 2562 timestamp: 1578324546067 -- kind: conda - name: _openmp_mutex - version: '4.5' - build: 2_gnu +- conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 build_number: 16 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 md5: 73aaf86a425cc6e73fcf236a5a46396d depends: @@ -276,37 +274,18 @@ packages: purls: [] size: 23621 timestamp: 1650670423406 -- kind: pypi - name: blinker - version: 1.8.2 - url: https://files.pythonhosted.org/packages/bb/2a/10164ed1f31196a2f7f3799368a821765c62851ead0e630ab52b8e14b4d0/blinker-1.8.2-py3-none-any.whl - sha256: 1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01 - requires_python: '>=3.8' -- kind: conda - name: bzip2 - version: 1.0.8 - build: h2466b09_7 - build_number: 7 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda - sha256: 35a5dad92e88fdd7fc405e864ec239486f4f31eec229e31686e61a140a8e573b - md5: 276e7ffe9ffe39688abc665ef0f45596 +- conda: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda + sha256: f7efd22b5c15b400ed84a996d777b6327e5c402e79e3c534a7e086236f1eb2dc + md5: 42834439227a4551b939beeeb8a4b085 depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: bzip2-1.0.6 - license_family: BSD - purls: [] - size: 54927 - timestamp: 1720974860185 -- kind: conda - name: bzip2 - version: 1.0.8 - build: h4bc722e_7 - build_number: 7 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/blinker?source=hash-mapping + size: 13934 + timestamp: 1731096548765 +- conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda sha256: 5ced96500d945fb286c9c838e54fa759aa04a7129c59800f0846b4335cee770d md5: 62ee74e96c5ebb0af99386de58cf9553 depends: @@ -317,13 +296,17 @@ packages: purls: [] size: 252783 timestamp: 1720974456583 -- kind: conda - name: bzip2 - version: 1.0.8 - build: h99b78c6_7 - build_number: 7 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda +- conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda + sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 + md5: 7ed4301d437b59045be7e051a0308211 + depends: + - __osx >=10.13 + license: bzip2-1.0.6 + license_family: BSD + purls: [] + size: 134188 + timestamp: 1720974491916 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda sha256: adfa71f158cbd872a36394c56c3568e6034aa55c623634b37a4836bd036e6b91 md5: fc6948412dbbbe9a4c9ddbbcfe0a79ab depends: @@ -333,137 +316,148 @@ packages: purls: [] size: 122909 timestamp: 1720974522888 -- kind: conda - name: bzip2 - version: 1.0.8 - build: hfdf4475_7 - build_number: 7 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda - sha256: cad153608b81fb24fc8c509357daa9ae4e49dfc535b2cb49b91e23dbd68fc3c5 - md5: 7ed4301d437b59045be7e051a0308211 +- conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda + sha256: 35a5dad92e88fdd7fc405e864ec239486f4f31eec229e31686e61a140a8e573b + md5: 276e7ffe9ffe39688abc665ef0f45596 depends: - - __osx >=10.13 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: bzip2-1.0.6 license_family: BSD purls: [] - size: 134188 - timestamp: 1720974491916 -- kind: conda - name: ca-certificates - version: 2024.7.4 - build: h56e8100_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.7.4-h56e8100_0.conda - sha256: 7f37bb33c7954de1b4d19ad622859feb4f6c58f751c38b895524cad4e44af72e - md5: 9caa97c9504072cd060cf0a3142cc0ed + size: 54927 + timestamp: 1720974860185 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda + sha256: c1548a3235376f464f9931850b64b02492f379b2f2bb98bc786055329b080446 + md5: 23ab7665c5f63cfb9f1f6195256daac6 license: ISC purls: [] - size: 154943 - timestamp: 1720077592592 -- kind: conda - name: ca-certificates - version: 2024.7.4 - build: h8857fd0_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda + size: 154853 + timestamp: 1720077432978 +- conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.7.4-h8857fd0_0.conda sha256: d16f46c489cb3192305c7d25b795333c5fc17bb0986de20598ed519f8c9cc9e4 md5: 7df874a4b05b2d2b82826190170eaa0f license: ISC purls: [] size: 154473 timestamp: 1720077510541 -- kind: conda - name: ca-certificates - version: 2024.7.4 - build: hbcca054_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - sha256: c1548a3235376f464f9931850b64b02492f379b2f2bb98bc786055329b080446 - md5: 23ab7665c5f63cfb9f1f6195256daac6 - license: ISC - purls: [] - size: 154853 - timestamp: 1720077432978 -- kind: conda - name: ca-certificates - version: 2024.7.4 - build: hf0a4a13_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.7.4-hf0a4a13_0.conda +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.7.4-hf0a4a13_0.conda sha256: 33a61116dae7f369b6ce92a7f2a1ff361ae737c675a493b11feb5570b89e0e3b md5: 21f9a33e5fe996189e470c19c5354dbe license: ISC purls: [] size: 154517 timestamp: 1720077468981 -- kind: pypi - name: click - version: 8.1.7 - url: https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl - sha256: ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 - requires_dist: - - colorama ; platform_system == 'Windows' - - importlib-metadata ; python_full_version < '3.8' - requires_python: '>=3.7' -- kind: pypi - name: colorama - version: 0.4.6 - url: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl - sha256: 4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*' -- kind: pypi - name: flask - version: 2.3.3 - url: https://files.pythonhosted.org/packages/fd/56/26f0be8adc2b4257df20c1c4260ddd0aa396cf8e75d90ab2f7ff99bc34f9/flask-2.3.3-py3-none-any.whl - sha256: f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b - requires_dist: - - werkzeug>=2.3.7 - - jinja2>=3.1.2 - - itsdangerous>=2.1.2 - - click>=8.1.3 - - blinker>=1.6.2 - - importlib-metadata>=3.6.0 ; python_full_version < '3.10' - - asgiref>=3.2 ; extra == 'async' - - python-dotenv ; extra == 'dotenv' - requires_python: '>=3.8' -- kind: pypi +- conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.7.4-h56e8100_0.conda + sha256: 7f37bb33c7954de1b4d19ad622859feb4f6c58f751c38b895524cad4e44af72e + md5: 9caa97c9504072cd060cf0a3142cc0ed + license: ISC + purls: [] + size: 154943 + timestamp: 1720077592592 +- conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + sha256: f0016cbab6ac4138a429e28dbcb904a90305b34b3fe41a9b89d697c90401caec + md5: f3ad426304898027fc619827ff428eca + depends: + - __unix + - python >=3.8 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/click?source=hash-mapping + size: 84437 + timestamp: 1692311973840 +- conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-win_pyh7428d3b_0.conda + sha256: 90236b113b9a20041736e80b80ee965167f9aac0468315c55e2bad902d673fb0 + md5: 3549ecbceb6cd77b91a105511b7d0786 + depends: + - __win + - colorama + - python >=3.8 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/click?source=hash-mapping + size: 85051 + timestamp: 1692312207348 +- conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 + sha256: 2c1b2e9755ce3102bca8d69e8f26e4f087ece73f50418186aee7c74bef8e1698 + md5: 3faab06a954c2a04039983f2c4a50d99 + depends: + - python >=3.7 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/colorama?source=hash-mapping + size: 25170 + timestamp: 1666700778190 +- conda: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + sha256: 4f84ffdc5471236e8225db86c7508426b46aa2c3802d58ca40b3c3e174533b39 + md5: 9b0d29067484a8dfacfae85b8fba81bc + depends: + - blinker >=1.6.2 + - click >=8.1.3 + - importlib-metadata >=3.6.0 + - itsdangerous >=2.1.2 + - jinja2 >=3.1.2 + - python >=3.8 + - werkzeug >=2.3.7 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/flask?source=hash-mapping + size: 79782 + timestamp: 1692686247131 +- pypi: . name: flask-hello-world-pyproject version: 0.1.0 - path: . - sha256: 6033690dfa1adac9b808662d775d1d69ba25b676af8e5cb14387fe47ad97a83f + sha256: 514e1ba6ab77ad2f19109ad1a798b92a9437a67eb6d89ff8f5e76e0a39959df2 requires_dist: - flask==2.* requires_python: '>=3.11' editable: true -- kind: pypi +- conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + sha256: 7194700ce1a5ad2621fd68e894dd8c1ceaff9a38723e6e0e5298fdef13017b1c + md5: 54198435fce4d64d8a89af22573012a8 + depends: + - python >=3.8 + - zipp >=0.5 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/importlib-metadata?source=hash-mapping + size: 28646 + timestamp: 1726082927916 +- pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl name: iniconfig version: 2.0.0 - url: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl sha256: b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 requires_python: '>=3.7' -- kind: pypi - name: itsdangerous - version: 2.2.0 - url: https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl - sha256: c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef - requires_python: '>=3.8' -- kind: pypi - name: jinja2 - version: 3.1.4 - url: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - sha256: bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - requires_dist: - - markupsafe>=2.0 - - babel>=2.7 ; extra == 'i18n' - requires_python: '>=3.7' -- kind: conda - name: ld_impl_linux-64 - version: '2.40' - build: hf3520f5_7 - build_number: 7 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda +- conda: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + sha256: 4e933e36e9b0401b62ea8fd63393827ebeb4250de77a56687afb387d504523c5 + md5: ff7ca04134ee8dde1d7cf491a78ef7c7 + depends: + - python >=3.8 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/itsdangerous?source=hash-mapping + size: 19333 + timestamp: 1713372766463 +- conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda + sha256: 27380d870d42d00350d2d52598cddaf02f9505fb24be09488da0c9b8d1428f2d + md5: 7b86ecb7d3557821c649b3c31e3eb9f2 + depends: + - markupsafe >=2.0 + - python >=3.7 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jinja2?source=hash-mapping + size: 111565 + timestamp: 1715127275924 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda sha256: 764b6950aceaaad0c67ef925417594dd14cd2e22fff864aeef455ac259263d15 md5: b80f2f396ca2c28b8c14c437a4ed1e74 constrains: @@ -473,12 +467,7 @@ packages: purls: [] size: 707602 timestamp: 1718625640445 -- kind: conda - name: libexpat - version: 2.6.2 - build: h59595ed_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda sha256: 331bb7c7c05025343ebd79f86ae612b9e1e74d2687b8f3179faec234f986ce19 md5: e7ba12deb7020dd080c6c70e7b6f6a3d depends: @@ -490,27 +479,7 @@ packages: purls: [] size: 73730 timestamp: 1710362120304 -- kind: conda - name: libexpat - version: 2.6.2 - build: h63175ca_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.2-h63175ca_0.conda - sha256: 79f612f75108f3e16bbdc127d4885bb74729cf66a8702fca0373dad89d40c4b7 - md5: bc592d03f62779511d392c175dcece64 - constrains: - - expat 2.6.2.* - license: MIT - license_family: MIT - purls: [] - size: 139224 - timestamp: 1710362609641 -- kind: conda - name: libexpat - version: 2.6.2 - build: h73e2aa4_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda +- conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.2-h73e2aa4_0.conda sha256: a188a77b275d61159a32ab547f7d17892226e7dac4518d2c6ac3ac8fc8dfde92 md5: 3d1d51c8f716d97c864d12f7af329526 constrains: @@ -520,12 +489,7 @@ packages: purls: [] size: 69246 timestamp: 1710362566073 -- kind: conda - name: libexpat - version: 2.6.2 - build: hebf3989_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.2-hebf3989_0.conda +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.2-hebf3989_0.conda sha256: ba7173ac30064ea901a4c9fb5a51846dcc25512ceb565759be7d18cbf3e5415e md5: e3cde7cfa87f82f7cb13d482d5e0ad09 constrains: @@ -535,13 +499,27 @@ packages: purls: [] size: 63655 timestamp: 1710362424980 -- kind: conda - name: libffi - version: 3.4.2 - build: h0d85af4_5 - build_number: 5 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/win-64/libexpat-2.6.2-h63175ca_0.conda + sha256: 79f612f75108f3e16bbdc127d4885bb74729cf66a8702fca0373dad89d40c4b7 + md5: bc592d03f62779511d392c175dcece64 + constrains: + - expat 2.6.2.* + license: MIT + license_family: MIT + purls: [] + size: 139224 + timestamp: 1710362609641 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 + sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e + md5: d645c6d2ac96843a2bfaccd2d62b3ac3 + depends: + - libgcc-ng >=9.4.0 + license: MIT + license_family: MIT + purls: [] + size: 58292 + timestamp: 1636488182923 +- conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 sha256: 7a2d27a936ceee6942ea4d397f9c7d136f12549d86f7617e8b6bad51e01a941f md5: ccb34fb14960ad8b125962d3d79b31a9 license: MIT @@ -549,13 +527,7 @@ packages: purls: [] size: 51348 timestamp: 1636488394370 -- kind: conda - name: libffi - version: 3.4.2 - build: h3422bc3_5 - build_number: 5 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 sha256: 41b3d13efb775e340e4dba549ab5c029611ea6918703096b2eaa9c015c0750ca md5: 086914b672be056eb70fd4285b6783b6 license: MIT @@ -563,29 +535,7 @@ packages: purls: [] size: 39020 timestamp: 1636488587153 -- kind: conda - name: libffi - version: 3.4.2 - build: h7f98852_5 - build_number: 5 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e - md5: d645c6d2ac96843a2bfaccd2d62b3ac3 - depends: - - libgcc-ng >=9.4.0 - license: MIT - license_family: MIT - purls: [] - size: 58292 - timestamp: 1636488182923 -- kind: conda - name: libffi - version: 3.4.2 - build: h8ffe710_5 - build_number: 5 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 sha256: 1951ab740f80660e9bc07d2ed3aefb874d78c107264fd810f24a1a6211d4b1a5 md5: 2c96d1b6915b408893f9472569dee135 depends: @@ -596,13 +546,7 @@ packages: purls: [] size: 42063 timestamp: 1636489106777 -- kind: conda - name: libgcc - version: 14.1.0 - build: h77fa898_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda sha256: 10fa74b69266a2be7b96db881e18fa62cfa03082b65231e8d652e897c4b335a3 md5: 002ef4463dd1e2b44a94a4ace468f5d2 depends: @@ -616,13 +560,7 @@ packages: purls: [] size: 846380 timestamp: 1724801836552 -- kind: conda - name: libgcc-ng - version: 14.1.0 - build: h69a702a_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda sha256: b91f7021e14c3d5c840fbf0dc75370d6e1f7c7ff4482220940eaafb9c64613b7 md5: 1efc0ad219877a73ef977af7dbb51f17 depends: @@ -632,13 +570,7 @@ packages: purls: [] size: 52170 timestamp: 1724801842101 -- kind: conda - name: libgomp - version: 14.1.0 - build: h77fa898_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda sha256: c96724c8ae4ee61af7674c5d9e5a3fbcf6cd887a40ad5a52c99aa36f1d4f9680 md5: 23c255b008c4f2ae008f81edcabaca89 depends: @@ -648,12 +580,7 @@ packages: purls: [] size: 460218 timestamp: 1724801743478 -- kind: conda - name: libnsl - version: 2.0.1 - build: hd590300_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 depends: @@ -663,12 +590,17 @@ packages: purls: [] size: 33408 timestamp: 1697359010159 -- kind: conda - name: libsqlite - version: 3.46.0 - build: h1b8f9f3_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.0-h1b8f9f3_0.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda + sha256: daee3f68786231dad457d0dfde3f7f1f9a7f2018adabdbb864226775101341a8 + md5: 18aa975d2094c34aef978060ae7da7d8 + depends: + - libgcc-ng >=12 + - libzlib >=1.2.13,<2.0a0 + license: Unlicense + purls: [] + size: 865346 + timestamp: 1718050628718 +- conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.46.0-h1b8f9f3_0.conda sha256: 63af1a9e3284c7e4952364bafe7267e41e2d9d8bcc0e85a4ea4b0ec02d3693f6 md5: 5dadfbc1a567fe6e475df4ce3148be09 depends: @@ -678,12 +610,17 @@ packages: purls: [] size: 908643 timestamp: 1718050720117 -- kind: conda - name: libsqlite - version: 3.46.0 - build: h2466b09_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.0-h2466b09_0.conda +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.46.0-hfb93653_0.conda + sha256: 73048f9cb8647d3d3bfe6021c0b7d663e12cffbe9b4f31bd081e713b0a9ad8f9 + md5: 12300188028c9bc02da965128b91b517 + depends: + - __osx >=11.0 + - libzlib >=1.2.13,<2.0a0 + license: Unlicense + purls: [] + size: 830198 + timestamp: 1718050644825 +- conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.46.0-h2466b09_0.conda sha256: 662bd7e0d63c5b8c31cca19b91649e798319b93568a2ba8d1375efb91eeb251b md5: 951b0a3a463932e17414cd9f047fa03d depends: @@ -694,42 +631,7 @@ packages: purls: [] size: 876677 timestamp: 1718051113874 -- kind: conda - name: libsqlite - version: 3.46.0 - build: hde9e2c9_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - sha256: daee3f68786231dad457d0dfde3f7f1f9a7f2018adabdbb864226775101341a8 - md5: 18aa975d2094c34aef978060ae7da7d8 - depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0a0 - license: Unlicense - purls: [] - size: 865346 - timestamp: 1718050628718 -- kind: conda - name: libsqlite - version: 3.46.0 - build: hfb93653_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.46.0-hfb93653_0.conda - sha256: 73048f9cb8647d3d3bfe6021c0b7d663e12cffbe9b4f31bd081e713b0a9ad8f9 - md5: 12300188028c9bc02da965128b91b517 - depends: - - __osx >=11.0 - - libzlib >=1.2.13,<2.0a0 - license: Unlicense - purls: [] - size: 830198 - timestamp: 1718050644825 -- kind: conda - name: libuuid - version: 2.38.1 - build: h0b41bf4_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 md5: 40b61aab5c7ba9ff276c41cfffe6b80b depends: @@ -739,13 +641,7 @@ packages: purls: [] size: 33601 timestamp: 1680112270483 -- kind: conda - name: libxcrypt - version: 4.4.36 - build: hd590300_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda sha256: 6ae68e0b86423ef188196fff6207ed0c8195dd84273cb5623b85aa08033a410c md5: 5aa797f8787fe7a17d1b0821485b5adc depends: @@ -754,33 +650,7 @@ packages: purls: [] size: 100393 timestamp: 1702724383534 -- kind: conda - name: libzlib - version: 1.3.1 - build: h2466b09_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda - sha256: b13846a54a15243e15f96fec06b526d8155adc6a1ac2b6ed47a88f6a71a94b68 - md5: d4483ca8afc57ddf1f6dded53b36c17f - depends: - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - constrains: - - zlib 1.3.1 *_1 - license: Zlib - license_family: Other - purls: [] - size: 56186 - timestamp: 1716874730539 -- kind: conda - name: libzlib - version: 1.3.1 - build: h4ab18f5_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda sha256: adf6096f98b537a11ae3729eaa642b0811478f0ea0402ca67b5108fe2cb0010d md5: 57d7dc60e9325e3de37ff8dffd18e814 depends: @@ -792,13 +662,7 @@ packages: purls: [] size: 61574 timestamp: 1716874187109 -- kind: conda - name: libzlib - version: 1.3.1 - build: h87427d6_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda +- conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-h87427d6_1.conda sha256: 80a62db652b1da0ccc100812a1d86e94f75028968991bfb17f9536f3aa72d91d md5: b7575b5aa92108dcc9aaab0f05f2dbce depends: @@ -810,13 +674,7 @@ packages: purls: [] size: 57372 timestamp: 1716874211519 -- kind: conda - name: libzlib - version: 1.3.1 - build: hfb2fe0b_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-hfb2fe0b_1.conda +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-hfb2fe0b_1.conda sha256: c34365dd37b0eab27b9693af32a1f7f284955517c2cc91f1b88a7ef4738ff03e md5: 636077128927cf79fd933276dc3aed47 depends: @@ -828,52 +686,93 @@ packages: purls: [] size: 46921 timestamp: 1716874262512 -- kind: pypi - name: markupsafe - version: 2.1.5 - url: https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 - requires_python: '>=3.7' -- kind: pypi - name: markupsafe - version: 2.1.5 - url: https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl - sha256: 823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb - requires_python: '>=3.7' -- kind: pypi - name: markupsafe - version: 2.1.5 - url: https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl - sha256: 3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 - requires_python: '>=3.7' -- kind: pypi - name: markupsafe - version: 2.1.5 - url: https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl - sha256: 8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 - requires_python: '>=3.7' -- kind: conda - name: ncurses - version: '6.5' - build: h7bae524_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda - sha256: 27d0b9ff78ad46e1f3a6c96c479ab44beda5f96def88e2fe626e0a49429d8afc - md5: cb2b0ea909b97b3d70cd3921d1445e1a +- conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.3.1-h2466b09_1.conda + sha256: b13846a54a15243e15f96fec06b526d8155adc6a1ac2b6ed47a88f6a71a94b68 + md5: d4483ca8afc57ddf1f6dded53b36c17f depends: - - __osx >=11.0 - license: X11 AND BSD-3-Clause + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + constrains: + - zlib 1.3.1 *_1 + license: Zlib + license_family: Other purls: [] - size: 802321 - timestamp: 1724658775723 -- kind: conda - name: ncurses - version: '6.5' - build: he02047a_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda + size: 56186 + timestamp: 1716874730539 +- conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda + sha256: 15f14ab429c846aacd47fada0dc4f341d64491e097782830f0906d00cb7b48b6 + md5: a755704ea0e2503f8c227d84829a8e81 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + constrains: + - jinja2 >=3.0.0 + arch: x86_64 + platform: linux + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/markupsafe?source=hash-mapping + size: 24878 + timestamp: 1729351558563 +- conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-3.0.2-py312hbe3f5e4_0.conda + sha256: b2fb54718159055fdf89da7d9f0c6743ef84b31960617a56810920d17616d944 + md5: c6238833d7dc908ec295bc490b80d845 + depends: + - __osx >=10.13 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + constrains: + - jinja2 >=3.0.0 + arch: x86_64 + platform: osx + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/markupsafe?source=hash-mapping + size: 23889 + timestamp: 1729351468966 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py312ha0ccf2a_0.conda + sha256: 360e958055f35e5087942b9c499eaafae984a951b84cf354ef7481a2806f340d + md5: c6ff9f291d011c9d4f0b840f49435c64 + depends: + - __osx >=11.0 + - python >=3.12,<3.13.0a0 + - python >=3.12,<3.13.0a0 *_cpython + - python_abi 3.12.* *_cp312 + constrains: + - jinja2 >=3.0.0 + arch: arm64 + platform: osx + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/markupsafe?source=hash-mapping + size: 24495 + timestamp: 1729351534830 +- conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-3.0.2-py312h31fea79_0.conda + sha256: eb0f3768890291f2d5fb666ab31b32b37a821e4a30968c6b3cd332472957abe7 + md5: e2ff001440760f2cbac24765d8a3d84a + depends: + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + constrains: + - jinja2 >=3.0.0 + arch: x86_64 + platform: win + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/markupsafe?source=hash-mapping + size: 27358 + timestamp: 1729351504449 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda sha256: 6a1d5d8634c1a07913f1c525db6455918cbc589d745fac46d9d6e30340c8731a md5: 70caf8bb6cf39a0b6b7efc885f51c0fe depends: @@ -883,13 +782,7 @@ packages: purls: [] size: 889086 timestamp: 1724658547447 -- kind: conda - name: ncurses - version: '6.5' - build: hf036a51_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda +- conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-hf036a51_1.conda sha256: b0b3180039ef19502525a2abd5833c00f9624af830fd391f851934d57bffb9af md5: e102bbf8a6ceeaf429deab8032fc8977 depends: @@ -898,49 +791,16 @@ packages: purls: [] size: 822066 timestamp: 1724658603042 -- kind: conda - name: openssl - version: 3.3.1 - build: h2466b09_3 - build_number: 3 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.1-h2466b09_3.conda - sha256: 76a10564ca450f56495cff06bf60bdf0fe42e6ef7a20469276894d4ac7c0140a - md5: c6ebd3a1a2b393e040ca71c9f9ef8d97 - depends: - - ca-certificates - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 - license: Apache-2.0 - license_family: Apache - purls: [] - size: 8362062 - timestamp: 1724404916759 -- kind: conda - name: openssl - version: 3.3.1 - build: h8359307_3 - build_number: 3 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.3.1-h8359307_3.conda - sha256: 9dd1ee7a8c21ff4fcbb98e9d0be0e83e5daf8a555c73589ad9e3046966b72e5e - md5: 644904d696d83c0ac78d594e0cf09f66 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h7bae524_1.conda + sha256: 27d0b9ff78ad46e1f3a6c96c479ab44beda5f96def88e2fe626e0a49429d8afc + md5: cb2b0ea909b97b3d70cd3921d1445e1a depends: - __osx >=11.0 - - ca-certificates - license: Apache-2.0 - license_family: Apache + license: X11 AND BSD-3-Clause purls: [] - size: 2888820 - timestamp: 1724402552318 -- kind: conda - name: openssl - version: 3.3.1 - build: hb9d3cd8_3 - build_number: 3 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-hb9d3cd8_3.conda + size: 802321 + timestamp: 1724658775723 +- conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-hb9d3cd8_3.conda sha256: 9e27441b273a7cf9071f6e88ba9ad565d926d8083b154c64a74b99fba167b137 md5: 6c566a46baae794daf34775d41eb180a depends: @@ -952,13 +812,7 @@ packages: purls: [] size: 2892042 timestamp: 1724402701933 -- kind: conda - name: openssl - version: 3.3.1 - build: hd23fc13_3 - build_number: 3 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-hd23fc13_3.conda +- conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.3.1-hd23fc13_3.conda sha256: 63921822fbb66337e0fd50b2a07412583fbe7783bc92c663bdf93c9a09026fdc md5: ad8c8c9556a701817bd1aca75a302e96 depends: @@ -969,16 +823,38 @@ packages: purls: [] size: 2549881 timestamp: 1724403015051 -- kind: pypi +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.3.1-h8359307_3.conda + sha256: 9dd1ee7a8c21ff4fcbb98e9d0be0e83e5daf8a555c73589ad9e3046966b72e5e + md5: 644904d696d83c0ac78d594e0cf09f66 + depends: + - __osx >=11.0 + - ca-certificates + license: Apache-2.0 + license_family: Apache + purls: [] + size: 2888820 + timestamp: 1724402552318 +- conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.3.1-h2466b09_3.conda + sha256: 76a10564ca450f56495cff06bf60bdf0fe42e6ef7a20469276894d4ac7c0140a + md5: c6ebd3a1a2b393e040ca71c9f9ef8d97 + depends: + - ca-certificates + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 8362062 + timestamp: 1724404916759 +- pypi: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl name: packaging version: '24.2' - url: https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl sha256: 09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 requires_python: '>=3.8' -- kind: pypi +- pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl name: pluggy version: 1.5.0 - url: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl sha256: 44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 requires_dist: - pre-commit ; extra == 'dev' @@ -986,10 +862,9 @@ packages: - pytest ; extra == 'testing' - pytest-benchmark ; extra == 'testing' requires_python: '>=3.8' -- kind: pypi +- pypi: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl name: pytest version: 8.3.3 - url: https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl sha256: a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 requires_dist: - iniconfig @@ -1007,12 +882,7 @@ packages: - setuptools ; extra == 'dev' - xmlschema ; extra == 'dev' requires_python: '>=3.8' -- kind: conda - name: python - version: 3.12.5 - build: h2ad013b_0_cpython - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.5-h2ad013b_0_cpython.conda sha256: e2aad83838988725d4ffba4e9717b9328054fd18a668cff3377e0c50f109e8bd md5: 9c56c4df45f6571b13111d8df2448692 depends: @@ -1039,16 +909,11 @@ packages: purls: [] size: 31663253 timestamp: 1723143721353 -- kind: conda - name: python - version: 3.12.5 - build: h30c5eda_0_cpython - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.5-h30c5eda_0_cpython.conda - sha256: 1319e918fb54c9491832a9731cad00235a76f61c6f9b23fc0f70cdfb74c950ea - md5: 5e315581e2948dfe3bcac306540e9803 +- conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda + sha256: c0f39e625b2fd65f70a9cc086fe4b25cc72228453dbbcd92cd5d140d080e38c5 + md5: 517cb4e16466f8d96ba2a72897d14c48 depends: - - __osx >=11.0 + - __osx >=10.13 - bzip2 >=1.0.8,<2.0a0 - libexpat >=2.6.2,<3.0a0 - libffi >=3.4,<4.0a0 @@ -1064,18 +929,13 @@ packages: - python_abi 3.12.* *_cp312 license: Python-2.0 purls: [] - size: 12926356 - timestamp: 1723142203193 -- kind: conda - name: python - version: 3.12.5 - build: h37a9e06_0_cpython - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.5-h37a9e06_0_cpython.conda - sha256: c0f39e625b2fd65f70a9cc086fe4b25cc72228453dbbcd92cd5d140d080e38c5 - md5: 517cb4e16466f8d96ba2a72897d14c48 + size: 12173272 + timestamp: 1723142761765 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.5-h30c5eda_0_cpython.conda + sha256: 1319e918fb54c9491832a9731cad00235a76f61c6f9b23fc0f70cdfb74c950ea + md5: 5e315581e2948dfe3bcac306540e9803 depends: - - __osx >=10.13 + - __osx >=11.0 - bzip2 >=1.0.8,<2.0a0 - libexpat >=2.6.2,<3.0a0 - libffi >=3.4,<4.0a0 @@ -1091,14 +951,9 @@ packages: - python_abi 3.12.* *_cp312 license: Python-2.0 purls: [] - size: 12173272 - timestamp: 1723142761765 -- kind: conda - name: python - version: 3.12.5 - build: h889d299_0_cpython - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda + size: 12926356 + timestamp: 1723142203193 +- conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.5-h889d299_0_cpython.conda sha256: 4cef304eb8877fd3094c14b57097ccc1b817b4afbf2223dd45d2b61e44064740 md5: db056d8b140ab2edd56a2f9bdb203dcd depends: @@ -1120,13 +975,59 @@ packages: purls: [] size: 15897752 timestamp: 1723141830317 -- kind: conda - name: readline - version: '8.2' - build: h8228510_1 - build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda +- conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda + build_number: 5 + sha256: d10e93d759931ffb6372b45d65ff34d95c6000c61a07e298d162a3bc2accebb0 + md5: 0424ae29b104430108f5218a66db7260 + constrains: + - python 3.12.* *_cpython + arch: x86_64 + platform: linux + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 6238 + timestamp: 1723823388266 +- conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-5_cp312.conda + build_number: 5 + sha256: 4da26c7508d5bc5d8621e84dc510284402239df56aab3587a7d217de9d3c806d + md5: c34dd4920e0addf7cfcc725809f25d8e + constrains: + - python 3.12.* *_cpython + arch: x86_64 + platform: osx + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 6312 + timestamp: 1723823137004 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-5_cp312.conda + build_number: 5 + sha256: 49d624e4b809c799d2bf257b22c23cf3fc4460f5570d9a58e7ad86350aeaa1f4 + md5: b76f9b1c862128e56ac7aa8cd2333de9 + constrains: + - python 3.12.* *_cpython + arch: arm64 + platform: osx + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 6278 + timestamp: 1723823099686 +- conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-5_cp312.conda + build_number: 5 + sha256: 9486662af81a219e96d343449eff242f38d7c5128ced5ce5acf85857265058d6 + md5: e8681f534453af7afab4cd2bc1423eec + constrains: + - python 3.12.* *_cpython + arch: x86_64 + platform: win + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 6730 + timestamp: 1723823139725 +- conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 md5: 47d31b792659ce70f470b5c82fdfb7a4 depends: @@ -1137,13 +1038,17 @@ packages: purls: [] size: 281456 timestamp: 1679532220005 -- kind: conda - name: readline - version: '8.2' - build: h92ec313_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda +- conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda + sha256: 41e7d30a097d9b060037f0c6a2b1d4c4ae7e942c06c943d23f9d481548478568 + md5: f17f77f2acf4d344734bda76829ce14e + depends: + - ncurses >=6.3,<7.0a0 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 255870 + timestamp: 1679532707590 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda sha256: a1dfa679ac3f6007362386576a704ad2d0d7a02e98f5d0b115f207a2da63e884 md5: 8cbb776a2f641b943d413b3e19df71f4 depends: @@ -1153,29 +1058,18 @@ packages: purls: [] size: 250351 timestamp: 1679532511311 -- kind: conda - name: readline - version: '8.2' - build: h9e318b2_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - sha256: 41e7d30a097d9b060037f0c6a2b1d4c4ae7e942c06c943d23f9d481548478568 - md5: f17f77f2acf4d344734bda76829ce14e +- conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda + sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e + md5: d453b98d9c83e71da0741bb0ff4d76bc depends: - - ncurses >=6.3,<7.0a0 - license: GPL-3.0-only - license_family: GPL + - libgcc-ng >=12 + - libzlib >=1.2.13,<2.0.0a0 + license: TCL + license_family: BSD purls: [] - size: 255870 - timestamp: 1679532707590 -- kind: conda - name: tk - version: 8.6.13 - build: h1abcd95_1 - build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda + size: 3318875 + timestamp: 1699202167581 +- conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda sha256: 30412b2e9de4ff82d8c2a7e5d06a15f4f4fef1809a72138b6ccb53a33b26faf5 md5: bf830ba5afc507c6232d4ef0fb1a882d depends: @@ -1185,13 +1079,7 @@ packages: purls: [] size: 3270220 timestamp: 1699202389792 -- kind: conda - name: tk - version: 8.6.13 - build: h5083fa2_1 - build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda sha256: 72457ad031b4c048e5891f3f6cb27a53cb479db68a52d965f796910e71a403a8 md5: b50a57ba89c32b62428b71a875291c9b depends: @@ -1201,13 +1089,7 @@ packages: purls: [] size: 3145523 timestamp: 1699202432999 -- kind: conda - name: tk - version: 8.6.13 - build: h5226925_1 - build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda +- conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda sha256: 2c4e914f521ccb2718946645108c9bd3fc3216ba69aea20c2c3cedbd8db32bb1 md5: fc048363eb8f03cd1737600a5d08aafe depends: @@ -1219,43 +1101,14 @@ packages: purls: [] size: 3503410 timestamp: 1699202577803 -- kind: conda - name: tk - version: 8.6.13 - build: noxft_h4845f30_101 - build_number: 101 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e - md5: d453b98d9c83e71da0741bb0ff4d76bc - depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - license: TCL - license_family: BSD - purls: [] - size: 3318875 - timestamp: 1699202167581 -- kind: conda - name: tzdata - version: 2024a - build: h8827d51_1 - build_number: 1 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda +- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda sha256: 7d21c95f61319dba9209ca17d1935e6128af4235a67ee4e57a00908a1450081e md5: 8bfdead4e0fff0383ae4c9c50d0531bd license: LicenseRef-Public-Domain purls: [] size: 124164 timestamp: 1724736371498 -- kind: conda - name: ucrt - version: 10.0.22621.0 - build: h57928b3_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 sha256: f29cdaf8712008f6b419b8b1a403923b00ab2504bfe0fb2ba8eb60e72d4f14c6 md5: 72608f6cd3e5898229c3ea16deb1ac43 constrains: @@ -1265,13 +1118,7 @@ packages: purls: [] size: 1283972 timestamp: 1666630199266 -- kind: conda - name: vc - version: '14.3' - build: h8a93ad2_20 - build_number: 20 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda +- conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-h8a93ad2_20.conda sha256: 23ac5feb15a9adf3ab2b8c4dcd63650f8b7ae860c5ceb073e49cf71d203eddef md5: 8558f367e1d7700554f7cdb823c46faf depends: @@ -1283,13 +1130,7 @@ packages: purls: [] size: 17391 timestamp: 1717709040616 -- kind: conda - name: vc14_runtime - version: 14.40.33810 - build: hcc2c482_20 - build_number: 20 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_20.conda +- conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_20.conda sha256: bba8daa6f78b26b48fb7e1377eb52160e25495710bf53146c5f405bd50565982 md5: ad33c7cd933d69b9dee0f48317cdf137 depends: @@ -1301,13 +1142,7 @@ packages: purls: [] size: 751028 timestamp: 1724712684919 -- kind: conda - name: vs2015_runtime - version: 14.40.33810 - build: h3bf8584_20 - build_number: 20 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda +- conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.40.33810-h3bf8584_20.conda sha256: 0c2803f7a788c51f28235a7228dc2ab3f107b4b16ab0845a3e595c8c51e50a7a md5: c21f1b4a3a30bbc3ef35a50957578e0e depends: @@ -1317,21 +1152,19 @@ packages: purls: [] size: 17395 timestamp: 1717709043353 -- kind: pypi - name: werkzeug - version: 3.0.4 - url: https://files.pythonhosted.org/packages/4b/84/997bbf7c2bf2dc3f09565c6d0b4959fefe5355c18c4096cfd26d83e0785b/werkzeug-3.0.4-py3-none-any.whl - sha256: 02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c - requires_dist: - - markupsafe>=2.1.1 - - watchdog>=2.3 ; extra == 'watchdog' - requires_python: '>=3.8' -- kind: conda - name: xz - version: 5.2.6 - build: h166bdaf_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhff2d567_0.conda + sha256: 588cb0a2275ae10fc8d43e1e43fa597ed2c63ea72eab3531f9e66e2352871240 + md5: f06be40e91eb82775b486f485c90995e + depends: + - markupsafe >=2.1.1 + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/werkzeug?source=hash-mapping + size: 243159 + timestamp: 1731097560953 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 md5: 2161070d867d1b1204ea749c8eec4ef0 depends: @@ -1340,36 +1173,21 @@ packages: purls: [] size: 418368 timestamp: 1660346797927 -- kind: conda - name: xz - version: 5.2.6 - build: h57fd34a_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 - sha256: 59d78af0c3e071021cfe82dc40134c19dab8cdf804324b62940f5c8cd71803ec - md5: 39c6b54e94014701dd157f4f576ed211 - license: LGPL-2.1 and GPL-2.0 - purls: [] - size: 235693 - timestamp: 1660346961024 -- kind: conda - name: xz - version: 5.2.6 - build: h775f41a_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 sha256: eb09823f34cc2dd663c0ec4ab13f246f45dcd52e5b8c47b9864361de5204a1c8 md5: a72f9d4ea13d55d745ff1ed594747f10 license: LGPL-2.1 and GPL-2.0 purls: [] size: 238119 timestamp: 1660346964847 -- kind: conda - name: xz - version: 5.2.6 - build: h8d14728_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 + sha256: 59d78af0c3e071021cfe82dc40134c19dab8cdf804324b62940f5c8cd71803ec + md5: 39c6b54e94014701dd157f4f576ed211 + license: LGPL-2.1 and GPL-2.0 + purls: [] + size: 235693 + timestamp: 1660346961024 +- conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 sha256: 54d9778f75a02723784dc63aff4126ff6e6749ba21d11a6d03c1f4775f269fe0 md5: 515d77642eaa3639413c6b1bc3f94219 depends: @@ -1379,3 +1197,14 @@ packages: purls: [] size: 217804 timestamp: 1660346976440 +- conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda + sha256: 232a30e4b0045c9de5e168dda0328dc0e28df9439cdecdfb97dd79c1c82c4cec + md5: fee389bf8a4843bd7a2248ce11b7f188 + depends: + - python >=3.8 + license: MIT + license_family: MIT + purls: + - pkg:pypi/zipp?source=hash-mapping + size: 21702 + timestamp: 1731262194278 diff --git a/examples/flask-hello-world-pyproject/pyproject.toml b/examples/flask-hello-world-pyproject/pyproject.toml index ba6e58f6f..40de916a1 100644 --- a/examples/flask-hello-world-pyproject/pyproject.toml +++ b/examples/flask-hello-world-pyproject/pyproject.toml @@ -13,10 +13,15 @@ requires = ["hatchling"] [tool.pixi.project] channels = ["conda-forge"] platforms = ["linux-64", "osx-arm64", "osx-64", "win-64"] +preview = ["pixi-build"] [tool.pixi.pypi-dependencies] flask-hello-world-pyproject = { path = ".", editable = true } +[tool.pixi.dependencies] +flask = "2.*" + + [tool.pixi.environments] default = { solve-group = "default" } test = { features = ["test"], solve-group = "default" } @@ -29,3 +34,16 @@ test = "pytest -v tests/*" [dependency-groups] test = ["pytest>=8.3.3,<9"] + +[tool.pixi.host-dependencies] +hatchling = "==1.26.3" + +[tool.pixi.package] + +[tool.pixi.build-system] +build-backend = "pixi-build-python" +channels = [ + "https://prefix.dev/pixi-build-backends", + "https://prefix.dev/conda-forge", +] +dependencies = ["pixi-build-python"] diff --git a/examples/flask-hello-world/pixi.toml b/examples/flask-hello-world/pixi.toml index c12166c8d..d5e573f68 100644 --- a/examples/flask-hello-world/pixi.toml +++ b/examples/flask-hello-world/pixi.toml @@ -2,12 +2,23 @@ authors = ["Wolf Vollprecht "] channels = ["conda-forge"] description = "Example how to get started with flask in a pixi environment." -name = "flask-hello-world" platforms = ["linux-64", "win-64", "osx-64", "osx-arm64"] +preview = ["pixi-build"] [tasks] start = "python -m flask run --port=5050" [dependencies] flask = "2.*" -python = "3.11.*" + +[package] +name = "flask-hello-world" +version = "0.1.0" + +[build-system] +build-backend = "pixi-build-python" +channels = [ + "https://fast.prefix.dev/pixi-build-backends", + "https://fast.prefix.dev/conda-forge", +] +dependencies = ["pixi-build-python"] diff --git a/schema/examples/valid/full.toml b/schema/examples/valid/full.toml index 6bdf2ac50..b9b19821f 100644 --- a/schema/examples/valid/full.toml +++ b/schema/examples/valid/full.toml @@ -12,11 +12,21 @@ license = "MIT" license-file = "LICENSE" name = "project" platforms = ["linux-64", "win-64", "osx-64", "osx-arm64"] -preview = ["new-resolve"] +preview = ["pixi-build"] readme = "README.md" repository = "https://github.com/author/project" version = "0.1.0" +[package] + +[build-system] +build-backend = "pixi-build-python" +channels = [ + "https://prefix.dev/pixi-build-backends", + "https://prefix.dev/conda-forge", +] +dependencies = ["pixi-build-python"] + [dependencies] detailed = { version = ">=1.2.3" } detailed-full = { version = ">=1.2.3", build = "py34_0", channel = "pytorch", subdir = "linux-64", md5 = "6f5902ac237024bdd0c176cb93063dc4", sha256 = "a948904f2f0f479b8f8197694b30184b0d2ed1c1cd2a1ec0fb85d299a192a447" } diff --git a/schema/model.py b/schema/model.py index 928f621f9..c13cfaadb 100644 --- a/schema/model.py +++ b/schema/model.py @@ -35,6 +35,10 @@ ] +def hyphenize(field: str): + return field.replace("_", "-") + + class Platform(str, Enum): """A supported operating system and processor architecture pair.""" @@ -63,6 +67,7 @@ class Platform(str, Enum): class StrictBaseModel(BaseModel): class Config: extra = "forbid" + alias_generator = hyphenize ################### @@ -88,15 +93,22 @@ class ChannelPriority(str, Enum): strict = "strict" -class KnownPreviewFeature(str, Enum): - """The preview features of the project.""" +PixiBuildFeature = Annotated[ + Literal["pixi-build"], Field(description="Enables building of source records") +] +KnownPreviewFeature = PixiBuildFeature + + +# class KnownPreviewFeature(Enum): +# """The preview features of the project.""" +# PixiBuild: Annotated[str, Field(description="Enables building of source records")] = "pixi-build" -class Project(StrictBaseModel): +class Workspace(StrictBaseModel): """The project's metadata information.""" - name: NonEmptyStr = Field( - description="The name of the project; we advise use of the name of the repository" + name: NonEmptyStr | None = Field( + None, description="The name of the project; we advise use of the name of the repository" ) version: NonEmptyStr | None = Field( None, @@ -113,7 +125,6 @@ class Project(StrictBaseModel): ) channel_priority: ChannelPriority | None = Field( None, - alias="channel-priority", examples=["strict", "disabled"], description="The type of channel priority that is used in the solve." "- 'strict': only take the package from the channel it exist in first." @@ -125,7 +136,7 @@ class Project(StrictBaseModel): description="The license of the project; we advise using an [SPDX](https://spdx.org/licenses/) identifier.", ) license_file: PathNoBackslash | None = Field( - None, alias="license-file", description="The path to the license file of the project" + None, description="The path to the license file of the project" ) readme: PathNoBackslash | None = Field( None, description="The path to the readme file of the project" @@ -138,13 +149,45 @@ class Project(StrictBaseModel): None, description="The URL of the documentation of the project" ) conda_pypi_map: dict[ChannelName, AnyHttpUrl | NonEmptyStr] | None = Field( - None, alias="conda-pypi-map", description="The `conda` to PyPI mapping configuration" + None, description="The `conda` to PyPI mapping configuration" ) pypi_options: PyPIOptions | None = Field( - None, alias="pypi-options", description="Options related to PyPI indexes for this project" + None, description="Options related to PyPI indexes for this project" ) preview: list[KnownPreviewFeature | str] | bool | None = Field( - None, alias="preview", description="Defines the enabling of preview features of the project" + None, description="Defines the enabling of preview features of the project" + ) + + +class Package(StrictBaseModel): + """The package's metadata information.""" + + name: NonEmptyStr | None = Field(None, description="The name of the package") + version: NonEmptyStr | None = Field( + None, + description="The version of the project; we advise use of [SemVer](https://semver.org)", + examples=["1.2.3"], + ) + description: NonEmptyStr | None = Field(None, description="A short description of the project") + authors: list[NonEmptyStr] | None = Field( + None, description="The authors of the project", examples=["John Doe "] + ) + license: NonEmptyStr | None = Field( + None, + description="The license of the project; we advise using an [SPDX](https://spdx.org/licenses/) identifier.", + ) + license_file: PathNoBackslash | None = Field( + None, description="The path to the license file of the project" + ) + readme: PathNoBackslash | None = Field( + None, description="The path to the readme file of the project" + ) + homepage: AnyHttpUrl | None = Field(None, description="The URL of the homepage of the project") + repository: AnyHttpUrl | None = Field( + None, description="The URL of the repository of the project" + ) + documentation: AnyHttpUrl | None = Field( + None, description="The URL of the documentation of the project" ) @@ -163,12 +206,9 @@ class MatchspecTable(StrictBaseModel): build: NonEmptyStr | None = Field(None, description="The build string of the package") build_number: NonEmptyStr | None = Field( None, - alias="build-number", description="The build number of the package, can be a spec like `>=1` or `<=10` or `1`", ) - file_name: NonEmptyStr | None = Field( - None, alias="file-name", description="The file name of the package" - ) + file_name: NonEmptyStr | None = Field(None, description="The file name of the package") channel: NonEmptyStr | None = Field( None, description="The channel the packages needs to be fetched from", @@ -194,6 +234,23 @@ class MatchspecTable(StrictBaseModel): CondaPackageName = NonEmptyStr +##################### +# The Build section # +##################### +class BuildSystem(StrictBaseModel): + channels: list[Channel] = Field( + None, + description="The `conda` channels that will be used to get build dependencies", + ) + dependencies: list[MatchSpec] = Field( + None, description="The dependencies for the build backend" + ) + build_backend: NonEmptyStr = Field(None, description="The build executable to call") + channels: list[Channel] = Field( + None, description="The `conda` channels that are used to fetch the build backend from" + ) + + class _PyPIRequirement(StrictBaseModel): extras: list[NonEmptyStr] | None = Field( None, @@ -271,13 +328,11 @@ class PyPIVersion(_PyPIRequirement): ) HostDependenciesField = Field( None, - alias="host-dependencies", description="The host `conda` dependencies, used in the build process", examples=[{"python": ">=3.8"}], ) BuildDependenciesField = Field( None, - alias="build-dependencies", description="The build `conda` dependencies, used in the build process", ) Dependencies = dict[CondaPackageName, MatchSpec] | None @@ -304,7 +359,6 @@ class TaskInlineTable(StrictBaseModel): ) depends_on: list[TaskName] | TaskName | None = Field( None, - alias="depends-on", description="The tasks that this task depends on. Environment variables will **not** be expanded.", ) inputs: list[Glob] | None = Field( @@ -327,7 +381,6 @@ class TaskInlineTable(StrictBaseModel): ) clean_env: bool | None = Field( None, - alias="clean-env", description="Whether to run in a clean environment, removing all environment variables except those defined in `env` and by pixi itself.", ) @@ -377,12 +430,10 @@ class Environment(StrictBaseModel): ) solve_group: SolveGroupName | None = Field( None, - alias="solve-group", description="The group name for environments that should be solved together", ) no_default_feature: Optional[bool] = Field( False, - alias="no-default-feature", description="Whether to add the default feature to this environment", ) @@ -418,7 +469,7 @@ class Target(StrictBaseModel): host_dependencies: Dependencies = HostDependenciesField build_dependencies: Dependencies = BuildDependenciesField pypi_dependencies: dict[PyPIPackageName, PyPIRequirement] | None = Field( - None, alias="pypi-dependencies", description="The PyPI dependencies for this target" + None, description="The PyPI dependencies for this target" ) tasks: dict[TaskName, TaskInlineTable | NonEmptyStr] | None = Field( None, description="The tasks of the target" @@ -440,7 +491,6 @@ class Feature(StrictBaseModel): ) channel_priority: ChannelPriority | None = Field( None, - alias="channel-priority", examples=["strict", "disabled"], description="The type of channel priority that is used in the solve." "- 'strict': only take the package from the channel it exist in first." @@ -454,7 +504,7 @@ class Feature(StrictBaseModel): host_dependencies: Dependencies = HostDependenciesField build_dependencies: Dependencies = BuildDependenciesField pypi_dependencies: dict[PyPIPackageName, PyPIRequirement] | None = Field( - None, alias="pypi-dependencies", description="The PyPI dependencies of this feature" + None, description="The PyPI dependencies of this feature" ) tasks: dict[TaskName, TaskInlineTable | NonEmptyStr] | None = Field( None, description="The tasks provided by this feature" @@ -463,7 +513,7 @@ class Feature(StrictBaseModel): None, description="The scripts used on the activation of environments using this feature" ) system_requirements: SystemRequirements | None = Field( - None, alias="system-requirements", description="The system requirements of this feature" + None, description="The system requirements of this feature" ) target: dict[TargetName, Target] | None = Field( None, @@ -471,7 +521,7 @@ class Feature(StrictBaseModel): examples=[{"linux": {"dependencies": {"python": "3.8"}}}], ) pypi_options: PyPIOptions | None = Field( - None, alias="pypi-options", description="Options related to PyPI indexes for this feature" + None, description="Options related to PyPI indexes for this feature" ) @@ -503,25 +553,21 @@ class PyPIOptions(StrictBaseModel): index_url: NonEmptyStr | None = Field( None, - alias="index-url", description="PyPI registry that should be used as the primary index", examples=["https://pypi.org/simple"], ) extra_index_urls: list[NonEmptyStr] | None = Field( None, - alias="extra-index-urls", description="Additional PyPI registries that should be used as extra indexes", examples=[["https://pypi.org/simple"]], ) find_links: list[FindLinksPath | FindLinksURL] = Field( None, - alias="find-links", description="Paths to directory containing", examples=[["https://pypi.org/simple"]], ) no_build_isolation: list[PyPIPackageName] = Field( None, - alias="no-build-isolation", description="Packages that should NOT be isolated during the build process", examples=[["numpy"]], ) @@ -529,7 +575,6 @@ class PyPIOptions(StrictBaseModel): Literal["first-index"] | Literal["unsafe-first-match"] | Literal["unsafe-best-match"] | None ) = Field( None, - alias="index-strategy", description="The strategy to use when resolving packages from multiple indexes", examples=["first-index", "unsafe-first-match", "unsafe-best-match"], ) @@ -548,6 +593,7 @@ class Config: "$id": SCHEMA_URI, "$schema": SCHEMA_DRAFT, "title": "`pixi.toml` manifest file", + "oneOf": [{"required": ["project"]}, {"required": ["workspace"]}], } schema_: str | None = Field( @@ -558,21 +604,21 @@ class Config: format="uri-reference", ) - project: Project = Field(..., description="The project's metadata information") + workspace: Workspace | None = Field(None, description="The workspace's metadata information") + project: Workspace | None = Field(None, description="The project's metadata information") + package: Package | None = Field(None, description="The package's metadata information") dependencies: Dependencies = DependenciesField host_dependencies: Dependencies = HostDependenciesField build_dependencies: Dependencies = BuildDependenciesField pypi_dependencies: dict[PyPIPackageName, PyPIRequirement] | None = Field( - None, alias="pypi-dependencies", description="The PyPI dependencies" - ) - pypi_options: PyPIOptions | None = Field( - None, alias="pypi-options", description="Options related to PyPI indexes" + None, description="The PyPI dependencies" ) + pypi_options: PyPIOptions | None = Field(None, description="Options related to PyPI indexes") tasks: dict[TaskName, TaskInlineTable | NonEmptyStr] | None = Field( None, description="The tasks of the project" ) system_requirements: SystemRequirements | None = Field( - None, alias="system-requirements", description="The system requirements of the project" + None, description="The system requirements of the project" ) environments: dict[EnvironmentName, Environment | list[FeatureName]] | None = Field( None, @@ -594,9 +640,11 @@ class Config: ) pypi_options: PyPIOptions | None = Field( None, - alias="pypi-options", description="Options related to PyPI indexes, on the default feature", ) + build_system: BuildSystem | None = Field( + None, description="The build-system used to build the package." + ) ######################### diff --git a/schema/schema.json b/schema/schema.json index 647479b05..3d356dd1c 100644 --- a/schema/schema.json +++ b/schema/schema.json @@ -4,10 +4,19 @@ "title": "`pixi.toml` manifest file", "description": "The configuration for a [`pixi`](https://pixi.sh) project.", "type": "object", - "required": [ - "project" - ], "additionalProperties": false, + "oneOf": [ + { + "required": [ + "project" + ] + }, + { + "required": [ + "workspace" + ] + } + ], "properties": { "$schema": { "title": "Schema", @@ -36,6 +45,10 @@ ] } }, + "build-system": { + "$ref": "#/$defs/BuildSystem", + "description": "The build-system used to build the package." + }, "dependencies": { "title": "Dependencies", "description": "The `conda` dependencies, consisting of a package name and a requirement in [MatchSpec](https://github.com/conda/conda/blob/078e7ee79381060217e1ec7f9b0e9cf80ecc8f3f/conda/models/match_spec.py) format", @@ -102,8 +115,12 @@ } ] }, + "package": { + "$ref": "#/$defs/Package", + "description": "The package's metadata information" + }, "project": { - "$ref": "#/$defs/Project", + "$ref": "#/$defs/Workspace", "description": "The project's metadata information" }, "pypi-dependencies": { @@ -184,6 +201,10 @@ "title": "Tool", "description": "Third-party tool configurations, ignored by pixi", "type": "object" + }, + "workspace": { + "$ref": "#/$defs/Workspace", + "description": "The workspace's metadata information" } }, "$defs": { @@ -225,6 +246,56 @@ } } }, + "BuildSystem": { + "title": "BuildSystem", + "type": "object", + "additionalProperties": false, + "properties": { + "build-backend": { + "title": "Build-Backend", + "description": "The build executable to call", + "type": "string", + "minLength": 1 + }, + "channels": { + "title": "Channels", + "description": "The `conda` channels that are used to fetch the build backend from", + "type": "array", + "items": { + "anyOf": [ + { + "type": "string", + "minLength": 1 + }, + { + "type": "string", + "format": "uri", + "minLength": 1 + }, + { + "$ref": "#/$defs/ChannelInlineTable" + } + ] + } + }, + "dependencies": { + "title": "Dependencies", + "description": "The dependencies for the build backend", + "type": "array", + "items": { + "anyOf": [ + { + "type": "string", + "minLength": 1 + }, + { + "$ref": "#/$defs/MatchspecTable" + } + ] + } + } + } + }, "ChannelInlineTable": { "title": "ChannelInlineTable", "description": "A precise description of a `conda` channel, with an optional priority.", @@ -631,41 +702,10 @@ } } }, - "Platform": { - "title": "Platform", - "description": "A supported operating system and processor architecture pair.", - "type": "string", - "enum": [ - "emscripten-wasm32", - "linux-32", - "linux-64", - "linux-aarch64", - "linux-armv6l", - "linux-armv7l", - "linux-ppc64", - "linux-ppc64le", - "linux-riscv32", - "linux-riscv64", - "linux-s390x", - "noarch", - "osx-64", - "osx-arm64", - "unknown", - "wasi-wasm32", - "win-32", - "win-64", - "win-arm64", - "zos-z" - ] - }, - "Project": { - "title": "Project", - "description": "The project's metadata information.", + "Package": { + "title": "Package", + "description": "The package's metadata information.", "type": "object", - "required": [ - "name", - "platforms" - ], "additionalProperties": false, "properties": { "authors": { @@ -680,53 +720,6 @@ "John Doe " ] }, - "channel-priority": { - "$ref": "#/$defs/ChannelPriority", - "description": "The type of channel priority that is used in the solve.- 'strict': only take the package from the channel it exist in first.- 'disabled': group all dependencies together as if there is no channel difference.", - "examples": [ - "strict", - "disabled" - ] - }, - "channels": { - "title": "Channels", - "description": "The `conda` channels that can be used in the project. Unless overridden by `priority`, the first channel listed will be preferred.", - "type": "array", - "items": { - "anyOf": [ - { - "type": "string", - "minLength": 1 - }, - { - "type": "string", - "format": "uri", - "minLength": 1 - }, - { - "$ref": "#/$defs/ChannelInlineTable" - } - ] - } - }, - "conda-pypi-map": { - "title": "Conda-Pypi-Map", - "description": "The `conda` to PyPI mapping configuration", - "type": "object", - "additionalProperties": { - "anyOf": [ - { - "type": "string", - "format": "uri", - "minLength": 1 - }, - { - "type": "string", - "minLength": 1 - } - ] - } - }, "description": { "title": "Description", "description": "A short description of the project", @@ -761,46 +754,10 @@ }, "name": { "title": "Name", - "description": "The name of the project; we advise use of the name of the repository", + "description": "The name of the package", "type": "string", "minLength": 1 }, - "platforms": { - "title": "Platforms", - "description": "The platforms that the project supports", - "type": "array", - "items": { - "$ref": "#/$defs/Platform" - } - }, - "preview": { - "title": "Preview", - "description": "Defines the enabling of preview features of the project", - "anyOf": [ - { - "type": "array", - "items": { - "anyOf": [ - { - "title": "KnownPreviewFeature", - "description": "The preview features of the project.", - "enum": [] - }, - { - "type": "string" - } - ] - } - }, - { - "type": "boolean" - } - ] - }, - "pypi-options": { - "$ref": "#/$defs/PyPIOptions", - "description": "Options related to PyPI indexes for this project" - }, "readme": { "title": "Readme", "description": "The path to the readme file of the project", @@ -825,6 +782,33 @@ } } }, + "Platform": { + "title": "Platform", + "description": "A supported operating system and processor architecture pair.", + "type": "string", + "enum": [ + "emscripten-wasm32", + "linux-32", + "linux-64", + "linux-aarch64", + "linux-armv6l", + "linux-armv7l", + "linux-ppc64", + "linux-ppc64le", + "linux-riscv32", + "linux-riscv64", + "linux-s390x", + "noarch", + "osx-64", + "osx-arm64", + "unknown", + "wasi-wasm32", + "win-32", + "win-64", + "win-arm64", + "zos-z" + ] + }, "PyPIGitBranchRequirement": { "title": "PyPIGitBranchRequirement", "type": "object", @@ -1413,6 +1397,171 @@ } } } + }, + "Workspace": { + "title": "Workspace", + "description": "The project's metadata information.", + "type": "object", + "required": [ + "platforms" + ], + "additionalProperties": false, + "properties": { + "authors": { + "title": "Authors", + "description": "The authors of the project", + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "examples": [ + "John Doe " + ] + }, + "channel-priority": { + "$ref": "#/$defs/ChannelPriority", + "description": "The type of channel priority that is used in the solve.- 'strict': only take the package from the channel it exist in first.- 'disabled': group all dependencies together as if there is no channel difference.", + "examples": [ + "strict", + "disabled" + ] + }, + "channels": { + "title": "Channels", + "description": "The `conda` channels that can be used in the project. Unless overridden by `priority`, the first channel listed will be preferred.", + "type": "array", + "items": { + "anyOf": [ + { + "type": "string", + "minLength": 1 + }, + { + "type": "string", + "format": "uri", + "minLength": 1 + }, + { + "$ref": "#/$defs/ChannelInlineTable" + } + ] + } + }, + "conda-pypi-map": { + "title": "Conda-Pypi-Map", + "description": "The `conda` to PyPI mapping configuration", + "type": "object", + "additionalProperties": { + "anyOf": [ + { + "type": "string", + "format": "uri", + "minLength": 1 + }, + { + "type": "string", + "minLength": 1 + } + ] + } + }, + "description": { + "title": "Description", + "description": "A short description of the project", + "type": "string", + "minLength": 1 + }, + "documentation": { + "title": "Documentation", + "description": "The URL of the documentation of the project", + "type": "string", + "format": "uri", + "minLength": 1 + }, + "homepage": { + "title": "Homepage", + "description": "The URL of the homepage of the project", + "type": "string", + "format": "uri", + "minLength": 1 + }, + "license": { + "title": "License", + "description": "The license of the project; we advise using an [SPDX](https://spdx.org/licenses/) identifier.", + "type": "string", + "minLength": 1 + }, + "license-file": { + "title": "License-File", + "description": "The path to the license file of the project", + "type": "string", + "pattern": "^[^\\\\]+$" + }, + "name": { + "title": "Name", + "description": "The name of the project; we advise use of the name of the repository", + "type": "string", + "minLength": 1 + }, + "platforms": { + "title": "Platforms", + "description": "The platforms that the project supports", + "type": "array", + "items": { + "$ref": "#/$defs/Platform" + } + }, + "preview": { + "title": "Preview", + "description": "Defines the enabling of preview features of the project", + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "description": "Enables building of source records", + "const": "pixi-build" + }, + { + "type": "string" + } + ] + } + }, + { + "type": "boolean" + } + ] + }, + "pypi-options": { + "$ref": "#/$defs/PyPIOptions", + "description": "Options related to PyPI indexes for this project" + }, + "readme": { + "title": "Readme", + "description": "The path to the readme file of the project", + "type": "string", + "pattern": "^[^\\\\]+$" + }, + "repository": { + "title": "Repository", + "description": "The URL of the repository of the project", + "type": "string", + "format": "uri", + "minLength": 1 + }, + "version": { + "title": "Version", + "description": "The version of the project; we advise use of [SemVer](https://semver.org)", + "type": "string", + "minLength": 1, + "examples": [ + "1.2.3" + ] + } + } } } } diff --git a/src/build/cache/build_cache.rs b/src/build/cache/build_cache.rs new file mode 100644 index 000000000..d579af435 --- /dev/null +++ b/src/build/cache/build_cache.rs @@ -0,0 +1,255 @@ +use std::{ + hash::{Hash, Hasher}, + io::SeekFrom, + path::PathBuf, +}; + +use crate::{ + build::{cache::source_checkout_cache_key, SourceCheckout}, + utils::{move_file, MoveError}, +}; +use async_fd_lock::{LockWrite, RwLockWriteGuard}; +use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; +use rattler_conda_types::{GenericVirtualPackage, Platform, RepoDataRecord}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt}; +use url::Url; +use xxhash_rust::xxh3::Xxh3; + +/// A cache for caching build artifacts of a source checkout. +#[derive(Clone)] +pub struct BuildCache { + root: PathBuf, +} + +#[derive(Debug, Error)] +pub enum BuildCacheError { + /// An I/O error occurred while reading or writing the cache. + #[error("an IO error occurred while {0} {1}")] + IoError(String, PathBuf, #[source] std::io::Error), + + /// Failed to move the build artifact + #[error("failed to move build artifact from '{}' to cache '{}'", .0.display(), .1.display())] + MoveError(PathBuf, PathBuf, #[source] MoveError), +} + +/// Defines additional input besides the source files that are used to compute +/// the metadata of a source checkout. +pub struct BuildInput { + /// TODO: I think this should also include the build backend used! Maybe? + + /// The URL channels used in the build. + pub channel_urls: Vec, + + /// The platform for which the metadata was computed. + pub target_platform: Platform, + + /// The name of the package + pub name: String, + + /// The version of the package to build + pub version: String, + + /// The build string of the package to build + pub build: String, + + /// The host platform + pub host_platform: Platform, + + /// The virtual packages of the target host + pub host_virtual_packages: Vec, + + /// The virtual packages used to build the package + pub build_virtual_packages: Vec, +} + +impl BuildInput { + /// Computes a unique semi-human-readable hash for this key. Some parts of + /// the input are hashes and others are included directly in the name this + /// is to make it easier to identify the cache files. + pub fn hash_key(&self) -> String { + let BuildInput { + channel_urls, + target_platform, + name, + version, + build, + host_platform, + host_virtual_packages, + build_virtual_packages, + } = self; + + // Hash some of the keys + let mut hasher = Xxh3::new(); + build.hash(&mut hasher); + channel_urls.hash(&mut hasher); + host_platform.hash(&mut hasher); + host_virtual_packages.hash(&mut hasher); + build_virtual_packages.hash(&mut hasher); + let hash = URL_SAFE_NO_PAD.encode(hasher.finish().to_ne_bytes()); + + format!("{name}-{version}-{target_platform}-{hash}",) + } +} + +impl BuildCache { + /// Constructs a new instance. + /// + /// An additional directory is created by this cache inside the passed root + /// which includes a version number. This is to ensure that the cache is + /// never corrupted if the format changes in the future. + pub fn new(root: PathBuf) -> Self { + Self { + root: root.join("source-builds-v0"), + } + } + + /// Returns a cache entry for the given source checkout and input from the + /// cache. If the cache doesn't contain an entry for this source and input, + /// it returns `None`. + /// + /// This function also returns a [`CacheEntry`] which can be used to update + /// the cache. The [`CacheEntry`] also holds an exclusive lock on the cache + /// which prevents other processes from accessing the cache entry. Drop + /// the entry as soon as possible to release the lock. + pub async fn entry( + &self, + source: &SourceCheckout, + input: &BuildInput, + ) -> Result<(Option, CacheEntry), BuildCacheError> { + let input_key = input.hash_key(); + + // Ensure the cache directory exists + let cache_dir = self + .root + .join(source_checkout_cache_key(source)) + .join(input_key); + tokio::fs::create_dir_all(&cache_dir).await.map_err(|e| { + BuildCacheError::IoError("creating cache directory".to_string(), cache_dir.clone(), e) + })?; + + // Try to acquire a lock on the cache file. + let cache_file_path = cache_dir.join(".lock"); + let cache_file = tokio::fs::OpenOptions::new() + .write(true) + .read(true) + .truncate(false) + .create(true) + .open(&cache_file_path) + .await + .map_err(|e| { + BuildCacheError::IoError( + "opening cache file".to_string(), + cache_file_path.clone(), + e, + ) + })?; + + let mut locked_cache_file = cache_file.lock_write().await.map_err(|e| { + BuildCacheError::IoError( + "locking cache file".to_string(), + cache_file_path.clone(), + e.error, + ) + })?; + + // Try to parse the contents of the file + let mut cache_file_contents = String::new(); + locked_cache_file + .read_to_string(&mut cache_file_contents) + .await + .map_err(|e| { + BuildCacheError::IoError( + "reading cache file".to_string(), + cache_file_path.clone(), + e, + ) + })?; + + let metadata = serde_json::from_str(&cache_file_contents).ok(); + Ok(( + metadata, + CacheEntry { + file: locked_cache_file, + cache_dir, + cache_file_path, + }, + )) + } +} + +/// Cached result of calling `conda/getMetadata` on a build backend. This is +/// returned by [`SourceMetadataCache::entry`]. +#[derive(Debug, Serialize, Deserialize)] +pub struct CachedBuild { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub source: Option, + pub record: RepoDataRecord, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct SourceInfo { + pub globs: Vec, +} + +/// A cache entry returned by [`BuildCache::entry`] which enables +/// updating the cache. +/// +/// As long as this entry is held, no other process can access this cache entry. +pub struct CacheEntry { + file: RwLockWriteGuard, + cache_dir: PathBuf, + cache_file_path: PathBuf, +} + +impl CacheEntry { + /// Consumes this instance and writes the given metadata to the cache. + pub async fn insert( + mut self, + mut metadata: CachedBuild, + ) -> Result { + // Move the file into the cache + if let Ok(file_path) = metadata.record.url.to_file_path() { + let file_name = file_path + .file_name() + .expect("the path cannot be empty because that wouldnt be a valid url"); + let destination = self.cache_dir.join(file_name); + if let Err(err) = move_file(&file_path, &destination) { + return Err(BuildCacheError::MoveError(file_path, destination, err)); + } + + metadata.record.url = Url::from_file_path(&destination) + .expect("the cache directory path should be a valid url"); + } + + self.file.seek(SeekFrom::Start(0)).await.map_err(|e| { + BuildCacheError::IoError( + "seeking to start of cache file".to_string(), + self.cache_file_path.clone(), + e, + ) + })?; + let bytes = serde_json::to_vec(&metadata).expect("serialization to JSON should not fail"); + self.file.write_all(&bytes).await.map_err(|e| { + BuildCacheError::IoError( + "writing metadata to cache file".to_string(), + self.cache_file_path.clone(), + e, + ) + })?; + self.file + .inner_mut() + .set_len(bytes.len() as u64) + .await + .map_err(|e| { + BuildCacheError::IoError( + "setting length of cache file".to_string(), + self.cache_file_path.clone(), + e, + ) + })?; + + Ok(metadata.record) + } +} diff --git a/src/build/cache/mod.rs b/src/build/cache/mod.rs new file mode 100644 index 000000000..ecdd06d57 --- /dev/null +++ b/src/build/cache/mod.rs @@ -0,0 +1,27 @@ +mod build_cache; +mod source_metadata_cache; + +use std::{ + ffi::OsStr, + hash::{Hash, Hasher}, +}; + +use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; +pub use build_cache::{BuildCache, BuildCacheError, BuildInput, CachedBuild, SourceInfo}; +pub use source_metadata_cache::{ + CachedCondaMetadata, SourceMetadataCache, SourceMetadataError, SourceMetadataInput, +}; +use xxhash_rust::xxh3::Xxh3; + +use crate::build::SourceCheckout; + +/// Constructs a name for a cache directory for the given source checkout. +fn source_checkout_cache_key(source: &SourceCheckout) -> String { + let mut hasher = Xxh3::new(); + source.pinned.to_string().hash(&mut hasher); + let unique_key = URL_SAFE_NO_PAD.encode(hasher.finish().to_ne_bytes()); + match source.path.file_name().and_then(OsStr::to_str) { + Some(name) => format!("{}-{}", name, unique_key), + None => unique_key, + } +} diff --git a/src/build/cache/source_metadata_cache.rs b/src/build/cache/source_metadata_cache.rs new file mode 100644 index 000000000..0cef3ad46 --- /dev/null +++ b/src/build/cache/source_metadata_cache.rs @@ -0,0 +1,206 @@ +use std::{ + hash::{DefaultHasher, Hash, Hasher}, + io::SeekFrom, + path::PathBuf, +}; + +use async_fd_lock::{LockWrite, RwLockWriteGuard}; +use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; +use pixi_build_types::CondaPackageMetadata; +use pixi_record::InputHash; +use rattler_conda_types::{GenericVirtualPackage, Platform}; +use serde::Deserialize; +use serde_with::serde_derive::Serialize; +use thiserror::Error; +use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt}; +use url::Url; + +use crate::build::{cache::source_checkout_cache_key, SourceCheckout}; + +/// A cache for caching the metadata of a source checkout. +/// +/// To request metadata for a source checkout we need to invoke the build +/// backend associated with the given source checkout. This operation can be +/// time-consuming so we want to avoid having to query the build backend. +/// +/// This cache stores the raw response for a given source checkout together with +/// some additional properties to determine if the cache is still valid. +#[derive(Clone)] +pub struct SourceMetadataCache { + root: PathBuf, +} + +#[derive(Debug, Error)] +pub enum SourceMetadataError { + /// An I/O error occurred while reading or writing the cache. + #[error("an IO error occurred while {0} {1}")] + IoError(String, PathBuf, #[source] std::io::Error), +} + +/// Defines additional input besides the source files that are used to compute +/// the metadata of a source checkout. +pub struct SourceMetadataInput { + /// TODO: I think this should also include the build backend used! Maybe? + + /// The URL of the source. + pub channel_urls: Vec, + + /// The platform on which the package will be built + pub build_platform: Platform, + pub build_virtual_packages: Vec, + + /// The platform on which the package will run + pub host_platform: Platform, + pub host_virtual_packages: Vec, +} + +impl SourceMetadataInput { + /// Computes a unique semi-human-readable hash for this key. + pub fn hash_key(&self) -> String { + let mut hasher = DefaultHasher::new(); + self.channel_urls.hash(&mut hasher); + self.build_platform.hash(&mut hasher); + self.build_virtual_packages.hash(&mut hasher); + self.host_virtual_packages.hash(&mut hasher); + format!( + "{}-{}", + self.host_platform, + URL_SAFE_NO_PAD.encode(hasher.finish().to_ne_bytes()) + ) + } +} + +impl SourceMetadataCache { + /// Constructs a new instance. + /// + /// An additional directory is created by this cache inside the passed root + /// which includes a version number. This is to ensure that the cache is + /// never corrupted if the format changes in the future. + pub fn new(root: PathBuf) -> Self { + Self { + root: root.join("source-meta-v0"), + } + } + + /// Returns the cache entry for the given source checkout and input. + /// + /// Returns the cached metadata if it exists and is still valid and a + /// [`CacheEntry`] that can be used to update the cache. As long as the + /// [`CacheEntry`] is held, another process cannot update the cache. + pub async fn entry( + &self, + source: &SourceCheckout, + input: &SourceMetadataInput, + ) -> Result<(Option, CacheEntry), SourceMetadataError> { + // Locate the cache file and lock it. + let cache_dir = self.root.join(source_checkout_cache_key(source)); + tokio::fs::create_dir_all(&cache_dir).await.map_err(|e| { + SourceMetadataError::IoError( + "creating cache directory".to_string(), + cache_dir.clone(), + e, + ) + })?; + + // Try to acquire a lock on the cache file. + let cache_file_path = cache_dir.join(input.hash_key()).with_extension("json"); + let cache_file = tokio::fs::OpenOptions::new() + .write(true) + .read(true) + .truncate(false) + .create(true) + .open(&cache_file_path) + .await + .map_err(|e| { + SourceMetadataError::IoError( + "opening cache file".to_string(), + cache_file_path.clone(), + e, + ) + })?; + + let mut locked_cache_file = cache_file.lock_write().await.map_err(|e| { + SourceMetadataError::IoError( + "locking cache file".to_string(), + cache_file_path.clone(), + e.error, + ) + })?; + + // Try to parse the contents of the file + let mut cache_file_contents = String::new(); + locked_cache_file + .read_to_string(&mut cache_file_contents) + .await + .map_err(|e| { + SourceMetadataError::IoError( + "reading cache file".to_string(), + cache_file_path.clone(), + e, + ) + })?; + + let metadata = serde_json::from_str(&cache_file_contents).ok(); + Ok(( + metadata, + CacheEntry { + file: locked_cache_file, + path: cache_file_path, + }, + )) + } +} + +/// A cache entry returned by [`SourceMetadataCache::entry`] which enables +/// updating the cache. +/// +/// As long as this entry is held, no other process can access this cache entry. +pub struct CacheEntry { + file: RwLockWriteGuard, + path: PathBuf, +} + +impl CacheEntry { + /// Consumes this instance and writes the given metadata to the cache. + pub async fn insert( + mut self, + metadata: CachedCondaMetadata, + ) -> Result<(), SourceMetadataError> { + self.file.seek(SeekFrom::Start(0)).await.map_err(|e| { + SourceMetadataError::IoError( + "seeking to start of cache file".to_string(), + self.path.clone(), + e, + ) + })?; + let bytes = serde_json::to_vec(&metadata).expect("serialization to JSON should not fail"); + self.file.write_all(&bytes).await.map_err(|e| { + SourceMetadataError::IoError( + "writing metadata to cache file".to_string(), + self.path.clone(), + e, + ) + })?; + self.file + .inner_mut() + .set_len(bytes.len() as u64) + .await + .map_err(|e| { + SourceMetadataError::IoError( + "setting length of cache file".to_string(), + self.path.clone(), + e, + ) + })?; + Ok(()) + } +} + +/// Cached result of calling `conda/getMetadata` on a build backend. This is +/// returned by [`SourceMetadataCache::entry`]. +#[derive(Debug, Serialize, Deserialize)] +pub struct CachedCondaMetadata { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub input_hash: Option, + pub packages: Vec, +} diff --git a/src/build/mod.rs b/src/build/mod.rs new file mode 100644 index 000000000..e2f0042a4 --- /dev/null +++ b/src/build/mod.rs @@ -0,0 +1,709 @@ +mod cache; +mod reporters; + +use std::{ + ffi::OsStr, + hash::{Hash, Hasher}, + ops::Not, + path::{Component, Path, PathBuf}, + str::FromStr, + sync::Arc, +}; + +use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; +use chrono::Utc; +use itertools::Itertools; +use miette::Diagnostic; +use pixi_build_frontend::{SetupRequest, ToolContext}; +use pixi_build_types::{ + procedures::{ + conda_build::{CondaBuildParams, CondaOutputIdentifier}, + conda_metadata::CondaMetadataParams, + }, + ChannelConfiguration, CondaPackageMetadata, PlatformAndVirtualPackages, +}; +pub use pixi_glob::{GlobHashCache, GlobHashError}; +use pixi_glob::{GlobHashKey, GlobModificationTime, GlobModificationTimeError}; +use pixi_record::{InputHash, PinnedPathSpec, PinnedSourceSpec, SourceRecord}; +use pixi_spec::SourceSpec; +use rattler_conda_types::{ + Channel, ChannelConfig, ChannelUrl, GenericVirtualPackage, PackageRecord, Platform, + RepoDataRecord, +}; +use rattler_digest::Sha256; +use rattler_repodata_gateway::Gateway; +pub use reporters::{BuildMetadataReporter, BuildReporter}; +use reqwest_middleware::ClientWithMiddleware; +use thiserror::Error; +use tracing::instrument; +use typed_path::{Utf8TypedPath, Utf8TypedPathBuf}; +use url::Url; +use xxhash_rust::xxh3::Xxh3; + +use crate::build::cache::{ + BuildCache, BuildInput, CachedBuild, CachedCondaMetadata, SourceInfo, SourceMetadataCache, + SourceMetadataInput, +}; + +/// The [`BuildContext`] is used to build packages from source. +#[derive(Clone)] +pub struct BuildContext { + channel_config: ChannelConfig, + glob_hash_cache: GlobHashCache, + source_metadata_cache: SourceMetadataCache, + build_cache: BuildCache, + cache_dir: PathBuf, + work_dir: PathBuf, +} + +#[derive(Debug, Error, Diagnostic)] +pub enum BuildError { + #[error("failed to resolve source path {}", &.0)] + ResolveSourcePath(Utf8TypedPathBuf, #[source] std::io::Error), + + #[error("error calculating sha for {}", &.0.display())] + CalculateSha(PathBuf, #[source] std::io::Error), + + #[error(transparent)] + BuildFrontendSetup(pixi_build_frontend::BuildFrontendError), + + #[error(transparent)] + BackendError(Box), + + #[error(transparent)] + FrontendError(Box), + + #[error(transparent)] + InputHash(#[from] GlobHashError), + + #[error(transparent)] + GlobModificationError(#[from] GlobModificationTimeError), + + #[error(transparent)] + SourceMetadataError(#[from] cache::SourceMetadataError), + + #[error(transparent)] + BuildCacheError(#[from] cache::BuildCacheError), +} + +/// Location of the source code for a package. This will be used as the input +/// for the build process. Archives are unpacked, git clones are checked out, +/// etc. +#[derive(Debug, Clone)] +pub struct SourceCheckout { + /// The path to where the source is located locally on disk. + pub path: PathBuf, + + /// The exact source specification + pub pinned: PinnedSourceSpec, +} + +/// The metadata of a source checkout. +#[derive(Debug)] +pub struct SourceMetadata { + /// The source checkout that the manifest was extracted from. + pub source: SourceCheckout, + + /// All the records that can be extracted from the source. + pub records: Vec, +} + +impl BuildContext { + pub fn new(cache_dir: PathBuf, dot_pixi_dir: PathBuf, channel_config: ChannelConfig) -> Self { + Self { + channel_config, + glob_hash_cache: GlobHashCache::default(), + source_metadata_cache: SourceMetadataCache::new(cache_dir.clone()), + build_cache: BuildCache::new(cache_dir.clone()), + cache_dir, + work_dir: dot_pixi_dir.join("build-v0"), + } + } + + /// Sets the input hash cache to use for caching input hashes. + pub fn with_glob_hash_cache(self, glob_hash_cache: GlobHashCache) -> Self { + Self { + glob_hash_cache, + ..self + } + } + + /// Extracts the metadata for a package from the given source specification. + #[allow(clippy::too_many_arguments)] + pub async fn extract_source_metadata( + &self, + source_spec: &SourceSpec, + channels: &[ChannelUrl], + build_channels: Vec, + host_platform: Platform, + host_virtual_packages: Vec, + build_platform: Platform, + build_virtual_packages: Vec, + metadata_reporter: Arc, + build_id: usize, + gateway: Gateway, + client: ClientWithMiddleware, + ) -> Result { + let source = self.fetch_source(source_spec).await?; + let records = self + .extract_records( + &source, + channels, + build_channels, + host_platform, + host_virtual_packages, + build_platform, + build_virtual_packages, + metadata_reporter.clone(), + build_id, + gateway, + client, + ) + .await?; + + Ok(SourceMetadata { source, records }) + } + + /// Build a package from the given source specification. + #[instrument(skip_all, fields(source = %source_spec.source))] + #[allow(clippy::too_many_arguments)] + pub async fn build_source_record( + &self, + source_spec: &SourceRecord, + build_channels: Vec, + channels: &[ChannelUrl], + host_platform: Platform, + host_virtual_packages: Vec, + build_virtual_packages: Vec, + build_reporter: Arc, + build_id: usize, + authenticated_client: ClientWithMiddleware, + gateway: Gateway, + ) -> Result { + let source_checkout = SourceCheckout { + path: self.fetch_pinned_source(&source_spec.source).await?, + pinned: source_spec.source.clone(), + }; + + let channels_urls: Vec = channels.iter().cloned().map(Into::into).collect::>(); + + let (cached_build, entry) = self + .build_cache + .entry( + &source_checkout, + &BuildInput { + channel_urls: channels.iter().cloned().map(Into::into).collect(), + target_platform: Platform::from_str(&source_spec.package_record.subdir) + .ok() + .unwrap_or(host_platform), + name: source_spec.package_record.name.as_normalized().to_string(), + version: source_spec.package_record.version.to_string(), + build: source_spec.package_record.build.clone(), + host_platform, + host_virtual_packages: host_virtual_packages.clone(), + build_virtual_packages: build_virtual_packages.clone(), + }, + ) + .await?; + + if let Some(build) = cached_build { + // Check to see if the cached build is up-to-date. + if let Some(source_input) = build.source { + let glob_time = GlobModificationTime::from_patterns( + &source_checkout.path, + source_input.globs.iter().map(String::as_str), + ) + .map_err(BuildError::GlobModificationError)?; + match glob_time { + GlobModificationTime::MatchesFound { + modified_at, + designated_file, + } => { + if build + .record + .package_record + .timestamp + .map(|t| t >= chrono::DateTime::::from(modified_at)) + .unwrap_or(false) + { + build_reporter.on_build_cached(build_id); + tracing::debug!("found an up-to-date cached build."); + return Ok(build.record); + } else { + tracing::debug!( + "found an stale cached build, {} is newer than {}", + designated_file.display(), + build.record.package_record.timestamp.unwrap_or_default() + ); + } + } + GlobModificationTime::NoMatches => { + // No matches, so we should rebuild. + tracing::debug!( + "found a stale cached build, no files match the source glob" + ); + } + } + } else { + tracing::debug!("found a cached build"); + build_reporter.on_build_cached(build_id); + + // If there is no source info in the cache we assume its still valid. + return Ok(build.record); + } + } + + let tool_config = ToolContext::builder(build_channels.to_vec()) + .with_gateway(gateway.clone()) + .with_client(authenticated_client.clone()) + .build(); + + // Instantiate a protocol for the source directory. + let protocol = pixi_build_frontend::BuildFrontend::default() + .with_channel_config(self.channel_config.clone()) + .with_cache_dir(self.cache_dir.clone()) + .with_tool_context(tool_config) + .setup_protocol(SetupRequest { + source_dir: source_checkout.path.clone(), + build_tool_override: Default::default(), + build_id, + }) + .await + .map_err(BuildError::BuildFrontendSetup)?; + + // Extract the conda metadata for the package. + let build_result = protocol + .conda_build( + &CondaBuildParams { + host_platform: Some(PlatformAndVirtualPackages { + platform: host_platform, + virtual_packages: Some(host_virtual_packages.clone()), + }), + build_platform_virtual_packages: Some(build_virtual_packages.clone()), + channel_base_urls: Some(channels_urls), + channel_configuration: ChannelConfiguration { + base_url: self.channel_config.channel_alias.clone(), + }, + outputs: Some(vec![CondaOutputIdentifier { + name: Some(source_spec.package_record.name.as_normalized().to_string()), + version: Some(source_spec.package_record.version.version().to_string()), + build: Some(source_spec.package_record.build.clone()), + subdir: Some(source_spec.package_record.subdir.clone()), + }]), + work_directory: self.work_dir.join( + WorkDirKey { + source: source_checkout.clone(), + host_platform, + build_backend: protocol.identifier().to_string(), + } + .key(), + ), + }, + build_reporter.as_conda_build_reporter(), + ) + .await + .map_err(|e| BuildError::BackendError(e.into()))?; + + let build_result = build_result + .packages + .into_iter() + .exactly_one() + .map_err(|e| { + BuildError::FrontendError( + miette::miette!("expected the build backend to return a single built package but it returned {}", e.len()) + .into(), + ) + })?; + + // Add the sha256 to the package record. + let sha = rattler_digest::compute_file_digest::(&build_result.output_file) + .map_err(|e| BuildError::CalculateSha(build_result.output_file.clone(), e))?; + + // Update the package_record sha256 field and timestamp. + let mut package_record = source_spec.package_record.clone(); + package_record.sha256 = Some(sha); + package_record.timestamp.get_or_insert_with(Utc::now); + + // Construct a repodata record that represents the package + let record = RepoDataRecord { + package_record, + url: Url::from_file_path(&build_result.output_file).map_err(|_| { + BuildError::FrontendError( + miette::miette!( + "failed to convert returned path to URL: {}", + build_result.output_file.display() + ) + .into(), + ) + })?, + channel: None, + file_name: build_result + .output_file + .file_name() + .and_then(OsStr::to_str) + .map(ToString::to_string) + .unwrap_or_default(), + }; + + // Store the build in the cache + let updated_record = entry + .insert(CachedBuild { + source: source_checkout + .pinned + .is_immutable() + .not() + .then_some(SourceInfo { + globs: build_result.input_globs, + }), + record: record.clone(), + }) + .await?; + + Ok(updated_record) + } + + /// Acquires the source from the given source specification. A source + /// specification can still not point to a specific pinned source. E.g. a + /// git spec that points to a branch or a tag. This function will fetch the + /// source and return a [`SourceCheckout`] that points to the actual source. + /// This also pins the source spec to a specific checkout (e.g. git commit + /// hash). + /// + /// TODO(baszalmstra): Ideally we would cache the result of this on disk + /// somewhere. + pub async fn fetch_source( + &self, + source_spec: &SourceSpec, + ) -> Result { + match source_spec { + SourceSpec::Url(_) => unimplemented!("fetching URL sources is not yet implemented"), + SourceSpec::Git(_) => unimplemented!("fetching Git sources is not yet implemented"), + SourceSpec::Path(path) => { + let source_path = self + .resolve_path(path.path.to_path()) + .map_err(|err| BuildError::ResolveSourcePath(path.path.clone(), err))?; + Ok(SourceCheckout { + path: source_path, + pinned: PinnedPathSpec { + path: path.path.clone(), + } + .into(), + }) + } + } + } + + /// Acquires the source from the given source specification. + /// + /// TODO(baszalmstra): Ideally we would cache the result of this on disk + /// somewhere. + pub async fn fetch_pinned_source( + &self, + source_spec: &PinnedSourceSpec, + ) -> Result { + match source_spec { + PinnedSourceSpec::Url(_) => { + unimplemented!("fetching URL sources is not yet implemented") + } + PinnedSourceSpec::Git(_) => { + unimplemented!("fetching Git sources is not yet implemented") + } + PinnedSourceSpec::Path(path) => self + .resolve_path(path.path.to_path()) + .map_err(|err| BuildError::ResolveSourcePath(path.path.clone(), err)), + } + } + + /// Resolves the source path to a full path. + /// + /// This function does not check if the path exists and also does not follow + /// symlinks. + fn resolve_path(&self, path_spec: Utf8TypedPath) -> Result { + if path_spec.is_absolute() { + Ok(Path::new(path_spec.as_str()).to_path_buf()) + } else if let Ok(user_path) = path_spec.strip_prefix("~/") { + let home_dir = dirs::home_dir().ok_or_else(|| { + std::io::Error::new( + std::io::ErrorKind::InvalidData, + "could not determine home directory", + ) + })?; + debug_assert!(home_dir.is_absolute()); + normalize_absolute_path(&home_dir.join(Path::new(user_path.as_str()))) + } else { + let root_dir = self.channel_config.root_dir.as_path(); + let native_path = Path::new(path_spec.as_str()); + debug_assert!(root_dir.is_absolute()); + normalize_absolute_path(&root_dir.join(native_path)) + } + } + + /// Extracts the metadata from a package whose source is located at the + /// given path. + #[instrument(skip_all, fields(source = %source.pinned, platform = %host_platform))] + #[allow(clippy::too_many_arguments)] + async fn extract_records( + &self, + source: &SourceCheckout, + channels: &[ChannelUrl], + build_channels: Vec, + host_platform: Platform, + host_virtual_packages: Vec, + build_platform: Platform, + build_virtual_packages: Vec, + metadata_reporter: Arc, + build_id: usize, + gateway: Gateway, + client: ClientWithMiddleware, + ) -> Result, BuildError> { + let channel_urls = channels.iter().cloned().map(Into::into).collect::>(); + + let (cached_metadata, cache_entry) = self + .source_metadata_cache + .entry( + source, + &SourceMetadataInput { + channel_urls: channel_urls.clone(), + build_platform, + build_virtual_packages: build_virtual_packages.clone(), + host_platform, + host_virtual_packages: host_virtual_packages.clone(), + }, + ) + .await?; + if let Some(metadata) = cached_metadata { + // Check if the input hash is still valid. + if let Some(input_globs) = &metadata.input_hash { + let new_hash = self + .glob_hash_cache + .compute_hash(GlobHashKey { + root: source.path.clone(), + globs: input_globs.globs.clone(), + }) + .await?; + if new_hash.hash == input_globs.hash { + tracing::debug!("found up-to-date cached metadata."); + return Ok(source_metadata_to_records( + source, + metadata.packages, + metadata.input_hash, + )); + } else { + tracing::debug!("found stale cached metadata."); + } + } else { + tracing::debug!("found cached metadata."); + metadata_reporter.on_metadata_cached(build_id); + // No input hash so just assume it is still valid. + return Ok(source_metadata_to_records( + source, + metadata.packages, + metadata.input_hash, + )); + } + } + // tool config + let tool_config = ToolContext::builder(build_channels) + .with_gateway(gateway) + .with_client(client) + .build(); + + // Instantiate a protocol for the source directory. + let protocol = pixi_build_frontend::BuildFrontend::default() + .with_channel_config(self.channel_config.clone()) + .with_tool_context(tool_config) + .setup_protocol(SetupRequest { + source_dir: source.path.clone(), + build_tool_override: Default::default(), + build_id, + }) + .await + .map_err(BuildError::BuildFrontendSetup)?; + + // Extract the conda metadata for the package. + let metadata = protocol + .get_conda_metadata( + &CondaMetadataParams { + build_platform: Some(PlatformAndVirtualPackages { + platform: build_platform, + virtual_packages: Some(build_virtual_packages), + }), + host_platform: Some(PlatformAndVirtualPackages { + platform: host_platform, + virtual_packages: Some(host_virtual_packages), + }), + channel_base_urls: Some(channel_urls), + channel_configuration: ChannelConfiguration { + base_url: self.channel_config.channel_alias.clone(), + }, + work_directory: self.work_dir.join( + WorkDirKey { + source: source.clone(), + host_platform, + build_backend: protocol.identifier().to_string(), + } + .key(), + ), + }, + metadata_reporter.as_conda_metadata_reporter().clone(), + ) + .await + .map_err(|e| BuildError::BackendError(e.into()))?; + + // Compute the input globs for the mutable source checkouts. + let input_hash = if source.pinned.is_immutable() { + None + } else { + let input_globs = metadata.input_globs.clone().unwrap_or(protocol.manifests()); + let input_hash = self + .glob_hash_cache + .compute_hash(GlobHashKey { + root: source.path.clone(), + globs: input_globs.clone(), + }) + .await?; + Some(InputHash { + hash: input_hash.hash, + globs: input_globs, + }) + }; + + // Store in the cache + cache_entry + .insert(CachedCondaMetadata { + packages: metadata.packages.clone(), + input_hash: input_hash.clone(), + }) + .await?; + + Ok(source_metadata_to_records( + source, + metadata.packages, + input_hash, + )) + } +} + +fn source_metadata_to_records( + source: &SourceCheckout, + packages: Vec, + input_hash: Option, +) -> Vec { + // Convert the metadata to repodata + let packages = packages + .into_iter() + .map(|p| { + SourceRecord { + input_hash: input_hash.clone(), + source: source.pinned.clone(), + package_record: PackageRecord { + // We cannot now these values from the metadata because no actual package + // was built yet. + size: None, + sha256: None, + md5: None, + + // TODO(baszalmstra): Decide if it makes sense to include the current + // timestamp here. + timestamp: None, + + // These values are derived from the build backend values. + platform: p.subdir.only_platform().map(ToString::to_string), + arch: p.subdir.arch().as_ref().map(ToString::to_string), + + // These values are passed by the build backend + name: p.name, + build: p.build, + version: p.version, + build_number: p.build_number, + license: p.license, + subdir: p.subdir.to_string(), + license_family: p.license_family, + noarch: p.noarch, + constrains: p.constraints.into_iter().map(|c| c.to_string()).collect(), + depends: p.depends.into_iter().map(|c| c.to_string()).collect(), + + // These are deprecated and no longer used. + features: None, + track_features: vec![], + legacy_bz2_md5: None, + legacy_bz2_size: None, + python_site_packages_path: None, + + // TODO(baszalmstra): Add support for these. + purls: None, + + // These are not important at this point. + run_exports: None, + }, + } + }) + .collect(); + packages +} + +/// Normalize a path, removing things like `.` and `..`. +/// +/// Source: +fn normalize_absolute_path(path: &Path) -> Result { + let mut components = path.components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() { + components.next(); + PathBuf::from(c.as_os_str()) + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { + ret.push(component.as_os_str()); + } + Component::CurDir => {} + Component::ParentDir => { + if !ret.pop() { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + format!( + "cannot normalize a relative path beyond the base directory: {}", + path.display() + ), + )); + } + } + Component::Normal(c) => { + ret.push(c); + } + } + } + Ok(ret) +} + +/// A key to uniquely identify a work directory. If there is a source build with +/// the same key they will share the same working directory. +struct WorkDirKey { + /// The location of the source + source: SourceCheckout, + + /// The platform the dependency will run on + host_platform: Platform, + + /// The build backend name + /// TODO: Maybe we should also include the version. + build_backend: String, +} + +impl WorkDirKey { + pub fn key(&self) -> String { + let mut hasher = Xxh3::new(); + self.source.pinned.to_string().hash(&mut hasher); + self.host_platform.hash(&mut hasher); + self.build_backend.hash(&mut hasher); + let unique_key = URL_SAFE_NO_PAD.encode(hasher.finish().to_ne_bytes()); + match self.source.path.file_name().and_then(OsStr::to_str) { + Some(name) => format!("{}-{}", name, unique_key), + None => unique_key, + } + } +} diff --git a/src/build/reporters.rs b/src/build/reporters.rs new file mode 100644 index 000000000..4def2bfbf --- /dev/null +++ b/src/build/reporters.rs @@ -0,0 +1,55 @@ +use std::sync::Arc; + +use pixi_build_frontend::{CondaBuildReporter, CondaMetadataReporter}; + +pub trait BuildMetadataReporter: CondaMetadataReporter { + /// Reporters that the metadata has been cached. + fn on_metadata_cached(&self, build_id: usize); + + /// Cast upwards + fn as_conda_metadata_reporter(self: Arc) -> Arc; +} + +/// Noop implementation of the BuildMetadataReporter trait. +struct NoopBuildMetadataReporter; +impl CondaMetadataReporter for NoopBuildMetadataReporter { + fn on_metadata_start(&self, _build_id: usize) -> usize { + 0 + } + + fn on_metadata_end(&self, _operation: usize) {} +} +impl BuildMetadataReporter for NoopBuildMetadataReporter { + fn on_metadata_cached(&self, _build_id: usize) {} + + fn as_conda_metadata_reporter(self: Arc) -> Arc { + self + } +} + +pub trait BuildReporter: CondaBuildReporter { + /// Reports that the build has been cached. + fn on_build_cached(&self, build_id: usize); + + /// Cast upwards + fn as_conda_build_reporter(self: Arc) -> Arc; +} + +/// Noop implementation of the BuildReporter trait. +struct NoopBuildReporter; +impl CondaBuildReporter for NoopBuildReporter { + fn on_build_start(&self, _build_id: usize) -> usize { + 0 + } + + fn on_build_end(&self, _operation: usize) {} + + fn on_build_output(&self, _operation: usize, _line: String) {} +} +impl BuildReporter for NoopBuildReporter { + fn on_build_cached(&self, _build_id: usize) {} + + fn as_conda_build_reporter(self: Arc) -> Arc { + self + } +} diff --git a/src/cli/add.rs b/src/cli/add.rs index 88ec3def5..5315700d4 100644 --- a/src/cli/add.rs +++ b/src/cli/add.rs @@ -12,9 +12,9 @@ use crate::{ /// Adds dependencies to the project /// /// The dependencies should be defined as MatchSpec for conda package, or a PyPI -/// requirement for the `--pypi` dependencies. If no specific version is provided, -/// the latest version compatible with your project will be chosen automatically -/// or a * will be used. +/// requirement for the `--pypi` dependencies. If no specific version is +/// provided, the latest version compatible with your project will be chosen +/// automatically or a * will be used. /// /// Example usage: /// diff --git a/src/cli/build.rs b/src/cli/build.rs new file mode 100644 index 000000000..dc13c0a28 --- /dev/null +++ b/src/cli/build.rs @@ -0,0 +1,259 @@ +use std::{path::PathBuf, sync::Arc, time::Duration}; + +use clap::{ArgAction, Parser}; +use indicatif::ProgressBar; +use miette::{Context, IntoDiagnostic}; +use pixi_build_frontend::{BackendOverride, CondaBuildReporter, EnabledProtocols, SetupRequest}; +use pixi_build_types::{ + procedures::conda_build::CondaBuildParams, ChannelConfiguration, PlatformAndVirtualPackages, +}; +use pixi_config::ConfigCli; +use pixi_manifest::FeaturesExt; +use rattler_conda_types::{GenericVirtualPackage, Platform}; + +use crate::{ + cli::cli_config::ProjectConfig, + repodata::Repodata, + utils::{move_file, MoveError}, + Project, +}; + +#[derive(Parser, Debug)] +#[clap(verbatim_doc_comment)] +pub struct Args { + #[clap(flatten)] + pub project_config: ProjectConfig, + + #[clap(flatten)] + pub config_cli: ConfigCli, + + /// The target platform to build for (defaults to the current platform) + #[clap(long, short, default_value_t = Platform::current())] + pub target_platform: Platform, + + /// The output directory to place the build artifacts + #[clap(long, short, default_value = ".")] + pub output_dir: PathBuf, + + /// Use system backend installed tool + #[arg(long, action = ArgAction::SetTrue)] + pub with_system: bool, + + /// If a recipe.yaml is present in the source directory, ignore it + /// and build the package using manifest only + #[arg(long, action = ArgAction::SetTrue)] + pub ignore_recipe: bool, +} + +struct ProgressReporter { + progress_bar: indicatif::ProgressBar, +} + +impl ProgressReporter { + fn new(source: &str) -> Self { + let style = indicatif::ProgressStyle::default_bar() + .template("{spinner:.dim} {elapsed} {prefix} {wide_msg:.dim}") + .unwrap(); + let pb = ProgressBar::new(0); + pb.set_style(style); + let progress = pixi_progress::global_multi_progress().add(pb); + progress.set_prefix(format!("building package: {}", source)); + progress.enable_steady_tick(Duration::from_millis(100)); + + Self { + progress_bar: progress, + } + } +} + +impl CondaBuildReporter for ProgressReporter { + /// Starts a progress bar that should currently be + /// [spinner] message + fn on_build_start(&self, _build_id: usize) -> usize { + // Create a new progress bar. + // Building the package + 0 + } + + fn on_build_end(&self, _operation: usize) { + // Finish the progress bar. + self.progress_bar.finish_with_message("build completed"); + } + + fn on_build_output(&self, _operation: usize, line: String) { + self.progress_bar.suspend(|| eprintln!("{}", line)) + } +} + +pub async fn execute(args: Args) -> miette::Result<()> { + let project = Project::load_or_else_discover(args.project_config.manifest_path.as_deref())? + .with_cli_config(args.config_cli); + + // TODO: Implement logic to take the source code from a VCS instead of from a + // local channel so that that information is also encoded in the manifest. + + // Instantiate a protocol for the source directory. + let channel_config = project.channel_config(); + let channels = project + .manifest() + .build_section() + .ok_or_else(|| miette::miette!("no build section found in the manifest"))? + .channels(&channel_config) + .into_diagnostic()?; + + let tool_config = pixi_build_frontend::ToolContext::builder(channels) + .with_gateway(project.repodata_gateway().clone()) + .with_client(project.authenticated_client().clone()) + .build(); + + let build_section = project + .manifest() + .build_section() + .ok_or_else(|| miette::miette!("no build section found in the manifest"))?; + + let backend_override = if args.with_system { + Some(BackendOverride::System(build_section.build_backend.clone())) + } else { + None + }; + + let enabled_protocols = EnabledProtocols { + enable_rattler_build: !args.ignore_recipe, + ..Default::default() + }; + + let protocol = pixi_build_frontend::BuildFrontend::default() + .with_channel_config(channel_config.clone()) + .with_tool_context(tool_config) + .with_enabled_protocols(enabled_protocols) + .setup_protocol(SetupRequest { + source_dir: project.root().to_path_buf(), + build_tool_override: backend_override, + build_id: 0, + }) + .await + .into_diagnostic() + .wrap_err("unable to setup the build-backend to build the project")?; + + // Construct a temporary directory to build the package in. This path is also + // automatically removed after the build finishes. + let pixi_dir = &project.pixi_dir(); + tokio::fs::create_dir_all(pixi_dir) + .await + .into_diagnostic() + .with_context(|| { + format!( + "failed to create the .pixi directory at '{}'", + pixi_dir.display() + ) + })?; + let work_dir = tempfile::Builder::new() + .prefix("pixi-build-") + .tempdir_in(project.pixi_dir()) + .into_diagnostic() + .context("failed to create temporary working directory in the .pixi directory")?; + + let progress = Arc::new(ProgressReporter::new(project.name())); + // Build platform virtual packages + let build_platform_virtual_packages: Vec = project + .default_environment() + .virtual_packages(Platform::current()) + .into_iter() + .map(GenericVirtualPackage::from) + .collect(); + + // Host platform virtual packages + let host_platform_virtual_packages: Vec = project + .default_environment() + .virtual_packages(args.target_platform) + .into_iter() + .map(GenericVirtualPackage::from) + .collect(); + + // Build the individual packages. + let result = protocol + .conda_build( + &CondaBuildParams { + build_platform_virtual_packages: Some(build_platform_virtual_packages), + host_platform: Some(PlatformAndVirtualPackages { + platform: args.target_platform, + virtual_packages: Some(host_platform_virtual_packages), + }), + channel_base_urls: Some( + project + .default_environment() + .channel_urls(&channel_config) + .into_diagnostic()? + .into_iter() + .map(Into::into) + .collect(), + ), + channel_configuration: ChannelConfiguration { + base_url: channel_config.channel_alias, + }, + outputs: None, + work_directory: work_dir.path().to_path_buf(), + }, + progress.clone(), + ) + .await + .wrap_err("during the building of the project the following error occurred")?; + + // Move the built packages to the output directory. + let output_dir = args.output_dir; + for package in result.packages { + std::fs::create_dir_all(&output_dir) + .into_diagnostic() + .with_context(|| { + format!( + "failed to create output directory '{0}'", + output_dir.display() + ) + })?; + + let file_name = package.output_file.file_name().ok_or_else(|| { + miette::miette!( + "output file '{0}' does not have a file name", + package.output_file.display() + ) + })?; + let dest = output_dir.join(file_name); + if let Err(err) = move_file(&package.output_file, &dest) { + match err { + MoveError::CopyFailed(err) => { + return Err(err).into_diagnostic().with_context(|| { + format!( + "failed to copy {} to {}", + package.output_file.display(), + dest.display() + ) + }); + } + MoveError::FailedToRemove(e) => { + tracing::warn!( + "failed to remove {} after copying it to the output directory: {}", + package.output_file.display(), + e + ); + } + MoveError::MoveFailed(e) => { + return Err(e).into_diagnostic().with_context(|| { + format!( + "failed to move {} to {}", + package.output_file.display(), + dest.display() + ) + }) + } + } + } + + println!( + "{}Successfully built '{}'", + console::style(console::Emoji("✔ ", "")).green(), + dest.display() + ); + } + + Ok(()) +} diff --git a/src/cli/exec.rs b/src/cli/exec.rs index 586e107ba..297035d55 100644 --- a/src/cli/exec.rs +++ b/src/cli/exec.rs @@ -1,11 +1,10 @@ -use std::{ - hash::{DefaultHasher, Hash, Hasher}, - path::Path, - str::FromStr, -}; +use std::{path::Path, str::FromStr}; use clap::{Parser, ValueHint}; use miette::{Context, IntoDiagnostic}; +use pixi_config::{self, Config, ConfigCli}; +use pixi_progress::{await_in_progress, global_multi_progress, wrap_in_progress}; +use pixi_utils::{reqwest::build_reqwest_clients, EnvironmentHash, PrefixGuard}; use rattler::{ install::{IndicatifReporter, Installer}, package_cache::PackageCache, @@ -15,12 +14,8 @@ use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; use rattler_virtual_packages::{VirtualPackage, VirtualPackageOverrides}; use reqwest_middleware::ClientWithMiddleware; -use crate::prefix::Prefix; -use pixi_config::{self, Config, ConfigCli}; -use pixi_progress::{await_in_progress, global_multi_progress, wrap_in_progress}; -use pixi_utils::{reqwest::build_reqwest_clients, PrefixGuard}; - use super::cli_config::ChannelsConfig; +use crate::prefix::Prefix; /// Run a command in a temporary environment. #[derive(Parser, Debug, Default)] @@ -47,40 +42,6 @@ pub struct Args { pub config: ConfigCli, } -#[derive(Hash)] -pub struct EnvironmentHash { - pub command: String, - pub specs: Vec, - pub channels: Vec, -} - -impl EnvironmentHash { - pub(crate) fn from_args(args: &Args, config: &Config) -> miette::Result { - Ok(Self { - command: args - .command - .first() - .cloned() - .expect("missing required command"), - specs: args.specs.clone(), - channels: args - .channels - .resolve_from_config(config)? - .iter() - .map(|c| c.base_url.to_string()) - .collect(), - }) - } - - /// Returns the name of the environment. - pub(crate) fn name(&self) -> String { - let mut hasher = DefaultHasher::new(); - self.hash(&mut hasher); - let hash = hasher.finish(); - format!("{}-{:x}", &self.command, hash) - } -} - /// CLI entry point for `pixi runx` pub async fn execute(args: Args) -> miette::Result<()> { let config = Config::with_cli_config(&args.config); @@ -118,11 +79,21 @@ pub async fn create_exec_prefix( config: &Config, client: &ClientWithMiddleware, ) -> miette::Result { - let environment_name = EnvironmentHash::from_args(args, config)?.name(); + let command = args.command.first().expect("missing required command"); + let specs = args.specs.clone(); + let channels = args + .channels + .resolve_from_config(config)? + .iter() + .map(|c| c.base_url.to_string()) + .collect(); + + let environment_hash = EnvironmentHash::new(command.clone(), specs, channels); + let prefix = Prefix::new( cache_dir .join(pixi_consts::consts::CACHED_ENVS_DIR) - .join(environment_name), + .join(environment_hash.name()), ); let mut guard = PrefixGuard::new(prefix.root()) @@ -181,9 +152,9 @@ pub async fn create_exec_prefix( .recursive(true) .execute() .await + .into_diagnostic() }) .await - .into_diagnostic() .context("failed to get repodata")?; // Determine virtual packages of the current platform diff --git a/src/cli/info.rs b/src/cli/info.rs index 540d2b077..a39c7b350 100644 --- a/src/cli/info.rs +++ b/src/cli/info.rs @@ -391,7 +391,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { .map(|solve_group| solve_group.name().to_string()), environment_size: None, dependencies: env - .dependencies(None, Some(env.best_platform())) + .combined_dependencies(Some(env.best_platform())) .names() .map(|p| p.as_source().to_string()) .collect(), diff --git a/src/cli/init.rs b/src/cli/init.rs index 499b27099..d7a948968 100644 --- a/src/cli/init.rs +++ b/src/cli/init.rs @@ -1,4 +1,11 @@ -use crate::Project; +use std::{ + cmp::PartialEq, + fs, + io::{Error, ErrorKind, Write}, + path::{Path, PathBuf}, + str::FromStr, +}; + use clap::{Parser, ValueEnum}; use miette::{Context, IntoDiagnostic}; use minijinja::{context, Environment}; @@ -9,17 +16,12 @@ use pixi_manifest::{ }; use pixi_utils::conda_environment_file::CondaEnvFile; use rattler_conda_types::{NamedChannelOrUrl, Platform}; -use std::str::FromStr; -use std::{ - cmp::PartialEq, - fs, - io::{Error, ErrorKind, Write}, - path::{Path, PathBuf}, -}; use tokio::fs::OpenOptions; use url::Url; use uv_normalize::PackageName; +use crate::Project; + #[derive(Parser, Debug, Clone, PartialEq, ValueEnum)] pub enum ManifestFormat { Pixi, @@ -265,7 +267,8 @@ pub async fn execute(args: Args) -> miette::Result<()> { eprintln!( "{}Created {}", console::style(console::Emoji("✔ ", "")).green(), - // Canonicalize the path to make it more readable, but if it fails just use the path as is. + // Canonicalize the path to make it more readable, but if it fails just use the path as + // is. project.manifest_path().display() ); } else { @@ -301,7 +304,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { let pyproject = PyProjectManifest::from_path(&pyproject_manifest_path)?; // Early exit if 'pyproject.toml' already contains a '[tool.pixi.project]' table - if pyproject.is_pixi() { + if pyproject.has_pixi_table() { eprintln!( "{}Nothing to do here: 'pyproject.toml' already contains a '[tool.pixi.project]' section.", console::style(console::Emoji("🤔 ", "")).blue(), @@ -311,7 +314,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { let (name, pixi_name) = match pyproject.name() { Some(name) => (name, false), - None => (default_name.clone(), true), + None => (default_name.as_str(), true), }; let environments = pyproject.environments_from_extras().into_diagnostic()?; let rv = env diff --git a/src/cli/list.rs b/src/cli/list.rs index 0d1d987fc..485fea59f 100644 --- a/src/cli/list.rs +++ b/src/cli/list.rs @@ -18,7 +18,7 @@ use pixi_uv_conversions::{ }; use pypi_modifiers::pypi_tags::{get_pypi_tags, is_python_record}; use rattler_conda_types::Platform; -use rattler_lock::{CondaPackage, Package, PypiPackage, UrlOrPath}; +use rattler_lock::{CondaPackageData, LockedPackageRef, PypiPackageData, UrlOrPath}; use serde::Serialize; use uv_distribution::RegistryWheelIndex; @@ -111,13 +111,14 @@ where } /// Associate with a uv_normalize::PackageName +#[allow(clippy::large_enum_variant)] enum PackageExt { - PyPI(PypiPackage, uv_normalize::PackageName), - Conda(CondaPackage), + PyPI(PypiPackageData, uv_normalize::PackageName), + Conda(CondaPackageData), } impl PackageExt { - fn as_conda(&self) -> Option<&CondaPackage> { + fn as_conda(&self) -> Option<&CondaPackageData> { match self { PackageExt::Conda(c) => Some(c), _ => None, @@ -127,16 +128,16 @@ impl PackageExt { /// Returns the name of the package. pub fn name(&self) -> Cow<'_, str> { match self { - Self::Conda(value) => value.package_record().name.as_normalized().into(), - Self::PyPI(value, _) => value.data().package.name.as_dist_info_name(), + Self::Conda(value) => value.record().name.as_normalized().into(), + Self::PyPI(value, _) => value.name.as_dist_info_name(), } } /// Returns the version string of the package pub fn version(&self) -> Cow<'_, str> { match self { - Self::Conda(value) => value.package_record().version.as_str(), - Self::PyPI(value, _) => value.data().package.version.to_string().into(), + Self::Conda(value) => value.record().version.as_str(), + Self::PyPI(value, _) => value.version.to_string().into(), } } } @@ -166,11 +167,11 @@ pub async fn execute(args: Args) -> miette::Result<()> { let locked_deps_ext = locked_deps .into_iter() .map(|p| match p { - Package::Pypi(p) => { - let name = to_uv_normalize(&p.data().package.name)?; - Ok(PackageExt::PyPI(p, name)) + LockedPackageRef::Pypi(pypi_data, _) => { + let name = to_uv_normalize(&pypi_data.name)?; + Ok(PackageExt::PyPI(pypi_data.clone(), name)) } - Package::Conda(c) => Ok(PackageExt::Conda(c)), + LockedPackageRef::Conda(c) => Ok(PackageExt::Conda(c.clone())), }) .collect::, ConversionError>>() .into_diagnostic()?; @@ -192,7 +193,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { tags = get_pypi_tags( platform, &environment.system_requirements(), - python_record.package_record(), + python_record.record(), )?; Some(RegistryWheelIndex::new( &uv_context.cache, @@ -209,7 +210,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { // Get the explicit project dependencies let mut project_dependency_names = environment - .dependencies(None, Some(platform)) + .combined_dependencies(Some(platform)) .names() .map(|p| p.as_source().to_string()) .collect_vec(); @@ -357,30 +358,29 @@ fn create_package_to_output<'a, 'b>( PackageExt::PyPI(_, _) => "pypi".to_string(), }; let build = match package { - PackageExt::Conda(pkg) => Some(pkg.package_record().build.clone()), + PackageExt::Conda(pkg) => Some(pkg.record().build.clone()), PackageExt::PyPI(_, _) => None, }; let (size_bytes, source) = match package { PackageExt::Conda(pkg) => ( - pkg.package_record().size, - pkg.file_name().map(ToOwned::to_owned), + pkg.record().size, + Some(pkg.record().name.as_source().to_owned()), ), PackageExt::PyPI(p, name) => { if let Some(registry_index) = registry_index { let entry = registry_index.get(name).find(|i| { - i.dist.filename.version - == to_uv_version(&p.data().package.version).expect("invalid version") + i.dist.filename.version == to_uv_version(&p.version).expect("invalid version") }); let size = entry.and_then(|e| get_dir_size(e.dist.path.clone()).ok()); let name = entry.map(|e| e.dist.filename.to_string()); (size, name) } else { - match &p.data().package.url_or_path { + match &p.location { UrlOrPath::Url(url) => (None, Some(url.to_string())), UrlOrPath::Path(path) => ( - get_dir_size(path.clone()).ok(), - Some(path.to_string_lossy().to_string()), + get_dir_size(std::path::Path::new(path.as_str())).ok(), + Some(path.to_string()), ), } } @@ -390,7 +390,7 @@ fn create_package_to_output<'a, 'b>( let is_explicit = project_dependency_names.contains(&name); let is_editable = match package { PackageExt::Conda(_) => false, - PackageExt::PyPI(p, _) => p.data().package.editable, + PackageExt::PyPI(p, _) => p.editable, }; Ok(PackageToOutput { diff --git a/src/cli/mod.rs b/src/cli/mod.rs index b2e9bc4bc..bdd00f7c7 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -3,17 +3,16 @@ use clap_verbosity_flag::Verbosity; use indicatif::ProgressDrawTarget; use miette::IntoDiagnostic; use pixi_consts::consts; +use pixi_progress::global_multi_progress; +use pixi_utils::indicatif::IndicatifWriter; use std::{env, io::IsTerminal}; use tracing_subscriber::{ filter::LevelFilter, prelude::__tracing_subscriber_SubscriberExt, util::SubscriberInitExt, EnvFilter, }; -use pixi_progress; -use pixi_progress::global_multi_progress; -use pixi_utils::indicatif::IndicatifWriter; - pub mod add; +mod build; pub mod clean; pub mod cli_config; pub mod completion; @@ -139,14 +138,17 @@ pub enum Command { SelfUpdate(self_update::Args), Clean(clean::Args), Completion(completion::Args), + + // Build + Build(build::Args), } #[derive(Parser, Debug, Default, Copy, Clone)] #[group(multiple = false)] /// Lock file usage from the CLI pub struct LockFileUsageArgs { - /// Install the environment as defined in the lockfile, doesn't update lockfile if it isn't - /// up-to-date with the manifest file. + /// Install the environment as defined in the lockfile, doesn't update + /// lockfile if it isn't up-to-date with the manifest file. #[clap(long, conflicts_with = "locked", env = "PIXI_FROZEN")] pub frozen: bool, /// Check if lockfile is up-to-date before installing the environment, @@ -294,6 +296,7 @@ pub async fn execute_command(command: Command) -> miette::Result<()> { Command::Update(cmd) => update::execute(cmd).await, Command::Upgrade(cmd) => upgrade::execute(cmd).await, Command::Exec(args) => exec::execute(args).await, + Command::Build(args) => build::execute(args).await, } } diff --git a/src/cli/project/description/set.rs b/src/cli/project/description/set.rs index 872a063f1..505c24b42 100644 --- a/src/cli/project/description/set.rs +++ b/src/cli/project/description/set.rs @@ -21,8 +21,8 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { console::style(console::Emoji("✔ ", "")).green(), project .manifest - .parsed - .project + .workspace + .workspace .description .as_ref() .unwrap() diff --git a/src/cli/project/export/conda_environment.rs b/src/cli/project/export/conda_environment.rs index 442a57e38..df73a4310 100644 --- a/src/cli/project/export/conda_environment.rs +++ b/src/cli/project/export/conda_environment.rs @@ -1,14 +1,12 @@ use std::path::PathBuf; -use crate::project::Environment; -use crate::Project; use clap::Parser; use itertools::Itertools; use miette::{Context, IntoDiagnostic}; use pep508_rs::ExtraName; use pixi_manifest::{ pypi::{PyPiPackageName, VersionOrStar}, - FeaturesExt, HasFeaturesIter, PyPiRequirement, + FeaturesExt, PyPiRequirement, }; use rattler_conda_types::{ ChannelConfig, EnvironmentYaml, MatchSpec, MatchSpecOrSubSection, NamedChannelOrUrl, @@ -16,6 +14,8 @@ use rattler_conda_types::{ }; use rattler_lock::FindLinksUrlOrPath; +use crate::{project::Environment, Project}; + #[derive(Debug, Parser)] pub struct Args { /// Explicit path to export the environment to @@ -142,30 +142,30 @@ fn build_env_yaml( let mut pip_dependencies: Vec = Vec::new(); - for feature in environment.features() { - if let Some(dependencies) = feature.dependencies(None, Some(*platform)) { - for (name, pixi_spec) in dependencies.iter() { - if let Some(nameless_spec) = pixi_spec - .clone() - .try_into_nameless_match_spec(config) - .into_diagnostic()? - { - let spec = MatchSpec::from_nameless(nameless_spec, Some(name.clone())); - env_yaml - .dependencies - .push(MatchSpecOrSubSection::MatchSpec(spec)); - } else { - tracing::warn!("Failed to convert dependency to conda environment spec: {:?}. Skipping dependency", name); - } - } + for (name, pixi_spec) in environment + .combined_dependencies(Some(*platform)) + .into_specs() + { + if let Some(nameless_spec) = pixi_spec + .clone() + .try_into_nameless_match_spec(config) + .into_diagnostic()? + { + let spec = MatchSpec::from_nameless(nameless_spec, Some(name.clone())); + env_yaml + .dependencies + .push(MatchSpecOrSubSection::MatchSpec(spec)); + } else { + tracing::warn!( + "Failed to convert dependency to conda environment spec: {:?}. Skipping dependency", + name + ); } + } - if feature.has_pypi_dependencies() { - if let Some(pypi_dependencies) = feature.pypi_dependencies(Some(*platform)) { - for (name, requirement) in pypi_dependencies.iter() { - pip_dependencies.push(format_pip_dependency(name, requirement)); - } - } + if environment.has_pypi_dependencies() { + for (name, requirement) in environment.pypi_dependencies(Some(*platform)).into_specs() { + pip_dependencies.push(format_pip_dependency(&name, &requirement)); } } @@ -208,7 +208,8 @@ fn build_env_yaml( Ok(env_yaml) } -/// Add `nodefaults` channel if the environment doesn't have `main`, `r`, or `msys2` +/// Add `nodefaults` channel if the environment doesn't have `main`, `r`, or +/// `msys2` fn channels_with_nodefaults(channels: Vec) -> Vec { let mut channels = channels; if !channels.iter().any(|channel| { @@ -241,10 +242,10 @@ pub async fn execute(project: Project, args: Args) -> miette::Result<()> { #[cfg(test)] mod tests { - use super::*; - use std::path::Path; + use super::*; + #[test] fn test_export_conda_env_yaml() { let path = Path::new(env!("CARGO_MANIFEST_DIR")) diff --git a/src/cli/project/export/conda_explicit_spec.rs b/src/cli/project/export/conda_explicit_spec.rs index 9176a827a..367e6dd02 100644 --- a/src/cli/project/export/conda_explicit_spec.rs +++ b/src/cli/project/export/conda_explicit_spec.rs @@ -1,17 +1,17 @@ -use std::collections::HashSet; -use std::fs; -use std::path::{Path, PathBuf}; +use std::{ + collections::HashSet, + fs, + path::{Path, PathBuf}, +}; use clap::Parser; use miette::{Context, IntoDiagnostic}; - -use crate::cli::cli_config::PrefixUpdateConfig; -use crate::lock_file::UpdateLockFileOptions; -use crate::Project; use rattler_conda_types::{ ExplicitEnvironmentEntry, ExplicitEnvironmentSpec, PackageRecord, Platform, RepoDataRecord, }; -use rattler_lock::{CondaPackage, Environment, Package}; +use rattler_lock::{CondaPackageData, Environment, LockedPackageRef}; + +use crate::{cli::cli_config::PrefixUpdateConfig, lock_file::UpdateLockFileOptions, Project}; #[derive(Debug, Parser)] #[clap(arg_required_else_help = false)] @@ -19,7 +19,6 @@ pub struct Args { /// Output directory for rendered explicit environment spec files pub output_dir: PathBuf, - /// Environment to render. Can be repeated for multiple envs. Defaults to all environments #[arg(short, long)] pub environment: Option>, @@ -29,10 +28,13 @@ pub struct Args { pub platform: Option>, /// PyPI dependencies are not supported in the conda explicit spec file. - /// This flag allows creating the spec file even if PyPI dependencies are present. #[arg(long, default_value = "false")] pub ignore_pypi_errors: bool, + /// Source dependencies are not supported in the conda explicit spec file. + #[arg(long, default_value = "false")] + pub ignore_source_errors: bool, + #[clap(flatten)] pub prefix_update_config: PrefixUpdateConfig, } @@ -45,12 +47,12 @@ fn build_explicit_spec<'a>( for cp in conda_packages { let prec = &cp.package_record; - let mut url = cp.url.to_owned(); let hash = prec.md5.ok_or(miette::miette!( "Package {} does not contain an md5 hash", prec.name.as_normalized() ))?; + let mut url = cp.url.clone(); url.set_fragment(Some(&format!("{:x}", hash))); packages.push(ExplicitEnvironmentEntry { @@ -97,16 +99,25 @@ fn render_env_platform( env_name, ))?; - let mut conda_packages_from_lockfile: Vec = Vec::new(); + let mut conda_packages_from_lockfile: Vec<_> = Vec::new(); for package in packages { match package { - Package::Conda(p) => conda_packages_from_lockfile.push(p), - Package::Pypi(pyp) => { + LockedPackageRef::Conda(CondaPackageData::Binary(p)) => { + conda_packages_from_lockfile.push(p.clone()) + } + LockedPackageRef::Conda(CondaPackageData::Source(_)) => { + miette::bail!( + "Conda source packages are not supported in a conda explicit spec. \ + Specify `--ignore-source-errors` to ignore this error and create \ + a spec file containing only the binary conda dependencies from the lockfile." + ); + } + LockedPackageRef::Pypi(pypi, _) => { if ignore_pypi_errors { tracing::warn!( "ignoring PyPI package {} since PyPI packages are not supported", - pyp.data().package.name + pypi.name ); } else { miette::bail!( @@ -175,7 +186,7 @@ pub async fn execute(project: Project, args: Args) -> miette::Result<()> { if let Some(ref platforms) = args.platform { for plat in platforms { if available_platforms.contains(plat) { - env_platform.push((env_name.clone(), env.clone(), *plat)); + env_platform.push((env_name.clone(), env, *plat)); } else { tracing::warn!( "Platform {} not available for environment {}. Skipping...", @@ -186,7 +197,7 @@ pub async fn execute(project: Project, args: Args) -> miette::Result<()> { } } else { for plat in available_platforms { - env_platform.push((env_name.clone(), env.clone(), plat)); + env_platform.push((env_name.clone(), env, plat)); } } } @@ -210,10 +221,11 @@ pub async fn execute(project: Project, args: Args) -> miette::Result<()> { mod tests { use std::path::Path; - use super::*; use rattler_lock::LockFile; use tempfile::tempdir; + use super::*; + #[test] fn test_render_conda_explicit_spec() { let path = Path::new(env!("CARGO_MANIFEST_DIR")) diff --git a/src/cli/run.rs b/src/cli/run.rs index 5bfee8eb7..228a9e27e 100644 --- a/src/cli/run.rs +++ b/src/cli/run.rs @@ -1,5 +1,6 @@ use clap::Parser; use dialoguer::theme::ColorfulTheme; +use fancy_display::FancyDisplay; use itertools::Itertools; use miette::{Diagnostic, IntoDiagnostic}; use std::collections::hash_map::Entry; @@ -18,7 +19,6 @@ use crate::task::{ InvalidWorkingDirectory, SearchEnvironments, TaskAndEnvironment, TaskGraph, }; use crate::Project; -use fancy_display::FancyDisplay; use pixi_config::ConfigCliActivation; use pixi_manifest::TaskName; use thiserror::Error; @@ -28,7 +28,8 @@ use tracing::Level; #[derive(Parser, Debug, Default)] #[clap(trailing_var_arg = true)] pub struct Args { - /// The pixi task or a task shell command you want to run in the project's environment, which can be an executable in the environment's PATH. + /// The pixi task or a task shell command you want to run in the project's + /// environment, which can be an executable in the environment's PATH. pub task: Vec, #[clap(flatten)] @@ -46,13 +47,15 @@ pub struct Args { /// Use a clean environment to run the task /// - /// Using this flag will ignore your current shell environment and use bare minimum environment to activate the pixi environment in. + /// Using this flag will ignore your current shell environment and use bare + /// minimum environment to activate the pixi environment in. #[arg(long)] pub clean_env: bool, } /// CLI entry point for `pixi run` -/// When running the sigints are ignored and child can react to them. As it pleases. +/// When running the sigints are ignored and child can react to them. As it +/// pleases. pub async fn execute(args: Args) -> miette::Result<()> { let cli_config = args .activation_config @@ -78,12 +81,19 @@ pub async fn execute(args: Args) -> miette::Result<()> { return Ok(()); } + // Print all available tasks if no task is provided + if args.task.is_empty() { + command_not_found(&project, explicit_environment); + return Ok(()); + } + // Sanity check of prefix location verify_prefix_location_unchanged(project.default_environment().dir().as_path()).await?; let best_platform = environment.best_platform(); - // Verify that the current platform has the required virtual packages for the environment. + // Verify that the current platform has the required virtual packages for the + // environment. if let Some(ref explicit_environment) = explicit_environment { verify_current_platform_has_required_virtual_packages(explicit_environment) .into_diagnostic()?; @@ -109,14 +119,15 @@ pub async fn execute(args: Args) -> miette::Result<()> { tracing::info!("Task graph: {}", task_graph); - // Traverse the task graph in topological order and execute each individual task. + // Traverse the task graph in topological order and execute each individual + // task. let mut task_idx = 0; let mut task_envs = HashMap::new(); for task_id in task_graph.topological_order() { let executable_task = ExecutableTask::from_task_graph(&task_graph, task_id); - // If the task is not executable (e.g. an alias), we skip it. This ensures we don't - // instantiate a prefix for an alias. + // If the task is not executable (e.g. an alias), we skip it. This ensures we + // don't instantiate a prefix for an alias. if !executable_task.task().is_executable() { continue; } @@ -170,8 +181,9 @@ pub async fn execute(args: Args) -> miette::Result<()> { } }; - // If we don't have a command environment yet, we need to compute it. We lazily compute the - // task environment because we only need the environment if a task is actually executed. + // If we don't have a command environment yet, we need to compute it. We lazily + // compute the task environment because we only need the environment if + // a task is actually executed. let task_env: &_ = match task_envs.entry(executable_task.run_environment.clone()) { Entry::Occupied(env) => env.into_mut(), Entry::Vacant(entry) => { @@ -195,8 +207,9 @@ pub async fn execute(args: Args) -> miette::Result<()> { } }; - // Execute the task itself within the command environment. If one of the tasks failed with - // a non-zero exit code, we exit this parent process with the same code. + // Execute the task itself within the command environment. If one of the tasks + // failed with a non-zero exit code, we exit this parent process with + // the same code. match execute_task(&executable_task, task_env).await { Ok(_) => { task_idx += 1; @@ -277,9 +290,10 @@ async fn execute_task<'p>( // Ignore CTRL+C // Specifically so that the child is responsible for its own signal handling - // NOTE: one CTRL+C is registered it will always stay registered for the rest of the runtime of the program - // which is fine when using run in isolation, however if we start to use run in conjunction with - // some other command we might want to revaluate this. + // NOTE: one CTRL+C is registered it will always stay registered for the rest of + // the runtime of the program which is fine when using run in isolation, + // however if we start to use run in conjunction with some other command we + // might want to revaluate this. let ctrl_c = tokio::spawn(async { while tokio::signal::ctrl_c().await.is_ok() {} }); let execute_future = diff --git a/src/cli/search.rs b/src/cli/search.rs index 33ae21060..9064ec43e 100644 --- a/src/cli/search.rs +++ b/src/cli/search.rs @@ -1,28 +1,26 @@ -use std::cmp::Ordering; -use std::collections::HashMap; -use std::future::{Future, IntoFuture}; -use std::io::{self, Write}; -use std::str::FromStr; +use std::{ + cmp::Ordering, + collections::HashMap, + future::{Future, IntoFuture}, + io::{self, Write}, + str::FromStr, +}; use clap::Parser; use indexmap::IndexMap; use itertools::Itertools; use miette::IntoDiagnostic; -use pixi_config::default_channel_config; +use pixi_config::{default_channel_config, Config}; use pixi_progress::await_in_progress; use pixi_utils::reqwest::build_reqwest_clients; -use rattler_conda_types::MatchSpec; -use rattler_conda_types::{PackageName, Platform, RepoDataRecord}; +use rattler_conda_types::{MatchSpec, PackageName, Platform, RepoDataRecord}; use rattler_repodata_gateway::{GatewayError, RepoData}; use regex::Regex; use strsim::jaro; use url::Url; -use crate::cli::cli_config::ProjectConfig; -use crate::Project; -use pixi_config::Config; - use super::cli_config::ChannelsConfig; +use crate::{cli::cli_config::ProjectConfig, Project}; /// Search a conda package /// @@ -49,7 +47,8 @@ pub struct Args { pub limit: Option, } -/// fetch packages from `repo_data` using `repodata_query_func` based on `filter_func` +/// fetch packages from `repo_data` using `repodata_query_func` based on +/// `filter_func` async fn search_package_by_filter( package: &PackageName, all_package_names: Vec, @@ -139,9 +138,8 @@ pub async fn execute_impl( .await .into_diagnostic()?; - // Compute the repodata query function that will be used to fetch the repodata for - // filtered package names - + // Compute the repodata query function that will be used to fetch the repodata + // for filtered package names let repodata_query_func = |some_specs: Vec| { gateway .query( @@ -529,10 +527,13 @@ fn print_matching_packages( // currently it relies on channel field being a url with trailing slash // https://github.com/mamba-org/rattler/issues/146 - let channel_name = Url::from_str(&package.channel) - .ok() + let channel_name = package + .channel + .as_ref() + .and_then(|channel| Url::from_str(channel).ok()) .and_then(|url| channel_config.strip_channel_alias(&url)) - .unwrap_or_else(|| package.channel.to_string()); + .or_else(|| package.channel.clone()) + .unwrap_or_else(|| "".to_string()); let channel_name = format!("{}/{}", channel_name, package.package_record.subdir); diff --git a/src/cli/tree.rs b/src/cli/tree.rs index 141252215..f768cfb93 100644 --- a/src/cli/tree.rs +++ b/src/cli/tree.rs @@ -1,5 +1,7 @@ -use std::collections::HashMap; -use std::io::{StdoutLock, Write}; +use std::{ + collections::HashMap, + io::{StdoutLock, Write}, +}; use ahash::{HashSet, HashSetExt}; use clap::Parser; @@ -9,6 +11,7 @@ use itertools::Itertools; use miette::{IntoDiagnostic, WrapErr}; use pixi_manifest::FeaturesExt; use rattler_conda_types::Platform; +use rattler_lock::LockedPackageRef; use regex::Regex; use crate::{ @@ -421,7 +424,7 @@ fn direct_dependencies( dep_map: &HashMap, ) -> HashSet { let mut project_dependency_names = environment - .dependencies(None, Some(*platform)) + .combined_dependencies(Some(*platform)) .names() .filter(|p| { if let Some(value) = dep_map.get(p.as_source()) { @@ -472,18 +475,14 @@ struct PackageInfo { } /// Helper function to extract package information -fn extract_package_info(package: &rattler_lock::Package) -> Option { +fn extract_package_info(package: rattler_lock::LockedPackageRef<'_>) -> Option { if let Some(conda_package) = package.as_conda() { // Extract name - let name = conda_package - .package_record() - .name - .as_normalized() - .to_string(); + let name = conda_package.record().name.as_normalized().to_string(); // Extract dependencies let dependencies: Vec = conda_package - .package_record() + .record() .depends .iter() .map(|d| { @@ -497,19 +496,12 @@ fn extract_package_info(package: &rattler_lock::Package) -> Option dependencies, source: PackageSource::Conda, }) - } else if let Some(pypi_package) = package.as_pypi() { + } else if let Some((pypi_package_data, _pypi_env_data)) = package.as_pypi() { // Extract name - let name = pypi_package - .data() - .package - .name - .as_dist_info_name() - .into_owned(); + let name = pypi_package_data.name.as_dist_info_name().into_owned(); // Extract dependencies - let dependencies = pypi_package - .data() - .package + let dependencies = pypi_package_data .requires_dist .iter() .filter_map(|p| { @@ -538,18 +530,23 @@ fn extract_package_info(package: &rattler_lock::Package) -> Option } /// Generate a map of dependencies from a list of locked packages -fn generate_dependency_map(locked_deps: &Vec) -> HashMap { +fn generate_dependency_map( + locked_deps: &[rattler_lock::LockedPackageRef<'_>], +) -> HashMap { let mut package_dependencies_map = HashMap::new(); - for package in locked_deps { - let version = package.version().into_owned(); - + for &package in locked_deps { if let Some(package_info) = extract_package_info(package) { package_dependencies_map.insert( package_info.name.clone(), Package { name: package_info.name, - version: version.clone(), + version: match package { + LockedPackageRef::Conda(conda_data) => { + conda_data.record().version.to_string() + } + LockedPackageRef::Pypi(pypi_data, _) => pypi_data.version.to_string(), + }, dependencies: package_info.dependencies.into_iter().unique().collect(), needed_by: Vec::new(), source: package_info.source, diff --git a/src/cli/update.rs b/src/cli/update.rs index e10ecaf3a..fd30f09b6 100644 --- a/src/cli/update.rs +++ b/src/cli/update.rs @@ -18,7 +18,7 @@ use pixi_config::ConfigCli; use pixi_consts::consts; use pixi_manifest::EnvironmentName; use rattler_conda_types::Platform; -use rattler_lock::{LockFile, Package}; +use rattler_lock::{LockFile, LockedPackageRef}; /// Update dependencies as recorded in the local lock file #[derive(Parser, Debug, Default)] @@ -88,7 +88,7 @@ impl UpdateSpecs { &self, environment_name: &EnvironmentName, platform: &Platform, - package: &Package, + package: LockedPackageRef<'_>, ) -> bool { // Check if the platform is in the list of platforms to update. if let Some(platforms) = &self.platforms { @@ -106,7 +106,7 @@ impl UpdateSpecs { // Check if the package is in the list of packages to update. if let Some(packages) = &self.packages { - if !packages.contains(&*package.name()) { + if !packages.contains(package.name()) { return false; } } @@ -159,7 +159,8 @@ pub async fn execute(args: Args) -> miette::Result<()> { let updated_lock_file = UpdateContext::builder(&project) .with_lock_file(relaxed_lock_file.clone()) .with_no_install(args.no_install) - .finish()? + .finish() + .await? .update() .await?; diff --git a/src/cli/upgrade.rs b/src/cli/upgrade.rs index 8fc031c13..d4c7f5333 100644 --- a/src/cli/upgrade.rs +++ b/src/cli/upgrade.rs @@ -68,7 +68,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { // TODO: Also support build and host let spec_type = SpecType::Run; let match_spec_iter = feature - .dependencies(Some(spec_type), None) + .dependencies(spec_type, None) .into_iter() .flat_map(|deps| deps.into_owned()); diff --git a/src/diff.rs b/src/diff.rs index b14560d4f..f5399431e 100644 --- a/src/diff.rs +++ b/src/diff.rs @@ -1,27 +1,27 @@ use std::{ - borrow::Cow, collections::HashSet, io::{stderr, Write}, }; -use crate::Project; use ahash::HashMap; use indexmap::IndexMap; use itertools::{Either, Itertools}; use pixi_consts::consts; use pixi_manifest::FeaturesExt; use rattler_conda_types::Platform; -use rattler_lock::{LockFile, Package}; +use rattler_lock::{LockFile, LockedPackage, LockedPackageRef}; use serde::Serialize; use serde_json::Value; use tabwriter::TabWriter; +use crate::Project; + // Represents the differences between two sets of packages. #[derive(Default, Clone)] pub struct PackagesDiff { - pub added: Vec, - pub removed: Vec, - pub changed: Vec<(rattler_lock::Package, rattler_lock::Package)>, + pub added: Vec, + pub removed: Vec, + pub changed: Vec<(LockedPackage, LockedPackage)>, } impl PackagesDiff { @@ -59,11 +59,15 @@ impl LockFileDiff { .into_iter() .flatten() .partition_map(|p| match p { - rattler_lock::Package::Conda(p) => { - Either::Left((p.package_record().name.clone(), p)) - } - rattler_lock::Package::Pypi(p) => { - Either::Right((p.data().package.name.clone(), p)) + LockedPackageRef::Conda(conda_package_data) => Either::Left(( + conda_package_data.record().name.clone(), + conda_package_data, + )), + LockedPackageRef::Pypi(pypi_package_data, pypi_env_data) => { + Either::Right(( + pypi_package_data.name.clone(), + (pypi_package_data, pypi_env_data), + )) } }); @@ -72,28 +76,32 @@ impl LockFileDiff { // Find new and changed packages for package in packages { match package { - Package::Conda(p) => { - let name = &p.package_record().name; + LockedPackageRef::Conda(data) => { + let name = &data.record().name; match previous_conda_packages.remove(name) { - Some(previous) if previous.url() != p.url() => { + Some(previous) if previous.location() != data.location() => { diff.changed - .push((Package::Conda(previous), Package::Conda(p))); + .push((previous.clone().into(), data.clone().into())); } None => { - diff.added.push(Package::Conda(p)); + diff.added.push(data.clone().into()); } _ => {} } } - Package::Pypi(p) => { - let name = &p.data().package.name; + LockedPackageRef::Pypi(data, env) => { + let name = &data.name; match previous_pypi_packages.remove(name) { - Some(previous) if previous.url() != p.url() => { - diff.changed - .push((Package::Pypi(previous), Package::Pypi(p))); + Some((previous_data, previous_env)) + if previous_data.location != data.location => + { + diff.changed.push(( + (previous_data.clone(), previous_env.clone()).into(), + (data.clone(), env.clone()).into(), + )); } None => { - diff.added.push(Package::Pypi(p)); + diff.added.push((data.clone(), env.clone()).into()); } _ => {} } @@ -103,10 +111,10 @@ impl LockFileDiff { // Determine packages that were removed for (_, p) in previous_conda_packages { - diff.removed.push(Package::Conda(p)); + diff.removed.push(p.clone().into()); } - for (_, p) in previous_pypi_packages { - diff.removed.push(Package::Pypi(p)); + for (_, (data, env)) in previous_pypi_packages { + diff.removed.push((data.clone(), env.clone()).into()); } environment_diff.insert(platform, diff); @@ -123,14 +131,7 @@ impl LockFileDiff { { let mut diff = PackagesDiff::default(); for package in packages { - match package { - Package::Conda(p) => { - diff.removed.push(Package::Conda(p)); - } - Package::Pypi(p) => { - diff.removed.push(Package::Pypi(p)); - } - } + diff.removed.push(package.into()); } environment_diff.insert(platform, diff); } @@ -153,14 +154,7 @@ impl LockFileDiff { for (platform, packages) in environment.packages_by_platform() { let mut diff = PackagesDiff::default(); for package in packages { - match package { - Package::Conda(p) => { - diff.removed.push(Package::Conda(p)); - } - Package::Pypi(p) => { - diff.removed.push(Package::Pypi(p)); - } - } + diff.removed.push(package.into()); } environment_diff.insert(platform, diff); } @@ -253,21 +247,19 @@ impl LockFileDiff { Ok(()) } - fn format_changes(packages: &PackagesDiff) -> Vec<(Cow<'_, str>, String)> { + fn format_changes(packages: &PackagesDiff) -> Vec<(&str, String)> { enum Change<'i> { - Added(&'i Package), - Removed(&'i Package), - Changed(&'i Package, &'i Package), + Added(&'i LockedPackage), + Removed(&'i LockedPackage), + Changed(&'i LockedPackage, &'i LockedPackage), } - fn format_package_identifier(package: &Package) -> String { + fn format_package_identifier(package: &LockedPackage) -> String { match package { - Package::Conda(p) => format!( - "{} {}", - &p.package_record().version.as_str(), - &p.package_record().build - ), - Package::Pypi(p) => p.data().package.version.to_string(), + LockedPackage::Conda(p) => { + format!("{} {}", &p.record().version.as_str(), &p.record().build) + } + LockedPackage::Pypi(p, _) => p.version.to_string(), } } @@ -288,8 +280,8 @@ impl LockFileDiff { "{} {} {}\t{}\t\t", console::style("+").green(), match p { - Package::Conda(_) => consts::CondaEmoji.to_string(), - Package::Pypi(_) => consts::PypiEmoji.to_string(), + LockedPackage::Conda(_) => consts::CondaEmoji.to_string(), + LockedPackage::Pypi(..) => consts::PypiEmoji.to_string(), }, p.name(), format_package_identifier(p) @@ -301,8 +293,8 @@ impl LockFileDiff { "{} {} {}\t{}\t\t", console::style("-").red(), match p { - Package::Conda(_) => consts::CondaEmoji.to_string(), - Package::Pypi(_) => consts::PypiEmoji.to_string(), + LockedPackage::Conda(_) => consts::CondaEmoji.to_string(), + LockedPackage::Pypi(..) => consts::PypiEmoji.to_string(), }, p.name(), format_package_identifier(p) @@ -319,9 +311,9 @@ impl LockFileDiff { let name = previous.name(); let line = match (previous, current) { - (Package::Conda(previous), Package::Conda(current)) => { - let previous = previous.package_record(); - let current = current.package_record(); + (LockedPackage::Conda(previous), LockedPackage::Conda(current)) => { + let previous = previous.record(); + let current = current.record(); format!( "{} {} {}\t{} {}\t->\t{} {}", @@ -334,10 +326,7 @@ impl LockFileDiff { choose_style(current.build.as_str(), previous.build.as_str()), ) } - (Package::Pypi(previous), Package::Pypi(current)) => { - let previous = previous.data().package; - let current = current.data().package; - + (LockedPackage::Pypi(previous, _), LockedPackage::Pypi(current, _)) => { format!( "{} {} {}\t{}\t->\t{}", console::style("~").yellow(), @@ -397,7 +386,7 @@ impl LockFileJsonDiff { for (platform, packages_diff) in environment_diff { let conda_dependencies = project .environment(environment_name.as_str()) - .map(|env| env.dependencies(None, Some(platform))) + .map(|env| env.dependencies(pixi_manifest::SpecType::Run, Some(platform))) .unwrap_or_default(); let pypi_dependencies = project @@ -406,64 +395,64 @@ impl LockFileJsonDiff { .unwrap_or_default(); let add_diffs = packages_diff.added.into_iter().map(|new| match new { - Package::Conda(pkg) => JsonPackageDiff { - name: pkg.package_record().name.as_normalized().to_string(), + LockedPackage::Conda(pkg) => JsonPackageDiff { + name: pkg.record().name.as_normalized().to_string(), before: None, after: Some(serde_json::to_value(&pkg).unwrap()), ty: JsonPackageType::Conda, - explicit: conda_dependencies.contains_key(&pkg.package_record().name), + explicit: conda_dependencies.contains_key(&pkg.record().name), }, - Package::Pypi(pkg) => JsonPackageDiff { - name: pkg.data().package.name.as_dist_info_name().into_owned(), + LockedPackage::Pypi(pkg, _) => JsonPackageDiff { + name: pkg.name.as_dist_info_name().into_owned(), before: None, after: Some(serde_json::to_value(&pkg).unwrap()), ty: JsonPackageType::Pypi, - explicit: pypi_dependencies.contains_key(&pkg.data().package.name), + explicit: pypi_dependencies.contains_key(&pkg.name), }, }); let removed_diffs = packages_diff.removed.into_iter().map(|old| match old { - Package::Conda(pkg) => JsonPackageDiff { - name: pkg.package_record().name.as_normalized().to_string(), + LockedPackage::Conda(pkg) => JsonPackageDiff { + name: pkg.record().name.as_normalized().to_string(), before: Some(serde_json::to_value(&pkg).unwrap()), after: None, ty: JsonPackageType::Conda, - explicit: conda_dependencies.contains_key(&pkg.package_record().name), + explicit: conda_dependencies.contains_key(&pkg.record().name), }, - Package::Pypi(pkg) => JsonPackageDiff { - name: pkg.data().package.name.as_dist_info_name().into_owned(), + LockedPackage::Pypi(pkg, _) => JsonPackageDiff { + name: pkg.name.as_dist_info_name().into_owned(), before: Some(serde_json::to_value(&pkg).unwrap()), after: None, ty: JsonPackageType::Pypi, - explicit: pypi_dependencies.contains_key(&pkg.data().package.name), + explicit: pypi_dependencies.contains_key(&pkg.name), }, }); let changed_diffs = packages_diff.changed.into_iter().map(|(old, new)| match (old, new) { - (Package::Conda(old), Package::Conda(new)) => + (LockedPackage::Conda(old), LockedPackage::Conda(new)) => { let before = serde_json::to_value(&old).unwrap(); let after = serde_json::to_value(&new).unwrap(); let (before, after) = compute_json_diff(before, after); JsonPackageDiff { - name: old.package_record().name.as_normalized().to_string(), + name: old.record().name.as_normalized().to_string(), before: Some(before), after: Some(after), ty: JsonPackageType::Conda, - explicit: conda_dependencies.contains_key(&old.package_record().name), + explicit: conda_dependencies.contains_key(&old.record().name), } } - (Package::Pypi(old), Package::Pypi(new)) => { + (LockedPackage::Pypi(old, _), LockedPackage::Pypi(new, _)) => { let before = serde_json::to_value(&old).unwrap(); let after = serde_json::to_value(&new).unwrap(); let (before, after) = compute_json_diff(before, after); JsonPackageDiff { - name: old.data().package.name.as_dist_info_name().into_owned(), + name: old.name.as_dist_info_name().into_owned(), before: Some(before), after: Some(after), ty: JsonPackageType::Pypi, - explicit: pypi_dependencies.contains_key(&old.data().package.name), + explicit: pypi_dependencies.contains_key(&old.name), } } _ => unreachable!("packages cannot change type, they are represented as removals and inserts instead"), diff --git a/src/environment.rs b/src/environment.rs index 2fa369a28..be12609a9 100644 --- a/src/environment.rs +++ b/src/environment.rs @@ -1,4 +1,5 @@ use crate::{ + build::{BuildError, BuildReporter}, install_pypi, lock_file::{UpdateLockFileOptions, UpdateMode, UvResolutionContext}, prefix::Prefix, @@ -9,29 +10,39 @@ use crate::{ use dialoguer::theme::ColorfulTheme; use fancy_display::FancyDisplay; use fs_err as fs; +use futures::{stream, StreamExt, TryStreamExt}; +use indicatif::ProgressBar; +use itertools::{Either, Itertools}; use miette::{IntoDiagnostic, WrapErr}; +use parking_lot::Mutex; +use pixi_build_frontend::CondaBuildReporter; use pixi_consts::consts; use pixi_manifest::{EnvironmentName, FeaturesExt, SystemRequirements}; use pixi_progress::{await_in_progress, global_multi_progress}; +use pixi_record::PixiRecord; use rattler::{ install::{DefaultProgressFormatter, IndicatifReporter, Installer, PythonInfo, Transaction}, package_cache::PackageCache, }; -use rattler_conda_types::{Platform, PrefixRecord, RepoDataRecord}; -use rattler_lock::Package::{Conda, Pypi}; +use rattler_conda_types::{ + Channel, ChannelUrl, GenericVirtualPackage, Platform, PrefixRecord, RepoDataRecord, +}; +use rattler_lock::LockedPackageRef; use rattler_lock::{PypiIndexes, PypiPackageData, PypiPackageEnvironmentData}; +use rattler_repodata_gateway::Gateway; use reqwest_middleware::ClientWithMiddleware; use serde::{Deserialize, Serialize}; -use std::hash::{Hash, Hasher}; use std::{ collections::HashMap, - convert::identity, - io, - io::ErrorKind, + hash::{Hash, Hasher}, + io::{self, ErrorKind}, path::{Path, PathBuf}, sync::Arc, + time::Duration, }; use tokio::sync::Semaphore; + +use crate::build::BuildContext; use uv_distribution_types::{InstalledDist, Name}; use crate::lock_file::LockFileDerivedData; @@ -89,7 +100,7 @@ async fn prefix_location_changed( .report(false) .default(true) .interact_opt() - .map_or(None, identity); + .map_or(None, std::convert::identity); if user_value == Some(true) { await_in_progress("removing old environment", |_| { tokio::fs::remove_dir_all(environment_dir) @@ -176,24 +187,20 @@ impl LockedEnvironmentHash { if let Some(packages) = environment.packages(platform) { for package in packages { // Always has the url or path - package - .url_or_path() - .into_owned() - .to_string() - .hash(&mut hasher); + package.location().to_owned().to_string().hash(&mut hasher); match package { // A select set of fields are used to hash the package - Conda(pack) => { - if let Some(sha) = pack.package_record().sha256 { + LockedPackageRef::Conda(pack) => { + if let Some(sha) = pack.record().sha256 { sha.hash(&mut hasher); - } else if let Some(md5) = pack.package_record().md5 { + } else if let Some(md5) = pack.record().md5 { md5.hash(&mut hasher); } } - Pypi(pack) => { - pack.is_editable().hash(&mut hasher); - pack.extras().hash(&mut hasher); + LockedPackageRef::Pypi(pack, env) => { + pack.editable.hash(&mut hasher); + env.extras.hash(&mut hasher); } } } @@ -432,7 +439,7 @@ pub async fn update_prefix_pypi( environment_name: &EnvironmentName, prefix: &Prefix, _platform: Platform, - conda_records: &[RepoDataRecord], + pixi_records: &[PixiRecord], pypi_records: &[(PypiPackageData, PypiPackageEnvironmentData)], status: &PythonStatus, system_requirements: &SystemRequirements, @@ -498,7 +505,7 @@ pub async fn update_prefix_pypi( install_pypi::update_python_distributions( lock_file_dir, prefix, - conda_records, + pixi_records, pypi_records, &python_info.path, system_requirements, @@ -621,6 +628,113 @@ impl PythonStatus { } } +struct CondaBuildProgress { + main_progress: ProgressBar, + build_progress: Mutex>, +} + +impl CondaBuildProgress { + fn new(num_packages: u64) -> Self { + // Create a new progress bar. + let pb = ProgressBar::hidden(); + pb.set_length(num_packages); + let pb = pixi_progress::global_multi_progress().add(pb); + pb.set_style(pixi_progress::default_progress_style()); + // Building the package + pb.set_prefix("building packages"); + pb.enable_steady_tick(Duration::from_millis(100)); + + Self { + main_progress: pb, + build_progress: Mutex::new(Vec::default()), + } + } +} + +impl CondaBuildProgress { + /// Associate a progress bar with a build identifier, and get a build id back + pub fn associate(&self, identifier: &str) -> usize { + let mut locked = self.build_progress.lock(); + let after = if locked.is_empty() { + &self.main_progress + } else { + &locked.last().unwrap().1 + }; + + let pb = pixi_progress::global_multi_progress().insert_after(after, ProgressBar::hidden()); + + locked.push((identifier.to_owned(), pb)); + locked.len() - 1 + } + + pub fn end_progress_for(&self, build_id: usize, alternative_message: Option) { + self.main_progress.inc(1); + if self.main_progress.position() + == self + .main_progress + .length() + .expect("expected length to be set for progress") + { + self.main_progress.finish_and_clear(); + // Clear all the build progress bars + for (_, pb) in self.build_progress.lock().iter() { + pb.finish_and_clear(); + } + return; + } + let locked = self.build_progress.lock(); + + // Finish the build progress bar + let (identifier, pb) = locked.get(build_id).unwrap(); + // If there is an alternative message, use that + let msg = if let Some(msg) = alternative_message { + pb.set_style(indicatif::ProgressStyle::with_template(" {msg}").unwrap()); + msg + } else { + // Otherwise show the default message + pb.set_style( + indicatif::ProgressStyle::with_template(" {msg} in {elapsed}").unwrap(), + ); + "built".to_string() + }; + pb.finish_with_message(format!("✔ {msg}: {identifier}")); + } +} + +impl CondaBuildReporter for CondaBuildProgress { + fn on_build_start(&self, build_id: usize) -> usize { + // Actually show the progress + let locked = self.build_progress.lock(); + let (identifier, pb) = locked.get(build_id).unwrap(); + let template = + indicatif::ProgressStyle::with_template(" {spinner:.green} {msg} {elapsed}") + .unwrap(); + pb.set_style(template); + pb.set_message(format!("building {identifier}")); + pb.enable_steady_tick(Duration::from_millis(100)); + // We keep operation and build id the same + build_id + } + + fn on_build_end(&self, operation: usize) { + self.end_progress_for(operation, None); + } + + fn on_build_output(&self, _operation: usize, line: String) { + self.main_progress.suspend(|| eprintln!("{}", line)); + } +} + +impl BuildReporter for CondaBuildProgress { + fn on_build_cached(&self, build_id: usize) { + self.end_progress_for(build_id, Some("cached".to_string())); + } + + fn as_conda_build_reporter(self: Arc) -> Arc { + self.clone() + } +} + /// Updates the environment to contain the packages from the specified lock-file #[allow(clippy::too_many_arguments)] pub async fn update_prefix_conda( @@ -628,15 +742,77 @@ pub async fn update_prefix_conda( package_cache: PackageCache, authenticated_client: ClientWithMiddleware, installed_packages: Vec, - repodata_records: Vec, + pixi_records: Vec, + virtual_packages: Vec, + channels: Vec, + build_channels: Option>, platform: Platform, progress_bar_message: &str, progress_bar_prefix: &str, io_concurrency_limit: Arc, + build_context: BuildContext, + gateway: Gateway, ) -> miette::Result { // Try to increase the rlimit to a sensible value for installation. try_increase_rlimit_to_sensible(); + let (mut repodata_records, source_records): (Vec<_>, Vec<_>) = pixi_records + .into_iter() + .partition_map(|record| match record { + PixiRecord::Binary(record) => Either::Left(record), + PixiRecord::Source(record) => Either::Right(record), + }); + + let mut progress_reporter = None; + let source_records_length = source_records.len(); + // Build conda packages out of the source records + let mut processed_source_packages = stream::iter(source_records) + .map(Ok) + .and_then(|record| { + // If we don't have a progress reporter, create one + // This is done so that the progress bars are not displayed if there are no source packages + let progress_reporter = progress_reporter + .get_or_insert_with(|| { + Arc::new(CondaBuildProgress::new(source_records_length as u64)) + }) + .clone(); + let build_id = progress_reporter.associate(record.package_record.name.as_source()); + let build_context = &build_context; + let channels = &channels; + let build_channels = &build_channels; + let virtual_packages = &virtual_packages; + let client = authenticated_client.clone(); + let gateway = gateway.clone(); + async move { + let build_channels = build_channels.clone().ok_or_else(|| { + BuildError::BackendError( + miette::miette!("`channels` are not defined in the `[build-system]`") + .into(), + ) + })?; + + build_context + .build_source_record( + &record, + build_channels, + channels, + platform, + virtual_packages.clone(), + virtual_packages.clone(), + progress_reporter.clone(), + build_id, + client, + gateway, + ) + .await + } + }) + .try_collect::>() + .await?; + + // Extend the repodata records with the built packages + repodata_records.append(&mut processed_source_packages); + // Execute the operations that are returned by the solver. let result = await_in_progress( format!("{progress_bar_prefix}{progress_bar_message}",), diff --git a/src/global/install.rs b/src/global/install.rs index 799d36d73..3be391e44 100644 --- a/src/global/install.rs +++ b/src/global/install.rs @@ -320,7 +320,7 @@ mod tests { .unwrap() .default_environment() .unwrap() - .conda_repodata_records_for_platform(Platform::Linux64) + .conda_repodata_records(Platform::Linux64) .unwrap() .unwrap() } @@ -339,7 +339,7 @@ mod tests { .unwrap() .default_environment() .unwrap() - .conda_repodata_records_for_platform(Platform::Linux64) + .conda_repodata_records(Platform::Linux64) .unwrap() .unwrap() } diff --git a/src/global/project/manifest.rs b/src/global/project/manifest.rs index b41d8f5c4..53499d686 100644 --- a/src/global/project/manifest.rs +++ b/src/global/project/manifest.rs @@ -8,18 +8,18 @@ use fs_err::tokio as tokio_fs; use indexmap::IndexSet; use miette::IntoDiagnostic; +use super::parsed_manifest::{ManifestParsingError, ManifestVersion, ParsedManifest}; +use super::{EnvironmentName, ExposedName, MANIFEST_DEFAULT_NAME}; use crate::global::project::ParsedEnvironment; use pixi_config::Config; -use pixi_manifest::{PrioritizedChannel, TomlManifest}; +use pixi_manifest::toml::TomlDocument; +use pixi_manifest::PrioritizedChannel; use pixi_spec::PixiSpec; use pixi_utils::{executable_from_path, strip_executable_extension}; use rattler_conda_types::{ChannelConfig, MatchSpec, NamedChannelOrUrl, PackageName, Platform}; use serde::{Deserialize, Serialize}; use toml_edit::{DocumentMut, Item}; -use super::parsed_manifest::{ManifestParsingError, ManifestVersion, ParsedManifest}; -use super::{EnvironmentName, ExposedName, MANIFEST_DEFAULT_NAME}; - /// Handles the global project's manifest file. /// This struct is responsible for reading, parsing, editing, and saving the /// manifest. It encapsulates all logic related to the manifest's TOML format @@ -31,7 +31,7 @@ pub struct Manifest { pub path: PathBuf, /// Editable toml document - pub document: TomlManifest, + pub document: TomlDocument, /// The parsed manifest pub parsed: ParsedManifest, @@ -63,7 +63,7 @@ impl Manifest { let manifest = Self { path: manifest_path.to_path_buf(), - document: TomlManifest::new(document), + document: TomlDocument::new(document), parsed: manifest, }; diff --git a/src/global/project/mod.rs b/src/global/project/mod.rs index ff2a82ce3..b4d84d0c0 100644 --- a/src/global/project/mod.rs +++ b/src/global/project/mod.rs @@ -25,32 +25,31 @@ use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; pub(crate) use manifest::{ExposedType, Manifest, Mapping}; use miette::{miette, Context, IntoDiagnostic}; -pub(crate) use parsed_manifest::ExposedName; -pub(crate) use parsed_manifest::ParsedEnvironment; use parsed_manifest::ParsedManifest; +pub(crate) use parsed_manifest::{ExposedName, ParsedEnvironment}; use pixi_config::{default_channel_config, home_path, Config}; use pixi_consts::consts; use pixi_manifest::PrioritizedChannel; use pixi_progress::{await_in_progress, global_multi_progress, wrap_in_progress}; -use pixi_utils::executable_from_path; -use pixi_utils::reqwest::build_reqwest_clients; -use rattler::install::{DefaultProgressFormatter, IndicatifReporter, Installer}; -use rattler::package_cache::PackageCache; +use pixi_utils::{executable_from_path, reqwest::build_reqwest_clients}; +use rattler::{ + install::{DefaultProgressFormatter, IndicatifReporter, Installer}, + package_cache::PackageCache, +}; use rattler_conda_types::{ ChannelConfig, GenericVirtualPackage, MatchSpec, PackageName, Platform, PrefixRecord, }; use rattler_lock::Matches; use rattler_repodata_gateway::Gateway; -use rattler_solve::resolvo::Solver; -use rattler_solve::{SolverImpl, SolverTask}; +use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; use rattler_virtual_packages::{VirtualPackage, VirtualPackageOverrides}; use reqwest_middleware::ClientWithMiddleware; -use std::sync::OnceLock; use std::{ ffi::OsStr, fmt::{Debug, Formatter}, path::{Path, PathBuf}, str::FromStr, + sync::OnceLock, }; use toml_edit::DocumentMut; @@ -106,11 +105,12 @@ struct ExposedData { } impl ExposedData { - /// Constructs an `ExposedData` instance from a exposed `script` or `trampoline` path. + /// Constructs an `ExposedData` instance from a exposed `script` or + /// `trampoline` path. /// - /// This function extracts metadata from the exposed script path, including the - /// environment name, platform, channel, and package information, by reading - /// the associated `conda-meta` directory. + /// This function extracts metadata from the exposed script path, including + /// the environment name, platform, channel, and package information, by + /// reading the associated `conda-meta` directory. /// or it looks into the trampoline manifest to extract the metadata. pub async fn from_exposed_path( bin: &GlobalBin, @@ -149,7 +149,7 @@ impl ExposedData { .iter() .map(|prefix_record| prefix_record.repodata_record.channel.clone()) .collect::>(); - for channel in all_channels { + for channel in all_channels.into_iter().flatten() { tracing::debug!("Channel: {} found in environment: {}", channel, env_name); channels.push(channel_url_to_prioritized_channel( &channel, @@ -185,7 +185,8 @@ fn determine_env_path(executable_path: &Path, env_root: &Path) -> miette::Result ) } -/// Converts a `PrefixRecord` into package metadata, including platform, channel, and package name. +/// Converts a `PrefixRecord` into package metadata, including platform, +/// channel, and package name. fn convert_record_to_metadata( prefix_record: &PrefixRecord, channel_config: &ChannelConfig, @@ -199,17 +200,24 @@ fn convert_record_to_metadata( let package_name = prefix_record.repodata_record.package_record.name.clone(); - let channel = - channel_url_to_prioritized_channel(&prefix_record.repodata_record.channel, channel_config)?; + let Some(channel_str) = prefix_record.repodata_record.channel.as_deref() else { + miette::bail!( + "missing channel in prefix record for {}", + package_name.as_source() + ) + }; + + let channel = channel_url_to_prioritized_channel(channel_str, channel_config)?; Ok((platform, channel, package_name)) } -/// Extracts package metadata from the `conda-meta` directory for a given executable. +/// Extracts package metadata from the `conda-meta` directory for a given +/// executable. /// /// This function reads the `conda-meta` directory to find the package metadata -/// associated with the specified executable. It returns the platform, channel, and -/// package name of the executable. +/// associated with the specified executable. It returns the platform, channel, +/// and package name of the executable. async fn package_from_conda_meta( conda_meta: &Path, executable: &str, @@ -450,8 +458,7 @@ impl Project { let (match_specs, dependencies_names) = environment .dependencies - .clone() - .into_iter() + .iter() .map(|(name, spec)| { if let Some(nameless_spec) = spec .clone() @@ -460,7 +467,7 @@ impl Project { { Ok(( MatchSpec::from_nameless(nameless_spec, Some(name.clone())), - name, + name.clone(), )) } else { Err(miette!("Couldn't convert {spec:?} to nameless match spec.")) @@ -555,7 +562,8 @@ impl Project { let env_dir = EnvDir::from_env_root(self.env_root.clone(), env_name).await?; let mut state_changes = StateChanges::new_with_env(env_name.clone()); - // Remove the environment from the manifest, if it exists, otherwise ignore error. + // Remove the environment from the manifest, if it exists, otherwise ignore + // error. self.manifest.remove_environment(env_name)?; // Remove the environment @@ -581,7 +589,8 @@ impl Project { Ok(state_changes) } - /// Find all binaries related to the environment and remove those that are not listed as exposed. + /// Find all binaries related to the environment and remove those that are + /// not listed as exposed. pub async fn prune_exposed(&self, env_name: &EnvironmentName) -> miette::Result { let mut state_changes = StateChanges::default(); let environment = self @@ -639,14 +648,14 @@ impl Project { Ok(executables_for_package) } - /// Sync the `exposed` field in manifest based on the executables in the environment and the expose type. - /// Expose type can be either: - /// * If the user initially chooses to auto-exposed everything, - /// we will add new binaries that are not exposed in the `exposed` field. + /// Sync the `exposed` field in manifest based on the executables in the + /// environment and the expose type. Expose type can be either: + /// * If the user initially chooses to auto-exposed everything, we will add + /// new binaries that are not exposed in the `exposed` field. /// - /// * If the use chose to expose only a subset of binaries, - /// we will remove the binaries that are not anymore present in the environment - /// and will not expose the new ones + /// * If the use chose to expose only a subset of binaries, we will remove + /// the binaries that are not anymore present in the environment and will + /// not expose the new ones pub async fn sync_exposed_names( &mut self, env_name: &EnvironmentName, @@ -699,7 +708,8 @@ impl Project { } } ExposedType::Filter(filter) => { - // Add new binaries that are not yet exposed and that don't come from one of the packages we filter on + // Add new binaries that are not yet exposed and that don't come from one of the + // packages we filter on let executable_names = env_executables .into_iter() .filter_map(|(package_name, executable)| { @@ -743,8 +753,7 @@ impl Project { let specs = environment .dependencies - .clone() - .into_iter() + .iter() .map(|(name, spec)| { let match_spec = MatchSpec::from_nameless( spec.clone() @@ -800,8 +809,9 @@ impl Project { } /// Expose executables from the environment to the global bin directory. /// - /// This function will first remove all binaries that are not listed as exposed. - /// It will then create an activation script for the shell and create the scripts. + /// This function will first remove all binaries that are not listed as + /// exposed. It will then create an activation script for the shell and + /// create the scripts. pub async fn expose_executables_from_environment( &self, env_name: &EnvironmentName, @@ -1026,9 +1036,6 @@ impl Repodata for Project { mod tests { use std::{collections::HashMap, io::Write}; - use crate::global::trampoline::{Configuration, Trampoline}; - - use super::*; use fake::{faker::filesystem::zh_tw::FilePath, Fake}; use itertools::Itertools; use rattler_conda_types::{ @@ -1037,6 +1044,9 @@ mod tests { use tempfile::tempdir; use url::Url; + use super::*; + use crate::global::trampoline::{Configuration, Trampoline}; + const SIMPLE_MANIFEST: &str = r#" [envs.python] channels = ["dummy-channel"] @@ -1238,7 +1248,8 @@ mod tests { BinDir::new(env_root.path().parent().unwrap().to_path_buf()).unwrap(), ); - // Call the prune method with a list of environments to keep (env1 and env3) but not env4 + // Call the prune method with a list of environments to keep (env1 and env3) but + // not env4 let state_changes = project.prune_old_environments().await.unwrap(); assert_eq!( state_changes.changes(), @@ -1277,7 +1288,11 @@ mod tests { package_record: package_record.clone(), file_name: "doesnt_matter.conda".to_string(), url: Url::from_str("https://also_doesnt_matter").unwrap(), - channel: format!("{}{}", channel_config.channel_alias.clone(), "test-channel"), + channel: Some(format!( + "{}{}", + channel_config.channel_alias.clone(), + "test-channel" + )), }; let prefix_record = PrefixRecord::from_repodata_record( repodata_record, @@ -1303,7 +1318,7 @@ mod tests { package_record: package_record.clone(), file_name: "doesnt_matter.conda".to_string(), url: Url::from_str("https://also_doesnt_matter").unwrap(), - channel: "https://test-channel.com/idk".to_string(), + channel: Some("https://test-channel.com/idk".to_string()), }; let prefix_record = PrefixRecord::from_repodata_record( repodata_record, diff --git a/src/global/project/parsed_manifest.rs b/src/global/project/parsed_manifest.rs index 23b26ebf3..08202762e 100644 --- a/src/global/project/parsed_manifest.rs +++ b/src/global/project/parsed_manifest.rs @@ -5,13 +5,17 @@ use std::path::Path; use std::str::FromStr; use super::environment::EnvironmentName; +use super::ExposedData; +use crate::global::Mapping; use console::StyledObject; use fancy_display::FancyDisplay; use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; use miette::{Context, Diagnostic, IntoDiagnostic, LabeledSpan, NamedSource, Report}; use pixi_consts::consts; +use pixi_manifest::utils::package_map::UniquePackageMap; use pixi_manifest::PrioritizedChannel; +use pixi_spec::PixiSpec; use rattler_conda_types::{NamedChannelOrUrl, PackageName, Platform}; use serde::de::{Deserialize, Deserializer, Visitor}; use serde::ser::SerializeMap; @@ -20,10 +24,6 @@ use serde_with::{serde_as, serde_derive::Deserialize}; use thiserror::Error; use toml_edit::TomlError; -use super::ExposedData; -use crate::global::Mapping; -use pixi_spec::PixiSpec; - pub const GLOBAL_MANIFEST_VERSION: u32 = 1; #[derive(Debug, Clone, Serialize, Deserialize)] @@ -252,12 +252,12 @@ where #[derive(Deserialize, Serialize, Debug, Clone, Default)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] pub(crate) struct ParsedEnvironment { - #[serde_as(as = "IndexSet")] + #[serde_as(as = "IndexSet")] pub channels: IndexSet, // Platform used by the environment. pub platform: Option, - #[serde(default, deserialize_with = "pixi_manifest::deserialize_package_map")] - pub(crate) dependencies: IndexMap, + #[serde(default)] + pub(crate) dependencies: UniquePackageMap, #[serde( default, deserialize_with = "deserialize_expose_mappings", diff --git a/src/global/snapshots/pixi__global__expose__tests__expose_add_when_binary_exist.snap b/src/global/snapshots/pixi__global__expose__tests__expose_add_when_binary_exist.snap deleted file mode 100644 index c345f0cd9..000000000 --- a/src/global/snapshots/pixi__global__expose__tests__expose_add_when_binary_exist.snap +++ /dev/null @@ -1,12 +0,0 @@ ---- -source: src/global/expose.rs -expression: project.manifest.document.to_string() ---- -[envs.python-3-10] -channels = ["conda-forge"] -[envs.python-3-10.dependencies] -python = "3.10" -[envs.python-3-10.exposed] -python = "python" -python3 = "python" -atuin = "atuin" diff --git a/src/install_pypi.rs b/src/install_pypi.rs index ff1c06402..26a331923 100644 --- a/src/install_pypi.rs +++ b/src/install_pypi.rs @@ -13,12 +13,13 @@ use miette::{IntoDiagnostic, WrapErr}; use pep440_rs::{Version, VersionSpecifiers}; use pixi_consts::consts; use pixi_manifest::{pyproject::PyProjectManifest, SystemRequirements}; +use pixi_record::PixiRecord; use pixi_uv_conversions::{ - isolated_names_to_packages, locked_indexes_to_index_locations, to_uv_normalize, to_uv_version, - to_uv_version_specifiers, ConversionError, + isolated_names_to_packages, locked_indexes_to_index_locations, names_to_build_isolation, + to_uv_normalize, to_uv_version, to_uv_version_specifiers, ConversionError, }; use pypi_modifiers::pypi_tags::{get_pypi_tags, is_python_record}; -use rattler_conda_types::{Platform, RepoDataRecord}; +use rattler_conda_types::Platform; use rattler_lock::{ PackageHashes, PypiIndexes, PypiPackageData, PypiPackageEnvironmentData, UrlOrPath, }; @@ -45,8 +46,6 @@ use uv_python::{Interpreter, PythonEnvironment}; use uv_resolver::{FlatIndex, InMemoryIndex}; use uv_types::HashStrategy; -use pixi_uv_conversions::names_to_build_isolation; - use crate::{ conda_pypi_clobber::PypiCondaClobberRegistry, lock_file::UvResolutionContext, @@ -188,7 +187,7 @@ fn convert_to_dist( lock_file_dir: &Path, ) -> Result { // Figure out if it is a url from the registry or a direct url - let dist = match &pkg.url_or_path { + let dist = match &pkg.location { UrlOrPath::Url(url) if is_direct_url(url.scheme()) => { let url_without_direct = strip_direct_scheme(url); let pkg_name = to_uv_normalize(&pkg.name)?; @@ -260,10 +259,11 @@ fn convert_to_dist( } } UrlOrPath::Path(path) => { + let native_path = Path::new(path.as_str()); let abs_path = if path.is_absolute() { - path.clone() + native_path.to_path_buf() } else { - lock_file_dir.join(path) + lock_file_dir.join(native_path) }; let absolute_url = VerbatimUrl::from_absolute_path(&abs_path)?; @@ -364,7 +364,7 @@ fn need_reinstall( match result { Ok(url) => { // Check if the urls are different - if Some(&url) == locked.url_or_path.as_url() { + if Some(&url) == locked.location.as_url() { // Check cache freshness if !check_url_freshness(&url, installed)? { return Ok(ValidateInstall::Reinstall); @@ -388,7 +388,7 @@ fn need_reinstall( // Subdirectory is either in the url or not supported subdirectory: _, } => { - let locked_url = match &locked.url_or_path { + let locked_url = match &locked.location { // Remove `direct+` scheme if it is there so we can compare the required to // the installed url UrlOrPath::Url(url) => strip_direct_scheme(url), @@ -422,7 +422,7 @@ fn need_reinstall( subdirectory: _, } => { let url = Url::parse(&url).into_diagnostic()?; - let git_url = match &locked.url_or_path { + let git_url = match &locked.location { UrlOrPath::Url(url) => ParsedGitUrl::try_from(url.clone()), UrlOrPath::Path(_path) => { // Previously @@ -616,7 +616,7 @@ fn whats_the_plan<'a>( pub async fn update_python_distributions( lock_file_dir: &Path, prefix: &Prefix, - conda_package: &[RepoDataRecord], + pixi_records: &[PixiRecord], python_packages: &[CombinedPypiPackageData], python_interpreter_path: &Path, system_requirements: &SystemRequirements, @@ -627,13 +627,17 @@ pub async fn update_python_distributions( non_isolated_packages: Option>, ) -> miette::Result<()> { let start = std::time::Instant::now(); - use pixi_consts::consts::PROJECT_MANIFEST; + // Determine the current environment markers. - let python_record = conda_package + let python_record = pixi_records .iter() .find(|r| is_python_record(r)) - .ok_or_else(|| miette::miette!("could not resolve pypi dependencies because no python interpreter is added to the dependencies of the project.\nMake sure to add a python interpreter to the [dependencies] section of the {PROJECT_MANIFEST}, or run:\n\n\tpixi add python"))?; - let tags = get_pypi_tags(platform, system_requirements, &python_record.package_record)?; + .ok_or_else(|| miette::miette!("could not resolve pypi dependencies because no python interpreter is added to the dependencies of the project.\nMake sure to add a python interpreter to the [dependencies] section of the {manifest}, or run:\n\n\tpixi add python", manifest=consts::PROJECT_MANIFEST))?; + let tags = get_pypi_tags( + platform, + system_requirements, + python_record.package_record(), + )?; let index_locations = pypi_indexes .map(|indexes| locked_indexes_to_index_locations(indexes, lock_file_dir)) @@ -1030,7 +1034,7 @@ mod tests { let locked = PypiPackageData { name: "torch".parse().unwrap(), version: Version::from_str("2.3.0+cu121").unwrap(), - url_or_path: UrlOrPath::Url(url), + location: UrlOrPath::Url(url), hash: None, requires_dist: vec![], requires_python: None, diff --git a/src/lib.rs b/src/lib.rs index ce8438777..4011f9dc3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -15,8 +15,9 @@ pub mod task; mod uv_reporter; +mod build; mod rlimit; +mod utils; -pub use lock_file::load_lock_file; -pub use lock_file::UpdateLockFileOptions; +pub use lock_file::{load_lock_file, UpdateLockFileOptions}; pub use project::{DependencyType, Project}; diff --git a/src/lock_file/mod.rs b/src/lock_file/mod.rs index c70aca2e0..d38ce8b1e 100644 --- a/src/lock_file/mod.rs +++ b/src/lock_file/mod.rs @@ -1,17 +1,18 @@ mod outdated; mod package_identifier; mod records_by_name; +mod reporter; mod resolve; mod satisfiability; mod update; mod utils; +use crate::Project; use miette::{IntoDiagnostic, WrapErr}; -pub(crate) use outdated::OutdatedEnvironments; pub(crate) use package_identifier::PypiPackageIdentifier; -use rattler_conda_types::RepoDataRecord; +use pixi_record::PixiRecord; use rattler_lock::{LockFile, PypiPackageData, PypiPackageEnvironmentData}; -pub(crate) use records_by_name::{PypiRecordsByName, RepoDataRecordsByName}; +pub(crate) use records_by_name::{PixiRecordsByName, PypiRecordsByName}; pub(crate) use resolve::{ conda::resolve_conda, pypi::resolve_pypi, uv_resolution_context::UvResolutionContext, }; @@ -23,10 +24,8 @@ pub(crate) use update::{LockFileDerivedData, UpdateContext}; pub use update::{UpdateLockFileOptions, UpdateMode}; pub(crate) use utils::filter_lock_file; -use crate::Project; - /// A list of conda packages that are locked for a specific platform. -pub type LockedCondaPackages = Vec; +pub type LockedCondaPackages = Vec; /// A list of Pypi packages that are locked for a specific platform. pub type LockedPypiPackages = Vec; diff --git a/src/lock_file/outdated.rs b/src/lock_file/outdated.rs index 41d91e9c1..a88bfcc97 100644 --- a/src/lock_file/outdated.rs +++ b/src/lock_file/outdated.rs @@ -1,28 +1,38 @@ -use super::{verify_environment_satisfiability, verify_platform_satisfiability}; -use crate::lock_file::satisfiability::EnvironmentUnsat; -use crate::{project::Environment, project::SolveGroup, Project}; +use std::collections::{HashMap, HashSet}; + use fancy_display::FancyDisplay; use itertools::Itertools; use pixi_consts::consts; use pixi_manifest::FeaturesExt; use rattler_conda_types::Platform; -use rattler_lock::{LockFile, Package}; -use std::collections::{HashMap, HashSet}; +use rattler_lock::{LockFile, LockedPackageRef}; + +use super::{verify_environment_satisfiability, verify_platform_satisfiability}; +use crate::{ + build::GlobHashCache, + lock_file::satisfiability::EnvironmentUnsat, + project::{Environment, SolveGroup}, + Project, +}; /// A struct that contains information about specific outdated environments. /// -/// Use the [`OutdatedEnvironments::from_project_and_lock_file`] to create an instance of this -/// struct by examining the project and lock-file and finding any mismatches. +/// Use the [`OutdatedEnvironments::from_project_and_lock_file`] to create an +/// instance of this struct by examining the project and lock-file and finding +/// any mismatches. #[derive(Debug)] pub struct OutdatedEnvironments<'p> { - /// The conda environments that are considered out of date with the lock-file. + /// The conda environments that are considered out of date with the + /// lock-file. pub conda: HashMap, HashSet>, - /// The pypi environments that are considered out of date with the lock-file. + /// The pypi environments that are considered out of date with the + /// lock-file. pub pypi: HashMap, HashSet>, - /// Records the environments for which the lock-file content should also be discarded. This is - /// the case for instance when the order of the channels changed. + /// Records the environments for which the lock-file content should also be + /// discarded. This is the case for instance when the order of the + /// channels changed. pub disregard_locked_content: DisregardLockedContent<'p>, } @@ -49,9 +59,13 @@ impl<'p> DisregardLockedContent<'p> { } impl<'p> OutdatedEnvironments<'p> { - /// Constructs a new instance of this struct by examining the project and lock-file and finding - /// any mismatches. - pub(crate) fn from_project_and_lock_file(project: &'p Project, lock_file: &LockFile) -> Self { + /// Constructs a new instance of this struct by examining the project and + /// lock-file and finding any mismatches. + pub(crate) async fn from_project_and_lock_file( + project: &'p Project, + lock_file: &LockFile, + glob_hash_cache: GlobHashCache, + ) -> Self { let mut outdated_conda: HashMap<_, HashSet<_>> = HashMap::new(); let mut outdated_pypi: HashMap<_, HashSet<_>> = HashMap::new(); let mut disregard_locked_content = DisregardLockedContent::default(); @@ -63,13 +77,16 @@ impl<'p> OutdatedEnvironments<'p> { &mut outdated_conda, &mut outdated_pypi, &mut disregard_locked_content, - ); + glob_hash_cache, + ) + .await; // Extend the outdated targets to include the solve groups let (mut conda_solve_groups_out_of_date, mut pypi_solve_groups_out_of_date) = map_outdated_targets_to_solve_groups(&outdated_conda, &outdated_pypi); - // Find all the solve groups that have inconsistent dependencies between environments. + // Find all the solve groups that have inconsistent dependencies between + // environments. find_inconsistent_solve_groups( project, lock_file, @@ -97,7 +114,8 @@ impl<'p> OutdatedEnvironments<'p> { } } - // For all targets where conda is out of date, the pypi packages are also out of date. + // For all targets where conda is out of date, the pypi packages are also out of + // date. for (environment, platforms) in outdated_conda.iter() { outdated_pypi .entry(environment.clone()) @@ -112,21 +130,22 @@ impl<'p> OutdatedEnvironments<'p> { } } - /// Returns true if the lock-file is up-to-date with the project (e.g. there are no - /// outdated targets). + /// Returns true if the lock-file is up-to-date with the project (e.g. there + /// are no outdated targets). pub(crate) fn is_empty(&self) -> bool { self.conda.is_empty() && self.pypi.is_empty() } } -/// Find all targets (combination of environment and platform) who's requirements in the `project` -/// are not satisfied by the `lock_file`. -fn find_unsatisfiable_targets<'p>( +/// Find all targets (combination of environment and platform) who's +/// requirements in the `project` are not satisfied by the `lock_file`. +async fn find_unsatisfiable_targets<'p>( project: &'p Project, lock_file: &LockFile, outdated_conda: &mut HashMap, HashSet>, outdated_pypi: &mut HashMap, HashSet>, disregard_locked_content: &mut DisregardLockedContent<'p>, + glob_hash_cache: GlobHashCache, ) { for environment in project.environments() { let platforms = environment.platforms(); @@ -147,7 +166,7 @@ fn find_unsatisfiable_targets<'p>( }; // The locked environment exists, but does it match our project environment? - if let Err(unsat) = verify_environment_satisfiability(&environment, &locked_environment) { + if let Err(unsat) = verify_environment_satisfiability(&environment, locked_environment) { tracing::info!( "environment '{0}' is out of date because {unsat}", environment.name().fancy_display() @@ -177,10 +196,13 @@ fn find_unsatisfiable_targets<'p>( for platform in platforms { match verify_platform_satisfiability( &environment, - &locked_environment, + locked_environment, platform, project.root(), - ) { + glob_hash_cache.clone(), + ) + .await + { Ok(_) => {} Err(unsat) if unsat.is_pypi_only() => { tracing::info!( @@ -209,11 +231,12 @@ fn find_unsatisfiable_targets<'p>( } } -/// Given a mapping of outdated targets, construct a new mapping of all the groups that are out of -/// date. +/// Given a mapping of outdated targets, construct a new mapping of all the +/// groups that are out of date. /// -/// If one of the environments in a solve-group is no longer satisfied by the lock-file all the -/// environments in the same solve-group have to be recomputed. +/// If one of the environments in a solve-group is no longer satisfied by the +/// lock-file all the environments in the same solve-group have to be +/// recomputed. fn map_outdated_targets_to_solve_groups<'p>( outdated_conda: &HashMap, HashSet>, outdated_pypi: &HashMap, HashSet>, @@ -252,12 +275,13 @@ fn map_outdated_targets_to_solve_groups<'p>( ) } -/// Given a `project` and `lock_file`, finds all the solve-groups that have inconsistent -/// dependencies between environments. +/// Given a `project` and `lock_file`, finds all the solve-groups that have +/// inconsistent dependencies between environments. /// -/// All environments in a solve-group must share the same dependencies. This function iterates over -/// solve-groups and checks if the dependencies of all its environments are the same. For each -/// package name, only one candidate is allowed. +/// All environments in a solve-group must share the same dependencies. This +/// function iterates over solve-groups and checks if the dependencies of all +/// its environments are the same. For each package name, only one candidate is +/// allowed. fn find_inconsistent_solve_groups<'p>( project: &'p Project, lock_file: &LockFile, @@ -279,7 +303,8 @@ fn find_inconsistent_solve_groups<'p>( let mut conda_package_mismatch = false; let mut pypi_package_mismatch = false; - // Keep track of the packages by name to check for mismatches between environments. + // Keep track of the packages by name to check for mismatches between + // environments. let mut conda_packages_by_name = HashMap::new(); let mut pypi_packages_by_name = HashMap::new(); @@ -302,30 +327,27 @@ fn find_inconsistent_solve_groups<'p>( for package in locked_env.packages(platform).into_iter().flatten() { match package { - Package::Conda(pkg) => { - match conda_packages_by_name.get(&pkg.package_record().name) { + LockedPackageRef::Conda(pkg) => { + match conda_packages_by_name.get(&pkg.record().name) { None => { conda_packages_by_name - .insert(pkg.package_record().name.clone(), pkg.url().clone()); + .insert(pkg.record().name.clone(), pkg.location().clone()); } - Some(url) if pkg.url() != url => { + Some(url) if pkg.location() != url => { conda_package_mismatch = true; } _ => {} } } - Package::Pypi(pkg) => { - match pypi_packages_by_name.get(&pkg.data().package.name) { - None => { - pypi_packages_by_name - .insert(pkg.data().package.name.clone(), pkg.url().clone()); - } - Some(url) if pkg.url() != url => { - pypi_package_mismatch = true; - } - _ => {} + LockedPackageRef::Pypi(pkg, _) => match pypi_packages_by_name.get(&pkg.name) { + None => { + pypi_packages_by_name.insert(pkg.name.clone(), pkg.location.clone()); } - } + Some(url) if &pkg.location != url => { + pypi_package_mismatch = true; + } + _ => {} + }, } // If there is a conda package mismatch there is also a pypi mismatch and we diff --git a/src/lock_file/package_identifier.rs b/src/lock_file/package_identifier.rs index 0317230c0..d22c5a7b4 100644 --- a/src/lock_file/package_identifier.rs +++ b/src/lock_file/package_identifier.rs @@ -1,15 +1,15 @@ use pixi_uv_conversions::{ to_normalize, to_uv_normalize, to_uv_version, ConversionError as PixiConversionError, }; -use rattler_conda_types::{PackageUrl, RepoDataRecord}; +use rattler_conda_types::{PackageRecord, PackageUrl, RepoDataRecord}; use std::{collections::HashSet, str::FromStr}; use thiserror::Error; use pixi_manifest::pypi::PyPiPackageName; use uv_normalize::{ExtraName, InvalidNameError}; -/// Defines information about a Pypi package extracted from either a python package or from a -/// conda package. That can be used for comparison in both +/// Defines information about a Pypi package extracted from either a python +/// package or from a conda package. That can be used for comparison in both #[derive(Debug)] pub struct PypiPackageIdentifier { pub name: PyPiPackageName, @@ -18,32 +18,40 @@ pub struct PypiPackageIdentifier { } impl PypiPackageIdentifier { - /// Extracts the python packages that will be installed when the specified conda package is - /// installed. - pub(crate) fn from_record(record: &RepoDataRecord) -> Result, ConversionError> { + /// Extracts the python packages that will be installed when the specified + /// conda package is installed. + pub(crate) fn from_repodata_record( + record: &RepoDataRecord, + ) -> Result, ConversionError> { let mut result = Vec::new(); Self::from_record_into(record, &mut result)?; Ok(result) } - /// Helper function to write the result of extract the python packages that will be installed - /// into a pre-allocated vector. + pub fn from_package_record(record: &PackageRecord) -> Result, ConversionError> { + let mut result = Vec::new(); + if let Some(purls) = &record.purls { + for purl in purls.iter() { + if let Some(entry) = Self::convert_from_purl(purl, &record.version.as_str())? { + result.push(entry); + } + } + } + Ok(result) + } + + /// Helper function to write the result of extract the python packages that + /// will be installed into a pre-allocated vector. fn from_record_into( record: &RepoDataRecord, result: &mut Vec, ) -> Result<(), ConversionError> { let mut has_pypi_purl = false; - // Check the PURLs for a python package. - if let Some(purls) = &record.package_record.purls { - for purl in purls.iter() { - if let Some(entry) = - Self::convert_from_purl(purl, &record.package_record.version.as_str())? - { - result.push(entry); - has_pypi_purl = true; - } - } + let identifiers = Self::from_package_record(&record.package_record)?; + if !identifiers.is_empty() { + has_pypi_purl = true; + result.extend(identifiers); } // Backwards compatibility: @@ -56,7 +64,7 @@ impl PypiPackageIdentifier { && !has_pypi_purl && pypi_mapping::is_conda_forge_record(record) { - tracing::debug!( + tracing::trace!( "Using backwards compatibility purl logic for conda package: {}", record.package_record.name.as_source() ); @@ -72,7 +80,8 @@ impl PypiPackageIdentifier { result.push(PypiPackageIdentifier { name: PyPiPackageName::from_normalized(pep_name), version, - // TODO: We can't really tell which python extras are enabled in a conda package. + // TODO: We can't really tell which python extras are enabled in a conda + // package. extras: Default::default(), }) } diff --git a/src/lock_file/records_by_name.rs b/src/lock_file/records_by_name.rs index 201669ad7..2aae0fe85 100644 --- a/src/lock_file/records_by_name.rs +++ b/src/lock_file/records_by_name.rs @@ -1,15 +1,15 @@ use super::package_identifier::ConversionError; use crate::lock_file::{PypiPackageIdentifier, PypiRecord}; -use pep508_rs::PackageName; +use pixi_record::PixiRecord; use pixi_uv_conversions::to_uv_normalize; use pypi_modifiers::pypi_tags::is_python_record; -use rattler_conda_types::{RepoDataRecord, VersionWithSource}; +use rattler_conda_types::{PackageName, RepoDataRecord, VersionWithSource}; use std::collections::hash_map::Entry; use std::collections::HashMap; use std::hash::Hash; -pub(crate) type RepoDataRecordsByName = DependencyRecordsByName; pub(crate) type PypiRecordsByName = DependencyRecordsByName; +pub(crate) type PixiRecordsByName = DependencyRecordsByName; /// A trait required from the dependencies stored in DependencyRecordsByName pub(crate) trait HasNameVersion { @@ -25,10 +25,10 @@ pub(crate) trait HasNameVersion { } impl HasNameVersion for PypiRecord { - type N = PackageName; + type N = pep508_rs::PackageName; type V = pep440_rs::Version; - fn name(&self) -> &PackageName { + fn name(&self) -> &pep508_rs::PackageName { &self.0.name } fn version(&self) -> &Self::V { @@ -47,7 +47,21 @@ impl HasNameVersion for RepoDataRecord { } } -/// A struct that holds both a ``Vec` of `DependencyRecord` and a mapping from name to index. +impl HasNameVersion for PixiRecord { + type N = PackageName; + type V = VersionWithSource; + + fn name(&self) -> &Self::N { + &self.package_record().name + } + + fn version(&self) -> &Self::V { + &self.package_record().version + } +} + +/// A struct that holds both a ``Vec` of `DependencyRecord` and a mapping from +/// name to index. #[derive(Clone, Debug)] pub(crate) struct DependencyRecordsByName { pub(crate) records: Vec, @@ -75,12 +89,14 @@ impl From> for DependencyRecordsByName { } impl DependencyRecordsByName { - /// Returns the record with the given name or `None` if no such record exists. + /// Returns the record with the given name or `None` if no such record + /// exists. pub(crate) fn by_name(&self, key: &D::N) -> Option<&D> { self.by_name.get(key).map(|idx| &self.records[*idx]) } - /// Returns the index of the record with the given name or `None` if no such record exists. + /// Returns the index of the record with the given name or `None` if no such + /// record exists. pub(crate) fn index_by_name(&self, key: &D::N) -> Option { self.by_name.get(key).copied() } @@ -99,14 +115,15 @@ impl DependencyRecordsByName { self.records } - /// Returns an iterator over the names of the records stored in this instance. + /// Returns an iterator over the names of the records stored in this + /// instance. pub(crate) fn names(&self) -> impl Iterator { // Iterate over the records to retain the index of the original record. self.records.iter().map(|r| r.name()) } - /// Constructs a new instance from an iterator of pypi records. If multiple records exist - /// for the same package name an error is returned. + /// Constructs a new instance from an iterator of pypi records. If multiple + /// records exist for the same package name an error is returned. pub(crate) fn from_unique_iter>(iter: I) -> Result> { let iter = iter.into_iter(); let min_size = iter.size_hint().0; @@ -127,8 +144,9 @@ impl DependencyRecordsByName { Ok(Self { records, by_name }) } - /// Constructs a new instance from an iterator of repodata records. The records are - /// deduplicated where the record with the highest version wins. + /// Constructs a new instance from an iterator of repodata records. The + /// records are deduplicated where the record with the highest version + /// wins. pub(crate) fn from_iter>(iter: I) -> Self { let iter = iter.into_iter(); let min_size = iter.size_hint().0; @@ -155,27 +173,38 @@ impl DependencyRecordsByName { } } -impl RepoDataRecordsByName { - /// Returns the record that represents the python interpreter or `None` if no such record exists. +impl PixiRecordsByName { + /// Returns the record that represents the python interpreter or `None` if + /// no such record exists. pub(crate) fn python_interpreter_record(&self) -> Option<&RepoDataRecord> { - self.records.iter().find(|record| is_python_record(*record)) + self.records.iter().find_map(|record| match record { + PixiRecord::Binary(record) if is_python_record(record) => Some(record), + _ => None, + }) } - /// Convert the records into a map of pypi package identifiers mapped to the records they were - /// extracted from. + /// Convert the records into a map of pypi package identifiers mapped to the + /// records they were extracted from. pub(crate) fn by_pypi_name( &self, ) -> Result< - HashMap, + HashMap, ConversionError, > { self.records .iter() .enumerate() - .filter_map(|(idx, record)| { - PypiPackageIdentifier::from_record(record) - .ok() - .map(move |identifiers| (idx, record, identifiers)) + .filter_map(|(idx, record)| match record { + PixiRecord::Binary(repodata_record) => { + PypiPackageIdentifier::from_repodata_record(repodata_record) + .ok() + .map(move |identifiers| (idx, record, identifiers)) + } + PixiRecord::Source(source_record) => { + PypiPackageIdentifier::from_package_record(&source_record.package_record) + .ok() + .map(move |identifiers| (idx, record, identifiers)) + } }) .flat_map(|(idx, record, identifiers)| { identifiers.into_iter().map(move |identifier| { diff --git a/src/lock_file/reporter/gateway_reporter.rs b/src/lock_file/reporter/gateway_reporter.rs new file mode 100644 index 000000000..3b4be8c34 --- /dev/null +++ b/src/lock_file/reporter/gateway_reporter.rs @@ -0,0 +1,147 @@ +use std::{collections::VecDeque, sync::Arc, time::Instant}; + +use parking_lot::Mutex; +use url::Url; + +use super::SolveProgressBar; + +pub struct GatewayProgressReporter { + inner: Mutex, +} + +impl GatewayProgressReporter { + pub(crate) fn new(pb: Arc) -> Self { + Self { + inner: Mutex::new(InnerProgressState { + pb, + downloads: VecDeque::new(), + + bytes_downloaded: 0, + total_bytes: 0, + total_pending_downloads: 0, + + jlap: VecDeque::default(), + total_pending_jlap: 0, + }), + } + } +} + +struct InnerProgressState { + pb: Arc, + + downloads: VecDeque, + + bytes_downloaded: usize, + total_bytes: usize, + total_pending_downloads: usize, + + jlap: VecDeque, + total_pending_jlap: usize, +} + +impl InnerProgressState { + fn update_progress(&self) { + if self.total_pending_downloads > 0 { + self.pb.set_bytes_update_style(self.total_bytes); + self.pb.set_position(self.bytes_downloaded as u64); + self.pb.set_message("downloading repodata"); + } else if self.total_pending_jlap > 0 { + self.pb.reset_style(); + self.pb.set_message("applying JLAP patches"); + } else { + self.pb.reset_style(); + self.pb.set_message("parsing repodata"); + } + } +} + +struct DownloadState { + _started_at: Instant, + bytes_downloaded: usize, + total_size: usize, + _finished_at: Option, +} + +struct JLAPState { + _started_at: Instant, + _finished_at: Option, +} + +impl rattler_repodata_gateway::Reporter for GatewayProgressReporter { + fn on_download_start(&self, _url: &Url) -> usize { + let mut inner = self.inner.lock(); + let download_idx = inner.downloads.len(); + inner.downloads.push_back(DownloadState { + _started_at: Instant::now(), + bytes_downloaded: 0, + total_size: 0, + _finished_at: None, + }); + inner.total_pending_downloads += 1; + inner.update_progress(); + download_idx + } + + fn on_download_progress( + &self, + _url: &Url, + index: usize, + bytes_downloaded: usize, + total_bytes: Option, + ) { + let mut inner = self.inner.lock(); + + let download = inner + .downloads + .get_mut(index) + .expect("download index should exist"); + + let prev_bytes_downloaded = download.bytes_downloaded; + let prev_total_size = download.total_size; + download.bytes_downloaded = bytes_downloaded; + download.total_size = total_bytes.unwrap_or(0); + + inner.bytes_downloaded = inner.bytes_downloaded + bytes_downloaded - prev_bytes_downloaded; + inner.total_bytes = inner.total_bytes + total_bytes.unwrap_or(0) - prev_total_size; + + inner.update_progress(); + } + + fn on_download_complete(&self, _url: &Url, _index: usize) { + let mut inner = self.inner.lock(); + let download = inner + .downloads + .get_mut(_index) + .expect("download index should exist"); + download._finished_at = Some(Instant::now()); + + inner.total_pending_downloads -= 1; + + inner.update_progress(); + } + + fn on_jlap_start(&self) -> usize { + let mut inner = self.inner.lock(); + + let index = inner.jlap.len(); + inner.jlap.push_back(JLAPState { + _started_at: Instant::now(), + _finished_at: None, + }); + inner.total_pending_jlap += 1; + + inner.update_progress(); + + index + } + + fn on_jlap_completed(&self, index: usize) { + let mut inner = self.inner.lock(); + let jlap = inner.jlap.get_mut(index).expect("jlap index should exist"); + jlap._finished_at = Some(Instant::now()); + inner.total_pending_jlap -= 1; + + inner.update_progress(); + } +} diff --git a/src/lock_file/reporter/mod.rs b/src/lock_file/reporter/mod.rs new file mode 100644 index 000000000..b8996d410 --- /dev/null +++ b/src/lock_file/reporter/mod.rs @@ -0,0 +1,8 @@ +mod gateway_reporter; +mod progress_bar; +mod purl_amend; + +pub(crate) use gateway_reporter::GatewayProgressReporter; +pub(crate) use progress_bar::CondaMetadataProgress; +pub(crate) use progress_bar::SolveProgressBar; +pub(crate) use purl_amend::PurlAmendReporter; diff --git a/src/lock_file/reporter/progress_bar.rs b/src/lock_file/reporter/progress_bar.rs new file mode 100644 index 000000000..c0fd833d5 --- /dev/null +++ b/src/lock_file/reporter/progress_bar.rs @@ -0,0 +1,174 @@ +use std::{borrow::Cow, fmt::Write, sync::Arc, time::Duration}; + +use indicatif::{HumanBytes, ProgressBar, ProgressState}; +use pixi_build_frontend::CondaMetadataReporter; +use pixi_consts::consts; +use pypi_mapping::Reporter; +use rattler_conda_types::Platform; + +use super::PurlAmendReporter; +use crate::{build::BuildMetadataReporter, project::grouped_environment::GroupedEnvironmentName}; + +/// A helper struct that manages a progress-bar for solving an environment. +#[derive(Clone)] +pub(crate) struct SolveProgressBar { + pub pb: ProgressBar, +} + +impl SolveProgressBar { + pub(crate) fn new( + pb: ProgressBar, + platform: Platform, + environment_name: GroupedEnvironmentName, + ) -> Self { + let name_and_platform = format!( + "{}:{}", + environment_name.fancy_display(), + consts::PLATFORM_STYLE.apply_to(platform) + ); + + pb.set_style(indicatif::ProgressStyle::with_template(" {prefix:20!} ..").unwrap()); + pb.enable_steady_tick(Duration::from_millis(100)); + pb.set_prefix(name_and_platform); + Self { pb } + } + + pub(crate) fn start(&self) { + self.pb.reset_elapsed(); + self.reset_style() + } + + pub(crate) fn set_message(&self, msg: impl Into>) { + self.pb.set_message(msg); + } + + pub(crate) fn inc(&self, n: u64) { + self.pb.inc(n); + } + + pub(crate) fn set_position(&self, n: u64) { + self.pb.set_position(n) + } + + pub(crate) fn set_update_style(&self, total: usize) { + self.pb.set_length(total as u64); + self.pb.set_position(0); + self.pb.set_style( + indicatif::ProgressStyle::with_template( + " {spinner:.dim} {prefix:20!} [{elapsed_precise}] [{bar:20!.bright.yellow/dim.white}] {pos:>4}/{len:4} {msg:.dim}") + .unwrap() + .progress_chars("━━╾─"), + ); + } + + pub(crate) fn set_bytes_update_style(&self, total: usize) { + self.pb.set_length(total as u64); + self.pb.set_position(0); + self.pb.set_style( + indicatif::ProgressStyle::with_template( + " {spinner:.dim} {prefix:20!} [{elapsed_precise}] [{bar:20!.bright.yellow/dim.white}] {bytes:>8} @ {smoothed_bytes_per_sec:8} {msg:.dim}") + .unwrap() + .progress_chars("━━╾─") + .with_key( + "smoothed_bytes_per_sec", + |s: &ProgressState, w: &mut dyn Write| match (s.pos(), s.elapsed().as_millis()) { + (pos, elapsed_ms) if elapsed_ms > 0 => { + write!(w, "{}/s", HumanBytes((pos as f64 * 1000_f64 / elapsed_ms as f64) as u64)).unwrap() + } + _ => write!(w, "-").unwrap(), + }, + ) + ); + } + + pub(crate) fn reset_style(&self) { + self.pb.set_style( + indicatif::ProgressStyle::with_template( + " {spinner:.dim} {prefix:20!} [{elapsed_precise}] {msg:.dim}", + ) + .unwrap(), + ); + } + + pub(crate) fn finish(&self) { + self.pb.set_style( + indicatif::ProgressStyle::with_template(&format!( + " {} {{prefix:20!}} [{{elapsed_precise}}]", + console::style(console::Emoji("✔", "↳")).green(), + )) + .unwrap(), + ); + self.pb.finish_and_clear(); + } + + pub(crate) fn purl_amend_reporter(self: &Arc) -> Arc { + Arc::new(PurlAmendReporter::new(self.clone())) + } +} + +/// Struct that manages the progress for getting source metadata. +pub(crate) struct CondaMetadataProgress { + progress_bar: ProgressBar, +} + +impl CondaMetadataProgress { + /// Creates a new progress bar for the metadata, and activates it + pub(crate) fn new(original_progress: &ProgressBar, num_packages: u64) -> Self { + // Create a new progress bar. + let pb = pixi_progress::global_multi_progress() + .insert_after(original_progress, ProgressBar::hidden()); + pb.set_length(num_packages); + pb.set_style(pixi_progress::default_progress_style()); + // Building the package + pb.set_prefix("retrieving metadata"); + pb.enable_steady_tick(Duration::from_millis(100)); + Self { progress_bar: pb } + } +} + +impl CondaMetadataProgress { + /// Use this method to increment the progress bar + /// It will also check if the progress bar is finished + pub fn increment(&self) { + self.progress_bar.inc(1); + self.check_finish(); + } + + /// Check if the progress bar is finished + /// and clears it + fn check_finish(&self) { + if self.progress_bar.position() + == self + .progress_bar + .length() + .expect("expected length to be set for progress") + { + self.progress_bar.set_message(""); + self.progress_bar.finish_and_clear(); + } + } +} + +impl CondaMetadataReporter for CondaMetadataProgress { + fn on_metadata_start(&self, _build_id: usize) -> usize { + // Started metadata extraction + self.progress_bar.set_message("extracting"); + 0 + } + + fn on_metadata_end(&self, _operation: usize) { + // Finished metadata extraction + self.increment(); + } +} + +// This is the same but for the cached variants +impl BuildMetadataReporter for CondaMetadataProgress { + fn on_metadata_cached(&self, _build_id: usize) { + self.increment(); + } + + fn as_conda_metadata_reporter(self: Arc) -> Arc { + self.clone() + } +} diff --git a/src/lock_file/reporter/purl_amend.rs b/src/lock_file/reporter/purl_amend.rs new file mode 100644 index 000000000..35cf73a39 --- /dev/null +++ b/src/lock_file/reporter/purl_amend.rs @@ -0,0 +1,38 @@ +use std::sync::{ + atomic::{AtomicBool, Ordering}, + Arc, +}; + +use rattler_conda_types::RepoDataRecord; + +use crate::lock_file::reporter::SolveProgressBar; + +pub(crate) struct PurlAmendReporter { + pb: Arc, + style_set: AtomicBool, +} + +impl PurlAmendReporter { + pub(super) fn new(pb: Arc) -> Self { + Self { + pb, + style_set: AtomicBool::new(false), + } + } +} + +impl pypi_mapping::Reporter for PurlAmendReporter { + fn download_started(&self, _package: &RepoDataRecord, total: usize) { + if !self.style_set.swap(true, Ordering::Relaxed) { + self.pb.set_update_style(total); + } + } + + fn download_finished(&self, _package: &RepoDataRecord, _total: usize) { + self.pb.inc(1); + } + + fn download_failed(&self, package: &RepoDataRecord, total: usize) { + self.download_finished(package, total); + } +} diff --git a/src/lock_file/resolve/conda.rs b/src/lock_file/resolve/conda.rs index 6137d205c..00608aba9 100644 --- a/src/lock_file/resolve/conda.rs +++ b/src/lock_file/resolve/conda.rs @@ -1,9 +1,16 @@ +use ahash::HashMap; +use itertools::Itertools; use miette::IntoDiagnostic; +use pixi_record::{PixiRecord, SourceRecord}; use rattler_conda_types::{GenericVirtualPackage, MatchSpec, RepoDataRecord}; use rattler_repodata_gateway::RepoData; use rattler_solve::{resolvo, ChannelPriority, SolverImpl}; +use url::Url; -use crate::lock_file::LockedCondaPackages; +use crate::{ + build::{SourceCheckout, SourceMetadata}, + lock_file::LockedCondaPackages, +}; /// Solves the conda package environment for the given input. This function is /// async because it spawns a background task for the solver. Since solving is a @@ -12,21 +19,63 @@ pub async fn resolve_conda( specs: Vec, virtual_packages: Vec, locked_packages: Vec, - available_packages: Vec, + available_repodata: Vec, + available_source_packages: Vec, channel_priority: ChannelPriority, ) -> miette::Result { tokio::task::spawn_blocking(move || { + // Combine the repodata from the source packages and from registry channels. + let mut url_to_source_package = HashMap::default(); + for source_metadata in available_source_packages { + for record in source_metadata.records { + let url = unique_url(&source_metadata.source, &record); + let repodata_record = RepoDataRecord { + package_record: record.package_record.clone(), + url: url.clone(), + file_name: format!( + "{}-{}-{}.source", + record.package_record.name.as_normalized(), + &record.package_record.version, + &record.package_record.build + ), + channel: None, + }; + url_to_source_package.insert(url, (record, repodata_record)); + } + } + + let mut solvable_records = Vec::with_capacity(available_repodata.len() + 1); + solvable_records.push( + url_to_source_package + .values() + .map(|(_, record)| record) + .collect_vec(), + ); + for repo_data in &available_repodata { + solvable_records.push(repo_data.iter().collect_vec()); + } + // Construct a solver task that we can start solving. let task = rattler_solve::SolverTask { specs, locked_packages, virtual_packages, channel_priority, - ..rattler_solve::SolverTask::from_iter(&available_packages) + ..rattler_solve::SolverTask::from_iter(solvable_records) }; // Solve the task - resolvo::Solver.solve(task).into_diagnostic() + let solved = resolvo::Solver.solve(task).into_diagnostic()?; + + Ok(solved + .into_iter() + .map(|record| { + url_to_source_package.remove(&record.url).map_or_else( + || PixiRecord::Binary(record), + |(source_record, _repodata_record)| PixiRecord::Source(source_record), + ) + }) + .collect_vec()) }) .await .unwrap_or_else(|e| match e.try_into_panic() { @@ -34,3 +83,17 @@ pub async fn resolve_conda( Err(_err) => Err(miette::miette!("cancelled")), }) } + +fn unique_url(checkout: &SourceCheckout, source: &SourceRecord) -> Url { + let mut url = Url::from_directory_path(&checkout.path) + .expect("expected source checkout to be a valid url"); + + // Add unique identifiers to the URL. + url.query_pairs_mut() + .append_pair("name", source.package_record.name.as_source()) + .append_pair("version", &source.package_record.version.as_str()) + .append_pair("build", &source.package_record.build) + .append_pair("subdir", &source.package_record.subdir); + + url +} diff --git a/src/lock_file/resolve/pypi.rs b/src/lock_file/resolve/pypi.rs index f1e69911f..3fb15d52f 100644 --- a/src/lock_file/resolve/pypi.rs +++ b/src/lock_file/resolve/pypi.rs @@ -14,6 +14,7 @@ use indicatif::ProgressBar; use itertools::{Either, Itertools}; use miette::{Context, IntoDiagnostic}; use pixi_manifest::{pypi::pypi_options::PypiOptions, PyPiRequirement, SystemRequirements}; +use pixi_record::PixiRecord; use pixi_uv_conversions::{ as_uv_req, convert_uv_requirements_to_pep508, isolated_names_to_packages, names_to_build_isolation, pypi_options_to_index_locations, to_index_strategy, to_normalize, @@ -23,11 +24,11 @@ use pypi_modifiers::{ pypi_marker_env::determine_marker_environment, pypi_tags::{get_pypi_tags, is_python_record}, }; -use rattler_conda_types::RepoDataRecord; use rattler_digest::{parse_digest_from_hex, Md5, Sha256}; use rattler_lock::{ PackageHashes, PypiPackageData, PypiPackageEnvironmentData, PypiSourceTreeHashable, UrlOrPath, }; +use typed_path::Utf8TypedPathBuf; use url::Url; use uv_client::{Connectivity, FlatIndexClient, RegistryClient, RegistryClientBuilder}; use uv_configuration::{ConfigSettings, Constraints, IndexStrategy, LowerBound, Overrides}; @@ -50,9 +51,8 @@ use uv_types::EmptyInstalledPackages; use crate::{ lock_file::{ - package_identifier, records_by_name::HasNameVersion, - resolve::resolver_provider::CondaResolverProvider, LockedPypiPackages, - PypiPackageIdentifier, PypiRecord, UvResolutionContext, + records_by_name::HasNameVersion, resolve::resolver_provider::CondaResolverProvider, + LockedPypiPackages, PypiPackageIdentifier, PypiRecord, UvResolutionContext, }, uv_reporter::{UvReporter, UvReporterOptions}, }; @@ -137,8 +137,7 @@ fn process_uv_path_url(path_url: &uv_pep508::VerbatimUrl) -> Result; +type CondaPythonPackages = HashMap; /// Prints the number of overridden uv PyPI package requests fn print_overridden_requests(package_requests: &HashMap) { @@ -161,7 +160,7 @@ pub async fn resolve_pypi( pypi_options: &PypiOptions, dependencies: IndexMap>, system_requirements: SystemRequirements, - locked_conda_records: &[RepoDataRecord], + locked_pixi_records: &[PixiRecord], locked_pypi_packages: &[PypiRecord], platform: rattler_conda_types::Platform, pb: &ProgressBar, @@ -173,10 +172,19 @@ pub async fn resolve_pypi( pb.set_message("resolving pypi dependencies"); // Determine which pypi packages are already installed as conda package. - let conda_python_packages = locked_conda_records + let conda_python_packages = locked_pixi_records .iter() .flat_map(|record| { - package_identifier::PypiPackageIdentifier::from_record(record).map_or_else( + let result = match record { + PixiRecord::Binary(repodata_record) => { + PypiPackageIdentifier::from_repodata_record(repodata_record) + } + PixiRecord::Source(source_record) => { + PypiPackageIdentifier::from_package_record(&source_record.package_record) + } + }; + + result.map_or_else( |err| Either::Right(once(Err(err))), |identifiers| { Either::Left(identifiers.into_iter().map(|i| Ok((record.clone(), i)))) @@ -222,9 +230,12 @@ pub async fn resolve_pypi( use pixi_consts::consts::PROJECT_MANIFEST; // Determine the python interpreter that is installed as part of the conda // packages. - let python_record = locked_conda_records + let python_record = locked_pixi_records .iter() - .find(|r| is_python_record(r)) + .find(|r| match r { + PixiRecord::Binary(r) => is_python_record(r), + _ => false, + }) .ok_or_else(|| miette::miette!("could not resolve pypi dependencies because no python interpreter is added to the dependencies of the project.\nMake sure to add a python interpreter to the [dependencies] section of the {PROJECT_MANIFEST}, or run:\n\n\tpixi add python"))?; // Construct the marker environment for the target platform @@ -303,12 +314,13 @@ pub async fn resolve_pypi( }; // Create a shared in-memory index. - // We need two in-memory indexes, one for the build dispatch and one for the resolver. - // because we manually override requests for the resolver, + // We need two in-memory indexes, one for the build dispatch and one for the + // resolver. because we manually override requests for the resolver, // but we don't want to override requests for the build dispatch. // - // The BuildDispatch might resolve or install when building wheels which will be mostly - // with build isolation. In that case we want to use fresh non-tampered requests. + // The BuildDispatch might resolve or install when building wheels which will be + // mostly with build isolation. In that case we want to use fresh + // non-tampered requests. let build_dispatch_in_memory_index = InMemoryIndex::default(); let config_settings = ConfigSettings::default(); @@ -457,8 +469,8 @@ pub async fn resolve_pypi( package_requests: package_requests.clone(), }; - // We need a new in-memory index for the resolver so that it does not conflict with the build dispatch - // one. As we have noted in the comment above. + // We need a new in-memory index for the resolver so that it does not conflict + // with the build dispatch one. As we have noted in the comment above. let resolver_in_memory_index = InMemoryIndex::default(); let resolution = Resolver::new_custom_io( manifest, @@ -579,7 +591,7 @@ fn get_url_or_path( // so we just return the absolute path Err(_) => absolute, }; - UrlOrPath::Path(path) + UrlOrPath::Path(Utf8TypedPathBuf::from(path.to_string_lossy().to_string())) } // This happens when it is relative to the non-standard index // location on disk. @@ -606,7 +618,7 @@ fn get_url_or_path( .join(relative), Err(_) => absolute, }; - UrlOrPath::Path(path) + UrlOrPath::Path(Utf8TypedPathBuf::from(path.to_string_lossy().to_string())) } }; Ok(url) @@ -639,7 +651,7 @@ async fn lock_pypi_packages<'a>( } ResolvedDist::Installable(Dist::Built(dist)) => { - let (url_or_path, hash) = match &dist { + let (location, hash) = match &dist { BuiltDist::Registry(dist) => { let best_wheel = dist.best_wheel(); let hash = parse_hashes_from_hash_vec(&dist.best_wheel().file.hashes) @@ -663,7 +675,12 @@ async fn lock_pypi_packages<'a>( (UrlOrPath::Url(direct_url), None) } BuiltDist::Path(dist) => ( - UrlOrPath::Path(process_uv_path_url(&dist.url).into_diagnostic()?), + UrlOrPath::Path(Utf8TypedPathBuf::from( + process_uv_path_url(&dist.url) + .into_diagnostic()? + .to_string_lossy() + .to_string(), + )), None, ), }; @@ -688,7 +705,7 @@ async fn lock_pypi_packages<'a>( requires_dist: convert_uv_requirements_to_pep508(metadata.requires_dist.iter()) .into_diagnostic()?, editable: false, - url_or_path, + location, hash, } } @@ -712,7 +729,7 @@ async fn lock_pypi_packages<'a>( // Use the precise url if we got it back // otherwise try to construct it from the source - let (url_or_path, hash, editable) = match source { + let (location, hash, editable) = match source { SourceDist::Registry(reg) => { let url_or_path = get_url_or_path(®.index, ®.file.url, abs_project_root) @@ -747,7 +764,9 @@ async fn lock_pypi_packages<'a>( // Create the url for the lock file. This is based on the passed in URL // instead of from the source path to copy the path that was passed in from // the requirement. - let url_or_path = UrlOrPath::Path(given_path); + let url_or_path = UrlOrPath::Path(Utf8TypedPathBuf::from( + given_path.to_string_lossy().to_string(), + )); (url_or_path, hash, false) } SourceDist::Directory(dir) => { @@ -769,7 +788,9 @@ async fn lock_pypi_packages<'a>( // Create the url for the lock file. This is based on the passed in URL // instead of from the source path to copy the path that was passed in from // the requirement. - let url_or_path = UrlOrPath::Path(given_path); + let url_or_path = UrlOrPath::Path(Utf8TypedPathBuf::from( + given_path.to_string_lossy().to_string(), + )); (url_or_path, hash, dir.editable) } }; @@ -783,9 +804,9 @@ async fn lock_pypi_packages<'a>( .map(|r| to_version_specifiers(&r)) .transpose() .into_diagnostic()?, + location, requires_dist: to_requirements(metadata.requires_dist.iter()) .into_diagnostic()?, - url_or_path, hash, editable, } diff --git a/src/lock_file/resolve/resolver_provider.rs b/src/lock_file/resolve/resolver_provider.rs index 3b14acc17..074662fbf 100644 --- a/src/lock_file/resolve/resolver_provider.rs +++ b/src/lock_file/resolve/resolver_provider.rs @@ -8,7 +8,7 @@ use std::{ use futures::{Future, FutureExt}; use pixi_consts::consts; -use rattler_conda_types::RepoDataRecord; +use pixi_record::PixiRecord; use uv_distribution::{ArchiveMetadata, Metadata}; use uv_distribution_filename::SourceDistExtension; use uv_distribution_types::{ @@ -21,12 +21,12 @@ use uv_resolver::{ }; use uv_types::BuildContext; -use crate::lock_file::{records_by_name::HasNameVersion, PypiPackageIdentifier}; +use crate::lock_file::PypiPackageIdentifier; pub(super) struct CondaResolverProvider<'a, Context: BuildContext> { pub(super) fallback: DefaultResolverProvider<'a, Context>, pub(super) conda_python_identifiers: - &'a HashMap, + &'a HashMap, /// Saves the number of requests by the uv solver per package pub(super) package_requests: Rc>>, @@ -40,16 +40,16 @@ impl<'a, Context: BuildContext> ResolverProvider for CondaResolverProvider<'a, C ) -> impl Future + 'io { if let Some((repodata_record, identifier)) = self.conda_python_identifiers.get(package_name) { - let version = repodata_record.version().to_string(); + let version = repodata_record.package_record().version.to_string(); tracing::debug!( "overriding PyPI package version request {}=={}", package_name, version ); - // If we encounter a package that was installed by conda we simply return a single - // available version in the form of a source distribution with the URL of the - // conda package. + // If we encounter a package that was installed by conda we simply return a + // single available version in the form of a source distribution + // with the URL of the conda package. // // Obviously this is not a valid source distribution but it eases debugging. @@ -62,7 +62,15 @@ impl<'a, Context: BuildContext> ResolverProvider for CondaResolverProvider<'a, C requires_python: None, size: None, upload_time_utc_ms: None, - url: FileLocation::AbsoluteUrl(UrlString::from(repodata_record.url.clone())), + url: match repodata_record { + PixiRecord::Binary(repodata_record) => { + FileLocation::AbsoluteUrl(UrlString::from(repodata_record.url.clone())) + } + PixiRecord::Source(_source) => { + // TODO(baszalmstra): Does this matter?? + FileLocation::RelativeUrl("foo".to_string(), "bar".to_string()) + } + }, yanked: None, }; @@ -84,7 +92,8 @@ impl<'a, Context: BuildContext> ResolverProvider for CondaResolverProvider<'a, C SourceDistCompatibility::Compatible(HashComparison::Matched), ); - // Record that we got a request for this package so we can track the number of requests + // Record that we got a request for this package so we can track the number of + // requests self.package_requests .borrow_mut() .entry(package_name.clone()) diff --git a/src/lock_file/satisfiability.rs b/src/lock_file/satisfiability.rs index 50208905b..da81b22b4 100644 --- a/src/lock_file/satisfiability.rs +++ b/src/lock_file/satisfiability.rs @@ -1,28 +1,30 @@ use std::{ borrow::Cow, collections::{HashMap, HashSet}, - fmt::Display, + fmt::{Display, Formatter}, ops::Sub, path::{Path, PathBuf}, str::FromStr, }; -use itertools::Itertools; +use itertools::{Either, Itertools}; use miette::Diagnostic; use pep440_rs::VersionSpecifiers; +use pixi_glob::{GlobHashCache, GlobHashError, GlobHashKey}; use pixi_manifest::FeaturesExt; -use pixi_spec::{PixiSpec, SpecConversionError}; +use pixi_record::{ParseLockFileError, PixiRecord, SourceMismatchError}; +use pixi_spec::{PixiSpec, SourceSpec, SpecConversionError}; use pixi_uv_conversions::{ as_uv_req, to_normalize, to_uv_marker_tree, to_uv_version_specifiers, AsPep508Error, }; use pypi_modifiers::pypi_marker_env::determine_marker_environment; use rattler_conda_types::{ ChannelUrl, GenericVirtualPackage, MatchSpec, Matches, NamedChannelOrUrl, ParseChannelError, - ParseMatchSpecError, ParseStrictness::Lenient, Platform, RepoDataRecord, + ParseMatchSpecError, ParseStrictness::Lenient, Platform, }; use rattler_lock::{ - ConversionError as RattlerLockConversionError, Package, PypiIndexes, PypiPackageData, - PypiSourceTreeHashable, UrlOrPath, + LockedPackageRef, PackageHashes, PypiIndexes, PypiPackageData, PypiSourceTreeHashable, + UrlOrPath, }; use thiserror::Error; use url::Url; @@ -33,7 +35,7 @@ use uv_pypi_types::{ }; use super::{ - package_identifier::ConversionError, PypiRecord, PypiRecordsByName, RepoDataRecordsByName, + package_identifier::ConversionError, PixiRecordsByName, PypiRecord, PypiRecordsByName, }; use crate::project::{grouped_environment::GroupedEnvironment, Environment, HasProjectRef}; @@ -80,11 +82,67 @@ pub struct EditablePackagesMismatch { pub unexpected_editable: Vec, } +#[derive(Debug, Error)] +pub struct SourceTreeHashMismatch { + pub computed: PackageHashes, + pub locked: Option, +} + +impl Display for SourceTreeHashMismatch { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let computed_hash = self + .computed + .sha256() + .map(|hash| format!("{:x}", hash)) + .or(self.computed.md5().map(|hash| format!("{:x}", hash))); + let locked_hash = self.locked.as_ref().and_then(|hash| { + hash.sha256() + .map(|hash| format!("{:x}", hash)) + .or(hash.md5().map(|hash| format!("{:x}", hash))) + }); + + match (computed_hash, locked_hash) { + (None, None) => write!(f, "could not compute a source tree hash"), + (Some(computed), None) => { + write!(f, + "the computed source tree hash is '{}', but the lock-file does not contain a hash", + computed + ) + } + (Some(computed), Some(locked)) => write!( + f, + "the computed source tree hash is '{}', but the lock-file contains '{}'", + computed, locked + ), + (None, Some(locked)) => write!( + f, + "could not compute a source tree hash, but the lock-file contains '{}'", + locked + ), + } + } +} + #[derive(Debug, Error, Diagnostic)] pub enum PlatformUnsat { #[error("the requirement '{0}' could not be satisfied (required by '{1}')")] UnsatisfiableMatchSpec(MatchSpec, String), + #[error("no package named exists '{0}' (required by '{1}')")] + SourcePackageMissing(String, String), + + #[error("required source package '{0}' is locked as binary (required by '{1}')")] + RequiredSourceIsBinary(String, String), + + #[error("package '{0}' is locked as source, but is only required as binary")] + RequiredBinaryIsSource(String), + + #[error("the locked source package '{0}' does not match the requested source package, {1}")] + SourcePackageMismatch(String, SourceMismatchError), + + #[error("failed to convert the requirement for '{0}'")] + FailedToConvertRequirement(pep508_rs::PackageName, #[source] Box), + #[error("the requirement '{0}' could not be satisfied (required by '{1}')")] UnsatisfiableRequirement(Box, String), @@ -104,7 +162,7 @@ pub enum PlatformUnsat { MissingPurls, #[error("corrupted lock-file entry for '{0}'")] - CorruptedEntry(String, RattlerLockConversionError), + CorruptedEntry(String, ParseLockFileError), #[error("there are more pypi packages in the lock-file than are used by the environment: {}", .0.iter().format(", "))] TooManyPypiPackages(Vec), @@ -152,11 +210,17 @@ pub enum PlatformUnsat { FailedToDetermineSourceTreeHash(pep508_rs::PackageName, std::io::Error), #[error("source tree hash for {0} does not match the hash in the lock-file")] - SourceTreeHashMismatch(pep508_rs::PackageName), + SourceTreeHashMismatch(pep508_rs::PackageName, #[source] SourceTreeHashMismatch), #[error("the path '{0}, cannot be canonicalized")] FailedToCanonicalizePath(PathBuf, #[source] std::io::Error), + #[error(transparent)] + FailedToComputeInputHash(#[from] GlobHashError), + + #[error("the input hash for '{0}' ({1}) does not match the hash in the lock-file ({2})")] + InputHashMismatch(String, String, String), + #[error("expect pypi package name '{expected}' but found '{found}'")] LockedPyPINamesMismatch { expected: String, found: String }, @@ -225,7 +289,7 @@ impl PlatformUnsat { | PlatformUnsat::FailedToDetermineSourceTreeHash(_, _) | PlatformUnsat::PythonVersionMismatch(_, _, _) | PlatformUnsat::EditablePackageMismatch(_) - | PlatformUnsat::SourceTreeHashMismatch(_), + | PlatformUnsat::SourceTreeHashMismatch(..), ) } } @@ -252,11 +316,16 @@ impl IntoUvRequirement for pep508_rs::Requirement { // it is actually a path let url = match url_or_path { UrlOrPath::Path(path) => { - let ext = DistExtension::from_path(path.clone()) - .map_err(|e| ParsedUrlError::MissingExtensionPath(path.clone(), e)) + let ext = DistExtension::from_path(Path::new(path.as_str())) + .map_err(|e| { + ParsedUrlError::MissingExtensionPath( + PathBuf::from_str(path.as_str()).expect("not a path"), + e, + ) + }) .expect("cannot get extension"); let parsed_url = ParsedUrl::Path(ParsedPathUrl::from_source( - path.clone(), + path.as_str().into(), ext, verbatim_url.to_url(), )); @@ -313,7 +382,7 @@ impl IntoUvRequirement for pep508_rs::Requirement { /// for the user and developer to figure out what went wrong. pub fn verify_environment_satisfiability( environment: &Environment<'_>, - locked_environment: &rattler_lock::Environment, + locked_environment: rattler_lock::Environment<'_>, ) -> Result<(), EnvironmentUnsat> { let grouped_env = GroupedEnvironment::from(environment.clone()); @@ -347,12 +416,12 @@ pub fn verify_environment_satisfiability( None => { // Mismatch when there should be an index but there is not if locked_environment + .lock_file() .version() .should_pypi_indexes_be_present() && locked_environment - .pypi_packages() - .iter() - .any(|(_platform, packages)| !packages.is_empty()) + .pypi_packages_by_platform() + .any(|(_platform, mut packages)| packages.next().is_some()) { return Err(IndexesMismatch { current: indexes, @@ -387,28 +456,29 @@ pub fn verify_environment_satisfiability( /// This function returns a [`PlatformUnsat`] error if a verification issue /// occurred. The [`PlatformUnsat`] error should contain enough information for /// the user and developer to figure out what went wrong. -#[allow(clippy::result_large_err)] -pub fn verify_platform_satisfiability( +pub async fn verify_platform_satisfiability( environment: &Environment<'_>, - locked_environment: &rattler_lock::Environment, + locked_environment: rattler_lock::Environment<'_>, platform: Platform, project_root: &Path, -) -> Result<(), PlatformUnsat> { + glob_hash_cache: GlobHashCache, +) -> Result<(), Box> { // Convert the lock file into a list of conda and pypi packages - let mut conda_packages: Vec = Vec::new(); + let mut pixi_records: Vec = Vec::new(); let mut pypi_packages: Vec = Vec::new(); for package in locked_environment.packages(platform).into_iter().flatten() { match package { - Package::Conda(conda) => { - let url = conda.url().clone(); - conda_packages.push(conda.try_into().map_err( - |e: RattlerLockConversionError| { - PlatformUnsat::CorruptedEntry(url.to_string(), e) - }, - )?); + LockedPackageRef::Conda(conda) => { + let url = conda.location().clone(); + pixi_records.push( + conda + .clone() + .try_into() + .map_err(|e| PlatformUnsat::CorruptedEntry(url.to_string(), e))?, + ); } - Package::Pypi(pypi) => { - pypi_packages.push((pypi.data().package.clone(), pypi.data().environment.clone())); + LockedPackageRef::Pypi(pypi, env) => { + pypi_packages.push((pypi.clone(), env.clone())); } } } @@ -418,40 +488,47 @@ pub fn verify_platform_satisfiability( // if all conda packages have empty purls if environment.has_pypi_dependencies() && pypi_packages.is_empty() - && !conda_packages + && pixi_records .iter() - .any(|record| record.package_record.purls.is_some()) + .filter_map(PixiRecord::as_binary) + .all(|record| record.package_record.purls.is_none()) { { - return Err(PlatformUnsat::MissingPurls); + return Err(Box::new(PlatformUnsat::MissingPurls)); } } // Create a lookup table from package name to package record. Returns an error // if we find a duplicate entry for a record - let repodata_records_by_name = match RepoDataRecordsByName::from_unique_iter(conda_packages) { - Ok(conda_packages) => conda_packages, + let pixi_records_by_name = match PixiRecordsByName::from_unique_iter(pixi_records) { + Ok(pixi_records) => pixi_records, Err(duplicate) => { - return Err(PlatformUnsat::DuplicateEntry( - duplicate.package_record.name.as_source().to_string(), - )) + return Err(Box::new(PlatformUnsat::DuplicateEntry( + duplicate.package_record().name.as_source().to_string(), + ))) } }; // Create a lookup table from package name to package record. Returns an error // if we find a duplicate entry for a record let pypi_records_by_name = match PypiRecordsByName::from_unique_iter(pypi_packages) { - Ok(conda_packages) => conda_packages, - Err(duplicate) => return Err(PlatformUnsat::DuplicateEntry(duplicate.0.name.to_string())), + Ok(pypi_packages) => pypi_packages, + Err(duplicate) => { + return Err(Box::new(PlatformUnsat::DuplicateEntry( + duplicate.0.name.to_string(), + ))) + } }; verify_package_platform_satisfiability( environment, - &repodata_records_by_name, + &pixi_records_by_name, &pypi_records_by_name, platform, project_root, + glob_hash_cache, ) + .await } #[allow(clippy::large_enum_variant)] @@ -472,7 +549,7 @@ pub(crate) fn pypi_satifisfies_editable( spec: &uv_pypi_types::Requirement, locked_data: &PypiPackageData, project_root: &Path, -) -> Result<(), PlatformUnsat> { +) -> Result<(), Box> { // We dont match on spec.is_editable() != locked_data.editable // as it will happen later in verify_package_platform_satisfiability // TODO: could be a potential refactoring opportunity @@ -486,27 +563,34 @@ pub(crate) fn pypi_satifisfies_editable( "editable requirement cannot be from registry, url, git or path (non-directory)" ) } - RequirementSource::Directory { install_path, .. } => match &locked_data.url_or_path { + RequirementSource::Directory { install_path, .. } => match &locked_data.location { // If we have an url requirement locked, but the editable is requested, this does not // satifsfy - UrlOrPath::Url(url) => Err(PlatformUnsat::EditablePackageIsUrl( + UrlOrPath::Url(url) => Err(Box::new(PlatformUnsat::EditablePackageIsUrl( spec.name.clone(), url.to_string(), - )), + ))), UrlOrPath::Path(path) => { - // sometimes the path is relative, so we need to join it with the project root - let absolute_path = project_root.join(path); - // absolute path can also have symlinks in it, so we need to canonicalize them - let real_absolute_path = dunce::canonicalize(&absolute_path).map_err(|e| { - PlatformUnsat::FailedToCanonicalizePath(absolute_path.clone(), e) + // Most of the times the path will be relative to the project root + let absolute_path = if path.is_absolute() { + Cow::Borrowed(Path::new(path.as_str())) + } else { + Cow::Owned(project_root.join(Path::new(path.as_str()))) + }; + // Absolute paths can have symbolic links, so we canonicalize + let canocalized_path = dunce::canonicalize(&absolute_path).map_err(|e| { + Box::new(PlatformUnsat::FailedToCanonicalizePath( + absolute_path.to_path_buf(), + e, + )) })?; - if &real_absolute_path != install_path { - return Err(PlatformUnsat::EditablePackagePathMismatch( + if &canocalized_path != install_path { + return Err(Box::new(PlatformUnsat::EditablePackagePathMismatch( spec.name.clone(), - absolute_path.clone(), + absolute_path.into_owned(), install_path.clone(), - )); + ))); } Ok(()) } @@ -526,12 +610,13 @@ pub(crate) fn pypi_satifisfies_requirement( spec: &uv_pypi_types::Requirement, locked_data: &PypiPackageData, project_root: &Path, -) -> Result<(), PlatformUnsat> { +) -> Result<(), Box> { if spec.name.to_string() != locked_data.name.to_string() { return Err(PlatformUnsat::LockedPyPINamesMismatch { expected: spec.name.to_string(), found: locked_data.name.to_string(), - }); + } + .into()); } match &spec.source { @@ -548,16 +633,17 @@ pub(crate) fn pypi_satifisfies_requirement( name: spec.name.clone().to_string(), specifiers: specifier.clone().to_string(), version: version_string, - }) + } + .into()) } } RequirementSource::Url { url: spec_url, .. } => { - if let UrlOrPath::Url(locked_url) = &locked_data.url_or_path { + if let UrlOrPath::Url(locked_url) = &locked_data.location { // Url may not start with git, and must start with direct+ if locked_url.as_str().starts_with("git+") || !locked_url.as_str().starts_with("direct+") { - return Err(PlatformUnsat::LockedPyPIMalformedUrl(locked_url.clone())); + return Err(PlatformUnsat::LockedPyPIMalformedUrl(locked_url.clone()).into()); } let locked_url = locked_url .as_ref() @@ -572,12 +658,11 @@ pub(crate) fn pypi_satifisfies_requirement( name: spec.name.clone().to_string(), spec_url: spec_url.raw().to_string(), lock_url: locked_url.to_string(), - }); + } + .into()); } } - Err(PlatformUnsat::LockedPyPIRequiresDirectUrl( - spec.name.to_string(), - )) + Err(PlatformUnsat::LockedPyPIRequiresDirectUrl(spec.name.to_string()).into()) } RequirementSource::Git { repository, @@ -585,7 +670,7 @@ pub(crate) fn pypi_satifisfies_requirement( precise: _precise, .. } => { - match &locked_data.url_or_path { + match &locked_data.location { UrlOrPath::Url(url) => { if let Ok(locked_git_url) = ParsedGitUrl::try_from(url.clone()) { let repo_is_same = locked_git_url.url.repository() == repository; @@ -594,7 +679,8 @@ pub(crate) fn pypi_satifisfies_requirement( name: spec.name.clone().to_string(), spec_url: repository.to_string(), lock_url: locked_git_url.url.repository().to_string(), - }); + } + .into()); } // If the spec does not specify a revision than any will do // E.g `git.com/user/repo` is the same as `git.com/user/repo@adbdd` @@ -618,7 +704,8 @@ pub(crate) fn pypi_satifisfies_requirement( name: spec.name.clone().to_string(), expected_ref: branch_or_tag.to_string(), found_ref: sha.to_string(), - }); + } + .into()); } } } @@ -632,59 +719,62 @@ pub(crate) fn pypi_satifisfies_requirement( name: spec.name.clone().to_string(), expected_ref: reference.to_string(), found_ref: locked_git_url.url.reference().to_string(), - }); + } + .into()); } } Err(PlatformUnsat::LockedPyPIRequiresGitUrl( spec.name.to_string(), url.to_string(), - )) - } - UrlOrPath::Path(path) => { - return Err(PlatformUnsat::LockedPyPIRequiresGitUrl( - spec.name.to_string(), - path.to_string_lossy().to_string(), - )) + ) + .into()) } + UrlOrPath::Path(path) => Err(PlatformUnsat::LockedPyPIRequiresGitUrl( + spec.name.to_string(), + path.to_string(), + ) + .into()), } } RequirementSource::Path { install_path, .. } | RequirementSource::Directory { install_path, .. } => { - if let UrlOrPath::Path(locked_path) = &locked_data.url_or_path { + if let UrlOrPath::Path(locked_path) = &locked_data.location { + let locked_path = Path::new(locked_path.as_str()); // sometimes the path is relative, so we need to join it with the project root if &project_root.join(locked_path) != install_path { return Err(PlatformUnsat::LockedPyPIPathMismatch { name: spec.name.clone().to_string(), expected_path: install_path.clone(), found_path: project_root.join(locked_path), - }); + } + .into()); } return Ok(()); } - Err(PlatformUnsat::LockedPyPIRequiresPath(spec.name.to_string())) + Err(PlatformUnsat::LockedPyPIRequiresPath(spec.name.to_string()).into()) } } } -#[allow(clippy::result_large_err)] -pub(crate) fn verify_package_platform_satisfiability( +pub(crate) async fn verify_package_platform_satisfiability( environment: &Environment<'_>, - locked_conda_packages: &RepoDataRecordsByName, + locked_pixi_records: &PixiRecordsByName, locked_pypi_environment: &PypiRecordsByName, platform: Platform, project_root: &Path, -) -> Result<(), PlatformUnsat> { + input_hash_cache: GlobHashCache, +) -> Result<(), Box> { let channel_config = environment.project().channel_config(); // Determine the dependencies requested by the environment - let conda_specs = environment - .dependencies(None, Some(platform)) + let environment_dependencies = environment + .combined_dependencies(Some(platform)) .into_specs() .map(|(package_name, spec)| Dependency::Input(package_name, spec, "".into())) .collect_vec(); - if conda_specs.is_empty() && !locked_conda_packages.is_empty() { - return Err(PlatformUnsat::TooManyCondaPackages); + if environment_dependencies.is_empty() && !locked_pixi_records.is_empty() { + return Err(Box::new(PlatformUnsat::TooManyCondaPackages)); } // Transform from PyPiPackage name into UV Requirement type @@ -693,9 +783,12 @@ pub(crate) fn verify_package_platform_satisfiability( .iter() .flat_map(|(name, reqs)| { reqs.iter().map(move |req| { - Ok::(Dependency::PyPi( + Ok::>(Dependency::PyPi( as_uv_req(req, name.as_source(), project_root).map_err(|e| { - PlatformUnsat::AsPep508Error(name.as_normalized().clone(), e) + Box::new(PlatformUnsat::AsPep508Error( + name.as_normalized().clone(), + e, + )) })?, "".into(), )) @@ -704,9 +797,9 @@ pub(crate) fn verify_package_platform_satisfiability( .collect::, _>>()?; if pypi_requirements.is_empty() && !locked_pypi_environment.is_empty() { - return Err(PlatformUnsat::TooManyPypiPackages( + return Err(Box::new(PlatformUnsat::TooManyPypiPackages( locked_pypi_environment.names().cloned().collect(), - )); + ))); } // Create a list of virtual packages by name @@ -721,13 +814,17 @@ pub(crate) fn verify_package_platform_satisfiability( // refers to the locked python interpreter, it might not match the specs // from the environment. That is ok because we will find that out when we // check all the records. - let python_interpreter_record = locked_conda_packages.python_interpreter_record(); + let python_interpreter_record = locked_pixi_records.python_interpreter_record(); // Determine the marker environment from the python interpreter package. let marker_environment = python_interpreter_record .map(|interpreter| determine_marker_environment(platform, &interpreter.package_record)) .transpose() - .map_err(|err| PlatformUnsat::FailedToDetermineMarkerEnvironment(err.into())); + .map_err(|err| { + Box::new(PlatformUnsat::FailedToDetermineMarkerEnvironment( + err.into(), + )) + }); // We cannot determine the marker environment, for example if installing // `wasm32` dependencies. However, it also doesn't really matter if we don't @@ -744,7 +841,10 @@ pub(crate) fn verify_package_platform_satisfiability( }; // Determine the pypi packages provided by the locked conda packages. - let locked_conda_pypi_packages = locked_conda_packages.by_pypi_name()?; + let locked_conda_pypi_packages = locked_pixi_records + .by_pypi_name() + .map_err(From::from) + .map_err(Box::new)?; // Keep a list of all conda packages that we have already visisted let mut conda_packages_visited = HashSet::new(); @@ -760,16 +860,35 @@ pub(crate) fn verify_package_platform_satisfiability( // Iterate over all packages. First iterate over all conda matchspecs and then // over all pypi requirements. We want to ensure we always check the conda // packages first. - let mut conda_queue = conda_specs; + let mut conda_queue = environment_dependencies; let mut pypi_queue = pypi_requirements; let mut expected_editable_pypi_packages = HashSet::new(); + let mut expected_conda_source_dependencies = HashSet::new(); while let Some(package) = conda_queue.pop().or_else(|| pypi_queue.pop()) { // Determine the package that matches the requirement of matchspec. let found_package = match package { Dependency::Input(name, spec, source) => { - let spec = match spec.try_into_nameless_match_spec(&channel_config) { - Ok(Some(spec)) => MatchSpec::from_nameless(spec, Some(name)), - Ok(None) => unimplemented!("source dependencies are not yet implemented"), + match spec.into_source_or_binary(&channel_config) { + Ok(Either::Left(source_spec)) => { + expected_conda_source_dependencies.insert(name.clone()); + find_matching_source_package( + locked_pixi_records, + name, + source_spec, + source, + )? + } + Ok(Either::Right(spec)) => { + match find_matching_package( + locked_pixi_records, + &virtual_packages, + MatchSpec::from_nameless(spec, Some(name)), + source, + )? { + Some(pkg) => pkg, + None => continue, + } + } Err(e) => { let parse_channel_err: ParseMatchSpecError = match e { SpecConversionError::NonAbsoluteRootDir(p) => { @@ -781,23 +900,17 @@ pub(crate) fn verify_package_platform_satisfiability( SpecConversionError::InvalidPath(p) => { ParseChannelError::InvalidPath(p).into() } - SpecConversionError::InvalidChannel(c) => c.into(), + SpecConversionError::InvalidChannel(p) => p.into(), }; - return Err(PlatformUnsat::FailedToParseMatchSpec( + return Err(Box::new(PlatformUnsat::FailedToParseMatchSpec( name.as_source().to_string(), parse_channel_err, - )); + ))); } - }; - match find_matching_package(locked_conda_packages, &virtual_packages, spec, source)? - { - Some(pkg) => pkg, - None => continue, } } Dependency::Conda(spec, source) => { - match find_matching_package(locked_conda_packages, &virtual_packages, spec, source)? - { + match find_matching_package(locked_pixi_records, &virtual_packages, spec, source)? { Some(pkg) => pkg, None => continue, } @@ -808,34 +921,47 @@ pub(crate) fn verify_package_platform_satisfiability( locked_conda_pypi_packages.get(&requirement.name) { if requirement.is_editable() { - return Err(PlatformUnsat::EditableDependencyOnCondaInstalledPackage( - requirement.name.clone(), - Box::new(requirement.source), + return Err(Box::new( + PlatformUnsat::EditableDependencyOnCondaInstalledPackage( + requirement.name.clone(), + Box::new(requirement.source), + ), )); } if matches!(requirement.source, RequirementSource::Url { .. }) { - return Err(PlatformUnsat::DirectUrlDependencyOnCondaInstalledPackage( - requirement.name.clone(), + return Err(Box::new( + PlatformUnsat::DirectUrlDependencyOnCondaInstalledPackage( + requirement.name.clone(), + ), )); } if matches!(requirement.source, RequirementSource::Git { .. }) { - return Err(PlatformUnsat::GitDependencyOnCondaInstalledPackage( - requirement.name.clone(), + return Err(Box::new( + PlatformUnsat::GitDependencyOnCondaInstalledPackage( + requirement.name.clone(), + ), )); } - if !identifier.satisfies(&requirement)? { + if !identifier + .satisfies(&requirement) + .map_err(From::from) + .map_err(Box::new)? + { // The record does not match the spec, the lock-file is inconsistent. - return Err(PlatformUnsat::CondaUnsatisfiableRequirement( + return Err(Box::new(PlatformUnsat::CondaUnsatisfiableRequirement( Box::new(requirement.clone()), source.into_owned(), - )); + ))); } FoundPackage::Conda(*repodata_idx) } else if let Some(idx) = locked_pypi_environment.index_by_name( - &to_normalize(&requirement.name).map_err(ConversionError::NameConversion)?, + &to_normalize(&requirement.name) + .map_err(ConversionError::NameConversion) + .map_err(From::from) + .map_err(Box::new)?, ) { let record = &locked_pypi_environment.records[idx]; if requirement.is_editable() { @@ -853,10 +979,10 @@ pub(crate) fn verify_package_platform_satisfiability( } } else { // The record does not match the spec, the lock-file is inconsistent. - return Err(PlatformUnsat::UnsatisfiableRequirement( + return Err(Box::new(PlatformUnsat::UnsatisfiableRequirement( Box::new(requirement), source.into_owned(), - )); + ))); } } }; @@ -870,13 +996,20 @@ pub(crate) fn verify_package_platform_satisfiability( continue; } - let record = &locked_conda_packages.records[idx]; - for depends in &record.package_record.depends { + let record = &locked_pixi_records.records[idx]; + for depends in &record.package_record().depends { let spec = MatchSpec::from_str(depends.as_str(), Lenient) .map_err(|e| PlatformUnsat::FailedToParseMatchSpec(depends.clone(), e))?; conda_queue.push(Dependency::Conda( spec, - Cow::Owned(record.file_name.clone()), + match record { + PixiRecord::Binary(record) => Cow::Owned(record.file_name.to_string()), + PixiRecord::Source(record) => Cow::Owned(format!( + "{} @ {}", + record.package_record.name.as_source(), + &record.source + )), + }, )); } } @@ -885,19 +1018,21 @@ pub(crate) fn verify_package_platform_satisfiability( // If there is no marker environment there is no python version let Some(marker_environment) = marker_environment.as_ref() else { - return Err(PlatformUnsat::MissingPythonInterpreter); + return Err(Box::new(PlatformUnsat::MissingPythonInterpreter)); }; if pypi_packages_visited.insert(idx) { // If this is path based package we need to check if the source tree hash still // matches. and if it is a directory - if let UrlOrPath::Path(path) = &record.0.url_or_path { - if path.is_dir() { - let path = - dunce::canonicalize(project_root.join(path)).map_err(|e| { - PlatformUnsat::FailedToCanonicalizePath(path.clone(), e) - })?; - let hashable = PypiSourceTreeHashable::from_directory(path) + if let UrlOrPath::Path(path) = &record.0.location { + let absolute_path = if path.is_absolute() { + Cow::Borrowed(Path::new(path.as_str())) + } else { + Cow::Owned(project_root.join(Path::new(path.as_str()))) + }; + + if absolute_path.is_dir() { + let hashable = PypiSourceTreeHashable::from_directory(&absolute_path) .map_err(|e| { PlatformUnsat::FailedToDetermineSourceTreeHash( record.0.name.clone(), @@ -905,10 +1040,14 @@ pub(crate) fn verify_package_platform_satisfiability( ) })? .hash(); - if Some(hashable) != record.0.hash { - return Err(PlatformUnsat::SourceTreeHashMismatch( + if Some(&hashable) != record.0.hash.as_ref() { + return Err(Box::new(PlatformUnsat::SourceTreeHashMismatch( record.0.name.clone(), - )); + SourceTreeHashMismatch { + computed: hashable, + locked: record.0.hash.clone(), + }, + ))); } } } @@ -920,18 +1059,22 @@ pub(crate) fn verify_package_platform_satisfiability( ) .expect("cannot parse version"); if !python_version.contains(&marker_version) { - return Err(PlatformUnsat::PythonVersionMismatch( + return Err(Box::new(PlatformUnsat::PythonVersionMismatch( record.0.name.clone(), python_version.clone(), marker_version.into(), - )); + ))); } } } // Add all the requirements of the package to the queue. for requirement in &record.0.requires_dist { - let requirement = requirement.clone().into_uv_requirement()?; + let requirement = requirement + .clone() + .into_uv_requirement() + .map_err(From::from) + .map_err(Box::new)?; // Skip this requirement if it does not apply. if !requirement.evaluate_markers(Some(marker_environment), &extras) { continue; @@ -952,12 +1095,12 @@ pub(crate) fn verify_package_platform_satisfiability( } // Check if all locked packages have also been visisted - if conda_packages_visited.len() != locked_conda_packages.len() { - return Err(PlatformUnsat::TooManyCondaPackages); + if conda_packages_visited.len() != locked_pixi_records.len() { + return Err(Box::new(PlatformUnsat::TooManyCondaPackages)); } if pypi_packages_visited.len() != locked_pypi_environment.len() { - return Err(PlatformUnsat::TooManyPypiPackages( + return Err(Box::new(PlatformUnsat::TooManyPypiPackages( locked_pypi_environment .names() .enumerate() @@ -969,7 +1112,7 @@ pub(crate) fn verify_package_platform_satisfiability( } }) .collect(), - )); + ))); } // Check if all packages that should be editable are actually editable and vice @@ -985,12 +1128,67 @@ pub(crate) fn verify_package_platform_satisfiability( let expected_editable = expected_editable_pypi_packages.sub(&locked_editable_packages); let unexpected_editable = locked_editable_packages.sub(&expected_editable_pypi_packages); if !expected_editable.is_empty() || !unexpected_editable.is_empty() { - return Err(PlatformUnsat::EditablePackageMismatch( + return Err(Box::new(PlatformUnsat::EditablePackageMismatch( EditablePackagesMismatch { expected_editable: expected_editable.into_iter().sorted().collect(), unexpected_editable: unexpected_editable.into_iter().sorted().collect(), }, - )); + ))); + } + + // Check if all records that are source records should actually be source + // records. If there are no source specs in the environment for a particular + // package than the package must be a binary package. + for record in locked_pixi_records + .records + .iter() + .filter_map(PixiRecord::as_source) + { + if !expected_conda_source_dependencies.contains(&record.package_record.name) { + return Err(Box::new(PlatformUnsat::RequiredBinaryIsSource( + record.package_record.name.as_source().to_string(), + ))); + } + } + + // Check if all source packages are still up-to-date. + for source_record in locked_pixi_records + .records + .iter() + .filter_map(PixiRecord::as_source) + { + let Some(path_record) = source_record.source.as_path() else { + continue; + }; + + let Some(locked_input_hash) = &source_record.input_hash else { + continue; + }; + + let source_dir = path_record.resolve(project_root); + let source_dir = source_dir.canonicalize().map_err(|e| { + Box::new(PlatformUnsat::FailedToCanonicalizePath( + path_record.path.as_str().into(), + e, + )) + })?; + + let input_hash = input_hash_cache + .compute_hash(GlobHashKey { + root: source_dir, + globs: locked_input_hash.globs.clone(), + }) + .await + .map_err(PlatformUnsat::FailedToComputeInputHash) + .map_err(Box::new)?; + + if input_hash.hash != locked_input_hash.hash { + return Err(Box::new(PlatformUnsat::InputHashMismatch( + path_record.path.to_string(), + format!("{:x}", input_hash.hash), + format!("{:x}", locked_input_hash.hash), + ))); + } } Ok(()) @@ -1001,44 +1199,43 @@ enum FoundPackage { PyPi(usize, Vec), } -#[allow(clippy::result_large_err)] fn find_matching_package( - locked_conda_packages: &RepoDataRecordsByName, + locked_pixi_records: &PixiRecordsByName, virtual_packages: &HashMap, spec: MatchSpec, source: Cow, -) -> Result, PlatformUnsat> { +) -> Result, Box> { let found_package = match &spec.name { None => { // No name means we have to find any package that matches the spec. - match locked_conda_packages + match locked_pixi_records .records .iter() - .position(|record| record.matches(&spec)) + .position(|record| spec.matches(record)) { None => { // No records match the spec. - return Err(PlatformUnsat::UnsatisfiableMatchSpec( + return Err(Box::new(PlatformUnsat::UnsatisfiableMatchSpec( spec, source.into_owned(), - )); + ))); } Some(idx) => FoundPackage::Conda(idx), } } Some(name) => { - match locked_conda_packages + match locked_pixi_records .index_by_name(name) - .map(|idx| (idx, &locked_conda_packages.records[idx])) + .map(|idx| (idx, &locked_pixi_records.records[idx])) { - Some((idx, record)) if record.matches(&spec) => FoundPackage::Conda(idx), + Some((idx, record)) if spec.matches(record) => FoundPackage::Conda(idx), Some(_) => { // The record does not match the spec, the lock-file is // inconsistent. - return Err(PlatformUnsat::UnsatisfiableMatchSpec( + return Err(Box::new(PlatformUnsat::UnsatisfiableMatchSpec( spec, source.into_owned(), - )); + ))); } None => { // Check if there is a virtual package by that name @@ -1050,18 +1247,18 @@ fn find_matching_package( } else { // The record does not match the spec, the lock-file is // inconsistent. - return Err(PlatformUnsat::UnsatisfiableMatchSpec( + return Err(Box::new(PlatformUnsat::UnsatisfiableMatchSpec( spec, source.into_owned(), - )); + ))); } } else { // The record does not match the spec, the lock-file is // inconsistent. - return Err(PlatformUnsat::UnsatisfiableMatchSpec( + return Err(Box::new(PlatformUnsat::UnsatisfiableMatchSpec( spec, source.into_owned(), - )); + ))); } } } @@ -1071,26 +1268,42 @@ fn find_matching_package( Ok(Some(found_package)) } -trait MatchesMatchspec { - fn matches(&self, spec: &MatchSpec) -> bool; -} +fn find_matching_source_package( + locked_pixi_records: &PixiRecordsByName, + name: rattler_conda_types::PackageName, + source_spec: SourceSpec, + source: Cow, +) -> Result> { + // Find the package that matches the source spec. + let Some((idx, package)) = locked_pixi_records + .index_by_name(&name) + .map(|idx| (idx, &locked_pixi_records.records[idx])) + else { + // The record does not match the spec, the lock-file is + // inconsistent. + return Err(Box::new(PlatformUnsat::SourcePackageMissing( + name.as_source().to_string(), + source.into_owned(), + ))); + }; -impl MatchesMatchspec for RepoDataRecord { - fn matches(&self, spec: &MatchSpec) -> bool { - if !spec.matches(&self.package_record) { - return false; - } + let PixiRecord::Source(source_package) = package else { + return Err(Box::new(PlatformUnsat::RequiredSourceIsBinary( + name.as_source().to_string(), + source.into_owned(), + ))); + }; - // TODO: We should really move this into rattler - // Check the channel - if let Some(channel) = &spec.channel { - if !self.url.as_str().starts_with(channel.base_url.as_str()) { - return false; - } - } + source_package + .source + .satisfies(&source_spec) + .map_err(|e| PlatformUnsat::SourcePackageMismatch(name.as_source().to_string(), e))?; - true - } + Ok(FoundPackage::Conda(idx)) +} + +trait MatchesMatchspec { + fn matches(&self, spec: &MatchSpec) -> bool; } impl MatchesMatchspec for GenericVirtualPackage { @@ -1193,17 +1406,18 @@ impl Display for EditablePackagesMismatch { #[cfg(test)] mod tests { + use insta::Settings; + use miette::{IntoDiagnostic, NarratableReportHandler}; + use pep440_rs::Version; + use rattler_lock::LockFile; + use rstest::rstest; + use std::{ ffi::OsStr, path::{Component, PathBuf}, str::FromStr, }; - use miette::{IntoDiagnostic, NarratableReportHandler}; - use pep440_rs::Version; - use rattler_lock::LockFile; - use rstest::rstest; - use super::*; use crate::Project; @@ -1221,7 +1435,7 @@ mod tests { PlatformUnsat(String, Platform, #[source] PlatformUnsat), } - fn verify_lockfile_satisfiability( + async fn verify_lockfile_satisfiability( project: &Project, lock_file: &LockFile, ) -> Result<(), LockfileUnsat> { @@ -1229,21 +1443,27 @@ mod tests { let locked_env = lock_file .environment(env.name().as_str()) .ok_or_else(|| LockfileUnsat::EnvironmentMissing(env.name().to_string()))?; - verify_environment_satisfiability(&env, &locked_env) + verify_environment_satisfiability(&env, locked_env) .map_err(|e| LockfileUnsat::Environment(env.name().to_string(), e))?; for platform in env.platforms() { - verify_platform_satisfiability(&env, &locked_env, platform, project.root()) - .map_err(|e| { - LockfileUnsat::PlatformUnsat(env.name().to_string(), platform, e) - })?; + verify_platform_satisfiability( + &env, + locked_env, + platform, + project.root(), + Default::default(), + ) + .await + .map_err(|e| LockfileUnsat::PlatformUnsat(env.name().to_string(), platform, *e))?; } } Ok(()) } - #[rstest] - fn test_good_satisfiability( + #[rstest::rstest] + #[tokio::test] + async fn test_good_satisfiability( #[files("tests/data/satisfiability/*/pixi.toml")] manifest_path: PathBuf, ) { // TODO: skip this test on windows @@ -1259,41 +1479,59 @@ mod tests { let project = Project::from_path(&manifest_path).unwrap(); let lock_file = LockFile::from_path(&project.lock_file_path()).unwrap(); - match verify_lockfile_satisfiability(&project, &lock_file).into_diagnostic() { + match verify_lockfile_satisfiability(&project, &lock_file) + .await + .into_diagnostic() + { Ok(()) => {} Err(e) => panic!("{e:?}"), } } #[rstest] + #[tokio::test] #[cfg_attr(not(feature = "slow_integration_tests"), ignore)] - fn test_example_satisfiability(#[files("examples/*/pixi.toml")] manifest_path: PathBuf) { + async fn test_example_satisfiability(#[files("examples/*/pixi.toml")] manifest_path: PathBuf) { let project = Project::from_path(&manifest_path).unwrap(); let lock_file = LockFile::from_path(&project.lock_file_path()).unwrap(); - match verify_lockfile_satisfiability(&project, &lock_file).into_diagnostic() { + match verify_lockfile_satisfiability(&project, &lock_file) + .await + .into_diagnostic() + { Ok(()) => {} Err(e) => panic!("{e:?}"), } } - #[test] - fn test_failing_satisiability() { + #[rstest] + #[tokio::test] + async fn test_failing_satisiability( + #[files("tests/data/non-satisfiability/*/pixi.toml")] manifest_path: PathBuf, + ) { let report_handler = NarratableReportHandler::new().with_cause_chain(); - insta::glob!( - "../../tests/data/non-satisfiability", - "*/pixi.toml", - |path| { - let project = Project::from_path(path).unwrap(); - let lock_file = LockFile::from_path(&project.lock_file_path()).unwrap(); - let err = verify_lockfile_satisfiability(&project, &lock_file) - .expect_err("expected failing satisfiability"); - - let mut s = String::new(); - report_handler.render_report(&mut s, &err).unwrap(); - insta::assert_snapshot!(s); - } - ); + let project = Project::from_path(&manifest_path).unwrap(); + let lock_file = LockFile::from_path(&project.lock_file_path()).unwrap(); + let err = verify_lockfile_satisfiability(&project, &lock_file) + .await + .expect_err("expected failing satisfiability"); + + let name = manifest_path + .parent() + .unwrap() + .file_name() + .and_then(OsStr::to_str) + .unwrap(); + + let mut s = String::new(); + report_handler.render_report(&mut s, &err).unwrap(); + + let mut settings = Settings::clone_current(); + settings.set_snapshot_suffix(name); + settings.bind(|| { + // run snapshot test here + insta::assert_snapshot!(s); + }); } #[test] @@ -1302,7 +1540,7 @@ mod tests { let locked_data = PypiPackageData { name: "mypkg".parse().unwrap(), version: Version::from_str("0.1.0").unwrap(), - url_or_path: "git+https://github.com/mypkg@29932f3915935d773dc8d52c292cadd81c81071d" + location: "git+https://github.com/mypkg@29932f3915935d773dc8d52c292cadd81c81071d" .parse() .expect("failed to parse url"), hash: None, @@ -1340,9 +1578,7 @@ mod tests { let locked_data = PypiPackageData { name: "mypkg".parse().unwrap(), version: Version::from_str("0.1.0").unwrap(), - url_or_path: UrlOrPath::Path( - PathBuf::from_str("C:\\Users\\username\\mypkg.tar.gz").unwrap(), - ), + location: UrlOrPath::Path("C:\\Users\\username\\mypkg.tar.gz".into()), hash: None, requires_dist: vec![], requires_python: None, diff --git a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@binary-spec-source-record.snap b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@binary-spec-source-record.snap new file mode 100644 index 000000000..987d9da48 --- /dev/null +++ b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@binary-spec-source-record.snap @@ -0,0 +1,7 @@ +--- +source: src/lock_file/satisfiability.rs +expression: s +--- +environment 'default' does not satisfy the requirements of the project for platform 'win-64 + Diagnostic severity: error + Caused by: package 'source' is locked as source, but is only required as binary diff --git a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable-multiple__pixi.toml.snap b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable-multiple.snap similarity index 78% rename from src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable-multiple__pixi.toml.snap rename to src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable-multiple.snap index 06239dc7a..a94f65293 100644 --- a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable-multiple__pixi.toml.snap +++ b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable-multiple.snap @@ -1,8 +1,6 @@ --- source: src/lock_file/satisfiability.rs -assertion_line: 874 expression: s -input_file: tests/non-satisfiability/expected-editable-multiple/pixi.toml --- environment 'default' does not satisfy the requirements of the project for platform 'win-64 Diagnostic severity: error diff --git a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable__pixi.toml.snap b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable.snap similarity index 75% rename from src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable__pixi.toml.snap rename to src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable.snap index bcef3642f..cb725226e 100644 --- a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable__pixi.toml.snap +++ b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@expected-editable.snap @@ -1,8 +1,6 @@ --- source: src/lock_file/satisfiability.rs -assertion_line: 874 expression: s -input_file: tests/non-satisfiability/expected-editable/pixi.toml --- environment 'default' does not satisfy the requirements of the project for platform 'win-64 Diagnostic severity: error diff --git a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@mismatched-spec__pixi.toml.snap b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@mismatched-spec.snap similarity index 69% rename from src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@mismatched-spec__pixi.toml.snap rename to src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@mismatched-spec.snap index 4557c2556..4f73e0e02 100644 --- a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@mismatched-spec__pixi.toml.snap +++ b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@mismatched-spec.snap @@ -1,8 +1,6 @@ --- source: src/lock_file/satisfiability.rs -assertion_line: 532 -expression: "format!(\"{err:?}\")" -input_file: tests/non-satisfiability/mismatched-spec/pixi.toml +expression: s --- environment 'default' does not satisfy the requirements of the project for platform 'win-64 Diagnostic severity: error diff --git a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-dependency__pixi.toml.snap b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-dependency.snap similarity index 71% rename from src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-dependency__pixi.toml.snap rename to src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-dependency.snap index 29fe3c670..984877ae6 100644 --- a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-dependency__pixi.toml.snap +++ b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-dependency.snap @@ -1,8 +1,6 @@ --- source: src/lock_file/satisfiability.rs -assertion_line: 532 -expression: "format!(\"{err:?}\")" -input_file: tests/non-satisfiability/missing-dependency/pixi.toml +expression: s --- environment 'default' does not satisfy the requirements of the project for platform 'win-64 Diagnostic severity: error diff --git a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-pypi-extra__pixi.toml.snap b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-pypi-extra.snap similarity index 81% rename from src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-pypi-extra__pixi.toml.snap rename to src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-pypi-extra.snap index 8b541f4ea..730045013 100644 --- a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-pypi-extra__pixi.toml.snap +++ b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@missing-pypi-extra.snap @@ -1,7 +1,6 @@ --- source: src/lock_file/satisfiability.rs expression: s -input_file: tests/non-satisfiability/missing-pypi-extra/pixi.toml --- environment 'default' does not satisfy the requirements of the project for platform 'win-64 Diagnostic severity: error diff --git a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@solve-groups-pypi__pixi.toml.snap b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@solve-groups-pypi.snap similarity index 81% rename from src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@solve-groups-pypi__pixi.toml.snap rename to src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@solve-groups-pypi.snap index dfe2fe58a..e83341e3f 100644 --- a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@solve-groups-pypi__pixi.toml.snap +++ b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@solve-groups-pypi.snap @@ -1,7 +1,6 @@ --- source: src/lock_file/satisfiability.rs expression: s -input_file: tests/non-satisfiability/solve-groups-pypi/pixi.toml --- environment 'default' does not satisfy the requirements of the project for platform 'win-64 Diagnostic severity: error diff --git a/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@source-dependency.snap b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@source-dependency.snap new file mode 100644 index 000000000..2fce187aa --- /dev/null +++ b/src/lock_file/snapshots/pixi__lock_file__satisfiability__tests__failing_satisiability@source-dependency.snap @@ -0,0 +1,7 @@ +--- +source: src/lock_file/satisfiability.rs +expression: s +--- +environment 'default' does not satisfy the requirements of the project for platform 'win-64 + Diagnostic severity: error + Caused by: the input hash for 'child-package' (f72524d6ca020fe2a30ceeb88e1d4931d8b7d8a07ba02c86e9450d12726070ce) does not match the hash in the lock-file (b67010bf5bc5608db89c0399e726852b07a7ef4fb26b3aa18171f1d0f6a19c89) diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index dc3e28efa..7ae75238f 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -1,63 +1,72 @@ +use std::{ + cmp::PartialEq, + collections::{HashMap, HashSet}, + future::{ready, Future}, + iter, + path::PathBuf, + sync::Arc, + time::{Duration, Instant}, +}; + use barrier_cell::BarrierCell; use fancy_display::FancyDisplay; -use futures::{future::Either, stream::FuturesUnordered, FutureExt, StreamExt, TryFutureExt}; +use futures::{ + future::Either, stream::FuturesUnordered, FutureExt, StreamExt, TryFutureExt, TryStreamExt, +}; use indexmap::{IndexMap, IndexSet}; -use indicatif::{HumanBytes, ProgressBar, ProgressState}; +use indicatif::ProgressBar; use itertools::Itertools; +use miette::{miette, Report}; use miette::{Diagnostic, IntoDiagnostic, LabeledSpan, MietteDiagnostic, WrapErr}; -use parking_lot::Mutex; + +use pixi_config::get_cache_dir; use pixi_consts::consts; use pixi_manifest::{EnvironmentName, FeaturesExt, HasFeaturesIter}; use pixi_progress::global_multi_progress; +use pixi_record::{ParseLockFileError, PixiRecord}; use pixi_uv_conversions::{ to_extra_name, to_marker_environment, to_normalize, to_uv_extra_name, to_uv_normalize, ConversionError, }; -use pypi_mapping::{self, Reporter}; -use pypi_modifiers::{pypi_marker_env::determine_marker_environment, pypi_tags::is_python_record}; +use pypi_mapping::{self}; +use pypi_modifiers::pypi_marker_env::determine_marker_environment; use rattler::package_cache::PackageCache; -use rattler_conda_types::{Arch, Channel, MatchSpec, Platform, RepoDataRecord}; +use rattler_conda_types::{Arch, GenericVirtualPackage, MatchSpec, ParseStrictness, Platform}; use rattler_lock::{LockFile, PypiIndexes, PypiPackageData, PypiPackageEnvironmentData}; use rattler_repodata_gateway::{Gateway, RepoData}; use rattler_solve::ChannelPriority; -use std::cmp::PartialEq; -use std::{ - borrow::Cow, - collections::{HashMap, HashSet, VecDeque}, - fmt::Write, - future::{ready, Future}, - iter, - path::PathBuf, - sync::{ - atomic::{AtomicBool, Ordering}, - Arc, - }, - time::{Duration, Instant}, -}; +use reqwest_middleware::ClientWithMiddleware; + use thiserror::Error; use tokio::sync::Semaphore; use tracing::Instrument; -use url::Url; use uv_normalize::ExtraName; -use crate::environment::{read_environment_file, LockedEnvironmentHash}; -use crate::repodata::Repodata; +use super::{ + outdated::OutdatedEnvironments, utils::IoConcurrencyLimit, PixiRecordsByName, + PypiRecordsByName, UvResolutionContext, +}; use crate::{ activation::CurrentEnvVarBehavior, + build::{BuildContext, GlobHashCache}, environment::{ - self, write_environment_file, EnvironmentFile, LockFileUsage, PerEnvironmentAndPlatform, - PerGroup, PerGroupAndPlatform, PythonStatus, + self, read_environment_file, write_environment_file, EnvironmentFile, LockFileUsage, + LockedEnvironmentHash, PerEnvironmentAndPlatform, PerGroup, PerGroupAndPlatform, + PythonStatus, }, load_lock_file, lock_file::{ - self, update, utils::IoConcurrencyLimit, OutdatedEnvironments, PypiRecord, - PypiRecordsByName, RepoDataRecordsByName, UvResolutionContext, + self, + records_by_name::HasNameVersion, + reporter::{CondaMetadataProgress, GatewayProgressReporter, SolveProgressBar}, + PypiRecord, }, prefix::Prefix, project::{ grouped_environment::{GroupedEnvironment, GroupedEnvironmentName}, Environment, HasProjectRef, }, + repodata::Repodata, Project, }; @@ -70,7 +79,7 @@ impl Project { &self, options: UpdateLockFileOptions, ) -> miette::Result> { - update::update_lock_file(self, options).await + self::update_lock_file(self, options).await } /// Get lockfile without checking @@ -83,8 +92,8 @@ impl Project { enum UpdateError { #[error("the lockfile is not up-to-date with requested environment: '{}'", .0.fancy_display())] LockFileMissingEnv(EnvironmentName), - #[error("the lockfile is not up-to-date with the requested platform: '{}'", .0)] - LockFileMissingPlatform(Platform), + #[error("some information from the lockfile could not be parsed")] + ParseLockFileError(#[from] ParseLockFileError), } /// Options to pass to [`Project::update_lock_file`]. @@ -125,6 +134,12 @@ pub struct LockFileDerivedData<'p> { /// The IO concurrency semaphore to use when updating environments pub io_concurrency_limit: IoConcurrencyLimit, + + /// The build context that was used to create the lock-file + pub build_context: BuildContext, + + /// An object that caches input hashes + pub glob_hash_cache: GlobHashCache, } /// The mode to use when updating a prefix. @@ -132,8 +147,8 @@ pub struct LockFileDerivedData<'p> { pub enum UpdateMode { /// Validate if the prefix is up-to-date. /// Using a fast and simple validation method. - /// Used for skipping the update if the prefix is already up-to-date, in activating commands. - /// Like `pixi shell` or `pixi run`. + /// Used for skipping the update if the prefix is already up-to-date, in + /// activating commands. Like `pixi shell` or `pixi run`. QuickValidate, /// Force a prefix install without running the short validation. /// Used for updating the prefix when the lock-file likely out of date. @@ -171,7 +186,8 @@ impl<'p> LockFileDerivedData<'p> { environment: &Environment<'p>, update_mode: UpdateMode, ) -> miette::Result { - // Check if the prefix is already up-to-date by validating the hash with the environment file + // Check if the prefix is already up-to-date by validating the hash with the + // environment file let hash = self.locked_environment_hash(environment)?; if update_mode == UpdateMode::QuickValidate { if let Ok(Some(environment_file)) = read_environment_file(&environment.dir()) { @@ -219,8 +235,8 @@ impl<'p> LockFileDerivedData<'p> { // Get the prefix with the conda packages installed. let platform = environment.best_platform(); let (prefix, python_status) = self.conda_prefix(environment).await?; - let repodata_records = self - .repodata_records(environment, platform) + let pixi_records = self + .pixi_records(environment, platform) .into_diagnostic()? .unwrap_or_default(); let pypi_records = self @@ -262,7 +278,7 @@ impl<'p> LockFileDerivedData<'p> { environment.name(), &prefix, platform, - &repodata_records, + &pixi_records, &pypi_records, &python_status, &environment.system_requirements(), @@ -298,7 +314,11 @@ impl<'p> LockFileDerivedData<'p> { .environment(environment.name().as_str()) .ok_or_else(|| UpdateError::LockFileMissingEnv(environment.name().clone()))?; - Ok(locked_env.pypi_packages_for_platform(platform)) + let packages = locked_env.pypi_packages(platform); + Ok(packages.map(|iter| { + iter.map(|(data, env_data)| (data.clone(), env_data.clone())) + .collect() + })) } fn pypi_indexes( @@ -312,18 +332,24 @@ impl<'p> LockFileDerivedData<'p> { Ok(locked_env.pypi_indexes().cloned()) } - fn repodata_records( + fn pixi_records( &self, environment: &Environment<'p>, platform: Platform, - ) -> Result>, UpdateError> { + ) -> Result>, UpdateError> { let locked_env = self .lock_file .environment(environment.name().as_str()) .ok_or_else(|| UpdateError::LockFileMissingEnv(environment.name().clone()))?; - locked_env - .conda_repodata_records_for_platform(platform) - .map_err(|_| UpdateError::LockFileMissingPlatform(platform)) + + Ok(locked_env + .conda_packages(platform) + .map(|iter| { + iter.cloned() + .map(PixiRecord::try_from) + .collect::, _>>() + }) + .transpose()?) } async fn conda_prefix( @@ -351,19 +377,37 @@ impl<'p> LockFileDerivedData<'p> { // Get the locked environment from the lock-file. let records = self - .repodata_records(environment, platform) + .pixi_records(environment, platform) .into_diagnostic()? .unwrap_or_default(); + let channel_urls = environment + .channel_urls(&self.project.channel_config()) + .into_diagnostic()?; + let build_dep_channel_urls = environment + .project() + .manifest() + .build_section() + .map(|section| section.channels(&self.project.channel_config())) + .transpose() + .into_diagnostic()?; // Update the prefix with conda packages. let has_existing_packages = !installed_packages.is_empty(); let env_name = GroupedEnvironmentName::Environment(environment.name().clone()); + let gateway = environment.project().repodata_gateway().clone(); let python_status = environment::update_prefix_conda( &prefix, self.package_cache.clone(), environment.project().authenticated_client().clone(), installed_packages, records, + environment + .virtual_packages(platform) + .into_iter() + .map(GenericVirtualPackage::from) + .collect(), + channel_urls, + build_dep_channel_urls, platform, &format!( "{} environment '{}'", @@ -376,6 +420,8 @@ impl<'p> LockFileDerivedData<'p> { ), "", self.io_concurrency_limit.clone().into(), + self.build_context.clone(), + gateway, ) .await?; @@ -395,10 +441,10 @@ pub struct UpdateContext<'p> { /// Repodata records from the lock-file. This contains the records that /// actually exist in the lock-file. If the lock-file is missing or /// partially missing then the data also won't exist in this field. - locked_repodata_records: PerEnvironmentAndPlatform<'p, Arc>, + locked_repodata_records: PerEnvironmentAndPlatform<'p, Arc>, /// Repodata records from the lock-file grouped by solve-group. - locked_grouped_repodata_records: PerGroupAndPlatform<'p, Arc>, + locked_grouped_repodata_records: PerGroupAndPlatform<'p, Arc>, /// Pypi records from the lock-file grouped by solve-group. locked_grouped_pypi_records: PerGroupAndPlatform<'p, Arc>, @@ -417,11 +463,11 @@ pub struct UpdateContext<'p> { /// solved records computed by another task. This allows tasks to wait /// for the records to be solved before proceeding. solved_repodata_records: - PerEnvironmentAndPlatform<'p, Arc>>>, + PerEnvironmentAndPlatform<'p, Arc>>>, /// Keeps track of all pending grouped conda targets that are being solved. grouped_solved_repodata_records: - PerGroupAndPlatform<'p, Arc>>>, + PerGroupAndPlatform<'p, Arc>>>, /// Keeps track of all pending prefix updates. This only tracks the conda /// updates to a prefix, not whether the pypi packages have also been @@ -452,6 +498,12 @@ pub struct UpdateContext<'p> { /// operations. io_concurrency_limit: IoConcurrencyLimit, + /// The build context to use for building source packages + build_context: BuildContext, + + /// The input hash cache + glob_hash_cache: GlobHashCache, + /// Whether it is allowed to instantiate any prefix. no_install: bool, } @@ -464,7 +516,7 @@ impl<'p> UpdateContext<'p> { &self, group: &GroupedEnvironment<'p>, platform: Platform, - ) -> Option>> { + ) -> Option>> { // Check if there is a pending operation for this group and platform if let Some(pending_records) = self .grouped_solved_repodata_records @@ -515,7 +567,7 @@ impl<'p> UpdateContext<'p> { &mut self, environment: &Environment<'p>, platform: Platform, - ) -> Option { + ) -> Option { self.solved_repodata_records .get_mut(environment) .and_then(|records| records.remove(&platform)) @@ -633,6 +685,7 @@ pub async fn update_lock_file( let lock_file = load_lock_file(project).await?; let package_cache = PackageCache::new(pixi_config::get_cache_dir()?.join(consts::CONDA_PACKAGE_CACHE_DIR)); + let glob_hash_cache = GlobHashCache::default(); // should we check the lock-file in the first place? if !options.lock_file_usage.should_check_if_out_of_date() { @@ -646,11 +699,22 @@ pub async fn update_lock_file( updated_pypi_prefixes: Default::default(), uv_context: None, io_concurrency_limit: IoConcurrencyLimit::default(), + build_context: BuildContext::new( + get_cache_dir()?, + project.pixi_dir(), + project.channel_config(), + ), + glob_hash_cache, }); } // Check which environments are out of date. - let outdated = OutdatedEnvironments::from_project_and_lock_file(project, &lock_file); + let outdated = OutdatedEnvironments::from_project_and_lock_file( + project, + &lock_file, + glob_hash_cache.clone(), + ) + .await; if outdated.is_empty() { tracing::info!("the lock-file is up-to-date"); @@ -663,6 +727,12 @@ pub async fn update_lock_file( updated_pypi_prefixes: Default::default(), uv_context: None, io_concurrency_limit: IoConcurrencyLimit::default(), + build_context: BuildContext::new( + get_cache_dir()?, + project.pixi_dir(), + project.channel_config(), + ), + glob_hash_cache, }); } @@ -683,7 +753,9 @@ pub async fn update_lock_file( .with_no_install(options.no_install) .with_outdated_environments(outdated) .with_lock_file(lock_file) - .finish()? + .with_glob_hash_cache(glob_hash_cache) + .finish() + .await? .update() .await?; @@ -720,9 +792,19 @@ pub struct UpdateContextBuilder<'p> { /// The io concurrency semaphore to use when updating environments io_concurrency_limit: Option, + + /// A cache for computing input hashes + glob_hash_cache: Option, } impl<'p> UpdateContextBuilder<'p> { + pub(crate) fn with_glob_hash_cache(self, glob_hash_cache: GlobHashCache) -> Self { + Self { + glob_hash_cache: Some(glob_hash_cache), + ..self + } + } + /// The package cache to use during the update process. Prefixes might need /// to be instantiated to be able to solve pypi dependencies. pub(crate) fn with_package_cache(self, package_cache: PackageCache) -> Self { @@ -775,7 +857,7 @@ impl<'p> UpdateContextBuilder<'p> { } /// Construct the context. - pub(crate) fn finish(self) -> miette::Result> { + pub(crate) async fn finish(self) -> miette::Result> { let project = self.project; let package_cache = match self.package_cache { Some(package_cache) => package_cache, @@ -784,9 +866,18 @@ impl<'p> UpdateContextBuilder<'p> { ), }; let lock_file = self.lock_file; - let outdated = self.outdated_environments.unwrap_or_else(|| { - OutdatedEnvironments::from_project_and_lock_file(project, &lock_file) - }); + let glob_hash_cache = self.glob_hash_cache.unwrap_or_default(); + let outdated = match self.outdated_environments { + Some(outdated) => outdated, + None => { + OutdatedEnvironments::from_project_and_lock_file( + project, + &lock_file, + glob_hash_cache.clone(), + ) + .await + } + }; // Extract the current conda records from the lock-file // TODO: Should we parallelize this? Measure please. @@ -798,20 +889,19 @@ impl<'p> UpdateContextBuilder<'p> { .environment(env.name().as_str()) .into_iter() .map(move |locked_env| { - locked_env.conda_repodata_records().map(|records| { - ( - env.clone(), + locked_env + .conda_packages_by_platform() + .map(|(platform, records)| { records - .into_iter() - .map(|(platform, records)| { - ( - platform, - Arc::new(RepoDataRecordsByName::from_iter(records)), - ) + .cloned() + .map(PixiRecord::try_from) + .collect::, _>>() + .map(|records| { + (platform, Arc::new(PixiRecordsByName::from_iter(records))) }) - .collect(), - ) - }) + }) + .collect::, _>>() + .map(|records| (env.clone(), records)) }) }) .collect::>, _>>() @@ -828,10 +918,14 @@ impl<'p> UpdateContextBuilder<'p> { ( env.clone(), locked_env - .pypi_packages() - .into_iter() + .pypi_packages_by_platform() .map(|(platform, records)| { - (platform, Arc::new(PypiRecordsByName::from_iter(records))) + ( + platform, + Arc::new(PypiRecordsByName::from_iter(records.map( + |(data, env_data)| (data.clone(), env_data.clone()), + ))), + ) }) .collect(), ) @@ -886,10 +980,7 @@ impl<'p> UpdateContextBuilder<'p> { by_platform .into_iter() .map(|(platform, records)| { - ( - platform, - Arc::new(RepoDataRecordsByName::from_iter(records)), - ) + (platform, Arc::new(PixiRecordsByName::from_iter(records))) }) .collect() } @@ -943,6 +1034,12 @@ impl<'p> UpdateContextBuilder<'p> { .max_concurrent_solves .unwrap_or_else(default_max_concurrent_solves); + let build_context = BuildContext::new( + pixi_config::get_cache_dir()?, + project.pixi_dir(), + project.channel_config(), + ); + Ok(UpdateContext { project, @@ -962,6 +1059,8 @@ impl<'p> UpdateContextBuilder<'p> { conda_solve_semaphore: Arc::new(Semaphore::new(max_concurrent_solves)), pypi_solve_semaphore: Arc::new(Semaphore::new(determine_pypi_solve_permits(project))), io_concurrency_limit: self.io_concurrency_limit.unwrap_or_default(), + build_context, + glob_hash_cache, no_install: self.no_install, }) @@ -979,6 +1078,7 @@ impl<'p> UpdateContext<'p> { package_cache: None, max_concurrent_solves: None, io_concurrency_limit: None, + glob_hash_cache: None, } } @@ -1055,8 +1155,9 @@ impl<'p> UpdateContext<'p> { project.repodata_gateway().clone(), platform, self.conda_solve_semaphore.clone(), - project.client().clone(), + project.authenticated_client().clone(), channel_priority, + self.build_context.clone(), ) .boxed_local(); @@ -1123,6 +1224,7 @@ impl<'p> UpdateContext<'p> { self.package_cache.clone(), records_future, self.io_concurrency_limit.clone(), + self.build_context.clone(), ) .map_err(move |e| { e.context(format!( @@ -1484,6 +1586,8 @@ impl<'p> UpdateContext<'p> { updated_pypi_prefixes: HashMap::default(), uv_context, io_concurrency_limit: self.io_concurrency_limit, + build_context: self.build_context, + glob_hash_cache: self.glob_hash_cache, }) } } @@ -1507,7 +1611,7 @@ fn make_unsupported_pypi_platform_error( let mut labels = Vec::new(); // Add a reference to the set of platforms that are supported by the project. - let project_platforms = &environment.project().manifest.parsed.project.platforms; + let project_platforms = &environment.project().manifest.workspace.workspace.platforms; if let Some(span) = project_platforms.span.clone() { labels.push(LabeledSpan::at( span, @@ -1542,7 +1646,14 @@ fn make_unsupported_pypi_platform_error( diag.labels = Some(labels); diag.help = Some("Try converting your [pypi-dependencies] to conda [dependencies]".to_string()); - miette::Report::new(diag).with_source_code(environment.project().manifest.contents.clone()) + let reporter = miette::Report::new(diag); + + // Add the source code if we have it available. + if let Some(content) = environment.project().manifest.contents.as_ref() { + reporter.with_source_code(content.clone()) + } else { + reporter + } } /// Represents data that is sent back from a task. This is used to communicate @@ -1553,7 +1664,7 @@ enum TaskResult { CondaGroupSolved( GroupedEnvironmentName, Platform, - RepoDataRecordsByName, + PixiRecordsByName, Duration, ), @@ -1573,23 +1684,25 @@ enum TaskResult { ExtractedRecordsSubset( EnvironmentName, Platform, - Arc, + Arc, Arc, ), } /// A task that solves the conda dependencies for a given environment. +#[allow(clippy::too_many_arguments)] async fn spawn_solve_conda_environment_task( group: GroupedEnvironment<'_>, - existing_repodata_records: Arc, + existing_repodata_records: Arc, repodata_gateway: Gateway, platform: Platform, concurrency_semaphore: Arc, - client: reqwest::Client, + client: ClientWithMiddleware, channel_priority: ChannelPriority, + build_context: BuildContext, ) -> miette::Result { // Get the dependencies for this platform - let dependencies = group.dependencies(None, Some(platform)); + let dependencies = group.combined_dependencies(Some(platform)); // Get the virtual packages for this platform let virtual_packages = group.virtual_packages(platform); @@ -1609,8 +1722,19 @@ async fn spawn_solve_conda_environment_task( // Get the channel configuration let channel_config = group.project().channel_config(); + let gateway = group.project().repodata_gateway().clone(); + + let build_channels = group + .project() + .manifest() + .build_section() + .map(|section| section.channels(&channel_config)) + .transpose() + .into_diagnostic()?; + tokio::spawn( async move { + // Acquire a permit before we are allowed to solve the environment. let _permit = concurrency_semaphore .acquire() .await @@ -1622,33 +1746,101 @@ async fn spawn_solve_conda_environment_task( group_name.clone(), )); pb.start(); + pb.set_message("loading repodata"); let start = Instant::now(); - // Convert the dependencies into match specs - let match_specs = dependencies - .iter_specs() - .map(|(name, constraint)| { - let nameless = constraint - .clone() - .try_into_nameless_match_spec(&channel_config) - .unwrap() - .expect("only binaries are supported at the moment"); - MatchSpec::from_nameless(nameless, Some(name.clone())) + // Convert the dependencies into match specs and source dependencies + let (source_specs, match_specs): (Vec<_>, Vec<_>) = dependencies + .into_specs() + .partition_map(|(name, constraint)| { + constraint + .into_named_source_or_binary(name, &channel_config) + .expect("failed to convert dependency into match spec") + }); + + // Collect metadata from all source packages + let channel_urls = channels + .iter() + .map(|c| c.clone().into_base_url(&channel_config)) + .collect::, _>>() + .into_diagnostic()?; + + let build_channels = &build_channels; + let gateway = &gateway; + + let mut metadata_progress = None; + let mut source_match_specs = Vec::new(); + let source_futures = FuturesUnordered::new(); + for (build_id, (name, source_spec)) in source_specs.iter().enumerate() { + let build_channels = build_channels + .clone() + .ok_or_else(|| miette!("`channels` are not defined in the `[build-system]`"))?; + + // Create a metadata reporter if it doesn't exist yet. + let metadata_reporter = metadata_progress.get_or_insert_with(|| { + Arc::new(CondaMetadataProgress::new( + &pb.pb, + source_specs.len() as u64, + )) + }); + source_futures.push( + build_context + .extract_source_metadata( + source_spec, + &channel_urls, + build_channels.clone(), + platform, + virtual_packages.clone(), + platform, + virtual_packages.clone(), + metadata_reporter.clone(), + build_id, + gateway.clone(), + client.clone(), + ) + .map_err(|e| { + Report::new(e).wrap_err(format!( + "failed to extract metadata for '{}'", + name.as_source() + )) + }), + ); + + // Add a dependency to the source package itself. + // TODO: We also need to make sure that only the source package is used when + // passing these packages to the gateway. + source_match_specs.push(MatchSpec { + name: Some(name.clone()), + ..MatchSpec::default() }) - .collect_vec(); + } + let source_repodata: Vec<_> = source_futures.try_collect().await?; + + // Extract transitive requirements from the requirements of the source packages + let mut query_match_specs = match_specs.clone(); + for source_repodata in source_repodata + .iter() + .flat_map(|r| r.records.iter()) + .flat_map(|r| &r.package_record.depends) + { + if let Ok(spec) = MatchSpec::from_str(source_repodata, ParseStrictness::Lenient) { + query_match_specs.push(spec); + } + } // Extract the repo data records needed to solve the environment. - pb.set_message("loading repodata"); let fetch_repodata_start = Instant::now(); - let channels: Vec = channels - .into_iter() - .map(|c| c.into_channel(&channel_config)) - .try_collect() - .into_diagnostic()?; - let available_packages = repodata_gateway - .query(channels, [platform, Platform::NoArch], match_specs.clone()) + .query( + channels + .into_iter() + .map(|c| c.into_channel(&channel_config)) + .collect::, _>>() + .into_diagnostic()?, + [platform, Platform::NoArch], + query_match_specs, + ) .recursive(true) .with_reporter(GatewayProgressReporter::new(pb.clone())) .await @@ -1662,11 +1854,36 @@ async fn spawn_solve_conda_environment_task( // Solve conda packages pb.reset_style(); pb.set_message("resolving conda"); + + let mut all_specs = match_specs; + all_specs.extend(source_match_specs); + + // Update the locked records by filtering out any source records. These will be + // locked again every time. + let source_package_records: HashSet = source_repodata + .iter() + .flat_map(|record| record.records.iter()) + .map(|record| record.package_record.name.clone()) + .collect(); + let locked_records = existing_repodata_records + .records + .iter() + .filter_map(|record| { + let record = record.as_binary()?; + if source_package_records.contains(record.name()) { + None + } else { + Some(record.clone()) + } + }) + .collect(); + let mut records = lock_file::resolve_conda( - match_specs, + all_specs, virtual_packages, - existing_repodata_records.records.clone(), + locked_records, available_packages, + source_repodata, channel_priority, ) .await @@ -1683,16 +1900,16 @@ async fn spawn_solve_conda_environment_task( if has_pypi_dependencies { pb.set_message("extracting pypi packages"); pypi_mapping::amend_pypi_purls( - client.into(), + client, &pypi_name_mapping_location, - &mut records, + records.iter_mut().filter_map(PixiRecord::as_binary_mut), Some(pb.purl_amend_reporter()), ) .await?; } // Turn the records into a map by name - let records_by_name = RepoDataRecordsByName::from(records); + let records_by_name = PixiRecordsByName::from(records); let end = Instant::now(); @@ -1724,7 +1941,7 @@ async fn spawn_solve_conda_environment_task( async fn spawn_extract_environment_task( environment: Environment<'_>, platform: Platform, - grouped_repodata_records: impl Future>, + grouped_repodata_records: impl Future>, grouped_pypi_records: impl Future>, ) -> miette::Result { let group = GroupedEnvironment::from(environment.clone()); @@ -1754,13 +1971,13 @@ async fn spawn_extract_environment_task( } enum PackageRecord<'a> { - Conda(&'a RepoDataRecord), + Conda(&'a PixiRecord), Pypi((&'a PypiRecord, Option)), } // Determine the conda packages we need. let conda_package_names = environment - .dependencies(None, Some(platform)) + .combined_dependencies(Some(platform)) .names() .cloned() .map(PackageName::Conda) @@ -1788,9 +2005,7 @@ async fn spawn_extract_environment_task( // dependencies. let marker_environment = if has_pypi_dependencies { grouped_repodata_records - .records - .iter() - .find(|r| is_python_record(r)) + .python_interpreter_record() .and_then(|record| determine_marker_environment(platform, &record.package_record).ok()) } else { None @@ -1800,7 +2015,7 @@ async fn spawn_extract_environment_task( let mut queue = itertools::chain(conda_package_names, pypi_package_names).collect::>(); let mut queued_names = queue.iter().cloned().collect::>(); - let mut conda_records = Vec::new(); + let mut pixi_records = Vec::new(); let mut pypi_records = HashMap::new(); while let Some(package) = queue.pop() { let record = match package { @@ -1828,7 +2043,7 @@ async fn spawn_extract_environment_task( match record { PackageRecord::Conda(record) => { // Find all dependencies in the record and add them to the queue. - for dependency in record.package_record.depends.iter() { + for dependency in record.package_record().depends.iter() { let dependency_name = PackageName::Conda(rattler_conda_types::PackageName::new_unchecked( dependency.split_once(' ').unwrap_or((dependency, "")).0, @@ -1839,7 +2054,7 @@ async fn spawn_extract_environment_task( } // Store the record itself as part of the subset - conda_records.push(record); + pixi_records.push(record); } PackageRecord::Pypi((record, extra)) => { // Evaluate all dependencies @@ -1888,8 +2103,8 @@ async fn spawn_extract_environment_task( Ok(TaskResult::ExtractedRecordsSubset( environment.name().clone(), platform, - Arc::new(RepoDataRecordsByName::from_iter( - conda_records.into_iter().cloned(), + Arc::new(PixiRecordsByName::from_iter( + pixi_records.into_iter().cloned(), )), Arc::new(PypiRecordsByName::from_iter( pypi_records.into_values().cloned(), @@ -1903,7 +2118,7 @@ async fn spawn_solve_pypi_task( resolution_context: UvResolutionContext, environment: GroupedEnvironment<'_>, platform: Platform, - repodata_records: impl Future>, + repodata_records: impl Future>, prefix: impl Future, env_variables: &HashMap, semaphore: Arc, @@ -1932,13 +2147,15 @@ async fn spawn_solve_pypi_task( let pypi_name_mapping_location = environment.project().pypi_name_mapping_source()?; - let mut conda_records = repodata_records.records.clone(); + let mut pixi_records = repodata_records.records.clone(); let locked_pypi_records = locked_pypi_packages.records.clone(); pypi_mapping::amend_pypi_purls( environment.project().client().clone().into(), pypi_name_mapping_location, - &mut conda_records, + pixi_records + .iter_mut() + .filter_map(PixiRecord::as_binary_mut), None, ) .await?; @@ -1978,7 +2195,7 @@ async fn spawn_solve_pypi_task( &pypi_options, index_map, system_requirements, - &conda_records, + &pixi_records, &locked_pypi_records, platform, &pb.pb, @@ -2022,12 +2239,24 @@ async fn spawn_solve_pypi_task( async fn spawn_create_prefix_task( group: GroupedEnvironment<'_>, package_cache: PackageCache, - conda_records: impl Future>, + pixi_records: impl Future>, io_concurrency_limit: IoConcurrencyLimit, + build_context: BuildContext, ) -> miette::Result { let group_name = group.name().clone(); let prefix = group.prefix(); let client = group.project().authenticated_client().clone(); + let channels = group + .channel_urls(&group.project().channel_config()) + .into_diagnostic()?; + + let build_channels = group + .project() + .manifest() + .build_section() + .map(|section| section.channels(&group.project().channel_config())) + .transpose() + .into_diagnostic()?; // Spawn a task to determine the currently installed packages. let installed_packages_future = tokio::spawn({ @@ -2041,12 +2270,17 @@ async fn spawn_create_prefix_task( // Wait until the conda records are available and until the installed packages // for this prefix are available. - let (conda_records, installed_packages) = - tokio::try_join!(conda_records.map(Ok), installed_packages_future)?; + let (pixi_records, installed_packages) = + tokio::try_join!(pixi_records.map(Ok), installed_packages_future)?; + + let build_virtual_packages = group.virtual_packages(Platform::current()); + + let gateway = group.project().repodata_gateway(); // Spawn a background task to update the prefix let (python_status, duration) = tokio::spawn({ let prefix = prefix.clone(); + let gateway = gateway.clone(); let group_name = group_name.clone(); async move { let start = Instant::now(); @@ -2056,7 +2290,10 @@ async fn spawn_create_prefix_task( package_cache, client, installed_packages, - conda_records.records.clone(), + pixi_records.records.clone(), + build_virtual_packages, + channels, + build_channels, Platform::current(), &format!( "{} python environment to solve pypi packages for '{}'", @@ -2069,6 +2306,8 @@ async fn spawn_create_prefix_task( ), " ", io_concurrency_limit.into(), + build_context, + gateway.clone(), ) .await?; let end = Instant::now(); @@ -2088,265 +2327,3 @@ async fn spawn_create_prefix_task( duration, )) } - -/// A helper struct that manages a progress-bar for solving an environment. -#[derive(Clone)] -pub(crate) struct SolveProgressBar { - pub pb: ProgressBar, -} - -impl SolveProgressBar { - pub(crate) fn new( - pb: ProgressBar, - platform: Platform, - environment_name: GroupedEnvironmentName, - ) -> Self { - let name_and_platform = format!( - "{}:{}", - environment_name.fancy_display(), - consts::PLATFORM_STYLE.apply_to(platform) - ); - - pb.set_style(indicatif::ProgressStyle::with_template(" {prefix:20!} ..").unwrap()); - pb.enable_steady_tick(Duration::from_millis(100)); - pb.set_prefix(name_and_platform); - Self { pb } - } - - pub(crate) fn start(&self) { - self.pb.reset_elapsed(); - self.reset_style() - } - - pub(crate) fn set_message(&self, msg: impl Into>) { - self.pb.set_message(msg); - } - - pub(crate) fn inc(&self, n: u64) { - self.pb.inc(n); - } - - pub(crate) fn set_position(&self, n: u64) { - self.pb.set_position(n) - } - - pub(crate) fn set_update_style(&self, total: usize) { - self.pb.set_length(total as u64); - self.pb.set_position(0); - self.pb.set_style( - indicatif::ProgressStyle::with_template( - " {spinner:.dim} {prefix:20!} [{elapsed_precise}] [{bar:20!.bright.yellow/dim.white}] {pos:>4}/{len:4} {msg:.dim}") - .unwrap() - .progress_chars("━━╾─"), - ); - } - - pub(crate) fn set_bytes_update_style(&self, total: usize) { - self.pb.set_length(total as u64); - self.pb.set_position(0); - self.pb.set_style( - indicatif::ProgressStyle::with_template( - " {spinner:.dim} {prefix:20!} [{elapsed_precise}] [{bar:20!.bright.yellow/dim.white}] {bytes:>8} @ {smoothed_bytes_per_sec:8} {msg:.dim}") - .unwrap() - .progress_chars("━━╾─") - .with_key( - "smoothed_bytes_per_sec", - |s: &ProgressState, w: &mut dyn Write| match (s.pos(), s.elapsed().as_millis()) { - (pos, elapsed_ms) if elapsed_ms > 0 => { - write!(w, "{}/s", HumanBytes((pos as f64 * 1000_f64 / elapsed_ms as f64) as u64)).unwrap() - } - _ => write!(w, "-").unwrap(), - }, - ) - ); - } - - pub(crate) fn reset_style(&self) { - self.pb.set_style( - indicatif::ProgressStyle::with_template( - " {spinner:.dim} {prefix:20!} [{elapsed_precise}] {msg:.dim}", - ) - .unwrap(), - ); - } - - pub(crate) fn finish(&self) { - self.pb.set_style( - indicatif::ProgressStyle::with_template(&format!( - " {} {{prefix:20!}} [{{elapsed_precise}}]", - console::style(console::Emoji("✔", "↳")).green(), - )) - .unwrap(), - ); - self.pb.finish_and_clear(); - } - - fn purl_amend_reporter(self: &Arc) -> Arc { - Arc::new(PurlAmendReporter { - pb: self.clone(), - style_set: AtomicBool::new(false), - }) - } -} - -struct PurlAmendReporter { - pb: Arc, - style_set: AtomicBool, -} - -impl pypi_mapping::Reporter for PurlAmendReporter { - fn download_started(&self, _package: &RepoDataRecord, total: usize) { - if !self.style_set.swap(true, Ordering::Relaxed) { - self.pb.set_update_style(total); - } - } - - fn download_finished(&self, _package: &RepoDataRecord, _total: usize) { - self.pb.inc(1); - } - - fn download_failed(&self, package: &RepoDataRecord, total: usize) { - self.download_finished(package, total); - } -} - -struct GatewayProgressReporter { - inner: Mutex, -} - -impl GatewayProgressReporter { - pub(crate) fn new(pb: Arc) -> Self { - Self { - inner: Mutex::new(InnerProgressState { - pb, - downloads: VecDeque::new(), - - bytes_downloaded: 0, - total_bytes: 0, - total_pending_downloads: 0, - - jlap: VecDeque::default(), - total_pending_jlap: 0, - }), - } - } -} - -struct InnerProgressState { - pb: Arc, - - downloads: VecDeque, - - bytes_downloaded: usize, - total_bytes: usize, - total_pending_downloads: usize, - - jlap: VecDeque, - total_pending_jlap: usize, -} - -impl InnerProgressState { - fn update_progress(&self) { - if self.total_pending_downloads > 0 { - self.pb.set_bytes_update_style(self.total_bytes); - self.pb.set_position(self.bytes_downloaded as u64); - self.pb.set_message("downloading repodata"); - } else if self.total_pending_jlap > 0 { - self.pb.reset_style(); - self.pb.set_message("applying JLAP patches"); - } else { - self.pb.reset_style(); - self.pb.set_message("parsing repodata"); - } - } -} - -struct DownloadState { - _started_at: Instant, - bytes_downloaded: usize, - total_size: usize, - _finished_at: Option, -} - -struct JLAPState { - _started_at: Instant, - _finished_at: Option, -} - -impl rattler_repodata_gateway::Reporter for GatewayProgressReporter { - fn on_download_start(&self, _url: &Url) -> usize { - let mut inner = self.inner.lock(); - let download_idx = inner.downloads.len(); - inner.downloads.push_back(DownloadState { - _started_at: Instant::now(), - bytes_downloaded: 0, - total_size: 0, - _finished_at: None, - }); - inner.total_pending_downloads += 1; - inner.update_progress(); - download_idx - } - - fn on_download_progress( - &self, - _url: &Url, - index: usize, - bytes_downloaded: usize, - total_bytes: Option, - ) { - let mut inner = self.inner.lock(); - - let download = inner - .downloads - .get_mut(index) - .expect("download index should exist"); - - let prev_bytes_downloaded = download.bytes_downloaded; - let prev_total_size = download.total_size; - download.bytes_downloaded = bytes_downloaded; - download.total_size = total_bytes.unwrap_or(0); - - inner.bytes_downloaded = inner.bytes_downloaded + bytes_downloaded - prev_bytes_downloaded; - inner.total_bytes = inner.total_bytes + total_bytes.unwrap_or(0) - prev_total_size; - - inner.update_progress(); - } - - fn on_download_complete(&self, _url: &Url, _index: usize) { - let mut inner = self.inner.lock(); - let download = inner - .downloads - .get_mut(_index) - .expect("download index should exist"); - download._finished_at = Some(Instant::now()); - - inner.total_pending_downloads -= 1; - - inner.update_progress(); - } - - fn on_jlap_start(&self) -> usize { - let mut inner = self.inner.lock(); - - let index = inner.jlap.len(); - inner.jlap.push_back(JLAPState { - _started_at: Instant::now(), - _finished_at: None, - }); - inner.total_pending_jlap += 1; - - inner.update_progress(); - - index - } - - fn on_jlap_completed(&self, index: usize) { - let mut inner = self.inner.lock(); - let jlap = inner.jlap.get_mut(index).expect("jlap index should exist"); - jlap._finished_at = Some(Instant::now()); - inner.total_pending_jlap -= 1; - - inner.update_progress(); - } -} diff --git a/src/lock_file/utils.rs b/src/lock_file/utils.rs index 32b6e3911..131311b90 100644 --- a/src/lock_file/utils.rs +++ b/src/lock_file/utils.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use pixi_manifest::FeaturesExt; use rattler_conda_types::Platform; -use rattler_lock::{LockFile, LockFileBuilder, Package}; +use rattler_lock::{LockFile, LockFileBuilder, LockedPackageRef}; use tokio::sync::Semaphore; use crate::{ @@ -28,9 +28,13 @@ impl From for Arc { } /// Constructs a new lock-file where some of the packages have been removed -pub fn filter_lock_file<'p, F: FnMut(&Environment<'p>, Platform, &Package) -> bool>( +pub fn filter_lock_file< + 'p, + 'lock, + F: FnMut(&Environment<'p>, Platform, LockedPackageRef<'lock>) -> bool, +>( project: &'p Project, - lock_file: &LockFile, + lock_file: &'lock LockFile, mut filter: F, ) -> LockFile { let mut builder = LockFileBuilder::new(); @@ -55,8 +59,8 @@ pub fn filter_lock_file<'p, F: FnMut(&Environment<'p>, Platform, &Package) -> bo // Copy all packages that don't need to be relaxed for (platform, packages) in environment.packages_by_platform() { for package in packages { - if filter(&project_env, platform, &package) { - builder.add_package(environment_name, platform, package); + if filter(&project_env, platform, package) { + builder.add_package(environment_name, platform, package.into()); } } } diff --git a/src/project/environment.rs b/src/project/environment.rs index 1243206b9..2ff1e2425 100644 --- a/src/project/environment.rs +++ b/src/project/environment.rs @@ -1,4 +1,3 @@ -use indexmap::IndexMap; use std::{ collections::{HashMap, HashSet}, fmt::Debug, @@ -7,6 +6,7 @@ use std::{ sync::Once, }; +use indexmap::IndexMap; use itertools::Either; use pixi_consts::consts; use pixi_manifest::{ @@ -87,7 +87,7 @@ impl<'p> Environment<'p> { .solve_group .map(|solve_group_idx| SolveGroup { project: self.project, - solve_group: &self.project.manifest.parsed.solve_groups[solve_group_idx], + solve_group: &self.project.manifest.workspace.solve_groups[solve_group_idx], }) } @@ -318,7 +318,7 @@ impl<'p> HasFeaturesIter<'p> for Environment<'p> { let manifest = self.manifest(); let environment_features = self.environment.features.iter().map(|feature_name| { manifest - .parsed + .workspace .features .get(&FeatureName::Named(feature_name.clone())) .expect("feature usage should have been validated upfront") @@ -498,7 +498,7 @@ mod tests { let deps = manifest .environment("foobar") .unwrap() - .dependencies(None, None); + .combined_dependencies(None); assert_snapshot!(format_dependencies(deps)); } diff --git a/src/project/mod.rs b/src/project/mod.rs index d129104d2..dabbbb970 100644 --- a/src/project/mod.rs +++ b/src/project/mod.rs @@ -32,13 +32,13 @@ use pixi_config::{Config, PinningStrategy}; use pixi_consts::consts; use pixi_manifest::{ pypi::PyPiPackageName, DependencyOverwriteBehavior, EnvironmentName, Environments, FeatureName, - FeaturesExt, HasFeaturesIter, HasManifestRef, KnownPreviewFeature, Manifest, ParsedManifest, - PypiDependencyLocation, SpecType, + FeaturesExt, HasFeaturesIter, HasManifestRef, KnownPreviewFeature, Manifest, + PypiDependencyLocation, SpecType, WorkspaceManifest, }; use pixi_utils::reqwest::build_reqwest_clients; use pypi_mapping::{ChannelName, CustomMapping, MappingLocation, MappingSource}; use rattler_conda_types::{Channel, ChannelConfig, MatchSpec, PackageName, Platform, Version}; -use rattler_lock::{LockFile, Package}; +use rattler_lock::{LockFile, LockedPackageRef}; use rattler_repodata_gateway::Gateway; use reqwest_middleware::ClientWithMiddleware; pub use solve_group::SolveGroup; @@ -148,8 +148,8 @@ impl Debug for Project { } } -impl Borrow for Project { - fn borrow(&self) -> &ParsedManifest { +impl Borrow for Project { + fn borrow(&self) -> &WorkspaceManifest { self.manifest.borrow() } } @@ -157,7 +157,7 @@ impl Borrow for Project { impl Project { /// Constructs a new instance from an internal manifest representation pub(crate) fn from_manifest(manifest: Manifest) -> Self { - let env_vars = Project::init_env_vars(&manifest.parsed.environments); + let env_vars = Project::init_env_vars(&manifest.workspace.environments); let root = manifest .path @@ -272,22 +272,17 @@ impl Project { /// Returns the name of the project pub fn name(&self) -> &str { - self.manifest - .parsed - .project - .name - .as_ref() - .expect("name should always be defined.") + &self.manifest.workspace.workspace.name } /// Returns the version of the project pub fn version(&self) -> &Option { - &self.manifest.parsed.project.version + &self.manifest.workspace.workspace.version } /// Returns the description of the project pub(crate) fn description(&self) -> &Option { - &self.manifest.parsed.project.description + &self.manifest.workspace.workspace.description } /// Returns the root directory of the project @@ -412,7 +407,7 @@ impl Project { /// Returns the environments in this project. pub(crate) fn environments(&self) -> Vec { self.manifest - .parsed + .workspace .environments .iter() .map(|env| Environment::new(self, env)) @@ -494,7 +489,7 @@ impl Project { /// Returns all the solve groups in the project. pub(crate) fn solve_groups(&self) -> Vec { self.manifest - .parsed + .workspace .solve_groups .iter() .map(|group| SolveGroup { @@ -508,7 +503,7 @@ impl Project { /// exists. pub(crate) fn solve_group(&self, name: &str) -> Option { self.manifest - .parsed + .workspace .solve_groups .find(name) .map(|group| SolveGroup { @@ -562,7 +557,7 @@ impl Project { manifest: &Manifest, channel_config: &ChannelConfig, ) -> miette::Result { - match manifest.parsed.project.conda_pypi_map.clone() { + match manifest.workspace.workspace.conda_pypi_map.clone() { Some(map) => { let channel_to_location_map = map .into_iter() @@ -578,8 +573,8 @@ impl Project { } let project_channels: HashSet<_> = manifest - .parsed - .project + .workspace + .workspace .channels .iter() .map(|pc| pc.channel.clone().into_channel(channel_config)) @@ -587,7 +582,7 @@ impl Project { .into_diagnostic()?; let feature_channels: HashSet<_> = manifest - .parsed + .workspace .features .values() .flat_map(|feature| feature.channels.iter()) @@ -654,11 +649,14 @@ impl Project { &self.manifest } - /// Update the manifest with the given package specs, and upgrade the packages if possible + /// Update the manifest with the given package specs, and upgrade the + /// packages if possible /// - /// 1. Modify the manifest with the given package specs, if no version is given, use `no-pin` strategy + /// 1. Modify the manifest with the given package specs, if no version is + /// given, use `no-pin` strategy /// 2. Update the lock file - /// 3. Given packages without version restrictions will get a semver restriction + /// 3. Given packages without version restrictions will get a semver + /// restriction #[allow(clippy::too_many_arguments)] pub async fn update_dependencies( &mut self, @@ -764,11 +762,14 @@ impl Project { uv_context, updated_conda_prefixes, updated_pypi_prefixes, + build_context, + glob_hash_cache, io_concurrency_limit, } = UpdateContext::builder(self) .with_lock_file(unlocked_lock_file) .with_no_install(prefix_update_config.no_install() || dry_run) - .finish()? + .finish() + .await? .update() .await?; @@ -810,6 +811,8 @@ impl Project { updated_pypi_prefixes, uv_context, io_concurrency_limit, + build_context, + glob_hash_cache, }; if !prefix_update_config.no_lockfile_update && !dry_run { updated_lock_file.write_to_disk()?; @@ -833,7 +836,8 @@ impl Project { })) } - /// Constructs a new lock-file where some of the constraints have been removed. + /// Constructs a new lock-file where some of the constraints have been + /// removed. fn unlock_packages( &self, lock_file: &LockFile, @@ -844,10 +848,10 @@ impl Project { filter_lock_file(self, lock_file, |env, platform, package| { if affected_environments.contains(&(env.name().as_str(), platform)) { match package { - Package::Conda(package) => { - !conda_packages.contains(&package.package_record().name) + LockedPackageRef::Conda(package) => { + !conda_packages.contains(&package.record().name) } - Package::Pypi(package) => !pypi_packages.contains(&package.data().package.name), + LockedPackageRef::Pypi(package, _env) => !pypi_packages.contains(&package.name), } } else { true @@ -855,8 +859,8 @@ impl Project { }) } - /// Update the conda specs of newly added packages based on the contents of the - /// updated lock-file. + /// Update the conda specs of newly added packages based on the contents of + /// the updated lock-file. fn update_conda_specs_from_lock_file( &mut self, updated_lock_file: &LockFile, @@ -874,9 +878,7 @@ impl Project { // platforms .filter_map(|(env, platform)| { let locked_env = updated_lock_file.environment(&env)?; - locked_env - .conda_repodata_records_for_platform(platform) - .ok()? + locked_env.conda_repodata_records(platform).ok()? }) .flatten() .collect_vec(); @@ -884,8 +886,9 @@ impl Project { let mut pinning_strategy = self.config().pinning_strategy; let channel_config = self.channel_config(); for (name, (spec_type, spec)) in conda_specs_to_add_constraints_for { - // Edge case: some packages are a special case where we want to pin the minor version by default. - // This is done to avoid early user confusion when the minor version changes and environments magically start breaking. + // Edge case: some packages are a special case where we want to pin the minor + // version by default. This is done to avoid early user confusion + // when the minor version changes and environments magically start breaking. // This move a `>=3.13, <4` to a `>=3.13, <3.14` constraint. if NON_SEMVER_PACKAGES.contains(&name.as_normalized()) && pinning_strategy.is_none() { tracing::info!( @@ -925,8 +928,8 @@ impl Project { Ok(implicit_constraints) } - /// Update the pypi specs of newly added packages based on the contents of the - /// updated lock-file. + /// Update the pypi specs of newly added packages based on the contents of + /// the updated lock-file. #[allow(clippy::too_many_arguments)] fn update_pypi_specs_from_lock_file( &mut self, @@ -940,14 +943,18 @@ impl Project { ) -> miette::Result> { let mut implicit_constraints = HashMap::new(); + let affect_environment_and_platforms = affect_environment_and_platforms + .iter() + .filter_map(|(env, platform)| { + updated_lock_file.environment(env).map(|e| (e, *platform)) + }) + .collect_vec(); + let pypi_records = affect_environment_and_platforms - .into_iter() // Get all the conda and pypi records for the combination of environments and // platforms - .filter_map(|(env, platform)| { - let locked_env = updated_lock_file.environment(&env)?; - locked_env.pypi_packages_for_platform(platform) - }) + .iter() + .filter_map(|(env, platform)| env.pypi_packages(*platform)) .flatten() .collect_vec(); @@ -993,18 +1000,12 @@ impl Project { /// Returns true if all preview features are enabled pub fn all_preview_features_enabled(&self) -> bool { - self.manifest - .preview() - .map(|preview| preview.all_enabled()) - .unwrap_or(false) + self.manifest.preview().all_enabled() } /// Returns true if the given preview feature is enabled pub fn is_preview_feature_enabled(&self, feature: KnownPreviewFeature) -> bool { - self.manifest - .preview() - .map(|preview| preview.is_enabled(feature)) - .unwrap_or(false) + self.manifest.preview().is_enabled(feature) } } @@ -1125,18 +1126,17 @@ fn write_warning_file(default_envs_dir: &PathBuf, envs_dir_name: &Path) { #[cfg(test)] mod tests { - use std::fs::File; - use std::io::Write; - use std::str::FromStr; + use std::{fs::File, io::Write, str::FromStr}; - use super::*; use insta::{assert_debug_snapshot, assert_snapshot}; use itertools::Itertools; - use pixi_manifest::{FeatureName, FeaturesExt}; + use pixi_manifest::FeatureName; use rattler_conda_types::Platform; use rattler_virtual_packages::{LibC, VirtualPackage}; use tempfile::tempdir; + use super::*; + const PROJECT_BOILERPLATE: &str = r#" [project] name = "foo" @@ -1215,7 +1215,44 @@ mod tests { assert_snapshot!(format_dependencies( project .default_environment() - .dependencies(None, Some(Platform::Linux64)) + .combined_dependencies(Some(Platform::Linux64)) + )); + } + + #[test] + #[ignore] + fn test_dependency_set_with_build_section() { + let file_contents = r#" + [project] + name = "foo" + version = "0.1.0" + channels = [] + platforms = ["linux-64", "win-64"] + preview = ["pixi-build"] + [dependencies] + foo = "1.0" + + [package] + + [build-system] + channels = [] + dependencies = [] + build-backend = "foobar" + + [host-dependencies] + libc = "2.12" + + [build-dependencies] + bar = "1.0" + "#; + + let manifest = Manifest::from_str(Path::new("pixi.toml"), file_contents).unwrap(); + let project = Project::from_manifest(manifest); + + assert_snapshot!(format_dependencies( + project + .default_environment() + .combined_dependencies(Some(Platform::Linux64)) )); } @@ -1250,7 +1287,7 @@ mod tests { assert_snapshot!(format_dependencies( project .default_environment() - .dependencies(None, Some(Platform::Linux64)) + .combined_dependencies(Some(Platform::Linux64)) )); } diff --git a/src/project/snapshots/pixi__project__tests__dependency_set_with_build_section.snap b/src/project/snapshots/pixi__project__tests__dependency_set_with_build_section.snap new file mode 100644 index 000000000..21821c8df --- /dev/null +++ b/src/project/snapshots/pixi__project__tests__dependency_set_with_build_section.snap @@ -0,0 +1,5 @@ +--- +source: src/project/mod.rs +expression: "format_dependencies(project.default_environment().environment_dependencies(Some(Platform::Linux64)))" +--- +foo = "==1.0" diff --git a/src/project/solve_group.rs b/src/project/solve_group.rs index b3217d61a..40955ddf3 100644 --- a/src/project/solve_group.rs +++ b/src/project/solve_group.rs @@ -1,11 +1,11 @@ use std::{hash::Hash, path::PathBuf}; use itertools::Itertools; - -use super::{Environment, HasProjectRef, Project}; use pixi_manifest as manifest; use pixi_manifest::{FeaturesExt, HasFeaturesIter, HasManifestRef, Manifest, SystemRequirements}; +use super::{Environment, HasProjectRef, Project}; + /// A grouping of environments that are solved together. #[derive(Debug, Clone)] pub struct SolveGroup<'p> { @@ -53,7 +53,7 @@ impl<'p> SolveGroup<'p> { self.solve_group.environments.iter().map(|env_idx| { Environment::new( self.project, - &self.project.manifest.parsed.environments[*env_idx], + &self.project.manifest.workspace.environments[*env_idx], ) }) } @@ -96,10 +96,10 @@ mod tests { use std::{collections::HashSet, path::Path}; use itertools::Itertools; + use pixi_manifest::FeaturesExt; use rattler_conda_types::PackageName; use crate::Project; - use pixi_manifest::FeaturesExt; #[test] fn test_solve_group() { @@ -172,7 +172,7 @@ mod tests { // Check that the solve group 'group1' contains all the dependencies of its // environments let package_names: HashSet<_> = solve_group - .dependencies(None, None) + .combined_dependencies(None) .names() .cloned() .collect(); @@ -189,7 +189,7 @@ mod tests { // default environment let solve_group = solve_groups[1].clone(); let package_names: HashSet<_> = solve_group - .dependencies(None, None) + .combined_dependencies(None) .names() .cloned() .collect(); diff --git a/src/task/task_hash.rs b/src/task/task_hash.rs index e5a455dab..0b3ab405c 100644 --- a/src/task/task_hash.rs +++ b/src/task/task_hash.rs @@ -77,7 +77,7 @@ impl EnvironmentHash { if let Some(env) = lock_file.environment(run_environment.name().as_str()) { if let Some(packages) = env.packages(run_environment.best_platform()) { for package in packages { - urls.push(package.url_or_path().into_owned().to_string()) + urls.push(package.location().to_string()) } } } diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 000000000..0c076e29e --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,37 @@ +use std::path::Path; + +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum MoveError { + #[error(transparent)] + CopyFailed(std::io::Error), + + #[error(transparent)] + FailedToRemove(std::io::Error), + + #[error(transparent)] + MoveFailed(std::io::Error), +} + +#[cfg(unix)] +const EXDEV: i32 = 18; + +#[cfg(windows)] +const EXDEV: i32 = 17; + +/// A utility function to move a file from one location to another by renaming +/// the file if possible and otherwise copying the file and removing the +/// original. +pub(crate) fn move_file(from: &Path, to: &Path) -> Result<(), MoveError> { + if let Err(e) = std::fs::rename(from, to) { + if e.raw_os_error() == Some(EXDEV) { + std::fs::copy(from, to).map_err(MoveError::CopyFailed)?; + std::fs::remove_file(from).map_err(MoveError::FailedToRemove)? + } else { + return Err(MoveError::MoveFailed(e)); + } + } + + Ok(()) +} diff --git a/tests/data/non-satisfiability/binary-spec-source-record/pixi.lock b/tests/data/non-satisfiability/binary-spec-source-record/pixi.lock new file mode 100644 index 000000000..1bb64999a --- /dev/null +++ b/tests/data/non-satisfiability/binary-spec-source-record/pixi.lock @@ -0,0 +1,60 @@ +version: 6 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_22.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_22.conda + - conda: source +packages: +- conda: source + name: source + version: 0.1.0 + build: hbf21a9e_0 + subdir: win-64 + depends: + - vc >=14.1,<15 + - vc14_runtime >=14.16.27033 + input: + hash: a3c04eab345d02c20c544f694526f0ed92e07d7b0ebfe0f55ad5e6b752259223 + globs: + - pixi.toml +- conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda + sha256: db8dead3dd30fb1a032737554ce91e2819b43496a0db09927edf01c32b577450 + md5: 6797b005cd0f439c4c5c9ac565783700 + constrains: + - vs2015_runtime >=14.29.30037 + arch: x86_64 + platform: win + license: LicenseRef-MicrosoftWindowsSDK10 + size: 559710 + timestamp: 1728377334097 +- conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-ha32ba9b_22.conda + sha256: 2a47c5bd8bec045959afada7063feacd074ad66b170c1ea92dd139b389fcf8fd + md5: 311c9ba1dfdd2895a8cb08346ff26259 + depends: + - vc14_runtime >=14.38.33135 + arch: x86_64 + platform: win + track_features: + - vc14 + license: BSD-3-Clause + license_family: BSD + size: 17447 + timestamp: 1728400826998 +- conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.40.33810-hcc2c482_22.conda + sha256: 4c669c65007f88a7cdd560192f7e6d5679d191ac71610db724e18b2410964d64 + md5: ce23a4b980ee0556a118ed96550ff3f3 + depends: + - ucrt >=10.0.20348.0 + constrains: + - vs2015_runtime 14.40.33810.* *_22 + arch: x86_64 + platform: win + license: LicenseRef-MicrosoftVisualCpp2015-2022Runtime + license_family: Proprietary + size: 750719 + timestamp: 1728401055788 diff --git a/tests/data/non-satisfiability/binary-spec-source-record/pixi.toml b/tests/data/non-satisfiability/binary-spec-source-record/pixi.toml new file mode 100644 index 000000000..619e04bf9 --- /dev/null +++ b/tests/data/non-satisfiability/binary-spec-source-record/pixi.toml @@ -0,0 +1,8 @@ +[project] +channels = ["conda-forge"] +name = "binary-spec-source-record" +platforms = ["win-64"] +preview = ["pixi-build"] + +[dependencies] +source = "*" diff --git a/tests/data/non-satisfiability/expected-editable-multiple/pixi.lock b/tests/data/non-satisfiability/expected-editable-multiple/pixi.lock index 8dc416498..be0174414 100644 --- a/tests/data/non-satisfiability/expected-editable-multiple/pixi.lock +++ b/tests/data/non-satisfiability/expected-editable-multiple/pixi.lock @@ -28,12 +28,12 @@ packages: name: bar version: 0.1.0 path: ./bar - sha256: 8f77543104fe37f9893e1669ff08c452562c0cfa05e925287d9f1c0f21737a50 + sha256: 0398aa324f09d68612f7545e7a35ef8b6f660fc695cadcf31929825e987d01da - kind: pypi name: baz version: 0.1.0 path: ./baz - sha256: 152f61c74691f968a30dbe3d6835356afea1e16d6eaeb71a2d1ba284e9b92d3a + sha256: bc8b1ca288df1e002daeb40a76f40829545ae7f40ed9186ec334dfa5a88414bf editable: true - kind: conda name: bzip2 @@ -67,7 +67,7 @@ packages: name: foo version: 0.1.0 path: ./foo - sha256: 82242d45e33bf66048a9f4c19ee5e217c27ea0245cc7a1df22fa15ba75737beb + sha256: 52abd6609b8936ca84feba7f23519d94cd578acbda172c903df6927f012d310f - kind: conda name: libexpat version: 2.6.2 diff --git a/tests/data/non-satisfiability/expected-editable/pixi.lock b/tests/data/non-satisfiability/expected-editable/pixi.lock index 04420c907..58e55e6e7 100644 --- a/tests/data/non-satisfiability/expected-editable/pixi.lock +++ b/tests/data/non-satisfiability/expected-editable/pixi.lock @@ -54,7 +54,7 @@ packages: name: foo version: 0.1.0 path: ./foo - sha256: 82242d45e33bf66048a9f4c19ee5e217c27ea0245cc7a1df22fa15ba75737beb + sha256: 52abd6609b8936ca84feba7f23519d94cd578acbda172c903df6927f012d310f - kind: conda name: libexpat version: 2.6.2 diff --git a/tests/data/non-satisfiability/source-dependency/child-package/pixi.toml b/tests/data/non-satisfiability/source-dependency/child-package/pixi.toml new file mode 100644 index 000000000..b434ee635 --- /dev/null +++ b/tests/data/non-satisfiability/source-dependency/child-package/pixi.toml @@ -0,0 +1,8 @@ +[project] +channels = ["conda-forge"] +name = "child-package" +platforms = ["win-64"] +version = "0.1.0" + +[dependencies] +extra-dependency = "*" diff --git a/tests/data/non-satisfiability/source-dependency/pixi.lock b/tests/data/non-satisfiability/source-dependency/pixi.lock new file mode 100644 index 000000000..37fce18fc --- /dev/null +++ b/tests/data/non-satisfiability/source-dependency/pixi.lock @@ -0,0 +1,30 @@ +version: 6 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.0-h2628c8c_0_cpython.conda + - conda: child-package +packages: +- conda: child-package + name: child-package + version: 0.1.0 + build: pyhbf21a9e_0 + subdir: noarch + depends: + - python + input: + hash: b67010bf5bc5608db89c0399e726852b07a7ef4fb26b3aa18171f1d0f6a19c89 + globs: + - pixi.toml +- conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.0-h2628c8c_0_cpython.conda + sha256: 90553586879bf328f2f9efb8d8faa958ecba822faf379f0a20c3461467b9b955 + md5: defd5d375853a2caff36a19d2d81a28e + arch: x86_64 + platform: win + channel: https://conda.anaconda.org/conda-forge/ + license: Python-2.0 + size: 16140836 + timestamp: 1696321871976 diff --git a/tests/data/non-satisfiability/source-dependency/pixi.toml b/tests/data/non-satisfiability/source-dependency/pixi.toml new file mode 100644 index 000000000..05314474e --- /dev/null +++ b/tests/data/non-satisfiability/source-dependency/pixi.toml @@ -0,0 +1,9 @@ +[project] +channels = ["conda-forge"] +name = "source-dependency" +platforms = ["win-64"] +preview = ["pixi-build"] +version = "0.1.0" + +[dependencies] +child-package = { path = "child-package" } diff --git a/tests/data/satisfiability/absolute-paths/pixi.lock b/tests/data/satisfiability/absolute-paths/pixi.lock index 29002bd4d..9488e0960 100644 --- a/tests/data/satisfiability/absolute-paths/pixi.lock +++ b/tests/data/satisfiability/absolute-paths/pixi.lock @@ -65,7 +65,7 @@ packages: name: foo version: 0.1.0 path: ./projects/foo - sha256: 4c7329c4617e93d0bb49490a455a924edbb83b61807cf74183d67c13cb80b2b8 + sha256: c0313922ab478848f74a8d51c223f0bd2749d43fd2001f318f65938381b847c5 requires_dist: - requests @ file:///Users/tdejager/development/prefix/pixi/tests/satisfiability/absolute-paths/projects/requests-2.31.0-py3-none-any.whl requires_python: '>=3.11' diff --git a/tests/data/satisfiability/editable-non-editable/pixi.lock b/tests/data/satisfiability/editable-non-editable/pixi.lock index ea4850122..b9aad3b80 100644 --- a/tests/data/satisfiability/editable-non-editable/pixi.lock +++ b/tests/data/satisfiability/editable-non-editable/pixi.lock @@ -27,7 +27,7 @@ packages: name: bar version: 0.1.0 path: ./bar - sha256: 42b3134f7b848892b70d479433af4fe4c08926f343959b8e4ec5f2e011b98225 + sha256: a15c6f6e3f1d3b572750d1e07aa1f6d4f78bd6cedad23d8686b8070b5842d287 editable: true - kind: conda name: bzip2 @@ -61,7 +61,7 @@ packages: name: foo version: 0.1.0 path: ./foo - sha256: cc07a07a094b9279961e72ee32807416b9e7dd7ead4049ed923e3d65a5084aed + sha256: 4c02a56d3fefcc603952a33463c51455bfed58f0ebaca70694b9d6ee17f5ebb2 requires_dist: - bar editable: true diff --git a/tests/data/satisfiability/pypi-extras/pixi.lock b/tests/data/satisfiability/pypi-extras/pixi.lock index 6dea8e2f8..d839ea989 100644 --- a/tests/data/satisfiability/pypi-extras/pixi.lock +++ b/tests/data/satisfiability/pypi-extras/pixi.lock @@ -64,7 +64,7 @@ packages: name: has-extras version: 0.1.0 path: has-extras - sha256: 87db81cf86f2a54190b0965b12ead65a4b7310265c2f451449b4e5b9c5f51ec9 + sha256: a3ced8eac37c7eae6ed05e9e836e5fe188f211e11df2c3db62bc146a1761e300 requires_dist: - pinject ; extra == 'pinject' - kind: conda @@ -238,7 +238,7 @@ packages: name: use-with-extras version: 0.1.0 path: use-with-extras - sha256: 534a6eeb01c299cf7c3a940c01dda3d1df9f69c4bba81899c82892b79289f2dc + sha256: 9a29a7aeb7cde4c15e4f500c2c2f5a1476ec3361420e6e333b2f3e172a55e818 requires_dist: - has-extras[pinject] - kind: conda diff --git a/tests/data/satisfiability/source-dependency/child-package/pixi.toml b/tests/data/satisfiability/source-dependency/child-package/pixi.toml new file mode 100644 index 000000000..7d3129f81 --- /dev/null +++ b/tests/data/satisfiability/source-dependency/child-package/pixi.toml @@ -0,0 +1,7 @@ +[project] +channels = ["conda-forge"] +name = "child-package" +platforms = ["win-64"] +version = "0.1.0" + +[dependencies] diff --git a/tests/data/satisfiability/source-dependency/pixi.lock b/tests/data/satisfiability/source-dependency/pixi.lock new file mode 100644 index 000000000..37fce18fc --- /dev/null +++ b/tests/data/satisfiability/source-dependency/pixi.lock @@ -0,0 +1,30 @@ +version: 6 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + packages: + win-64: + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.0-h2628c8c_0_cpython.conda + - conda: child-package +packages: +- conda: child-package + name: child-package + version: 0.1.0 + build: pyhbf21a9e_0 + subdir: noarch + depends: + - python + input: + hash: b67010bf5bc5608db89c0399e726852b07a7ef4fb26b3aa18171f1d0f6a19c89 + globs: + - pixi.toml +- conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.0-h2628c8c_0_cpython.conda + sha256: 90553586879bf328f2f9efb8d8faa958ecba822faf379f0a20c3461467b9b955 + md5: defd5d375853a2caff36a19d2d81a28e + arch: x86_64 + platform: win + channel: https://conda.anaconda.org/conda-forge/ + license: Python-2.0 + size: 16140836 + timestamp: 1696321871976 diff --git a/tests/data/satisfiability/source-dependency/pixi.toml b/tests/data/satisfiability/source-dependency/pixi.toml new file mode 100644 index 000000000..05314474e --- /dev/null +++ b/tests/data/satisfiability/source-dependency/pixi.toml @@ -0,0 +1,9 @@ +[project] +channels = ["conda-forge"] +name = "source-dependency" +platforms = ["win-64"] +preview = ["pixi-build"] +version = "0.1.0" + +[dependencies] +child-package = { path = "child-package" } diff --git a/tests/integration_python/build/__init__.py b/tests/integration_python/build/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration_python/build/conftest.py b/tests/integration_python/build/conftest.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration_python/build/recipes/boltons_recipe.yaml b/tests/integration_python/build/recipes/boltons_recipe.yaml new file mode 100644 index 000000000..357ce46cb --- /dev/null +++ b/tests/integration_python/build/recipes/boltons_recipe.yaml @@ -0,0 +1,35 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json + +context: + version: "23.0.0" + + +package: + name: boltons-with-extra + version: ${{ version }} + +source: + url: https://github.com/mahmoud/boltons/archive/refs/tags/${{ version }}.tar.gz + sha256: 9b2998cd9525ed472079c7dd90fbd216a887202e8729d5969d4f33878f0ff668 + +build: + noarch: python + script: + - python -m pip install . --no-deps -vv + +requirements: + host: + # - if: linux + # then: + - python + - pip + - setuptools + # - numpy + # - ${{ stdlib('c') }} + run: + - pip + # - ${{ pin_compatible('numpy', min_pin='x.x', max_pin='x') }} + +about: + license: BSD-3-Clause + license_file: LICENSE diff --git a/tests/integration_python/build/test_build.py b/tests/integration_python/build/test_build.py new file mode 100644 index 000000000..3c22968c1 --- /dev/null +++ b/tests/integration_python/build/test_build.py @@ -0,0 +1,137 @@ +from pathlib import Path +import shutil +import tomllib + +import tomli_w + +from ..common import verify_cli_command + + +def test_build_conda_package(pixi: Path, tmp_path: Path) -> None: + manifest_path = tmp_path / "pyproject.toml" + + # Create a new project + verify_cli_command([pixi, "init", tmp_path, "--format", "pyproject"]) + + # Add a boltons package to it + verify_cli_command( + [ + pixi, + "add", + "boltons", + "--manifest-path", + manifest_path, + ], + ) + + parsed_manifest = tomllib.loads(manifest_path.read_text()) + parsed_manifest["tool"]["pixi"]["project"]["preview"] = ["pixi-build"] + parsed_manifest["tool"]["pixi"]["host-dependencies"] = {"hatchling": "*"} + parsed_manifest["tool"]["pixi"]["build-system"] = { + "build-backend": "pixi-build-python", + "channels": [ + "https://repo.prefix.dev/pixi-build-backends", + "https://repo.prefix.dev/conda-forge", + ], + "dependencies": ["pixi-build-python"], + } + + manifest_path.write_text(tomli_w.dumps(parsed_manifest)) + # build it + verify_cli_command( + [pixi, "build", "--manifest-path", manifest_path, "--output-dir", manifest_path.parent] + ) + + # really make sure that conda package was built + package_to_be_built = next(manifest_path.parent.glob("*.conda")) + + assert package_to_be_built.exists() + + +def test_build_using_rattler_build_backend(pixi: Path, tmp_path: Path) -> None: + manifest_path = tmp_path / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", tmp_path]) + + parsed_manifest = tomllib.loads(manifest_path.read_text()) + parsed_manifest["project"]["preview"] = ["pixi-build"] + parsed_manifest["host-dependencies"] = {"hatchling": "*"} + parsed_manifest["build-system"] = { + "build-backend": "pixi-build-rattler-build", + "channels": [ + "https://repo.prefix.dev/pixi-build-backends", + "https://repo.prefix.dev/conda-forge", + ], + "dependencies": ["pixi-build-rattler-build"], + } + manifest_path.write_text(tomli_w.dumps(parsed_manifest)) + + # now copy recipe.yaml to the project + shutil.copy(Path(__file__).parent / "recipes" / "boltons_recipe.yaml", tmp_path / "recipe.yaml") + + # Running pixi build should build the recipe.yaml + verify_cli_command( + [pixi, "build", "--manifest-path", manifest_path, "--output-dir", manifest_path.parent], + ) + + # really make sure that conda package was built + package_to_be_built = next(manifest_path.parent.glob("*.conda")) + + assert "boltons-with-extra" in package_to_be_built.name + assert package_to_be_built.exists() + + +def test_build_conda_package_ignoring_recipe(pixi: Path, tmp_path: Path) -> None: + manifest_path = tmp_path / "pyproject.toml" + + # Create a new project + verify_cli_command([pixi, "init", tmp_path, "--format", "pyproject"]) + + # Add a boltons package to it + verify_cli_command( + [ + pixi, + "add", + "boltons", + "--manifest-path", + manifest_path, + ], + ) + + parsed_manifest = tomllib.loads(manifest_path.read_text()) + parsed_manifest["tool"]["pixi"]["project"]["preview"] = ["pixi-build"] + parsed_manifest["tool"]["pixi"]["host-dependencies"] = {"hatchling": "*"} + parsed_manifest["tool"]["pixi"]["build-system"] = { + "build-backend": "pixi-build-python", + "channels": [ + "https://repo.prefix.dev/pixi-build-backends", + "https://repo.prefix.dev/conda-forge", + ], + "dependencies": ["pixi-build-python"], + } + + # now copy recipe.yaml to the project + shutil.copy(Path(__file__).parent / "recipes" / "boltons_recipe.yaml", tmp_path / "recipe.yaml") + + manifest_path.write_text(tomli_w.dumps(parsed_manifest)) + # build it + verify_cli_command( + [ + pixi, + "build", + "--manifest-path", + manifest_path, + "--output-dir", + manifest_path.parent, + "--ignore-recipe", + ] + ) + + # really make sure that conda package was built + package_to_be_built = next(manifest_path.parent.glob("*.conda")) + # our recipe has boltons-with-extra name, so we need to be sure that we are building the `pixi.toml` + # and not the recipe + assert "test_build_conda_package" in package_to_be_built.name + + assert package_to_be_built.exists() diff --git a/tests/integration_rust/add_tests.rs b/tests/integration_rust/add_tests.rs index 2be8ee37f..ffca9726b 100644 --- a/tests/integration_rust/add_tests.rs +++ b/tests/integration_rust/add_tests.rs @@ -95,7 +95,7 @@ async fn add_with_channel() { let project = Project::from_path(pixi.manifest_path().as_path()).unwrap(); let mut specs = project .default_environment() - .dependencies(Some(SpecType::Run), Some(Platform::current())) + .combined_dependencies(Some(Platform::current())) .into_specs(); let (name, spec) = specs.next().unwrap(); @@ -159,17 +159,17 @@ async fn add_functionality_union() { // Should contain all added dependencies let dependencies = project .default_environment() - .dependencies(Some(SpecType::Run), Some(Platform::current())); + .dependencies(SpecType::Run, Some(Platform::current())); let (name, _) = dependencies.into_specs().next().unwrap(); assert_eq!(name, PackageName::try_from("rattler").unwrap()); let host_deps = project .default_environment() - .dependencies(Some(SpecType::Host), Some(Platform::current())); + .dependencies(SpecType::Host, Some(Platform::current())); let (name, _) = host_deps.into_specs().next().unwrap(); assert_eq!(name, PackageName::try_from("libcomputer").unwrap()); let build_deps = project .default_environment() - .dependencies(Some(SpecType::Build), Some(Platform::current())); + .dependencies(SpecType::Build, Some(Platform::current())); let (name, _) = build_deps.into_specs().next().unwrap(); assert_eq!(name, PackageName::try_from("libidk").unwrap()); @@ -493,7 +493,7 @@ async fn add_unconstrainted_dependency() { let foo_spec = project .manifest() .default_feature() - .dependencies(None, None) + .combined_dependencies(None) .unwrap_or_default() .get("foobar") .cloned() @@ -506,7 +506,7 @@ async fn add_unconstrainted_dependency() { .manifest() .feature("unreferenced") .expect("feature 'unreferenced' is missing") - .dependencies(None, None) + .combined_dependencies(None) .unwrap_or_default() .get("bar") .cloned() @@ -543,7 +543,7 @@ async fn pinning_dependency() { let python_spec = project .manifest() .default_feature() - .dependencies(None, None) + .dependencies(SpecType::Run, None) .unwrap_or_default() .get("python") .cloned() @@ -558,7 +558,7 @@ async fn pinning_dependency() { let foobar_spec = project .manifest() .default_feature() - .dependencies(None, None) + .dependencies(SpecType::Run, None) .unwrap_or_default() .get("foobar") .cloned() @@ -573,7 +573,7 @@ async fn pinning_dependency() { let python_spec = project .manifest() .default_feature() - .dependencies(None, None) + .dependencies(SpecType::Run, None) .unwrap_or_default() .get("python") .cloned() diff --git a/tests/integration_rust/common/mod.rs b/tests/integration_rust/common/mod.rs index e8300a478..1f777797a 100644 --- a/tests/integration_rust/common/mod.rs +++ b/tests/integration_rust/common/mod.rs @@ -16,28 +16,25 @@ use miette::{Context, Diagnostic, IntoDiagnostic}; use pixi::{ cli::{ add, - cli_config::{PrefixUpdateConfig, ProjectConfig}, + cli_config::{ChannelsConfig, PrefixUpdateConfig, ProjectConfig}, init::{self, GitAttributes}, install::Args, - project, remove, run, + project, remove, run, search, task::{self, AddArgs, AliasArgs}, update, LockFileUsageArgs, }, + lock_file::UpdateMode, task::{ get_task_env, ExecutableTask, RunOutput, SearchEnvironments, TaskExecutionError, TaskGraph, TaskGraphError, TaskName, }, Project, UpdateLockFileOptions, }; -use pixi::{ - cli::{cli_config::ChannelsConfig, search}, - lock_file::UpdateMode, -}; use pixi_consts::consts; use pixi_manifest::{EnvironmentName, FeatureName}; use pixi_progress::global_multi_progress; use rattler_conda_types::{MatchSpec, ParseStrictness::Lenient, Platform}; -use rattler_lock::{LockFile, Package, UrlOrPath}; +use rattler_lock::{LockFile, LockedPackageRef, UrlOrPath}; use tempfile::TempDir; use thiserror::Error; @@ -124,8 +121,8 @@ impl LockFileExt for LockFile { .packages(platform) .into_iter() .flatten() - .filter_map(Package::into_conda) - .any(|package| package.package_record().name.as_normalized() == name); + .filter_map(LockedPackageRef::as_conda) + .any(|package| package.record().name.as_normalized() == name); package_found } fn contains_pypi_package(&self, environment: &str, platform: Platform, name: &str) -> bool { @@ -136,8 +133,8 @@ impl LockFileExt for LockFile { .packages(platform) .into_iter() .flatten() - .filter_map(Package::into_pypi) - .any(|pkg| pkg.data().package.name.as_ref() == name); + .filter_map(LockedPackageRef::as_pypi) + .any(|(data, _)| data.name.as_ref() == name); package_found } @@ -155,7 +152,7 @@ impl LockFileExt for LockFile { .packages(platform) .into_iter() .flatten() - .filter_map(Package::into_conda) + .filter_map(LockedPackageRef::as_conda) .any(move |p| p.satisfies(&match_spec)); package_found } @@ -173,8 +170,8 @@ impl LockFileExt for LockFile { .packages(platform) .into_iter() .flatten() - .filter_map(Package::into_pypi) - .any(move |p| p.satisfies(&requirement)); + .filter_map(LockedPackageRef::as_pypi) + .any(move |(data, _)| data.satisfies(&requirement)); package_found } @@ -186,10 +183,11 @@ impl LockFileExt for LockFile { ) -> Option { self.environment(environment) .and_then(|env| { - env.packages(platform) - .and_then(|mut packages| packages.find(|p| p.name() == package)) + env.pypi_packages(platform).and_then(|mut packages| { + packages.find(|(data, _)| data.name.as_ref() == package) + }) }) - .map(|p| p.version().to_string()) + .map(|(data, _)| data.version.to_string()) } fn get_pypi_package_url( @@ -203,7 +201,7 @@ impl LockFileExt for LockFile { env.packages(platform) .and_then(|mut packages| packages.find(|p| p.name() == package)) }) - .map(|p| p.url_or_path().into_owned()) + .map(|p| p.location().clone()) } } diff --git a/tests/integration_rust/install_tests.rs b/tests/integration_rust/install_tests.rs index 76b369a08..9422df95a 100644 --- a/tests/integration_rust/install_tests.rs +++ b/tests/integration_rust/install_tests.rs @@ -18,6 +18,7 @@ use std::{ path::{Path, PathBuf}, str::FromStr, }; + use tempfile::TempDir; use uv_python::PythonEnvironment; @@ -561,7 +562,8 @@ async fn test_installer_name() { #[tokio::test(flavor = "multi_thread", worker_threads = 1)] #[cfg_attr(not(feature = "slow_integration_tests"), ignore)] /// Test full prefix install for an old lock file to see if it still works. -/// Makes sure the lockfile isn't touched and the environment is still installed. +/// Makes sure the lockfile isn't touched and the environment is still +/// installed. async fn test_old_lock_install() { let lock_str = std::fs::read_to_string("tests/data/satisfiability/old_lock_file/pixi.lock").unwrap(); diff --git a/tests/integration_rust/project_tests.rs b/tests/integration_rust/project_tests.rs index abbd073ae..afe1b1057 100644 --- a/tests/integration_rust/project_tests.rs +++ b/tests/integration_rust/project_tests.rs @@ -86,7 +86,7 @@ async fn parse_project() { fn dependency_names(project: &Project, platform: Platform) -> Vec { project .default_environment() - .dependencies(None, Some(platform)) + .combined_dependencies(Some(platform)) .iter() .map(|dep| dep.0.as_normalized().to_string()) .collect() diff --git a/tests/integration_rust/pypi_tests.rs b/tests/integration_rust/pypi_tests.rs index e4f923c6a..6373bc76f 100644 --- a/tests/integration_rust/pypi_tests.rs +++ b/tests/integration_rust/pypi_tests.rs @@ -1,9 +1,11 @@ use std::path::Path; -use crate::common::{LockFileExt, PixiControl}; use rattler_conda_types::Platform; +use typed_path::Utf8TypedPath; use url::Url; +use crate::common::{LockFileExt, PixiControl}; + #[tokio::test] #[cfg_attr(not(feature = "slow_integration_tests"), ignore)] async fn test_flat_links_based_index_returns_path() { @@ -36,7 +38,7 @@ async fn test_flat_links_based_index_returns_path() { .unwrap() .as_path() .unwrap(), - pypi_indexes + Utf8TypedPath::from(&*pypi_indexes.as_os_str().to_string_lossy()) .join("multiple-indexes-a") .join("flat") .join("foo-1.0.0-py2.py3-none-any.whl") @@ -78,7 +80,7 @@ async fn test_file_based_index_returns_path() { .unwrap() .as_path() .unwrap(), - pypi_indexes + Utf8TypedPath::from(&*pypi_indexes.as_os_str().to_string_lossy()) .join("multiple-indexes-a/index/foo") .join("foo-1.0.0-py2.py3-none-any.whl") ); @@ -197,7 +199,7 @@ async fn test_pinning_index() { .unwrap() .as_path() .unwrap(), - pypi_indexes + Utf8TypedPath::from(&*pypi_indexes.as_os_str().to_string_lossy()) .join("multiple-indexes-a/index/foo") .join("foo-1.0.0-py2.py3-none-any.whl") ); diff --git a/tests/integration_rust/solve_group_tests.rs b/tests/integration_rust/solve_group_tests.rs index 737c08c01..b72efa909 100644 --- a/tests/integration_rust/solve_group_tests.rs +++ b/tests/integration_rust/solve_group_tests.rs @@ -111,10 +111,8 @@ async fn test_purl_are_added_for_pypi() { .packages(Platform::current()) .unwrap() .for_each(|dep| { - if dep.as_conda().unwrap().package_record().name - == PackageName::from_str("boltons").unwrap() - { - assert!(dep.as_conda().unwrap().package_record().purls.is_none()); + if dep.as_conda().unwrap().record().name == PackageName::from_str("boltons").unwrap() { + assert!(dep.as_conda().unwrap().record().purls.is_none()); } }); @@ -134,13 +132,11 @@ async fn test_purl_are_added_for_pypi() { .packages(Platform::current()) .unwrap() .for_each(|dep| { - if dep.as_conda().unwrap().package_record().name - == PackageName::from_str("boltons").unwrap() - { - assert!( + if dep.as_conda().unwrap().record().name == PackageName::from_str("boltons").unwrap() { + assert_eq!( dep.as_conda() .unwrap() - .package_record() + .record() .purls .as_ref() .unwrap() @@ -148,8 +144,8 @@ async fn test_purl_are_added_for_pypi() { .unwrap() .qualifiers() .get("source") - .unwrap() - == PurlSource::HashMapping.as_str() + .unwrap(), + PurlSource::HashMapping.as_str() ); } }); @@ -180,7 +176,7 @@ async fn test_purl_are_missing_for_non_conda_forge() { package_record: foo_bar_package.package_record, file_name: "foo-bar-car".to_owned(), url: Url::parse("https://pypi.org/simple/boltons/").unwrap(), - channel: "dummy-channel".to_owned(), + channel: Some("dummy-channel".to_owned()), }; let packages = vec![repo_data_record.clone()]; @@ -223,7 +219,7 @@ async fn test_purl_are_generated_using_custom_mapping() { package_record: foo_bar_package.package_record, file_name: "foo-bar-car".to_owned(), url: Url::parse("https://pypi.org/simple/boltons/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/".to_owned()), }; let packages = vec![repo_data_record.clone()]; @@ -267,7 +263,7 @@ async fn test_compressed_mapping_catch_not_pandoc_not_a_python_package() { package_record: foo_bar_package.package_record, file_name: "pandoc".to_owned(), url: Url::parse("https://haskell.org/pandoc/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/".to_owned()), }; let packages = vec![repo_data_record.clone()]; @@ -316,14 +312,14 @@ async fn test_dont_record_not_present_package_as_purl() { package_record: foo_bar_package.package_record, file_name: "pixi-something-new-for-test".to_owned(), url: Url::parse("https://pypi.org/simple/something-new/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py311ha891d26_1.conda".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py311ha891d26_1.conda".to_owned()), }; let mut boltons_repo_data_record = RepoDataRecord { package_record: boltons_package.package_record, file_name: "boltons".to_owned(), url: Url::parse("https://pypi.org/simple/boltons/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/".to_owned()), }; let packages = vec![repo_data_record.clone(), boltons_repo_data_record.clone()]; @@ -411,14 +407,14 @@ async fn test_we_record_not_present_package_as_purl_for_custom_mapping() { package_record: foo_bar_package.package_record, file_name: "pixi-something-new".to_owned(), url: Url::parse("https://pypi.org/simple/pixi-something-new-new/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/".to_owned()), }; let boltons_repo_data_record = RepoDataRecord { package_record: boltons_package.package_record, file_name: "boltons".to_owned(), url: Url::parse("https://pypi.org/simple/boltons/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/".to_owned()), }; let mut packages = vec![repo_data_record, boltons_repo_data_record]; @@ -488,7 +484,7 @@ async fn test_custom_mapping_channel_with_suffix() { package_record: foo_bar_package.package_record, file_name: "pixi-something-new".to_owned(), url: Url::parse("https://pypi.org/simple/pixi-something-new-new/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge".to_owned()), }; let mut packages = vec![repo_data_record]; @@ -539,7 +535,7 @@ async fn test_repo_data_record_channel_with_suffix() { package_record: foo_bar_package.package_record, file_name: "pixi-something-new".to_owned(), url: Url::parse("https://pypi.org/simple/pixi-something-new-new/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/".to_owned()), }; let mut packages = vec![repo_data_record]; @@ -590,7 +586,7 @@ async fn test_path_channel() { package_record: foo_bar_package.package_record, file_name: "pixi-something-new".to_owned(), url: Url::parse("https://pypi.org/simple/pixi-something-new-new/").unwrap(), - channel: "file:///home/user/staged-recipes/build_artifacts".to_owned(), + channel: Some("file:///home/user/staged-recipes/build_artifacts".to_owned()), }; let mut packages = vec![repo_data_record]; @@ -634,8 +630,8 @@ async fn test_file_url_as_mapping_location() { ); let mapping_file_path_as_url = Url::from_file_path( - mapping_file, // .canonicalize() - // .expect("should be canonicalized"), + mapping_file, /* .canonicalize() + * .expect("should be canonicalized"), */ ) .unwrap(); @@ -664,7 +660,7 @@ async fn test_file_url_as_mapping_location() { package_record: foo_bar_package.package_record, file_name: "pixi-something-new".to_owned(), url: Url::parse("https://pypi.org/simple/pixi-something-new-new/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/".to_owned()), }; let mut packages = vec![repo_data_record]; @@ -721,7 +717,7 @@ async fn test_disabled_mapping() { package_record: boltons_package.package_record, file_name: "boltons".to_owned(), url: Url::parse("https://pypi.org/simple/boltons/").unwrap(), - channel: "https://conda.anaconda.org/conda-forge/".to_owned(), + channel: Some("https://conda.anaconda.org/conda-forge/".to_owned()), }; let mut packages = vec![boltons_repo_data_record]; diff --git a/tests/scripts/check-openssl.py b/tests/scripts/check-openssl.py index e89fe3855..010c8bd33 100644 --- a/tests/scripts/check-openssl.py +++ b/tests/scripts/check-openssl.py @@ -21,14 +21,16 @@ def check_openssl_dependency() -> None: text=True, ) - if result.returncode == 0: + if ( + "package ID specification `openssl` did not match any packages" in result.stderr + or "nothing to print" in result.stderr + ): + colored_print("Success: openssl is not part of the dependencies tree.", Colors.GREEN) + sys.exit(0) + elif result.returncode == 0 and "nothing to print" not in result.stderr: colored_print("Error: openssl is part of the dependencies tree", Colors.RED) print(result.stdout) sys.exit(1) - - # Check if the error message matches the expected message - if "package ID specification `openssl` did not match any packages" in result.stderr: - colored_print("Success: openssl is not part of the dependencies tree.", Colors.GREEN) else: colored_print("Error: Unexpected error message.", Colors.RED) print(result.stderr)