diff --git a/.github/actionlint.yml b/.github/actionlint.yml index 4aea2a42bb..43b0b27690 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -1,5 +1,6 @@ self-hosted-runner: labels: + - "codspeed-macro" - 8core_ubuntu_latest_runner - 16core_windows_latest_runner - windows_arm64_2025_large diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 0000000000..07451690da --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,43 @@ +name: CodSpeed + +on: + push: + branches: + - "feature/codespeed" # or "master" + pull_request: # required to have reports on PRs + # `workflow_dispatch` allows CodSpeed to trigger backtest + # performance analysis in order to generate initial data. + workflow_dispatch: + +jobs: + benchmarks: + name: Run benchmarks + runs-on: codspeed-macro + env: + RUST_BACKTRACE: full + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 + + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y tar bzip2 + + - name: Setup rust toolchain, cache and cargo-codspeed binary + uses: moonrepo/setup-rust@e013866c4215f77c925f42f60257dec7dd18836e + with: + channel: stable + cache-target: release + bins: cargo-codspeed + + - name: Build the benchmark target(s) + run: cargo codspeed build -p pixi_bench + + - name: Run the benchmarks + uses: CodSpeedHQ/action@cc824aeb2c86848c39cf722ab4c2b6c5bf290530 + with: + run: | + export PATH="~/.cargo/bin:$PATH" + cargo codspeed run -p pixi_bench + mode: walltime + token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 334c44bddb..da1cb679b0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -107,7 +107,7 @@ jobs: with: save-if: ${{ github.ref == 'refs/heads/main' }} - run: | - for package in $(cargo metadata --no-deps --format-version=1 | jq -r '.packages[] | .name'); do + for package in $(cargo metadata --no-deps --format-version=1 | jq -r '.packages[] | select(.name != "pixi_bench") | .name'); do cargo rustdoc -p "$package" --all-features -- -D warnings -W unreachable-pub done diff --git a/Cargo.lock b/Cargo.lock index cc8a6ba3b7..1bd3a27979 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,6 +65,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "anstream" version = "0.6.20" @@ -121,6 +127,15 @@ version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + [[package]] name = "arbitrary" version = "1.4.2" @@ -1223,6 +1238,12 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cbc" version = "0.1.2" @@ -1286,6 +1307,33 @@ dependencies = [ "windows-link 0.2.0", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "cipher" version = "0.4.4" @@ -1366,12 +1414,82 @@ dependencies = [ "tokio", ] +[[package]] +name = "codspeed" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35584c5fcba8059780748866387fb97c5a203bcfc563fc3d0790af406727a117" +dependencies = [ + "anyhow", + "bincode", + "colored", + "glob", + "libc", + "nix 0.29.0", + "serde", + "serde_json", + "statrs", + "uuid", +] + +[[package]] +name = "codspeed-criterion-compat" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78f6c1c6bed5fd84d319e8b0889da051daa361c79b7709c9394dfe1a882bba67" +dependencies = [ + "codspeed", + "codspeed-criterion-compat-walltime", + "colored", + "futures", + "tokio", +] + +[[package]] +name = "codspeed-criterion-compat-walltime" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c989289ce6b1cbde72ed560496cb8fbf5aa14d5ef5666f168e7f87751038352e" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "codspeed", + "criterion-plot", + "futures", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "tokio", + "walkdir", +] + [[package]] name = "colorchoice" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + [[package]] name = "combine" version = "4.6.7" @@ -1468,26 +1586,6 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" -[[package]] -name = "const_format" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" -dependencies = [ - "const_format_proc_macros", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - [[package]] name = "core-foundation" version = "0.9.4" @@ -1560,6 +1658,16 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + [[package]] name = "crossbeam-channel" version = "0.5.15" @@ -1594,6 +1702,12 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + [[package]] name = "crypto-bigint" version = "0.4.9" @@ -2277,6 +2391,15 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs-err" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" +dependencies = [ + "autocfg", +] + [[package]] name = "fs-err" version = "3.1.1" @@ -2303,7 +2426,7 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8640e34b88f7652208ce9e88b1a37a2ae95227d84abec377ccd3c5cfeb141ed4" dependencies = [ - "fs-err", + "fs-err 3.1.1", "rustix 1.1.2", "tokio", "windows-sys 0.59.0", @@ -2705,6 +2828,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" +dependencies = [ + "cfg-if", + "crunchy", +] + [[package]] name = "halfbrown" version = "0.3.0" @@ -3108,7 +3241,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.59.0", + "windows-core 0.61.2", ] [[package]] @@ -3344,6 +3477,17 @@ dependencies = [ "serde", ] +[[package]] +name = "is-terminal" +version = "0.4.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.59.0", +] + [[package]] name = "is_ci" version = "1.2.0" @@ -3367,9 +3511,9 @@ checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" -version = "0.11.0" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] @@ -3730,7 +3874,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.53.3", ] [[package]] @@ -4288,6 +4432,12 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + [[package]] name = "openssl" version = "0.10.73" @@ -4471,7 +4621,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "493dd224adc6163e40a1e58cefbeec27dabb69696017677406417b3516c800fa" dependencies = [ - "fs-err", + "fs-err 3.1.1", "fxhash", "indexmap 2.11.1", "itertools 0.14.0", @@ -4634,7 +4784,7 @@ dependencies = [ "chrono", "dunce", "fake", - "fs-err", + "fs-err 3.1.1", "fs_extra", "futures", "http 1.3.1", @@ -4695,9 +4845,10 @@ dependencies = [ name = "pixi_api" version = "0.1.0" dependencies = [ + "console 0.15.11", "dunce", "fancy_display", - "fs-err", + "fs-err 3.1.1", "itertools 0.14.0", "miette 7.6.0", "minijinja", @@ -4716,6 +4867,30 @@ dependencies = [ "uv-normalize", ] +[[package]] +name = "pixi_bench" +version = "0.1.0" +dependencies = [ + "clap", + "codspeed-criterion-compat", + "fs-err 2.11.0", + "miette 7.6.0", + "once_cell", + "pixi_cli", + "pixi_config", + "pixi_core", + "pixi_global", + "pixi_manifest", + "pixi_spec", + "rattler_conda_types", + "reqwest", + "serde", + "serde_json", + "tempfile", + "tokio", + "uuid", +] + [[package]] name = "pixi_build_discovery" version = "0.1.0" @@ -4743,7 +4918,7 @@ dependencies = [ name = "pixi_build_frontend" version = "0.1.0" dependencies = [ - "fs-err", + "fs-err 3.1.1", "futures", "jsonrpsee", "miette 7.6.0", @@ -4806,7 +4981,7 @@ dependencies = [ "dunce", "fancy_display", "flate2", - "fs-err", + "fs-err 3.1.1", "futures", "human_bytes", "indexmap 2.11.1", @@ -4891,7 +5066,7 @@ dependencies = [ "derive_more", "dirs", "dunce", - "fs-err", + "fs-err 3.1.1", "futures", "indexmap 2.11.1", "insta", @@ -4947,7 +5122,7 @@ dependencies = [ "clap", "console 0.15.11", "dirs", - "fs-err", + "fs-err 3.1.1", "insta", "itertools 0.14.0", "miette 7.6.0", @@ -4992,7 +5167,7 @@ dependencies = [ "dunce", "fake", "fancy_display", - "fs-err", + "fs-err 3.1.1", "fs_extra", "futures", "http 1.3.1", @@ -5091,7 +5266,7 @@ name = "pixi_docs" version = "0.1.0" dependencies = [ "clap", - "fs-err", + "fs-err 3.1.1", "itertools 0.14.0", "pixi_cli", "rattler_conda_types", @@ -5103,7 +5278,7 @@ version = "0.0.1" dependencies = [ "dashmap", "dunce", - "fs-err", + "fs-err 3.1.1", "pixi_utils", "reqwest", "reqwest-middleware", @@ -5121,16 +5296,19 @@ name = "pixi_glob" version = "0.1.0" dependencies = [ "dashmap", - "fs-err", + "fs-err 3.1.1", + "ignore", "insta", "itertools 0.14.0", "memchr", + "parking_lot", "rattler_digest", "rstest", + "serde", "tempfile", "thiserror 2.0.16", "tokio", - "wax", + "tracing", ] [[package]] @@ -5143,7 +5321,7 @@ dependencies = [ "dunce", "fake", "fancy_display", - "fs-err", + "fs-err 3.1.1", "futures", "indexmap 2.11.1", "indicatif", @@ -5200,7 +5378,7 @@ dependencies = [ "console 0.15.11", "csv", "fancy_display", - "fs-err", + "fs-err 3.1.1", "insta", "itertools 0.14.0", "miette 7.6.0", @@ -5261,7 +5439,7 @@ dependencies = [ "console 0.15.11", "dunce", "fancy_display", - "fs-err", + "fs-err 3.1.1", "glob", "indexmap 2.11.1", "insta", @@ -5452,7 +5630,7 @@ dependencies = [ "crossbeam-channel", "deno_task_shell", "fancy_display", - "fs-err", + "fs-err 3.1.1", "itertools 0.14.0", "miette 7.6.0", "pixi_consts", @@ -5502,7 +5680,7 @@ name = "pixi_utils" version = "0.1.0" dependencies = [ "async-fd-lock", - "fs-err", + "fs-err 3.1.1", "indicatif", "insta", "is_executable", @@ -5535,7 +5713,7 @@ dependencies = [ name = "pixi_uv_context" version = "0.1.0" dependencies = [ - "fs-err", + "fs-err 3.1.1", "miette 7.6.0", "pixi_config", "pixi_consts", @@ -5622,6 +5800,34 @@ dependencies = [ "time", ] +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + [[package]] name = "polling" version = "3.10.0" @@ -5636,15 +5842,6 @@ dependencies = [ "windows-sys 0.60.2", ] -[[package]] -name = "pori" -version = "0.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a63d338dec139f56dacc692ca63ad35a6be6a797442479b55acd611d79e906" -dependencies = [ - "nom 7.1.3", -] - [[package]] name = "portable-atomic" version = "1.11.1" @@ -5783,7 +5980,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.11.0", + "itertools 0.14.0", "proc-macro2", "quote", "syn", @@ -5849,7 +6046,7 @@ version = "0.1.0" dependencies = [ "async-once-cell", "dashmap", - "fs-err", + "fs-err 3.1.1", "futures", "http-cache-reqwest", "itertools 0.14.0", @@ -6095,7 +6292,7 @@ dependencies = [ "console 0.16.1", "digest", "dirs", - "fs-err", + "fs-err 3.1.1", "futures", "humantime", "indexmap 2.11.1", @@ -6139,7 +6336,7 @@ dependencies = [ "dashmap", "digest", "dirs", - "fs-err", + "fs-err 3.1.1", "fs4", "futures", "fxhash", @@ -6172,7 +6369,7 @@ dependencies = [ "core-foundation 0.10.1", "dirs", "file_url", - "fs-err", + "fs-err 3.1.1", "fxhash", "glob", "hex", @@ -6266,7 +6463,7 @@ dependencies = [ "chrono", "configparser", "dirs", - "fs-err", + "fs-err 3.1.1", "known-folders", "once_cell", "plist", @@ -6300,7 +6497,7 @@ dependencies = [ "aws-sdk-s3", "base64 0.22.1", "dirs", - "fs-err", + "fs-err 3.1.1", "getrandom 0.3.3", "google-cloud-auth", "http 1.3.1", @@ -6326,7 +6523,7 @@ checksum = "93a1867b2b10cc5a204e479dee7a02f29794db31a05e2cdff40bfd9652cbd54c" dependencies = [ "bzip2 0.6.0", "chrono", - "fs-err", + "fs-err 3.1.1", "futures-util", "num_cpus", "rattler_conda_types", @@ -6389,7 +6586,7 @@ dependencies = [ "dashmap", "dirs", "file_url", - "fs-err", + "fs-err 3.1.1", "futures", "hex", "http 1.3.1", @@ -6438,7 +6635,7 @@ checksum = "f3719c5f7eae3f4abd56ec6be280eecd50bdfd81670fde2fb3685911882e5e1f" dependencies = [ "anyhow", "enum_dispatch", - "fs-err", + "fs-err 3.1.1", "indexmap 2.11.1", "itertools 0.14.0", "rattler_conda_types", @@ -7680,6 +7877,16 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "statrs" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a3fe7c28c6512e766b0874335db33c94ad7b8f9054228ae1c2abd47ce7d335e" +dependencies = [ + "approx", + "num-traits", +] + [[package]] name = "strsim" version = "0.11.1" @@ -7829,7 +8036,7 @@ dependencies = [ "ntapi", "objc2-core-foundation", "objc2-io-kit", - "windows 0.59.0", + "windows 0.61.3", ] [[package]] @@ -8044,6 +8251,16 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tinyvec" version = "1.10.0" @@ -8075,6 +8292,7 @@ dependencies = [ "io-uring", "libc", "mio", + "parking_lot", "pin-project-lite", "signal-hook-registry", "slab", @@ -8500,12 +8718,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - [[package]] name = "unit-prefix" version = "0.5.1" @@ -8612,7 +8824,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "csv", "flate2", - "fs-err", + "fs-err 3.1.1", "globset", "itertools 0.14.0", "rustc-hash", @@ -8647,7 +8859,7 @@ version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "anstream", - "fs-err", + "fs-err 3.1.1", "indoc", "itertools 0.14.0", "owo-colors", @@ -8681,7 +8893,7 @@ name = "uv-cache" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "nanoid", "rmp-serde", "rustc-hash", @@ -8706,7 +8918,7 @@ name = "uv-cache-info" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "globwalk", "serde", "thiserror 2.0.16", @@ -8738,7 +8950,7 @@ dependencies = [ "async_http_range_reader", "async_zip", "bytecheck", - "fs-err", + "fs-err 3.1.1", "futures", "html-escape", "http 1.3.1", @@ -8788,7 +9000,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "bitflags", "either", - "fs-err", + "fs-err 3.1.1", "rayon", "rustc-hash", "same-file", @@ -8826,7 +9038,7 @@ version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "etcetera", - "fs-err", + "fs-err 3.1.1", "tracing", "uv-static", ] @@ -8870,7 +9082,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "anyhow", "either", - "fs-err", + "fs-err 3.1.1", "futures", "nanoid", "owo-colors", @@ -8934,7 +9146,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "arcstr", "bitflags", - "fs-err", + "fs-err 3.1.1", "http 1.3.1", "itertools 0.14.0", "jiff", @@ -8974,7 +9186,7 @@ dependencies = [ "async-compression", "async_zip", "blake2", - "fs-err", + "fs-err 3.1.1", "futures", "md-5", "rayon", @@ -9001,7 +9213,7 @@ dependencies = [ "dunce", "either", "encoding_rs_io", - "fs-err", + "fs-err 3.1.1", "fs2", "junction", "path-slash", @@ -9025,7 +9237,7 @@ dependencies = [ "anyhow", "cargo-util", "dashmap", - "fs-err", + "fs-err 3.1.1", "reqwest", "reqwest-middleware", "thiserror 2.0.16", @@ -9076,7 +9288,7 @@ dependencies = [ "configparser", "csv", "data-encoding", - "fs-err", + "fs-err 3.1.1", "mailparse", "pathdiff", "reflink-copy", @@ -9109,7 +9321,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "anyhow", "async-channel", - "fs-err", + "fs-err 3.1.1", "futures", "owo-colors", "rayon", @@ -9160,7 +9372,7 @@ version = "0.1.0" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "async_zip", - "fs-err", + "fs-err 3.1.1", "futures", "thiserror 2.0.16", "tokio", @@ -9245,7 +9457,7 @@ name = "uv-platform" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "goblin", "procfs", "regex", @@ -9309,7 +9521,7 @@ dependencies = [ "anyhow", "configparser", "dunce", - "fs-err", + "fs-err 3.1.1", "futures", "indexmap 2.11.1", "itertools 0.14.0", @@ -9377,7 +9589,7 @@ dependencies = [ "anyhow", "configparser", "console 0.16.1", - "fs-err", + "fs-err 3.1.1", "futures", "rustc-hash", "serde", @@ -9410,7 +9622,7 @@ name = "uv-requirements-txt" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "memchr", "reqwest", "reqwest-middleware", @@ -9518,7 +9730,7 @@ name = "uv-state" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "tempfile", "uv-dirs", ] @@ -9537,7 +9749,7 @@ version = "0.1.0" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "either", - "fs-err", + "fs-err 3.1.1", "serde", "thiserror 2.0.16", "tracing", @@ -9554,7 +9766,7 @@ name = "uv-trampoline-builder" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "thiserror 2.0.16", "uv-fs", "zip 2.4.2", @@ -9595,7 +9807,7 @@ version = "0.0.4" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "console 0.16.1", - "fs-err", + "fs-err 3.1.1", "itertools 0.14.0", "owo-colors", "pathdiff", @@ -9627,7 +9839,7 @@ name = "uv-workspace" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "glob", "itertools 0.14.0", "owo-colors", @@ -9850,21 +10062,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "wax" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d12a78aa0bab22d2f26ed1a96df7ab58e8a93506a3e20adb47c51a93b4e1357" -dependencies = [ - "const_format", - "itertools 0.11.0", - "nom 7.1.3", - "pori", - "regex", - "thiserror 1.0.69", - "walkdir", -] - [[package]] name = "web-sys" version = "0.3.78" @@ -9946,7 +10143,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.61.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 26df7135cb..bc420b9c01 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -123,7 +123,6 @@ uv-platform-tags = { git = "https://github.com/astral-sh/uv", tag = "0.8.5" } uv-pypi-types = { git = "https://github.com/astral-sh/uv", tag = "0.8.5" } uv-requirements-txt = { git = "https://github.com/astral-sh/uv", tag = "0.8.5" } -wax = "0.6.0" which = "8.0.0" # Rattler crates diff --git a/crates/pixi/tests/integration_rust/upgrade_tests.rs b/crates/pixi/tests/integration_rust/upgrade_tests.rs index a8d45bd5a0..bb12f23c79 100644 --- a/crates/pixi/tests/integration_rust/upgrade_tests.rs +++ b/crates/pixi/tests/integration_rust/upgrade_tests.rs @@ -38,8 +38,7 @@ async fn pypi_dependency_index_preserved_on_upgrade() { let workspace = Workspace::from_path(&pixi.manifest_path()).unwrap(); let workspace_value = workspace.workspace.value.clone(); - let feature = workspace_value.feature(&args.specs.feature).unwrap(); - + let feature = workspace_value.default_feature(); let mut workspace = workspace.modify().unwrap(); let (match_specs, pypi_deps) = @@ -52,7 +51,7 @@ async fn pypi_dependency_index_preserved_on_upgrade() { IndexMap::default(), args.no_install_config.no_install, &args.lock_file_update_config.lock_file_usage().unwrap(), - &args.specs.feature, + &feature.name, &[], true, args.dry_run, diff --git a/crates/pixi_api/Cargo.toml b/crates/pixi_api/Cargo.toml index 2d687551fc..ec62ff2e55 100644 --- a/crates/pixi_api/Cargo.toml +++ b/crates/pixi_api/Cargo.toml @@ -9,6 +9,7 @@ repository.workspace = true version = "0.1.0" [dependencies] +console = { workspace = true } dunce = { workspace = true } fancy_display = { workspace = true } fs-err = { workspace = true } diff --git a/crates/pixi_api/src/context.rs b/crates/pixi_api/src/context.rs index b85a778b0f..07b98cc5b7 100644 --- a/crates/pixi_api/src/context.rs +++ b/crates/pixi_api/src/context.rs @@ -21,11 +21,11 @@ impl WorkspaceContext { } pub async fn name(&self) -> String { - crate::workspace::config::name::get(self.workspace.clone()).await + crate::workspace::workspace::name::get(self.workspace.clone()).await } pub async fn set_name(&self, name: &str) -> miette::Result<()> { - crate::workspace::config::name::set(&self.interface, self.workspace.clone(), name).await + crate::workspace::workspace::name::set(&self.interface, self.workspace.clone(), name).await } pub async fn reinstall( diff --git a/crates/pixi_api/src/interface.rs b/crates/pixi_api/src/interface.rs index 672c0f3539..38f960fe46 100644 --- a/crates/pixi_api/src/interface.rs +++ b/crates/pixi_api/src/interface.rs @@ -1,10 +1,7 @@ use miette::Result; use std::future::Future; -use crate::styled_text::StyledText; - pub trait Interface { - fn styled(&self, text: StyledText) -> String; fn is_cli(&self) -> impl Future + Send; fn confirm(&self, msg: &str) -> impl Future> + Send; fn info(&self, msg: &str) -> impl Future + Send; diff --git a/crates/pixi_api/src/lib.rs b/crates/pixi_api/src/lib.rs index 31c79f6fd5..e91126ff39 100644 --- a/crates/pixi_api/src/lib.rs +++ b/crates/pixi_api/src/lib.rs @@ -2,7 +2,6 @@ pub mod workspace; pub mod context; pub mod interface; -pub mod styled_text; // Reexport for pixi_api consumers pub use pixi_core as core; diff --git a/crates/pixi_api/src/styled_text.rs b/crates/pixi_api/src/styled_text.rs deleted file mode 100644 index 3f44961011..0000000000 --- a/crates/pixi_api/src/styled_text.rs +++ /dev/null @@ -1,39 +0,0 @@ -pub struct StyledText { - pub text: String, - pub bold: bool, - pub green: bool, -} - -impl StyledText { - pub fn new(text: impl Into) -> Self { - Self { - text: text.into(), - bold: false, - green: false, - } - } - - pub fn bold(mut self) -> Self { - self.bold = true; - self - } - - pub fn green(mut self) -> Self { - self.green = true; - self - } - - pub fn text(&self) -> &str { - &self.text - } -} - -pub trait StyleExt { - fn style(self) -> StyledText; -} - -impl> StyleExt for T { - fn style(self) -> StyledText { - StyledText::new(self) - } -} diff --git a/crates/pixi_api/src/workspace/init/mod.rs b/crates/pixi_api/src/workspace/init/mod.rs index e6620cc69c..ebf6d53b9e 100644 --- a/crates/pixi_api/src/workspace/init/mod.rs +++ b/crates/pixi_api/src/workspace/init/mod.rs @@ -19,7 +19,7 @@ use tokio::fs::OpenOptions; use url::Url; use uv_normalize::PackageName; -use crate::{interface::Interface, styled_text::StyleExt}; +use crate::interface::Interface; mod options; mod template; @@ -46,7 +46,7 @@ pub(crate) async fn init( format!( "Please follow the getting started guide at https://pixi.sh/v{}/init_getting_started/ or run the following command to create a new workspace in a subdirectory:\n\n {}\n", consts::PIXI_VERSION, - interface.styled("pixi init my_workspace".style().bold()), + console::style("pixi init my_workspace").bold(), ) } else { "You have to select a subdirectory to create a new workspace".to_string() @@ -135,8 +135,8 @@ pub(crate) async fn init( { interface.confirm(&format!( "A '{}' file already exists. Do you want to extend it with the '{}' configuration?", - interface.styled(consts::PYPROJECT_MANIFEST.style().bold()), - interface.styled("[tool.pixi]".style().bold().green()), + console::style(consts::PYPROJECT_MANIFEST).bold(), + console::style("[tool.pixi]").bold().green() )).await? } else { options.format == Some(ManifestFormat::Pyproject) diff --git a/crates/pixi_api/src/workspace/mod.rs b/crates/pixi_api/src/workspace/mod.rs index e84b32d3e2..369de734c0 100644 --- a/crates/pixi_api/src/workspace/mod.rs +++ b/crates/pixi_api/src/workspace/mod.rs @@ -1,3 +1,4 @@ -pub mod config; pub mod init; pub mod reinstall; +#[allow(clippy::module_inception)] +pub mod workspace; diff --git a/crates/pixi_api/src/workspace/reinstall/mod.rs b/crates/pixi_api/src/workspace/reinstall/mod.rs index aec4b60509..3d1f34611c 100644 --- a/crates/pixi_api/src/workspace/reinstall/mod.rs +++ b/crates/pixi_api/src/workspace/reinstall/mod.rs @@ -6,7 +6,7 @@ use pixi_core::{ lock_file::{ReinstallEnvironment, UpdateMode}, }; -use crate::{interface::Interface, styled_text::StyledText}; +use crate::interface::Interface; mod options; @@ -59,10 +59,7 @@ pub(crate) async fn reinstall( // Message what's installed let detached_envs_message = if let Ok(Some(path)) = workspace.config().detached_environments().path() { - format!( - " in '{}'", - interface.styled(StyledText::new(path.display().to_string()).bold()) - ) + format!(" in '{}'", console::style(path.display()).bold()) } else { "".to_string() }; diff --git a/crates/pixi_api/src/workspace/config/mod.rs b/crates/pixi_api/src/workspace/workspace/mod.rs similarity index 100% rename from crates/pixi_api/src/workspace/config/mod.rs rename to crates/pixi_api/src/workspace/workspace/mod.rs diff --git a/crates/pixi_api/src/workspace/config/name.rs b/crates/pixi_api/src/workspace/workspace/name.rs similarity index 100% rename from crates/pixi_api/src/workspace/config/name.rs rename to crates/pixi_api/src/workspace/workspace/name.rs diff --git a/crates/pixi_bench/Cargo.toml b/crates/pixi_bench/Cargo.toml new file mode 100644 index 0000000000..ce0d708548 --- /dev/null +++ b/crates/pixi_bench/Cargo.toml @@ -0,0 +1,59 @@ +[package] +edition = "2021" +license = "MIT OR Apache-2.0" +name = "pixi_bench" +publish = false +version = "0.1.0" + +[dev-dependencies] +clap = { workspace = true } +criterion = { version = "3.0.5", package = "codspeed-criterion-compat", features = [ + "async", + "async_futures", + "async_tokio", +] } +fs-err = "2.11.0" +miette = { workspace = true, features = ["fancy-no-backtrace"] } +once_cell = "1.19" +reqwest = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +tempfile = "3.0" +tokio = { version = "1.0", features = ["full"] } +uuid = { version = "1.0", features = ["v4"] } + +# Pixi crates for direct API usage +pixi_cli = { path = "../pixi_cli" } +pixi_config = { path = "../pixi_config" } +pixi_core = { path = "../pixi_core" } +pixi_global = { path = "../pixi_global" } +pixi_manifest = { path = "../pixi_manifest" } +pixi_spec = { path = "../pixi_spec" } + +# Rattler crates +rattler_conda_types = { workspace = true } + +[[bench]] +harness = false +name = "cold_warm_install" + +[[bench]] +harness = false +name = "lock_install" + + +[[bench]] +harness = false +name = "task_run" + + +[[bench]] +harness = false +name = "global_install" + +[[bench]] +harness = false +name = "clean" + +[profile.release] +debug = true diff --git a/crates/pixi_bench/benches/clean.rs b/crates/pixi_bench/benches/clean.rs new file mode 100644 index 0000000000..6364695f98 --- /dev/null +++ b/crates/pixi_bench/benches/clean.rs @@ -0,0 +1,708 @@ +use clap::Parser; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::PathBuf; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_cli::{clean, install}; +use pixi_config::ConfigCli; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi workspace environment for clean testing +struct IsolatedPixiWorkspace { + _temp_dir: TempDir, + workspace_dir: PathBuf, + cache_dir: PathBuf, +} + +impl IsolatedPixiWorkspace { + /// Create with shared cache directory for warm testing + fn new_with_shared_cache( + shared_cache_dir: &std::path::Path, + ) -> Result> { + let temp_dir = TempDir::new()?; + let workspace_dir = temp_dir.path().join("workspace"); + + fs::create_dir_all(&workspace_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + workspace_dir, + cache_dir: shared_cache_dir.to_path_buf(), + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Ensure local channel exists, create it dynamically if missing (for CI robustness) + fn ensure_local_channel_exists( + &self, + local_channel_dir: &std::path::Path, + packages: &[&str], + ) -> Result<(), Box> { + let noarch_dir = local_channel_dir.join("noarch"); + + // If the channel already exists, we're good + if noarch_dir.exists() && noarch_dir.join("repodata.json").exists() { + return Ok(()); + } + + println!("🔧 Creating local conda channel for CI environment..."); + + // Create the directory structure + fs::create_dir_all(&noarch_dir)?; + + // Create repodata.json + self.create_repodata_json(&noarch_dir, packages)?; + + // Create minimal conda packages + self.create_conda_packages(&noarch_dir, packages)?; + + println!("✅ Local conda channel created successfully"); + Ok(()) + } + + /// Create repodata.json for the local channel + fn create_repodata_json( + &self, + noarch_dir: &std::path::Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let mut repodata = serde_json::json!({ + "info": { + "subdir": "noarch" + }, + "packages": {}, + "packages.conda": {}, + "removed": [], + "repodata_version": 1 + }); + + // Add each package to the repodata + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + repodata["packages"][&package_filename] = serde_json::json!({ + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": package, + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64, + "version": "1.0.0" + }); + } + + let mut file = File::create(noarch_dir.join("repodata.json"))?; + file.write_all(serde_json::to_string_pretty(&repodata)?.as_bytes())?; + Ok(()) + } + + /// Create minimal conda packages + fn create_conda_packages( + &self, + noarch_dir: &std::path::Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + use std::process::Command as StdCommand; + + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + let package_path = noarch_dir.join(&package_filename); + + // Create a temporary directory for package contents + let temp_dir = tempfile::TempDir::new()?; + let info_dir = temp_dir.path().join("info"); + fs::create_dir_all(&info_dir)?; + + // Create index.json + let index_data = serde_json::json!({ + "name": package, + "version": "1.0.0", + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64 + }); + + let mut index_file = File::create(info_dir.join("index.json"))?; + index_file.write_all(serde_json::to_string_pretty(&index_data)?.as_bytes())?; + + // Create empty files list + File::create(info_dir.join("files"))?.write_all(b"")?; + + // Create paths.json + let paths_data = serde_json::json!({ + "paths": [], + "paths_version": 1 + }); + let mut paths_file = File::create(info_dir.join("paths.json"))?; + paths_file.write_all(serde_json::to_string_pretty(&paths_data)?.as_bytes())?; + + // Create the tar.bz2 package using system tar command + let output = StdCommand::new("tar") + .args([ + "-cjf", + package_path.to_str().unwrap(), + "-C", + temp_dir.path().to_str().unwrap(), + "info", + ]) + .output()?; + + if !output.status.success() { + return Err(format!( + "Failed to create tar.bz2 package for {}: {}", + package, + String::from_utf8_lossy(&output.stderr) + ) + .into()); + } + } + + Ok(()) + } + + /// Create a basic pixi.toml file with specified dependencies using local channel + fn create_pixi_toml(&self, dependencies: &[&str]) -> Result<(), Box> { + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, dependencies)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let pixi_toml_content = format!( + r#"[project] +name = "test-project" +version = "0.1.0" +description = "Test project for pixi clean benchmarks" +channels = ["{}", "conda-forge"] + +[dependencies] +{} + +[tasks] +test = "echo 'test task'" +"#, + local_channel_url, + dependencies + .iter() + .map(|dep| format!("{} = \"*\"", dep)) + .collect::>() + .join("\n") + ); + + let pixi_toml_path = self.workspace_dir.join("pixi.toml"); + fs::write(pixi_toml_path, pixi_toml_content)?; + Ok(()) + } + + /// Create a pixi.toml with multiple environments using local channel + fn create_multi_env_pixi_toml(&self) -> Result<(), Box> { + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // All packages used in multi-environment setup + let all_packages = [ + "python", + "pytest", + "pytest-cov", + "black", + "flake8", + "mypy", + "requests", + "flask", + ]; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, &all_packages)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let pixi_toml_content = format!( + r#"[project] +name = "multi-env-project" +version = "0.1.0" +description = "Multi-environment test project for pixi clean benchmarks" +channels = ["{}", "conda-forge"] + +[dependencies] +python = "*" + +[environments] +default = {{ solve-group = "default" }} +test = {{ features = ["test"], solve-group = "test" }} +dev = {{ features = ["dev"], solve-group = "dev" }} +prod = {{ features = ["prod"], solve-group = "prod" }} + +[feature.test.dependencies] +pytest = "*" +pytest-cov = "*" + +[feature.dev.dependencies] +black = "*" +flake8 = "*" +mypy = "*" + +[feature.prod.dependencies] +requests = "*" +flask = "*" + +[tasks] +test = "pytest" +lint = "flake8 ." +format = "black ." +"#, + local_channel_url + ); + + let pixi_toml_path = self.workspace_dir.join("pixi.toml"); + fs::write(pixi_toml_path, pixi_toml_content)?; + Ok(()) + } + + /// Install dependencies to create environments using pixi API directly + async fn install_dependencies(&self) -> Result<(), Box> { + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to workspace directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.workspace_dir)?; + + // Create install arguments + let install_args = install::Args { + project_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_file_usage: pixi_cli::LockFileUsageConfig::default(), + environment: None, + config: ConfigCli::default(), + all: false, + skip: None, + skip_with_deps: None, + only: None, + }; + + // Execute pixi install directly + let result = install::execute(install_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => Ok(()), + Err(e) => Err(format!("pixi install failed: {}", e).into()), + } + } + + /// Run pixi clean and measure execution time using pixi API directly + async fn pixi_clean( + &self, + environment: Option<&str>, + ) -> Result> { + let env_desc = environment.map_or("all environments".to_string(), |e| { + format!("environment '{}'", e) + }); + println!("⏱️ Timing: pixi clean {}", env_desc); + + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Force non-interactive mode for benchmarks + std::env::set_var("NO_COLOR", "1"); + std::env::set_var("PIXI_NO_PROGRESS", "1"); + std::env::set_var("CI", "1"); + + // Change to workspace directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.workspace_dir)?; + + let start = Instant::now(); + + // Create clean arguments + let clean_args = clean::Args::parse_from(["clean", "cache", "-y"]); + + // Execute pixi clean directly + let result = clean::execute(clean_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + let duration = start.elapsed(); + + match result { + Ok(_) => { + println!("✅ Clean completed in {:.2}s", duration.as_secs_f64()); + Ok(duration) + } + Err(e) => { + println!("❌ pixi clean failed: {}", e); + Err(format!("pixi clean failed: {}", e).into()) + } + } + } + + /// Check if environments exist + fn environments_exist(&self) -> bool { + self.workspace_dir.join(".pixi").join("envs").exists() + } + + /// Get size of .pixi/envs directory + fn get_envs_size(&self) -> Result> { + let envs_dir = self.workspace_dir.join(".pixi").join("envs"); + if !envs_dir.exists() { + return Ok(0); + } + + let mut total_size = 0; + for entry in fs::read_dir(&envs_dir)? { + let entry = entry?; + let metadata = entry.metadata()?; + if metadata.is_file() { + total_size += metadata.len(); + } else if metadata.is_dir() { + total_size += self.get_dir_size(&entry.path())?; + } + } + Ok(total_size) + } + + #[allow(clippy::only_used_in_recursion)] + fn get_dir_size(&self, dir: &std::path::Path) -> Result> { + let mut total_size = 0; + for entry in fs::read_dir(dir)? { + let entry = entry?; + let metadata = entry.metadata()?; + if metadata.is_file() { + total_size += metadata.len(); + } else if metadata.is_dir() { + total_size += self.get_dir_size(&entry.path())?; + } + } + Ok(total_size) + } + + /// Clean small environment (few small packages) + async fn clean_small_environment(&self) -> Result> { + self.create_pixi_toml(&["python"])?; + self.install_dependencies().await?; + self.pixi_clean(None).await + } + + /// Clean medium environment (several packages) + async fn clean_medium_environment(&self) -> Result> { + self.create_pixi_toml(&["python", "numpy", "pandas", "requests"])?; + self.install_dependencies().await?; + self.pixi_clean(None).await + } + + /// Clean large environment (many packages) + async fn clean_large_environment(&self) -> Result> { + self.create_pixi_toml(&[ + "python", + "numpy", + "pandas", + "scipy", + "matplotlib", + "jupyter", + "scikit-learn", + "requests", + "flask", + "django", + ])?; + self.install_dependencies().await?; + self.pixi_clean(None).await + } + + /// Clean specific environment from multi-environment setup + async fn clean_specific_environment(&self) -> Result> { + self.create_multi_env_pixi_toml()?; + // Install all environments first (pixi install installs all environments by default) + self.install_dependencies().await?; + + // Clean only the test environment + self.pixi_clean(Some("test")).await + } + + /// Clean all environments from multi-environment setup + async fn clean_multi_environments(&self) -> Result> { + self.create_multi_env_pixi_toml()?; + // Install all environments first (pixi install installs all environments by default) + self.install_dependencies().await?; + + // Clean all environments + self.pixi_clean(None).await + } + + /// Clean empty workspace (no environments to clean) + async fn clean_empty_workspace(&self) -> Result> { + self.create_pixi_toml(&["python"])?; + // Don't install dependencies, so no environments exist + self.pixi_clean(None).await + } +} + +/// Shared cache for warm testing +struct SharedCache { + cache_dir: PathBuf, + _temp_dir: TempDir, +} + +impl SharedCache { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let cache_dir = temp_dir.path().join("shared_pixi_cache"); + fs::create_dir_all(&cache_dir)?; + + Ok(Self { + cache_dir, + _temp_dir: temp_dir, + }) + } +} + +fn bench_environment_sizes(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("environment_sizes_clean"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(8); // Moderate sample size + group.warm_up_time(Duration::from_secs(10)); + + // Small environment clean + group.bench_function("clean_small_environment", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_small_environment()) + .expect("Failed to time pixi clean"); + black_box(duration) + }) + }); + + // Medium environment clean + group.bench_function("clean_medium_environment", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_medium_environment()) + .expect("Failed to time pixi clean"); + black_box(duration) + }) + }); + + // Large environment clean + group.bench_function("clean_large_environment", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_large_environment()) + .expect("Failed to time pixi clean"); + black_box(duration) + }) + }); +} + +fn bench_multi_environment_scenarios(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("multi_environment_clean"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum required sample size + group.warm_up_time(Duration::from_secs(15)); + + // Clean specific environment + group.bench_function("clean_specific_environment", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_specific_environment()) + .expect("Failed to time pixi clean specific environment"); + black_box(duration) + }) + }); + + // Clean all environments in multi-environment setup + group.bench_function("clean_all_multi_environments", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_multi_environments()) + .expect("Failed to time pixi clean all environments"); + black_box(duration) + }) + }); +} + +fn bench_edge_cases(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("edge_cases_clean"); + group.measurement_time(Duration::from_secs(60)); // 1 minute + group.sample_size(10); // More samples for quick operations + group.warm_up_time(Duration::from_secs(5)); + + // Clean empty workspace (no environments exist) + group.bench_function("clean_empty_workspace", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_empty_workspace()) + .expect("Failed to time pixi clean empty workspace"); + black_box(duration) + }) + }); +} + +fn bench_repeated_clean_operations(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("repeated_clean_operations"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(8); // Moderate sample size + group.warm_up_time(Duration::from_secs(10)); + + // Clean, reinstall, clean again cycle + group.bench_function("clean_reinstall_clean_cycle", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + + RUNTIME.block_on(async { + // Setup environment + workspace + .create_pixi_toml(&["python", "numpy"]) + .expect("Failed to create pixi.toml"); + workspace + .install_dependencies() + .await + .expect("Failed to install dependencies"); + + // First clean + let duration1 = workspace + .pixi_clean(None) + .await + .expect("Failed to clean first time"); + + // Reinstall + workspace + .install_dependencies() + .await + .expect("Failed to reinstall dependencies"); + + // Second clean + let duration2 = workspace + .pixi_clean(None) + .await + .expect("Failed to clean second time"); + + black_box((duration1, duration2)) + }) + }) + }); +} + +fn bench_clean_performance_by_size(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("clean_performance_by_size"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum required sample size + group.warm_up_time(Duration::from_secs(15)); + + // Measure clean performance vs environment size + group.bench_function("clean_with_size_measurement", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + + RUNTIME.block_on(async { + // Create large environment + workspace + .create_pixi_toml(&[ + "python", + "numpy", + "pandas", + "scipy", + "matplotlib", + "jupyter", + "scikit-learn", + "requests", + "flask", + ]) + .expect("Failed to create pixi.toml"); + workspace + .install_dependencies() + .await + .expect("Failed to install dependencies"); + + // Measure size before clean + let size_before = workspace + .get_envs_size() + .expect("Failed to get environment size"); + + // Clean and measure time + let clean_duration = workspace + .pixi_clean(None) + .await + .expect("Failed to clean environment"); + + // Verify environments are gone + let environments_exist_after = workspace.environments_exist(); + + black_box((clean_duration, size_before, environments_exist_after)) + }) + }) + }); +} + +criterion_group!( + benches, + bench_environment_sizes, + bench_multi_environment_scenarios, + bench_edge_cases, + bench_repeated_clean_operations, + bench_clean_performance_by_size +); +criterion_main!(benches); diff --git a/crates/pixi_bench/benches/cold_warm_install.rs b/crates/pixi_bench/benches/cold_warm_install.rs new file mode 100644 index 0000000000..27d9ad24d0 --- /dev/null +++ b/crates/pixi_bench/benches/cold_warm_install.rs @@ -0,0 +1,481 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_cli::install; +use pixi_config::ConfigCli; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi environment with shared cache for warm testing +struct IsolatedPixiEnv { + _temp_dir: TempDir, + cache_dir: PathBuf, + home_dir: PathBuf, + project_dir: PathBuf, + project_created: bool, +} + +impl IsolatedPixiEnv { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let cache_dir = base_path.join("pixi_cache"); + let home_dir = base_path.join("pixi_home"); + let project_dir = base_path.join("project"); + + fs::create_dir_all(&cache_dir)?; + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&project_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir, + home_dir, + project_dir, + project_created: false, + }) + } + + /// Create with shared cache directory for warm testing + fn new_with_shared_cache(shared_cache_dir: &Path) -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let home_dir = base_path.join("pixi_home"); + let project_dir = base_path.join("project"); + + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&project_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir: shared_cache_dir.to_path_buf(), + home_dir, + project_dir, + project_created: false, + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_HOME".to_string(), + self.home_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Ensure local channel exists, create it dynamically if missing (for CI robustness) + fn ensure_local_channel_exists( + &self, + local_channel_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + let noarch_dir = local_channel_dir.join("noarch"); + + // If the channel already exists, we're good + if noarch_dir.exists() && noarch_dir.join("repodata.json").exists() { + return Ok(()); + } + + println!("🔧 Creating local conda channel for CI environment..."); + + // Create the directory structure + fs::create_dir_all(&noarch_dir)?; + + // Create repodata.json + self.create_repodata_json(&noarch_dir, packages)?; + + // Create minimal conda packages + self.create_conda_packages(&noarch_dir, packages)?; + + println!("✅ Local conda channel created successfully"); + Ok(()) + } + + /// Create repodata.json for the local channel + fn create_repodata_json( + &self, + noarch_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let mut repodata = serde_json::json!({ + "info": { + "subdir": "noarch" + }, + "packages": {}, + "packages.conda": {}, + "removed": [], + "repodata_version": 1 + }); + + // Add each package to the repodata + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + repodata["packages"][&package_filename] = serde_json::json!({ + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": package, + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64, + "version": "1.0.0" + }); + } + + let mut file = File::create(noarch_dir.join("repodata.json"))?; + file.write_all(serde_json::to_string_pretty(&repodata)?.as_bytes())?; + Ok(()) + } + + /// Create minimal conda packages + fn create_conda_packages( + &self, + noarch_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + use std::process::Command as StdCommand; + + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + let package_path = noarch_dir.join(&package_filename); + + // Create a temporary directory for package contents + let temp_dir = tempfile::TempDir::new()?; + let info_dir = temp_dir.path().join("info"); + fs::create_dir_all(&info_dir)?; + + // Create index.json + let index_data = serde_json::json!({ + "name": package, + "version": "1.0.0", + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64 + }); + + let mut index_file = File::create(info_dir.join("index.json"))?; + index_file.write_all(serde_json::to_string_pretty(&index_data)?.as_bytes())?; + + // Create empty files list + File::create(info_dir.join("files"))?.write_all(b"")?; + + // Create paths.json + let paths_data = serde_json::json!({ + "paths": [], + "paths_version": 1 + }); + let mut paths_file = File::create(info_dir.join("paths.json"))?; + paths_file.write_all(serde_json::to_string_pretty(&paths_data)?.as_bytes())?; + + // Create the tar.bz2 package using system tar command + let output = StdCommand::new("tar") + .args([ + "-cjf", + package_path.to_str().unwrap(), + "-C", + temp_dir.path().to_str().unwrap(), + "info", + ]) + .output()?; + + if !output.status.success() { + return Err(format!( + "Failed to create tar.bz2 package for {}: {}", + package, + String::from_utf8_lossy(&output.stderr) + ) + .into()); + } + } + + Ok(()) + } + + /// Create pixi project only once + fn ensure_pixi_project_created( + &mut self, + packages: &[&str], + ) -> Result<(), Box> { + if self.project_created { + return Ok(()); + } + + use std::fs::File; + use std::io::Write; + + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, packages)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let mut pixi_toml = format!( + r#"[project] +name = "benchmark-project" +version = "0.1.0" +description = "Benchmark project for pixi local channel benchmark" +channels = ["{}"] +platforms = ["linux-64", "osx-64", "osx-arm64", "win-64"] + +[dependencies] +"#, + local_channel_url + ); + + // Add all packages to dependencies + for package in packages { + pixi_toml.push_str(&format!("{} = \"==1.0.0\"\n", package)); + } + + let mut file = File::create(self.project_dir.join("pixi.toml"))?; + file.write_all(pixi_toml.as_bytes())?; + + self.project_created = true; + Ok(()) + } + + /// For cold cache: create new project and install + async fn pixi_install_cold( + &mut self, + packages: &[&str], + ) -> Result> { + // Always create fresh project for cold test + self.project_created = false; + self.ensure_pixi_project_created(packages)?; + + self.run_pixi_install(packages).await + } + + /// For warm cache: reuse existing project and install + async fn pixi_install_warm( + &mut self, + packages: &[&str], + ) -> Result> { + // Ensure project exists (but don't recreate if already exists) + self.ensure_pixi_project_created(packages)?; + + // For warm test, we measure re-installation or verification time + // This simulates "pixi install" when packages are already resolved/cached + self.run_pixi_install(packages).await + } + + /// Run the actual pixi install command using direct API + async fn run_pixi_install( + &self, + packages: &[&str], + ) -> Result> { + println!("⏱️ Timing: pixi install {} packages", packages.len()); + + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to project directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.project_dir)?; + + let start = Instant::now(); + + // Create install arguments + let install_args = install::Args { + project_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_file_usage: pixi_cli::LockFileUsageConfig::default(), + environment: None, + config: ConfigCli::default(), + all: false, + skip: None, + skip_with_deps: None, + only: None, + }; + + // Execute pixi install directly + let result = install::execute(install_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => { + let duration = start.elapsed(); + println!("✅ Completed in {:.2}s", duration.as_secs_f64()); + Ok(duration) + } + Err(e) => { + println!("❌ pixi install failed: {}", e); + Err(format!("pixi install failed: {}", e).into()) + } + } + } +} + +/// Shared cache for warm testing +struct SharedCache { + cache_dir: PathBuf, + _temp_dir: TempDir, +} + +impl SharedCache { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let cache_dir = temp_dir.path().join("shared_pixi_cache"); + fs::create_dir_all(&cache_dir)?; + + Ok(Self { + cache_dir, + _temp_dir: temp_dir, + }) + } +} + +fn bench_small(c: &mut Criterion) { + let packages = ["numpy"]; + + // Create shared cache for warm testing + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("small_package_installs"); + group.measurement_time(Duration::from_secs(60)); // Allow 1 minute for measurements + group.sample_size(10); // Reduce sample size for long operations + group.warm_up_time(Duration::from_secs(5)); // Warm up time + + // Cold cache benchmark - always creates new isolated environment + group.bench_function("cold_cache_small", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_cold(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); + + // Warm cache benchmark - reuses shared cache and may reuse project + group.bench_function("warm_cache_small", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .pixi_install_warm(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); +} + +fn bench_medium(c: &mut Criterion) { + let packages = ["numpy", "pandas", "requests", "click", "pyyaml"]; + + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + + let mut group = c.benchmark_group("medium_package_installs"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(5); // Even fewer samples for medium complexity + group.warm_up_time(Duration::from_secs(10)); + + group.bench_function("cold_cache_medium", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_cold(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); + + group.bench_function("warm_cache_medium", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .pixi_install_warm(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); +} + +fn bench_large(c: &mut Criterion) { + let packages = [ + "pytorch", + "scipy", + "scikit-learn", + "matplotlib", + "jupyter", + "bokeh", + "dask", + "xarray", + "opencv", + "pandas", + ]; + + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("large_package_installs"); + group.measurement_time(Duration::from_secs(180)); // 3 minutes + group.sample_size(3); // Very few samples for large operations + group.warm_up_time(Duration::from_secs(15)); + + group.bench_function("cold_cache_large", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_cold(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); + + group.bench_function("warm_cache_large", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .pixi_install_warm(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); +} + +criterion_group!(benches, bench_small, bench_medium, bench_large); +criterion_main!(benches); diff --git a/crates/pixi_bench/benches/global_install.rs b/crates/pixi_bench/benches/global_install.rs new file mode 100644 index 0000000000..f81d1706e9 --- /dev/null +++ b/crates/pixi_bench/benches/global_install.rs @@ -0,0 +1,653 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::PathBuf; +use std::str::FromStr; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_global::project::GlobalSpec; +use pixi_global::{EnvironmentName, Project}; +use rattler_conda_types::{NamedChannelOrUrl, Platform}; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi environment for global install testing +struct IsolatedPixiGlobalEnv { + _temp_dir: TempDir, + cache_dir: PathBuf, + home_dir: PathBuf, + global_dir: PathBuf, +} + +impl IsolatedPixiGlobalEnv { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let cache_dir = base_path.join("pixi_cache"); + let home_dir = base_path.join("pixi_home"); + let global_dir = base_path.join("pixi_global"); + + fs::create_dir_all(&cache_dir)?; + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&global_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir, + home_dir, + global_dir, + }) + } + + /// Create with shared cache directory for warm testing + fn new_with_shared_cache( + shared_cache_dir: &std::path::Path, + ) -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let home_dir = base_path.join("pixi_home"); + let global_dir = base_path.join("pixi_global"); + + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&global_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir: shared_cache_dir.to_path_buf(), + home_dir, + global_dir, + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_HOME".to_string(), + self.home_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_GLOBAL_DIR".to_string(), + self.global_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Run pixi global install and measure execution time using pixi_global crate directly + async fn pixi_global_install( + &self, + packages: &[&str], + channels: Option>, + platform: Option, + _force_reinstall: bool, + ) -> Result> { + println!("⏱️ Timing: pixi global install {} packages", packages.len()); + + let start = Instant::now(); + + // Set environment variables for pixi_global + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create or discover the global project + let mut project = Project::discover_or_create().await?; + + // Create environment name from first package + let env_name = EnvironmentName::from_str(&format!("bench_{}", packages[0]))?; + + // Use local channel if no channels specified + let channels = channels.unwrap_or_else(|| { + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())] + }); + + // Add environment to manifest with channels + project + .manifest + .add_environment(&env_name, Some(channels))?; + + // Set platform if specified + if let Some(platform) = platform { + project.manifest.set_platform(&env_name, platform)?; + } + + // Add each package as a dependency with version constraint to match local channel + for package in packages { + let package_spec = format!("{}==1.0.0", package); + let global_spec = + GlobalSpec::try_from_str(&package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + } + + // Install the environment + let _environment_update = project.install_environment(&env_name).await?; + + let duration = start.elapsed(); + println!( + "✅ Global install completed in {:.2}s", + duration.as_secs_f64() + ); + + Ok(duration) + } + + /// Install a single small package + async fn install_single_small(&self) -> Result> { + self.pixi_global_install(&["numpy"], None, None, false) + .await + } + + /// Install multiple small packages + async fn install_multiple_small(&self) -> Result> { + self.pixi_global_install(&["numpy", "pandas", "requests"], None, None, false) + .await + } + + /// Install a medium-sized package + async fn install_medium(&self) -> Result> { + self.pixi_global_install(&["matplotlib"], None, None, false) + .await + } + + /// Install a large package + async fn install_large(&self) -> Result> { + self.pixi_global_install(&["jupyter"], None, None, false) + .await + } + + /// Install with force reinstall + async fn install_with_force_reinstall(&self) -> Result> { + // First install + let _ = self + .pixi_global_install(&["numpy"], None, None, false) + .await?; + // Then force reinstall + self.pixi_global_install(&["numpy"], None, None, true).await + } + + /// Install with specific platform + async fn install_with_platform(&self) -> Result> { + let platform = Platform::current(); + self.pixi_global_install(&["click"], None, Some(platform), false) + .await + } + + /// Install with custom channel + async fn install_with_custom_channel(&self) -> Result> { + // Use local channel for this test too, but with different packages + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + self.pixi_global_install(&["scipy"], Some(channels), None, false) + .await + } + + /// Install and uninstall a single small package (only uninstall is timed) + async fn install_and_uninstall_single_small( + &self, + ) -> Result> { + // Set environment variables once for both operations + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create a single project instance for both operations + let mut project = Project::discover_or_create().await?; + let env_name = EnvironmentName::from_str("bench_numpy")?; + + // Use local channel + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + + // Setup: Install the package (not timed) + project + .manifest + .add_environment(&env_name, Some(channels))?; + let package_spec = "numpy==1.0.0"; + let global_spec = GlobalSpec::try_from_str(package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + let _ = project.install_environment(&env_name).await?; + + // Measure: Only the uninstall operation + println!("⏱️ Timing: pixi global uninstall 1 packages"); + let start = Instant::now(); + let _ = project.remove_environment(&env_name).await?; + let duration = start.elapsed(); + println!( + "✅ Global uninstall completed in {:.2}s", + duration.as_secs_f64() + ); + + Ok(duration) + } + + /// Install and uninstall multiple small packages (only uninstall is timed) + async fn install_and_uninstall_multiple_small( + &self, + ) -> Result> { + // Set environment variables once for all operations + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create a single project instance for all operations + let mut project = Project::discover_or_create().await?; + + // Use local channel + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + + // Setup: Install the packages (not timed) + let packages = ["numpy", "pandas", "requests"]; + for package in &packages { + let env_name = EnvironmentName::from_str(&format!("bench_{}", package))?; + project + .manifest + .add_environment(&env_name, Some(channels.clone()))?; + let package_spec = format!("{}==1.0.0", package); + let global_spec = + GlobalSpec::try_from_str(&package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + let _ = project.install_environment(&env_name).await?; + } + + // Measure: Only the uninstall operations + println!( + "⏱️ Timing: pixi global uninstall {} packages", + packages.len() + ); + let start = Instant::now(); + for package in &packages { + let env_name = EnvironmentName::from_str(&format!("bench_{}", package))?; + let _ = project.remove_environment(&env_name).await?; + } + let duration = start.elapsed(); + println!( + "✅ Multiple uninstall completed in {:.2}s", + duration.as_secs_f64() + ); + Ok(duration) + } + + /// Install and uninstall a medium-sized package (only uninstall is timed) + async fn install_and_uninstall_medium(&self) -> Result> { + // Set environment variables once for both operations + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create a single project instance for both operations + let mut project = Project::discover_or_create().await?; + let env_name = EnvironmentName::from_str("bench_matplotlib")?; + + // Use local channel + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + + // Setup: Install the package (not timed) + project + .manifest + .add_environment(&env_name, Some(channels))?; + let package_spec = "matplotlib==1.0.0"; + let global_spec = GlobalSpec::try_from_str(package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + let _ = project.install_environment(&env_name).await?; + + // Measure: Only the uninstall operation + println!("⏱️ Timing: pixi global uninstall 1 packages"); + let start = Instant::now(); + let _ = project.remove_environment(&env_name).await?; + let duration = start.elapsed(); + println!( + "✅ Global uninstall completed in {:.2}s", + duration.as_secs_f64() + ); + + Ok(duration) + } + + /// Install and uninstall a large package (only uninstall is timed) + async fn install_and_uninstall_large(&self) -> Result> { + // Set environment variables once for both operations + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create a single project instance for both operations + let mut project = Project::discover_or_create().await?; + let env_name = EnvironmentName::from_str("bench_jupyter")?; + + // Use local channel + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + + // Setup: Install the package (not timed) + project + .manifest + .add_environment(&env_name, Some(channels))?; + let package_spec = "jupyter==1.0.0"; + let global_spec = GlobalSpec::try_from_str(package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + let _ = project.install_environment(&env_name).await?; + + // Measure: Only the uninstall operation + println!("⏱️ Timing: pixi global uninstall 1 packages"); + let start = Instant::now(); + let _ = project.remove_environment(&env_name).await?; + let duration = start.elapsed(); + println!( + "✅ Global uninstall completed in {:.2}s", + duration.as_secs_f64() + ); + + Ok(duration) + } +} + +/// Shared cache for warm testing +struct SharedCache { + cache_dir: PathBuf, + _temp_dir: TempDir, +} + +impl SharedCache { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let cache_dir = temp_dir.path().join("shared_pixi_cache"); + fs::create_dir_all(&cache_dir)?; + + Ok(Self { + cache_dir, + _temp_dir: temp_dir, + }) + } +} + +fn bench_single_package(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("single_package_global_install"); + group.measurement_time(Duration::from_secs(60)); // Allow 1 minute for measurements + group.sample_size(10); // Reduce sample size for long operations + group.warm_up_time(Duration::from_secs(5)); // Warm up time + + // Cold cache benchmark - always creates new isolated environment + group.bench_function("cold_cache_single", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new().expect("Failed to create isolated environment"); + let duration = env + .install_single_small() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); + + // Warm cache benchmark - reuses shared cache + group.bench_function("warm_cache_single", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_single_small() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); +} + +fn bench_multiple_packages(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("multiple_packages_global_install"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(10)); + + // Cold cache benchmark + group.bench_function("cold_cache_multiple", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new().expect("Failed to create isolated environment"); + let duration = env + .install_multiple_small() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); + + // Warm cache benchmark + group.bench_function("warm_cache_multiple", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_multiple_small() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); +} + +fn bench_package_sizes(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("package_sizes_global_install"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(15)); + + // Medium package benchmark + group.bench_function("medium_package", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_medium() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); + + // Large package benchmark + group.bench_function("large_package", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_large() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); +} + +fn bench_special_scenarios(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("special_scenarios_global_install"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(10)); + + // Force reinstall benchmark + group.bench_function("force_reinstall", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_with_force_reinstall() + .await + .expect("Failed to time pixi global install with force reinstall"); + black_box(duration) + }) + }); + + // Platform-specific install benchmark + group.bench_function("platform_specific", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_with_platform() + .await + .expect("Failed to time pixi global install with platform"); + black_box(duration) + }) + }); + + // Custom channel benchmark + group.bench_function("custom_channel", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_with_custom_channel() + .await + .expect("Failed to time pixi global install with custom channel"); + black_box(duration) + }) + }); +} + +fn bench_single_package_uninstall(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("single_package_global_uninstall"); + group.measurement_time(Duration::from_secs(60)); // Allow 1 minute for measurements + group.sample_size(10); // Reduce sample size for long operations + group.warm_up_time(Duration::from_secs(5)); // Warm up time + + // Uninstall single package benchmark + group.bench_function("uninstall_single", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + // Install and uninstall (only uninstall is timed) + let duration = env + .install_and_uninstall_single_small() + .await + .expect("Failed to time pixi global uninstall"); + black_box(duration) + }) + }); +} + +fn bench_multiple_packages_uninstall(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("multiple_packages_global_uninstall"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(10)); + + // Uninstall multiple packages benchmark + group.bench_function("uninstall_multiple", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + // Install and uninstall (only uninstall is timed) + let duration = env + .install_and_uninstall_multiple_small() + .await + .expect("Failed to time pixi global uninstall"); + black_box(duration) + }) + }); +} + +fn bench_package_sizes_uninstall(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("package_sizes_global_uninstall"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(15)); + + // Medium package uninstall benchmark + group.bench_function("uninstall_medium_package", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + // Install and uninstall (only uninstall is timed) + let duration = env + .install_and_uninstall_medium() + .await + .expect("Failed to time pixi global uninstall"); + black_box(duration) + }) + }); + + // Large package uninstall benchmark + group.bench_function("uninstall_large_package", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + // Install and uninstall (only uninstall is timed) + let duration = env + .install_and_uninstall_large() + .await + .expect("Failed to time pixi global uninstall"); + black_box(duration) + }) + }); +} + +criterion_group!( + benches, + bench_single_package, + bench_multiple_packages, + bench_package_sizes, + bench_special_scenarios, + bench_single_package_uninstall, + bench_multiple_packages_uninstall, + bench_package_sizes_uninstall +); +criterion_main!(benches); diff --git a/crates/pixi_bench/benches/lock_install.rs b/crates/pixi_bench/benches/lock_install.rs new file mode 100644 index 0000000000..2d47e7e8e3 --- /dev/null +++ b/crates/pixi_bench/benches/lock_install.rs @@ -0,0 +1,545 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_cli::install; +use pixi_config::ConfigCli; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi environment for lockfile testing +struct IsolatedPixiEnv { + _temp_dir: TempDir, + cache_dir: PathBuf, + home_dir: PathBuf, + project_dir: PathBuf, + project_created: bool, +} + +impl IsolatedPixiEnv { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let cache_dir = base_path.join("pixi_cache"); + let home_dir = base_path.join("pixi_home"); + let project_dir = base_path.join("project"); + + fs::create_dir_all(&cache_dir)?; + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&project_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir, + home_dir, + project_dir, + project_created: false, + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_HOME".to_string(), + self.home_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Ensure local channel exists, create it dynamically if missing (for CI robustness) + fn ensure_local_channel_exists( + &self, + local_channel_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + let noarch_dir = local_channel_dir.join("noarch"); + + // If the channel already exists, we're good + if noarch_dir.exists() && noarch_dir.join("repodata.json").exists() { + return Ok(()); + } + + println!("🔧 Creating local conda channel for CI environment..."); + + // Create the directory structure + fs::create_dir_all(&noarch_dir)?; + + // Create repodata.json + self.create_repodata_json(&noarch_dir, packages)?; + + // Create minimal conda packages + self.create_conda_packages(&noarch_dir, packages)?; + + println!("✅ Local conda channel created successfully"); + Ok(()) + } + + /// Create repodata.json for the local channel + fn create_repodata_json( + &self, + noarch_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let mut repodata = serde_json::json!({ + "info": { + "subdir": "noarch" + }, + "packages": {}, + "packages.conda": {}, + "removed": [], + "repodata_version": 1 + }); + + // Add each package to the repodata + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + repodata["packages"][&package_filename] = serde_json::json!({ + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": package, + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64, + "version": "1.0.0" + }); + } + + let mut file = File::create(noarch_dir.join("repodata.json"))?; + file.write_all(serde_json::to_string_pretty(&repodata)?.as_bytes())?; + Ok(()) + } + + /// Create minimal conda packages + fn create_conda_packages( + &self, + noarch_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + use std::process::Command as StdCommand; + + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + let package_path = noarch_dir.join(&package_filename); + + // Create a temporary directory for package contents + let temp_dir = tempfile::TempDir::new()?; + let info_dir = temp_dir.path().join("info"); + fs::create_dir_all(&info_dir)?; + + // Create index.json + let index_data = serde_json::json!({ + "name": package, + "version": "1.0.0", + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64 + }); + + let mut index_file = File::create(info_dir.join("index.json"))?; + index_file.write_all(serde_json::to_string_pretty(&index_data)?.as_bytes())?; + + // Create empty files list + File::create(info_dir.join("files"))?.write_all(b"")?; + + // Create paths.json + let paths_data = serde_json::json!({ + "paths": [], + "paths_version": 1 + }); + let mut paths_file = File::create(info_dir.join("paths.json"))?; + paths_file.write_all(serde_json::to_string_pretty(&paths_data)?.as_bytes())?; + + // Create the tar.bz2 package using system tar command + let output = StdCommand::new("tar") + .args([ + "-cjf", + package_path.to_str().unwrap(), + "-C", + temp_dir.path().to_str().unwrap(), + "info", + ]) + .output()?; + + if !output.status.success() { + return Err(format!( + "Failed to create tar.bz2 package for {}: {}", + package, + String::from_utf8_lossy(&output.stderr) + ) + .into()); + } + } + + Ok(()) + } + + /// Create pixi project and generate lockfile + async fn create_pixi_project_with_lockfile( + &mut self, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, packages)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let mut pixi_toml = format!( + r#"[project] +name = "lockfile-benchmark-project" +version = "0.1.0" +description = "Benchmark project for pixi lockfile testing" +channels = ["{}"] +platforms = ["linux-64", "osx-64", "osx-arm64", "win-64"] + +[dependencies] +"#, + local_channel_url + ); + + // Add all packages to dependencies + for package in packages { + pixi_toml.push_str(&format!("{} = \"==1.0.0\"\n", package)); + } + + let mut file = File::create(self.project_dir.join("pixi.toml"))?; + file.write_all(pixi_toml.as_bytes())?; + + // Generate lockfile by running install once + self.run_pixi_install_internal(packages).await?; + + self.project_created = true; + Ok(()) + } + + /// Create pixi project without lockfile + fn create_pixi_project_without_lockfile( + &mut self, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, packages)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let mut pixi_toml = format!( + r#"[project] +name = "no-lockfile-benchmark-project" +version = "0.1.0" +description = "Benchmark project for pixi no-lockfile testing" +channels = ["{}"] +platforms = ["linux-64", "osx-64", "osx-arm64", "win-64"] + +[dependencies] +"#, + local_channel_url + ); + + // Add all packages to dependencies + for package in packages { + pixi_toml.push_str(&format!("{} = \"==1.0.0\"\n", package)); + } + + let mut file = File::create(self.project_dir.join("pixi.toml"))?; + file.write_all(pixi_toml.as_bytes())?; + + // Ensure no lockfile exists + let lockfile_path = self.project_dir.join("pixi.lock"); + if lockfile_path.exists() { + fs::remove_file(lockfile_path)?; + } + + self.project_created = true; + Ok(()) + } + + /// Install with existing lockfile - should be faster as dependency resolution is skipped + async fn pixi_install_with_lockfile( + &mut self, + packages: &[&str], + ) -> Result> { + // Create project with lockfile if not already created + if !self.project_created { + self.create_pixi_project_with_lockfile(packages).await?; + } + + // Ensure lockfile exists + let lockfile_path = self.project_dir.join("pixi.lock"); + if !lockfile_path.exists() { + return Err("Lockfile does not exist for with-lockfile benchmark".into()); + } + + println!( + "⏱️ Timing: pixi install with lockfile ({} packages)", + packages.len() + ); + self.run_pixi_install_timed(packages).await + } + + /// Install without lockfile - requires full dependency resolution + async fn pixi_install_without_lockfile( + &mut self, + packages: &[&str], + ) -> Result> { + // Always create fresh project without lockfile + self.project_created = false; + self.create_pixi_project_without_lockfile(packages)?; + + // Ensure no lockfile exists + let lockfile_path = self.project_dir.join("pixi.lock"); + if lockfile_path.exists() { + fs::remove_file(lockfile_path)?; + } + + println!( + "⏱️ Timing: pixi install without lockfile ({} packages)", + packages.len() + ); + self.run_pixi_install_timed(packages).await + } + + /// Internal install method for setup (not timed) + async fn run_pixi_install_internal( + &self, + _packages: &[&str], + ) -> Result<(), Box> { + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to project directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.project_dir)?; + + // Create install arguments + let install_args = install::Args { + project_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_file_usage: pixi_cli::LockFileUsageConfig::default(), + environment: None, + config: ConfigCli::default(), + all: false, + skip: None, + skip_with_deps: None, + only: None, + }; + + // Execute pixi install directly + let result = install::execute(install_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => Ok(()), + Err(e) => Err(format!("pixi install failed: {}", e).into()), + } + } + + /// Run the actual pixi install command using direct API (timed) + async fn run_pixi_install_timed( + &self, + _packages: &[&str], + ) -> Result> { + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to project directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.project_dir)?; + + let start = Instant::now(); + + // Create install arguments + let install_args = install::Args { + project_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_file_usage: pixi_cli::LockFileUsageConfig::default(), + environment: None, + config: ConfigCli::default(), + all: false, + skip: None, + skip_with_deps: None, + only: None, + }; + + // Execute pixi install directly + let result = install::execute(install_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => { + let duration = start.elapsed(); + println!("✅ Completed in {:.2}s", duration.as_secs_f64()); + Ok(duration) + } + Err(e) => { + println!("❌ pixi install failed: {}", e); + Err(format!("pixi install failed: {}", e).into()) + } + } + } +} + +fn bench_lockfile_small(c: &mut Criterion) { + let packages = ["numpy"]; + + let mut group = c.benchmark_group("small_lockfile_installs"); + group.measurement_time(Duration::from_secs(30)); // Allow 30 seconds for measurements + group.sample_size(20); // Increase sample size to meet criterion requirements + group.warm_up_time(Duration::from_secs(5)); // Warm up time + + // Install with lockfile - should be faster + group.bench_function("with_lockfile_small", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_with_lockfile(&packages) + .await + .expect("Failed to time pixi install with lockfile"); + black_box(duration) + }) + }); + + // Install without lockfile - requires dependency resolution + group.bench_function("without_lockfile_small", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_without_lockfile(&packages) + .await + .expect("Failed to time pixi install without lockfile"); + black_box(duration) + }) + }); +} + +fn bench_lockfile_medium(c: &mut Criterion) { + let packages = ["numpy", "pandas", "requests", "click", "pyyaml"]; + + let mut group = c.benchmark_group("medium_lockfile_installs"); + group.measurement_time(Duration::from_secs(60)); // 1 minute + group.sample_size(15); // Increase sample size to meet criterion requirements + group.warm_up_time(Duration::from_secs(10)); + + group.bench_function("with_lockfile_medium", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_with_lockfile(&packages) + .await + .expect("Failed to time pixi install with lockfile"); + black_box(duration) + }) + }); + + group.bench_function("without_lockfile_medium", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_without_lockfile(&packages) + .await + .expect("Failed to time pixi install without lockfile"); + black_box(duration) + }) + }); +} + +fn bench_lockfile_large(c: &mut Criterion) { + let packages = [ + "pytorch", + "scipy", + "scikit-learn", + "matplotlib", + "jupyter", + "bokeh", + "dask", + "xarray", + "opencv", + "pandas", + ]; + + let mut group = c.benchmark_group("large_lockfile_installs"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum sample size to meet criterion requirements + group.warm_up_time(Duration::from_secs(15)); + + group.bench_function("with_lockfile_large", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_with_lockfile(&packages) + .await + .expect("Failed to time pixi install with lockfile"); + black_box(duration) + }) + }); + + group.bench_function("without_lockfile_large", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_without_lockfile(&packages) + .await + .expect("Failed to time pixi install without lockfile"); + black_box(duration) + }) + }); +} + +criterion_group!( + benches, + bench_lockfile_small, + bench_lockfile_medium, + bench_lockfile_large +); +criterion_main!(benches); diff --git a/crates/pixi_bench/benches/task_run.rs b/crates/pixi_bench/benches/task_run.rs new file mode 100644 index 0000000000..3be85f8f62 --- /dev/null +++ b/crates/pixi_bench/benches/task_run.rs @@ -0,0 +1,359 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::PathBuf; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_cli::run; +use pixi_config::ConfigCli; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi environment for task runner testing +struct IsolatedPixiEnv { + _temp_dir: TempDir, + cache_dir: PathBuf, + home_dir: PathBuf, + project_dir: PathBuf, + project_created: bool, +} + +impl IsolatedPixiEnv { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let cache_dir = base_path.join("pixi_cache"); + let home_dir = base_path.join("pixi_home"); + let project_dir = base_path.join("project"); + + fs::create_dir_all(&cache_dir)?; + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&project_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir, + home_dir, + project_dir, + project_created: false, + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_HOME".to_string(), + self.home_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Create pixi project with tasks + fn create_pixi_project_with_tasks( + &mut self, + _packages: &[&str], + task_type: TaskType, + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + // Create a minimal pixi.toml without external dependencies to avoid platform issues + let mut pixi_toml = r#"[project] +name = "task-benchmark-project" +version = "0.1.0" +description = "Benchmark project for pixi task runner testing" +channels = ["conda-forge"] +platforms = ["osx-arm64", "linux-64", "win-64"] + +[dependencies] +# No external dependencies to avoid platform resolution issues + +"# + .to_string(); + + // Add tasks based on the task type + pixi_toml.push_str("\n[tasks]\n"); + match task_type { + TaskType::Simple => { + pixi_toml.push_str( + r#"simple = "echo 'Hello from simple task'" +simple-with-args = "echo 'Task with args:' $@" +"#, + ); + } + TaskType::Complex => { + pixi_toml.push_str(r#"complex = "echo 'Starting complex task' && sleep 0.1 && echo 'Complex task completed'" +multi-step = "echo 'Step 1: Preparation' && echo 'Step 2: Processing' && echo 'Step 3: Cleanup'" +"#); + } + TaskType::WithDependencies => { + pixi_toml.push_str( + r#"prepare = "echo 'Preparing...'" +build = { cmd = "echo 'Building...'", depends-on = ["prepare"] } +test = { cmd = "echo 'Testing...'", depends-on = ["build"] } +deploy = { cmd = "echo 'Deploying...'", depends-on = ["test"] } +"#, + ); + } + TaskType::Python => { + pixi_toml.push_str( + r#"shell-simple = "echo 'Hello from shell task'" +shell-version = "echo 'Shell version check'" +shell-script = "echo 'Running shell script' && date" +"#, + ); + } + } + + let mut file = File::create(self.project_dir.join("pixi.toml"))?; + file.write_all(pixi_toml.as_bytes())?; + + self.project_created = true; + Ok(()) + } + + /// Run a pixi task and measure execution time + async fn run_pixi_task( + &mut self, + packages: &[&str], + task_type: TaskType, + task_name: &str, + task_args: Vec, + ) -> Result> { + // Create project if not already created + if !self.project_created { + self.create_pixi_project_with_tasks(packages, task_type)?; + } + + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to project directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.project_dir)?; + + let start = Instant::now(); + + // Create run arguments + let mut task_cmd = vec![task_name.to_string()]; + task_cmd.extend(task_args); + + let run_args = run::Args { + task: task_cmd, + workspace_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_and_install_config: pixi_cli::cli_config::LockAndInstallConfig::default(), + config: ConfigCli::default(), + activation_config: pixi_config::ConfigCliActivation::default(), + environment: None, + clean_env: false, + skip_deps: false, + dry_run: false, + help: None, + h: None, + }; + + // Execute pixi run directly + let result = run::execute(run_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => { + let duration = start.elapsed(); + println!( + "✅ Task '{}' completed in {:.2}s", + task_name, + duration.as_secs_f64() + ); + Ok(duration) + } + Err(e) => { + println!("❌ Task '{}' failed: {}", task_name, e); + Err(format!("Task '{}' failed: {}", task_name, e).into()) + } + } + } +} + +#[derive(Debug, Clone, Copy)] +enum TaskType { + Simple, + Complex, + WithDependencies, + Python, +} + +fn bench_simple_tasks(c: &mut Criterion) { + let packages = []; + + let mut group = c.benchmark_group("simple_task_execution"); + group.measurement_time(Duration::from_secs(30)); + group.sample_size(15); + group.warm_up_time(Duration::from_secs(5)); + + // Simple echo task + group.bench_function("simple_echo", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Simple, "simple", vec![]) + .await + .expect("Failed to run simple task"); + black_box(duration) + }) + }); + + // Simple task with arguments + group.bench_function("simple_with_args", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task( + &packages, + TaskType::Simple, + "simple-with-args", + vec!["arg1".to_string(), "arg2".to_string()], + ) + .await + .expect("Failed to run simple task with args"); + black_box(duration) + }) + }); +} + +fn bench_complex_tasks(c: &mut Criterion) { + let packages = []; + + let mut group = c.benchmark_group("complex_task_execution"); + group.measurement_time(Duration::from_secs(45)); + group.sample_size(12); + group.warm_up_time(Duration::from_secs(5)); + + // Complex task with multiple commands + group.bench_function("complex_multi_command", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Complex, "complex", vec![]) + .await + .expect("Failed to run complex task"); + black_box(duration) + }) + }); + + // Multi-step task + group.bench_function("multi_step_task", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Complex, "multi-step", vec![]) + .await + .expect("Failed to run multi-step task"); + black_box(duration) + }) + }); +} + +fn bench_dependency_tasks(c: &mut Criterion) { + let packages = []; + + let mut group = c.benchmark_group("dependency_task_execution"); + group.measurement_time(Duration::from_secs(60)); + group.sample_size(10); + group.warm_up_time(Duration::from_secs(5)); + + // Task with dependencies (should run prepare -> build -> test -> deploy) + group.bench_function("task_with_dependencies", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::WithDependencies, "deploy", vec![]) + .await + .expect("Failed to run task with dependencies"); + black_box(duration) + }) + }); + + // Single dependency task + group.bench_function("single_dependency", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::WithDependencies, "build", vec![]) + .await + .expect("Failed to run task with single dependency"); + black_box(duration) + }) + }); +} + +fn bench_python_tasks(c: &mut Criterion) { + let packages = []; + + let mut group = c.benchmark_group("shell_task_execution"); + group.measurement_time(Duration::from_secs(30)); + group.sample_size(15); + group.warm_up_time(Duration::from_secs(5)); + + // Simple shell task + group.bench_function("shell_simple", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Python, "shell-simple", vec![]) + .await + .expect("Failed to run shell simple task"); + black_box(duration) + }) + }); + + // Shell version check + group.bench_function("shell_version", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Python, "shell-version", vec![]) + .await + .expect("Failed to run shell version task"); + black_box(duration) + }) + }); + + // Shell script execution + group.bench_function("shell_script", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Python, "shell-script", vec![]) + .await + .expect("Failed to run shell script task"); + black_box(duration) + }) + }); +} + +criterion_group!( + benches, + bench_simple_tasks, + bench_complex_tasks, + bench_dependency_tasks, + bench_python_tasks +); +criterion_main!(benches); diff --git a/crates/pixi_bench/my-local-channel/noarch/bokeh-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/bokeh-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..0aff836e55 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/bokeh-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/click-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/click-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..6cada3c788 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/click-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/dask-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/dask-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..ddd2b1d4f6 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/dask-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/jupyter-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/jupyter-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..40d111105f Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/jupyter-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/matplotlib-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/matplotlib-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..e4a250e8d4 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/matplotlib-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/numpy-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/numpy-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..1aee12b45c Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/numpy-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/opencv-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/opencv-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..09a1b6b6b1 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/opencv-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/pandas-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/pandas-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..8e0c5ff500 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/pandas-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/pytorch-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/pytorch-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..cce603991c Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/pytorch-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/pyyaml-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/pyyaml-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..8fb7bbe209 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/pyyaml-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/repodata.json b/crates/pixi_bench/my-local-channel/noarch/repodata.json new file mode 100644 index 0000000000..1e16e12699 --- /dev/null +++ b/crates/pixi_bench/my-local-channel/noarch/repodata.json @@ -0,0 +1,164 @@ +{ + "info": { + "subdir": "noarch" + }, + "packages": { + "numpy-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "numpy", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "pandas-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "pandas", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "requests-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "requests", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "click-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "click", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "pyyaml-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "pyyaml", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "pytorch-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "pytorch", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "scipy-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "scipy", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "scikit-learn-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "scikit-learn", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "matplotlib-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "matplotlib", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "jupyter-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "jupyter", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "bokeh-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "bokeh", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "dask-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "dask", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "xarray-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "xarray", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "opencv-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "opencv", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + } + }, + "packages.conda": {}, + "removed": [], + "repodata_version": 1 +} diff --git a/crates/pixi_bench/my-local-channel/noarch/requests-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/requests-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..e44e9f63ea Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/requests-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/scikit-learn-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/scikit-learn-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..05dd530b0b Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/scikit-learn-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/scipy-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/scipy-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..44a50a3491 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/scipy-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/xarray-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/xarray-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..7cebecb4a5 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/xarray-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_cli/src/cli_interface.rs b/crates/pixi_cli/src/cli_interface.rs index d11482fbd6..c8e0aacc75 100644 --- a/crates/pixi_cli/src/cli_interface.rs +++ b/crates/pixi_cli/src/cli_interface.rs @@ -1,5 +1,5 @@ use miette::IntoDiagnostic; -use pixi_api::{interface::Interface, styled_text::StyledText}; +use pixi_api::interface::Interface; #[derive(Default)] pub struct CliInterface {} @@ -39,15 +39,4 @@ impl Interface for CliInterface { console::style(console::Emoji("❌ ", "")).yellow(), ); } - - fn styled(&self, text: StyledText) -> String { - let mut styled = console::style(text.text()); - if text.bold { - styled = styled.bold(); - } - if text.green { - styled = styled.green(); - } - styled.to_string() - } } diff --git a/crates/pixi_cli/src/global/global_specs.rs b/crates/pixi_cli/src/global/global_specs.rs index e3ce4e69ec..53829dcf27 100644 --- a/crates/pixi_cli/src/global/global_specs.rs +++ b/crates/pixi_cli/src/global/global_specs.rs @@ -30,7 +30,7 @@ pub struct GlobalSpecs { #[clap(long, requires = "git", help_heading = consts::CLAP_GIT_OPTIONS)] pub subdir: Option, - /// The path to the local directory + /// The path to the local package #[clap(long, conflicts_with = "git")] pub path: Option, } @@ -59,6 +59,8 @@ pub enum GlobalSpecsConversionError { #[error("failed to infer package name")] #[diagnostic(transparent)] PackageNameInference(#[from] pixi_global::project::InferPackageNameError), + #[error("Input {0} looks like a path: please pass `--path`.")] + MissingPathArg(String), } impl GlobalSpecs { @@ -92,6 +94,14 @@ impl GlobalSpecs { .to_typed_path_buf(), })) } else { + fn pathlike(s: &str) -> bool { + s.contains(".conda") || s.contains('/') || s.contains('\\') + } + if let Some(pathlike_input) = self.specs.iter().find(|s| pathlike(s)) { + return Err(GlobalSpecsConversionError::MissingPathArg( + pathlike_input.clone(), + )); + } None }; if let Some(pixi_spec) = git_or_path_spec { diff --git a/crates/pixi_cli/src/global/install.rs b/crates/pixi_cli/src/global/install.rs index f2af47f6ce..eadb591b5a 100644 --- a/crates/pixi_cli/src/global/install.rs +++ b/crates/pixi_cli/src/global/install.rs @@ -5,7 +5,7 @@ use indexmap::IndexMap; use clap::Parser; use fancy_display::FancyDisplay; use itertools::Itertools; -use miette::{Context, IntoDiagnostic, Report}; +use miette::Report; use rattler_conda_types::{MatchSpec, NamedChannelOrUrl, Platform}; use crate::global::{global_specs::GlobalSpecs, revert_environment_after_error}; @@ -78,18 +78,13 @@ pub struct Args { pub async fn execute(args: Args) -> miette::Result<()> { let config = Config::with_cli_config(&args.config); + + // Load the global config and ensure + // that the root_dir is relative to the manifest directory let project_original = pixi_global::Project::discover_or_create() .await? .with_cli_config(config.clone()); - - // Capture the current working directory for proper relative path resolution - let current_dir = std::env::current_dir() - .into_diagnostic() - .wrap_err("Could not retrieve the current directory")?; - let channel_config = rattler_conda_types::ChannelConfig { - root_dir: current_dir, - ..project_original.global_channel_config().clone() - }; + let channel_config = project_original.global_channel_config().clone(); let specs = args .packages diff --git a/crates/pixi_cli/src/run.rs b/crates/pixi_cli/src/run.rs index 5ef98437f4..94a211a8b9 100644 --- a/crates/pixi_cli/src/run.rs +++ b/crates/pixi_cli/src/run.rs @@ -12,6 +12,7 @@ use clap::Parser; use deno_task_shell::KillSignal; use dialoguer::theme::ColorfulTheme; use fancy_display::FancyDisplay; +use indicatif::ProgressDrawTarget; use itertools::Itertools; use miette::{Diagnostic, IntoDiagnostic}; use pixi_config::{ConfigCli, ConfigCliActivation}; @@ -22,6 +23,7 @@ use pixi_core::{ workspace::{Environment, errors::UnsupportedPlatformError}, }; use pixi_manifest::{FeaturesExt, TaskName}; +use pixi_progress::global_multi_progress; use pixi_task::{ AmbiguousTask, CanSkip, ExecutableTask, FailedToParseShellScript, InvalidWorkingDirectory, SearchEnvironments, TaskAndEnvironment, TaskGraph, get_task_env, @@ -91,6 +93,11 @@ pub struct Args { /// When running the sigints are ignored and child can react to them. As it /// pleases. pub async fn execute(args: Args) -> miette::Result<()> { + // Following statements don't spawn any progress bar, so set + // progress draw target to hidden. Otherwise output may be + // incorrect. + global_multi_progress().set_draw_target(ProgressDrawTarget::hidden()); + let cli_config = args .activation_config .merge_config(args.config.clone().into()); @@ -117,6 +124,10 @@ pub async fn execute(args: Args) -> miette::Result<()> { return Ok(()); } + // We expect progress bar to be used afterwards, so set draw + // target to the original one. + global_multi_progress().set_draw_target(ProgressDrawTarget::stderr_with_hz(20)); + // Sanity check of prefix location sanity_check_workspace(&workspace).await?; diff --git a/crates/pixi_cli/src/upgrade.rs b/crates/pixi_cli/src/upgrade.rs index 6d2fb9c36f..8b45538256 100644 --- a/crates/pixi_cli/src/upgrade.rs +++ b/crates/pixi_cli/src/upgrade.rs @@ -23,6 +23,7 @@ use crate::cli_config::{LockFileUpdateConfig, NoInstallConfig, WorkspaceConfig}; /// Checks if there are newer versions of the dependencies and upgrades them in the lockfile and manifest file. /// /// `pixi upgrade` loosens the requirements for the given packages, updates the lock file and the adapts the manifest accordingly. +/// By default, all features are upgraded. #[derive(Parser, Debug, Default)] pub struct Args { #[clap(flatten)] @@ -55,8 +56,8 @@ pub struct UpgradeSpecsArgs { pub packages: Option>, /// The feature to update - #[clap(long = "feature", short = 'f', default_value_t)] - pub feature: FeatureName, + #[clap(long = "feature", short = 'f')] + pub feature: Option, /// The packages which should be excluded #[clap(long, conflicts_with = "packages")] @@ -71,19 +72,27 @@ pub async fn execute(args: Args) -> miette::Result<()> { let mut workspace = workspace.modify()?; - // Ensure that the given feature exists - let Some(feature) = workspace - .workspace() - .workspace - .value - .feature(&args.specs.feature) - else { - miette::bail!( - "could not find a feature named {}", - args.specs.feature.fancy_display() - ) + let features = { + if let Some(feature_arg) = &args.specs.feature { + // Ensure that the given feature exists + let Some(feature) = workspace.workspace().workspace.value.feature(feature_arg) else { + miette::bail!( + "could not find a feature named {}", + feature_arg.fancy_display() + ) + }; + Vec::from([feature.clone()]) + } else { + workspace + .workspace() + .workspace + .value + .features + .clone() + .into_values() + .collect() + } }; - let feature = feature.clone(); if !args.no_install_config.allow_installs() && (args.lock_file_update_config.lock_file_usage.frozen @@ -105,18 +114,29 @@ pub async fn execute(args: Args) -> miette::Result<()> { .collect(); if let Some(package_names) = &args.specs.packages { - let available_set = collect_available_packages(&feature, &all_platforms); - let available_packages: Vec = available_set.into_iter().collect(); + let available_packages: Vec = features + .clone() + .into_iter() + .map(|f| collect_available_packages(&f, &all_platforms)) + .fold(IndexSet::new(), |mut acc, set| { + acc.extend(set); + acc + }) + .into_iter() + .collect(); + for package in package_names { ensure_package_exists(package, &available_packages)?; } } - let SpecsByTarget { - default_match_specs, - default_pypi_deps, - per_platform, - } = collect_specs_by_target(&feature, &args, &workspace, &all_platforms)?; + let specs_by_feature = features + .into_iter() + .map(|f| { + let specs = collect_specs_by_target(&f, &args, &workspace, &all_platforms)?; + Ok((f.name.clone(), specs)) + }) + .collect::>()?; let lock_file_usage = args.lock_file_update_config.lock_file_usage()?; @@ -124,60 +144,69 @@ pub async fn execute(args: Args) -> miette::Result<()> { let original_lock_file = workspace.workspace().load_lock_file().await?; let mut printed_any = false; - if !default_match_specs.is_empty() || !default_pypi_deps.is_empty() { - if let Some(update) = workspace - .update_dependencies( - default_match_specs, - default_pypi_deps, - IndexMap::default(), - args.no_install_config.no_install, - &lock_file_usage, - &args.specs.feature, - &[], - false, - args.dry_run, - ) - .await? - { - let diff = update.lock_file_diff; - if !args.json { - diff.print() - .into_diagnostic() - .context("failed to print lock-file diff")?; + + for (feature_name, specs) in specs_by_feature { + let SpecsByTarget { + default_match_specs, + default_pypi_deps, + per_platform, + } = specs; + + if !default_match_specs.is_empty() || !default_pypi_deps.is_empty() { + if let Some(update) = workspace + .update_dependencies( + default_match_specs, + default_pypi_deps, + IndexMap::default(), + args.no_install_config.no_install, + &lock_file_usage, + &feature_name, + &[], + false, + args.dry_run, + ) + .await? + { + let diff = update.lock_file_diff; + if !args.json { + diff.print() + .into_diagnostic() + .context("failed to print lock-file diff")?; + } + printed_any = true; } - printed_any = true; } - } - for (platform, (platform_match_specs, platform_pypi_deps)) in per_platform { - if platform_match_specs.is_empty() && platform_pypi_deps.is_empty() { - continue; - } + for (platform, (platform_match_specs, platform_pypi_deps)) in per_platform { + if platform_match_specs.is_empty() && platform_pypi_deps.is_empty() { + continue; + } - if let Some(update) = workspace - .update_dependencies( - platform_match_specs, - platform_pypi_deps, - IndexMap::default(), - args.no_install_config.no_install, - &lock_file_usage, - &args.specs.feature, - &[platform], - false, - args.dry_run, - ) - .await? - { - let diff = update.lock_file_diff; - if !args.json { - if printed_any { - println!(); + if let Some(update) = workspace + .update_dependencies( + platform_match_specs, + platform_pypi_deps, + IndexMap::default(), + args.no_install_config.no_install, + &lock_file_usage, + &feature_name, + &[platform], + false, + args.dry_run, + ) + .await? + { + let diff = update.lock_file_diff; + if !args.json { + if printed_any { + println!(); + } + diff.print() + .into_diagnostic() + .context("failed to print lock-file diff")?; } - diff.print() - .into_diagnostic() - .context("failed to print lock-file diff")?; + printed_any = true; } - printed_any = true; } } @@ -236,6 +265,8 @@ struct SpecsByTarget { per_platform: IndexMap, } +type SpecsByFeature = IndexMap; + /// Collects specs for the default target and for each platform, partitioning /// out default-owned names so platform targets only get platform-owned entries. fn collect_specs_by_target( @@ -461,7 +492,7 @@ pub fn parse_specs_for_platform( let location = workspace .document() - .pypi_dependency_location(&name, platform, &args.specs.feature); + .pypi_dependency_location(&name, platform, &feature.name); (name, (req, Some(pixi_req), location)) }) .collect(); diff --git a/crates/pixi_command_dispatcher/src/source_build_cache_status/mod.rs b/crates/pixi_command_dispatcher/src/source_build_cache_status/mod.rs index e94399e8ab..5338183d54 100644 --- a/crates/pixi_command_dispatcher/src/source_build_cache_status/mod.rs +++ b/crates/pixi_command_dispatcher/src/source_build_cache_status/mod.rs @@ -18,11 +18,6 @@ use crate::{ }, }; -/// A list of globs that should be ignored when calculating any input hash. -/// These are typically used for build artifacts that should not be included in -/// the input hash. -pub const DEFAULT_BUILD_IGNORE_GLOBS: &[&str] = &["!.pixi/**"]; - /// A query to retrieve information from the source build cache. This is /// memoized to allow querying information from the cache while it is also /// overwritten at the same time by a build. @@ -322,11 +317,7 @@ impl SourceBuildCacheStatusSpec { // Compute the modification time of the files that match the source input globs. let glob_time = match GlobModificationTime::from_patterns( &source_checkout.path, - source_info - .globs - .iter() - .map(String::as_str) - .chain(DEFAULT_BUILD_IGNORE_GLOBS.iter().copied()), + source_info.globs.iter().map(String::as_str), ) { Ok(glob_time) => glob_time, Err(e) => { diff --git a/crates/pixi_command_dispatcher/tests/integration/main.rs b/crates/pixi_command_dispatcher/tests/integration/main.rs index b24992b92d..5b8a1273b7 100644 --- a/crates/pixi_command_dispatcher/tests/integration/main.rs +++ b/crates/pixi_command_dispatcher/tests/integration/main.rs @@ -71,7 +71,8 @@ fn default_build_environment() -> BuildEnvironment { } #[tokio::test] -#[cfg_attr(not(feature = "slow_integration_tests"), ignore)] +#[ignore = "multi-output recipes don't work with pixi-build-rattler-build 0.3.3: https://github.com/prefix-dev/pixi-build-backends/issues/379"] +//#[cfg_attr(not(feature = "slow_integration_tests"), ignore)] pub async fn simple_test() { let (reporter, events) = EventReporter::new(); let (tool_platform, tool_virtual_packages) = tool_platform(); diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index f91928147d..6bb972d792 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -654,7 +654,7 @@ pub struct Config { pub loaded_from: Vec, #[serde(skip, default = "default_channel_config")] - channel_config: ChannelConfig, + pub channel_config: ChannelConfig, /// Configuration for repodata fetching. #[serde(alias = "repodata_config")] // BREAK: remove to stop supporting snake_case alias @@ -1346,8 +1346,11 @@ impl Config { // Extended self.mirrors with other.mirrors mirrors: self.mirrors, loaded_from: other.loaded_from, - // currently this is always the default so just use the other value - channel_config: other.channel_config, + channel_config: if other.channel_config == default_channel_config() { + self.channel_config + } else { + other.channel_config + }, repodata_config: self.repodata_config.merge(other.repodata_config), pypi_config: self.pypi_config.merge(other.pypi_config), s3_options: { diff --git a/crates/pixi_glob/Cargo.toml b/crates/pixi_glob/Cargo.toml index ecc8766e27..83c60fade0 100644 --- a/crates/pixi_glob/Cargo.toml +++ b/crates/pixi_glob/Cargo.toml @@ -12,14 +12,17 @@ version = "0.1.0" [dependencies] dashmap = { workspace = true } fs-err = { workspace = true } +ignore = "0.4" itertools = { workspace = true } memchr = { workspace = true } +parking_lot = { workspace = true } rattler_digest = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["sync", "rt"] } -wax = { workspace = true } +tracing = { workspace = true } [dev-dependencies] -insta = { workspace = true } +insta = { workspace = true, features = ["yaml", "redactions"] } rstest = { workspace = true } +serde = { workspace = true, features = ["derive"] } tempfile = { workspace = true } diff --git a/crates/pixi_glob/src/glob_hash.rs b/crates/pixi_glob/src/glob_hash.rs index b64d8d3e0f..be3c566704 100644 --- a/crates/pixi_glob/src/glob_hash.rs +++ b/crates/pixi_glob/src/glob_hash.rs @@ -7,11 +7,10 @@ use std::{ path::{Path, PathBuf}, }; -use itertools::Itertools; use rattler_digest::{Sha256, Sha256Hash, digest::Digest}; use thiserror::Error; -use crate::glob_set::{self, GlobSet}; +use crate::{GlobSet, GlobSetError}; /// Contains a hash of the files that match the given glob patterns. #[derive(Debug, Clone, Default)] @@ -25,14 +24,14 @@ pub struct GlobHash { #[derive(Error, Debug)] #[allow(missing_docs)] pub enum GlobHashError { - #[error(transparent)] - FilterGlobError(#[from] glob_set::GlobSetError), - #[error("during line normalization, failed to access {}", .0.display())] NormalizeLineEnds(PathBuf, #[source] io::Error), #[error("the operation was cancelled")] Cancelled, + + #[error(transparent)] + GlobSetIgnore(#[from] GlobSetError), } impl GlobHash { @@ -47,14 +46,12 @@ impl GlobHash { return Ok(Self::default()); } - let glob_set = GlobSet::create(globs)?; - let mut entries = glob_set - .filter_directory(root_dir) - .collect::, _>>()? - .into_iter() - .map(|entry| entry.path().to_path_buf()) - .collect_vec(); - entries.sort(); + let glob_set = GlobSet::create(globs); + // Collect matching entries and convert to concrete DirEntry list, propagating errors. + let mut entries = glob_set.collect_matching(root_dir)?; + + // Sort deterministically by path + entries.sort_by_key(|e| e.path().to_path_buf()); #[cfg(test)] let mut matching_files = Vec::new(); @@ -63,7 +60,7 @@ impl GlobHash { for entry in entries { // Construct a normalized file path to ensure consistent hashing across // platforms. And add it to the hash. - let relative_path = entry.strip_prefix(root_dir).unwrap_or(&entry); + let relative_path = entry.path().strip_prefix(root_dir).unwrap_or(entry.path()); let normalized_file_path = relative_path.to_string_lossy().replace("\\", "/"); rattler_digest::digest::Update::update(&mut hasher, normalized_file_path.as_bytes()); @@ -71,9 +68,11 @@ impl GlobHash { matching_files.push(normalized_file_path); // Concatenate the contents of the file to the hash. - File::open(&entry) + File::open(entry.path()) .and_then(|mut file| normalize_line_endings(&mut file, &mut hasher)) - .map_err(move |e| GlobHashError::NormalizeLineEnds(entry, e))?; + .map_err(move |e| { + GlobHashError::NormalizeLineEnds(entry.path().to_path_buf(), e) + })?; } if let Some(additional_hash) = additional_hash { diff --git a/crates/pixi_glob/src/glob_mtime.rs b/crates/pixi_glob/src/glob_mtime.rs index 7319258f97..d7df483e74 100644 --- a/crates/pixi_glob/src/glob_mtime.rs +++ b/crates/pixi_glob/src/glob_mtime.rs @@ -5,7 +5,7 @@ use std::{ use thiserror::Error; -use crate::glob_set::{self, GlobSet}; +use crate::{GlobSet, GlobSetError}; /// Contains the newest modification time for the files that match the given glob patterns. #[derive(Debug, Clone)] @@ -27,7 +27,7 @@ pub enum GlobModificationTimeError { #[error("error calculating modification time for {}", .0.display())] CalculateMTime(PathBuf, #[source] std::io::Error), #[error(transparent)] - GlobSet(#[from] glob_set::GlobSetError), + GlobSetIgnore(#[from] GlobSetError), } impl GlobModificationTime { @@ -36,38 +36,49 @@ impl GlobModificationTime { root_dir: &Path, globs: impl IntoIterator, ) -> Result { - // If the root is not a directory or does not exist, return NoMatches. + // Delegate to the ignore-based implementation for performance. + Self::from_patterns_ignore(root_dir, globs) + } + + /// Same as `from_patterns` but uses the `ignore` crate for walking/matching. + pub fn from_patterns_ignore<'a>( + root_dir: &Path, + globs: impl IntoIterator, + ) -> Result { + // Normalize root to a directory if a file was passed. let mut root = root_dir.to_owned(); if !root.is_dir() { root.pop(); } - let glob_set = GlobSet::create(globs)?; - let entries: Vec<_> = glob_set - .filter_directory(root_dir) - .collect::, _>>()?; + let glob_set = GlobSet::create(globs); + let entries = glob_set.collect_matching(root_dir)?; let mut latest = None; let mut designated_file = PathBuf::new(); - // Find the newest modification time and the designated file for entry in entries { - let matched_path = entry.path().to_owned(); - let metadata = entry.metadata().map_err(|e| { - GlobModificationTimeError::CalculateMTime(matched_path.clone(), e.into()) - })?; - let modified_entry = metadata + let matched_path = entry.path().to_path_buf(); + let md = match entry.metadata() { + Ok(md) => md, + Err(e) => { + return Err(GlobModificationTimeError::CalculateMTime( + matched_path, + std::io::Error::new(std::io::ErrorKind::Other, e.to_string()), + )); + } + }; + let modified = md .modified() .map_err(|e| GlobModificationTimeError::CalculateMTime(matched_path.clone(), e))?; - if let Some(ref current_latest) = latest { - if *current_latest >= modified_entry { + if let Some(cur) = latest { + if cur >= modified { continue; } } - - latest = Some(modified_entry); - designated_file = matched_path.clone(); + latest = Some(modified); + designated_file = matched_path; } match latest { diff --git a/crates/pixi_glob/src/glob_set.rs b/crates/pixi_glob/src/glob_set.rs deleted file mode 100644 index a06a2f993a..0000000000 --- a/crates/pixi_glob/src/glob_set.rs +++ /dev/null @@ -1,253 +0,0 @@ -use std::{ - io, - path::{Path, PathBuf}, -}; - -use itertools::{Either, Itertools}; -use thiserror::Error; -use wax::{Glob, WalkEntry}; - -/// A set of globs to include and exclude from a directory. -pub struct GlobSet<'t> { - /// The globs to include in the filter. - pub include: Vec>, - /// The globs to exclude from the filter. - pub exclude: Vec>, -} - -#[derive(Error, Debug)] -#[allow(missing_docs)] -pub enum GlobSetError { - #[error("failed to access {}", .0.display())] - Io(PathBuf, #[source] io::Error), - - #[error(transparent)] - DirWalk(#[from] io::Error), - - #[error("failed to read metadata for {0}")] - Metadata(PathBuf, #[source] wax::WalkError), - - #[error(transparent)] - Build(#[from] wax::BuildError), - - #[error(transparent)] - StripPrefix(#[from] std::path::StripPrefixError), -} - -impl<'t> GlobSet<'t> { - /// Create a new `GlobSet` from a list of globs. - /// - /// The globs are split into inclusion and exclusion globs based on whether they - /// start with `!`. - pub fn create(globs: impl IntoIterator) -> Result, GlobSetError> { - // Split the globs into inclusion and exclusion globs based on whether they - // start with `!`. - let (inclusion_globs, exclusion_globs): (Vec<_>, Vec<_>) = - globs.into_iter().partition_map(|g| { - g.strip_prefix("!") - .map(Either::Right) - .unwrap_or(Either::Left(g)) - }); - - // Parse all globs - let inclusion_globs = inclusion_globs - .into_iter() - .map(Glob::new) - .collect::, _>>()?; - let exclusion_globs = exclusion_globs - .into_iter() - .map(Glob::new) - .collect::, _>>()?; - - Ok(Self { - include: inclusion_globs, - exclude: exclusion_globs, - }) - } - - /// Create a function that filters out files that match the globs. - pub fn filter_directory( - &'t self, - root_dir: &Path, - ) -> impl Iterator, GlobSetError>> + 't { - let root_dir = root_dir.to_path_buf(); - let entries = self - .include - .iter() - .flat_map(move |glob| { - let (effective_walk_root, glob) = if glob.has_semantic_literals() { - // if the glob has semantic literals, we need to - // join the root directory with the glob prefix - // and use that as the effective walk root. - // Example: - // if `root_dir` is "/path/to/src" and `glob` is "../**/*.cpp", - // `effective_walk_root` becomes "/path/to". - let (prefix, glob) = glob.clone().partition(); - (root_dir.join(&prefix), glob) - } else { - (root_dir.clone(), glob.clone()) - }; - - let walkable = glob - .walk(&effective_walk_root) - .not(self.exclude.clone()) - .expect("since the globs are already parsed this should not error") - .collect_vec(); - - walkable - .into_iter() - .map(|w| { - w.map_err(|e| GlobSetError::Metadata(effective_walk_root.to_path_buf(), e)) - }) - .collect_vec() - .into_iter() - }) - .filter_map(|entry| { - match entry { - Ok(entry) if entry.file_type().is_dir() => None, - Ok(entry) => Some(Ok(entry)), - Err(e) => { - match e { - GlobSetError::Metadata(_, we) => { - let path = we.path().map(Path::to_path_buf); - let io_err = std::io::Error::from(we); - match io_err.kind() { - // Ignore DONE and permission errors - io::ErrorKind::NotFound | io::ErrorKind::PermissionDenied => { - None - } - _ => Some(Err(if let Some(path) = path { - GlobSetError::Io(path, io_err) - } else { - GlobSetError::DirWalk(io_err) - })), - } - } - _ => Some(Err(e)), - } - } - } - }); - entries - } -} - -#[cfg(test)] -mod tests { - use super::GlobSet; - use fs_err::File; - use std::path::PathBuf; - use tempfile::tempdir; - - #[test] - fn test_filter_globs_inclusion_exclusion() { - let temp_dir = tempdir().unwrap(); - let root_path = temp_dir.path(); - - // Create files and directories - File::create(root_path.join("include1.txt")).unwrap(); - File::create(root_path.join("include2.log")).unwrap(); - File::create(root_path.join("exclude.txt")).unwrap(); - fs_err::create_dir(root_path.join("subdir")).unwrap(); - File::create(root_path.join("subdir/include_subdir.txt")).unwrap(); - - // Test globs: include all .txt but exclude exclude.txt - let filter_globs = GlobSet::create(vec!["**/*.txt", "!exclude.txt"]).unwrap(); - - // Filter directory and get results as strings - let mut filtered_files: Vec<_> = filter_globs - .filter_directory(root_path) - .collect::, _>>() - .unwrap() - .into_iter() - .map(|p| p.path().strip_prefix(root_path).unwrap().to_path_buf()) - .collect(); - - // Assert the expected files are present - filtered_files.sort(); - - let mut expected = vec![ - "include1.txt".parse::().unwrap(), - "subdir/include_subdir.txt".parse().unwrap(), - ]; - expected.sort(); - assert_eq!(filtered_files, expected); - } - - #[test] - fn test_filters_with_relatives_globs() { - // In this test we want to make sure that when globbing over - // patterns that contains semantic relative path, like - // ../pixi.toml or ../sources/*.toml, we are able to - // distinguish between glob and just a semantic path. - let temp_dir = tempdir().unwrap(); - let root_path = temp_dir.path(); - - let temp_path_as_root = temp_dir.path().join("somewhere_inside"); - fs_err::create_dir(&temp_path_as_root).unwrap(); - - // Create files and directories - fs_err::create_dir(root_path.join("subdir")).unwrap(); - File::create(root_path.join("subdir/some_inner_source.cpp")).unwrap(); - - // Test globs: we want to get the file inside the subdir using a relative glob. - let filter_globs = GlobSet::create(vec!["../**/*.cpp", "!exclude.txt"]).unwrap(); - - // Filter directory and get results as strings - let mut filtered_files: Vec<_> = filter_globs - // pretend that we are in the workspace folder - // and our recipe yaml is living inside some folder - // that will point outside - .filter_directory(&temp_path_as_root) - .collect::, _>>() - .unwrap() - .into_iter() - .map(|p| { - p.path() - .strip_prefix(&temp_path_as_root) - .unwrap() - .to_path_buf() - }) - .collect(); - - // Assert the expected files are present - filtered_files.sort(); - - let expected = vec![ - "../subdir/some_inner_source.cpp" - .parse::() - .unwrap(), - ]; - assert_eq!(filtered_files, expected); - } - - #[test] - fn test_filters_with_just_a_file_glob() { - let temp_dir = tempdir().unwrap(); - let root_path = temp_dir.path(); - - // Create files and directories - File::create(root_path.join("pixi.toml")).unwrap(); - - // Test globs: include pixi.toml - let filter_globs = GlobSet::create(vec!["pixi.toml"]).unwrap(); - - // Filter directory and get results as strings - let mut filtered_files: Vec<_> = filter_globs - // pretend that we are in the workspace folder - // and our recipe yaml is living inside some folder - // that will point outside - .filter_directory(root_path) - .collect::, _>>() - .unwrap() - .into_iter() - .map(|p| p.path().strip_prefix(root_path).unwrap().to_path_buf()) - .collect(); - - // Assert the expected files are present - filtered_files.sort(); - - let expected = vec!["pixi.toml".parse::().unwrap()]; - assert_eq!(filtered_files, expected); - } -} diff --git a/crates/pixi_glob/src/glob_set/mod.rs b/crates/pixi_glob/src/glob_set/mod.rs new file mode 100644 index 0000000000..5d39aee0f5 --- /dev/null +++ b/crates/pixi_glob/src/glob_set/mod.rs @@ -0,0 +1,221 @@ +//! Convenience wrapper around `ignore` that emulates the glob semantics pixi expects. +//! +//! Notable behavioural tweaks compared to vanilla gitignore parsing, so that it behaves more like unix globbing with special rules: +//! - Globs are rebased to a shared search root so patterns like `../src/*.rs` keep working even +//! when the caller starts from a nested directory. +//! - Negated patterns that start with `**/` are treated as global exclusions. We skip rebasing +//! those so `!**/build.rs` still hides every `build.rs`, regardless of the effective root. +//! - Plain file names without meta characters (e.g. `pixi.toml`) are anchored to the search root +//! instead of matching anywhere below it. This mirrors the behaviour we had with the previous +//! wax-based implementation. +//! - Negated literals (e.g. `!pixi.toml`) are anchored the same way, which lets recipes ignore a +//! single file at the root without accidentally hiding copies deeper in the tree. + +mod walk; +mod walk_root; + +use std::path::{Path, PathBuf}; + +use thiserror::Error; + +use walk_root::{WalkRoot, WalkRootsError}; + +/// A glob set implemented using the `ignore` crate (globset + fast walker). +pub struct GlobSet { + /// Include patterns (gitignore-style), without leading '!'. + pub walk_roots: WalkRoot, +} + +#[derive(Error, Debug)] +#[allow(missing_docs)] +pub enum GlobSetError { + #[error("failed to build globs")] + BuildOverrides(#[source] ignore::Error), + + #[error("walk error at {0}")] + Walk(PathBuf, #[source] ignore::Error), + + #[error(transparent)] + WalkRoots(#[from] WalkRootsError), +} + +impl GlobSet { + /// Create a new [`GlobSet`] from a list of patterns. Leading '!' indicates exclusion. + pub fn create<'t>(globs: impl IntoIterator) -> GlobSet { + GlobSet { + walk_roots: WalkRoot::build(globs).expect("should not fail"), + } + } + + /// Walks files matching all include/exclude patterns using a single parallel walker. + /// Returns a flat Vec of results to keep lifetimes simple and predictable. + pub fn collect_matching(&self, root_dir: &Path) -> Result, GlobSetError> { + if self.walk_roots.is_empty() { + return Ok(vec![]); + } + + let rebased = self.walk_roots.rebase(root_dir)?; + walk::walk_globs(&rebased.root, &rebased.globs) + } +} + +#[cfg(test)] +mod tests { + use super::GlobSet; + use fs_err::{self as fs, File}; + use insta::assert_yaml_snapshot; + use std::path::{Path, PathBuf}; + use tempfile::tempdir; + + fn relative_path(path: &Path, root: &Path) -> PathBuf { + if let Ok(rel) = path.strip_prefix(root) { + return rel.to_path_buf(); + } + if let Some(parent) = root.parent() { + if let Ok(rel) = path.strip_prefix(parent) { + return std::path::Path::new("..").join(rel); + } + } + path.to_path_buf() + } + + fn sorted_paths(entries: Vec, root: &std::path::Path) -> Vec { + let mut paths: Vec<_> = entries + .into_iter() + .map(|entry| { + relative_path(entry.path(), root) + .display() + .to_string() + .replace('\\', "/") + }) + .collect(); + paths.sort(); + paths + } + + // Test out a normal non-reseated globbing approach + #[test] + fn collect_matching_inclusion_exclusion() { + let temp_dir = tempdir().unwrap(); + let root_path = temp_dir.path(); + + File::create(root_path.join("include1.txt")).unwrap(); + File::create(root_path.join("include2.log")).unwrap(); + File::create(root_path.join("exclude.txt")).unwrap(); + fs::create_dir(root_path.join("subdir")).unwrap(); + File::create(root_path.join("subdir/include_subdir.txt")).unwrap(); + + let glob_set = GlobSet::create(vec!["**/*.txt", "!exclude.txt"]); + let entries = glob_set.collect_matching(root_path).unwrap(); + + let paths = sorted_paths(entries, root_path); + assert_yaml_snapshot!(paths, @r###" + - include1.txt + - subdir/include_subdir.txt + "###); + } + + // Check some general globbing support and make sure the correct things do not match + #[test] + fn collect_matching_relative_globs() { + let temp_dir = tempdir().unwrap(); + let root_path = temp_dir.path(); + let search_root = root_path.join("workspace"); + fs::create_dir(&search_root).unwrap(); + + fs::create_dir(root_path.join("subdir")).unwrap(); + File::create(root_path.join("subdir/some_inner_source.cpp")).unwrap(); + File::create(root_path.join("subdir/dont-match.txt")).unwrap(); + File::create(search_root.join("match.txt")).unwrap(); + + let glob_set = GlobSet::create(vec!["../**/*.cpp", "*.txt"]); + let entries = glob_set.collect_matching(&search_root).unwrap(); + + let paths = sorted_paths(entries, &search_root); + assert_yaml_snapshot!(paths, @r###" + - "../subdir/some_inner_source.cpp" + - match.txt + "###); + } + + // Check that single matching file glob works with rebasing + #[test] + fn collect_matching_file_glob() { + let temp_dir = tempdir().unwrap(); + let root_path = temp_dir.path().join("workspace"); + fs::create_dir(&root_path).unwrap(); + + File::create(root_path.join("pixi.toml")).unwrap(); + + let glob_set = GlobSet::create(vec!["pixi.toml", "../*.cpp"]); + let entries = glob_set.collect_matching(&root_path).unwrap(); + + let paths = sorted_paths(entries, &root_path); + assert_yaml_snapshot!(paths, @"- pixi.toml"); + } + + // Check that global ignores !**/ patterns ignore everything even if the root has been + // rebased to a parent folder, this is just a convenience assumed to be preferable + // from a user standpoint + #[test] + fn check_global_ignore_ignores() { + let temp_dir = tempdir().unwrap(); + let root_path = temp_dir.path().join("workspace"); + fs::create_dir(&root_path).unwrap(); + + File::create(root_path.join("pixi.toml")).unwrap(); + File::create(root_path.join("foo.txt")).unwrap(); + // This would be picked up otherwise + File::create(temp_dir.path().join("foo.txt")).unwrap(); + + let glob_set = GlobSet::create(vec!["pixi.toml", "!**/foo.txt"]); + let entries = glob_set.collect_matching(&root_path).unwrap(); + + let paths = sorted_paths(entries, &root_path); + assert_yaml_snapshot!(paths, @"- pixi.toml"); + } + + // Check that we can ignore a subset of file when using the rebasing + // So we want to match all `.txt` and `*.toml` files except in the root location + // where want to exclude `foo.txt` + #[test] + fn check_subset_ignore() { + let temp_dir = tempdir().unwrap(); + let root_path = temp_dir.path().join("workspace"); + fs::create_dir(&root_path).unwrap(); + + File::create(root_path.join("pixi.toml")).unwrap(); + // This should not be picked up + File::create(root_path.join("foo.txt")).unwrap(); + // But because of the non-global ignore this should be + File::create(temp_dir.path().join("foo.txt")).unwrap(); + + let glob_set = GlobSet::create(vec!["../*.{toml,txt}", "!foo.txt"]); + let entries = glob_set.collect_matching(&root_path).unwrap(); + + let paths = sorted_paths(entries, &root_path); + assert_yaml_snapshot!(paths, @r###" + - "../foo.txt" + - pixi.toml + "###); + } + + /// Because we are using ignore which uses gitignore style parsing of globs we need to do some extra processing + /// to make this more like unix globs in this case we check this explicitly here + #[test] + fn single_file_match() { + let temp_dir = tempdir().unwrap(); + let workspace = temp_dir.path().join("workspace"); + fs::create_dir(&workspace).unwrap(); + let subdir = workspace.join("subdir"); + fs::create_dir(&subdir).unwrap(); + + File::create(subdir.join("pixi.toml")).unwrap(); + + let glob_set = GlobSet::create(vec!["pixi.toml"]); + let entries = glob_set.collect_matching(&workspace).unwrap(); + + let paths = sorted_paths(entries, &workspace); + assert_yaml_snapshot!(paths, @"[]"); + } +} diff --git a/crates/pixi_glob/src/glob_set/walk.rs b/crates/pixi_glob/src/glob_set/walk.rs new file mode 100644 index 0000000000..5a10cf8527 --- /dev/null +++ b/crates/pixi_glob/src/glob_set/walk.rs @@ -0,0 +1,208 @@ +//! Contains the directory walking implementation +use itertools::Itertools; +use parking_lot::Mutex; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use crate::glob_set::walk_root::SimpleGlob; + +use super::GlobSetError; + +type SharedResults = Arc>>>>; + +struct CollectBuilder { + // Shared aggregation storage wrapped in an Option so we can `take` at the end. + sink: SharedResults, + // The root we are walking, used for error reporting + err_root: PathBuf, +} + +struct CollectVisitor { + // Local per-thread buffer to append results without holding the lock. + local: Vec>, + // Reference to the shared sink. + sink: SharedResults, + // The root we are walking, used for error reporting + err_root: PathBuf, +} + +impl Drop for CollectVisitor { + // This merges the outputs on the drop + fn drop(&mut self) { + let mut sink = self.sink.lock(); + sink.get_or_insert_with(Vec::new).append(&mut self.local); + } +} + +impl<'s> ignore::ParallelVisitorBuilder<'s> for CollectBuilder { + fn build(&mut self) -> Box { + // Build a visitor that maintains an internal list + Box::new(CollectVisitor { + local: Vec::new(), + sink: Arc::clone(&self.sink), + err_root: self.err_root.clone(), + }) + } +} + +impl ignore::ParallelVisitor for CollectVisitor { + /// This function loops over all matches, ignores directories, and ignores PermissionDenied and + /// NotFound errors + fn visit(&mut self, dir_entry: Result) -> ignore::WalkState { + match dir_entry { + Ok(dir_entry) => { + if dir_entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { + return ignore::WalkState::Continue; + } + self.local.push(Ok(dir_entry)); + } + Err(e) => { + if let Some(ioe) = e.io_error() { + match ioe.kind() { + std::io::ErrorKind::NotFound | std::io::ErrorKind::PermissionDenied => {} + _ => self + .local + .push(Err(GlobSetError::Walk(self.err_root.clone(), e))), + } + } else { + self.local + .push(Err(GlobSetError::Walk(self.err_root.clone(), e))); + } + } + } + ignore::WalkState::Continue + } +} + +/// Walk over the globs in the specific root +pub fn walk_globs( + effective_walk_root: &Path, + globs: &[SimpleGlob], +) -> Result, GlobSetError> { + let mut ob = ignore::overrides::OverrideBuilder::new(effective_walk_root); + for glob in globs { + let pattern = anchor_literal_pattern(glob.to_pattern()); + ob.add(&pattern).map_err(GlobSetError::BuildOverrides)?; + } + + let overrides = ob.build().map_err(GlobSetError::BuildOverrides)?; + + let walker = ignore::WalkBuilder::new(effective_walk_root) + .git_ignore(true) + .git_exclude(true) + .hidden(true) + .git_global(false) + .ignore(false) + .overrides(overrides) + .build_parallel(); + + let collected: SharedResults = Arc::new(Mutex::new(Some(Vec::new()))); + let start = std::time::Instant::now(); + + let mut builder = CollectBuilder { + sink: Arc::clone(&collected), + err_root: effective_walk_root.to_path_buf(), + }; + walker.visit(&mut builder); + + let results = collected.lock().take().unwrap_or_default(); + + // Log some statistics as long as we are unsure with regards to performance + let matched = results.len(); + let elapsed = start.elapsed(); + let (include, excludes): (Vec<_>, Vec<_>) = globs.iter().partition(|g| !g.is_negated()); + let include_patterns = include.iter().map(|g| g.to_pattern()).join(", "); + let exclude_patterns = excludes.iter().map(|g| g.to_pattern()).join(", "); + + tracing::debug!( + include = include_patterns, + excludes = exclude_patterns, + matched, + elapsed_ms = elapsed.as_millis(), + "glob pass completed" + ); + + results.into_iter().try_collect() +} + +/// Ensures plain file names behave as "current directory" matches for the ignore crate. +/// +/// Gitignore syntax treats bare literals (e.g. `pixi.toml`) as "match anywhere below the root". +/// To keep parity with the previous wax-based globbing, which treated them like Unix globs anchored +/// to the working directory, we prepend a `/` so the override only applies at the search root. +/// Literals are anchored whether they are positive or negated—`foo` matches only the root file and +/// `!foo` excludes only that file—while anything containing meta characters or directory separators +/// is left untouched and keeps gitignore semantics. +fn anchor_literal_pattern(pattern: String) -> String { + fn needs_anchor(body: &str) -> bool { + if body.is_empty() { + return false; + } + // These will not occur when used in conjunction with GlobWalkRoot, but lets keep + // them for if this is not used in conjunction with these + if body.starts_with("./") || body.starts_with('/') || body.starts_with("../") { + return false; + } + if body.contains('/') { + return false; + } + if body.chars().any(|c| matches!(c, '*' | '?' | '[' | '{')) { + return false; + } + true + } + + let (negated, body) = if let Some(rest) = pattern.strip_prefix('!') { + (true, rest) + } else { + (false, pattern.as_str()) + }; + + if needs_anchor(body) { + let mut anchored = String::with_capacity(pattern.len() + 2); + if negated { + anchored.push('!'); + } + anchored.push('/'); + anchored.push_str(body); + anchored + } else { + pattern + } +} + +#[cfg(test)] +mod tests { + use super::anchor_literal_pattern; + + #[test] + fn anchors_literal_file_patterns() { + assert_eq!( + anchor_literal_pattern("pixi.toml".to_string()), + "/pixi.toml" + ); + // Patterns that already specify a subdirectory should stay untouched. + assert_eq!( + anchor_literal_pattern("foo/bar/baz.txt".to_string()), + "foo/bar/baz.txt" + ); + } + + #[test] + fn leaves_non_literal_patterns_untouched() { + assert_eq!( + anchor_literal_pattern("!pixi.toml".to_string()), + "!/pixi.toml" + ); + assert_eq!(anchor_literal_pattern("*.toml".to_string()), "*.toml"); + assert_eq!(anchor_literal_pattern("!*.toml".to_string()), "!*.toml"); + assert_eq!( + anchor_literal_pattern("src/lib.rs".to_string()), + "src/lib.rs" + ); + assert_eq!( + anchor_literal_pattern("../pixi.toml".to_string()), + "../pixi.toml" + ); + } +} diff --git a/crates/pixi_glob/src/glob_set/walk_root.rs b/crates/pixi_glob/src/glob_set/walk_root.rs new file mode 100644 index 0000000000..6b864e515f --- /dev/null +++ b/crates/pixi_glob/src/glob_set/walk_root.rs @@ -0,0 +1,446 @@ +//! Plan the effective glob walk root for a set of patterns that may contain relative components. +//! +//! The builder determines how many `..` segments we need to traverse so every pattern can be +//! evaluated from a single ancestor directory. When `rebase` is invoked we pop that ancestor off +//! the provided search root, splice the remaining literal components back into each pattern and +//! return the rewritten globs. Negated patterns that start with `**/` are treated as global +//! exclusions and are emitted unchanged so users can keep wildcard directory bans in scope even if +//! the effective root moves. + +use std::path::{Component, Path, PathBuf}; + +/// Simple handler to work with our globs +/// basically splits up negation +#[derive(Clone, Debug)] +pub struct SimpleGlob { + glob: String, + negated: bool, +} + +impl SimpleGlob { + pub fn new(glob: String, negated: bool) -> Self { + Self { glob, negated } + } + + /// Returns the pattern without leading ! + pub fn normalized_pattern(&self) -> &str { + &self.glob + } + + pub fn is_negated(&self) -> bool { + self.negated + } + + /// Returns a proper glob pattern + pub fn to_pattern(&self) -> String { + if self.negated { + format!("!{}", self.glob) + } else { + self.glob.clone() + } + } +} + +#[derive(thiserror::Error, Debug)] +pub enum WalkRootsError { + #[error("after processing glob '{glob}', split into '{prefix}' and empty glob")] + EmptyGlob { prefix: String, glob: String }, + + #[error("glob prefix '{prefix}' must be relative")] + AbsolutePrefix { prefix: String }, + + #[error("cannot ascend {required} level(s) from '{root}'")] + CannotAscend { required: usize, root: PathBuf }, +} + +struct GlobSpec { + // Is this a ! glob + negated: bool, + // How many `..` path components does this contain + parent_dirs: usize, + // The `foo/bar/` concrete components + concrete_components: Vec, + // Original glob pattern + pattern: String, + // Determines if we want to rebase the glob + skip_rebase: bool, +} + +/// Contains the globs and the joinable path +pub struct WalkRoot { + // The parsed glob specifications + specs: Vec, + // The maximum number of parent dirs we need to ascend + max_parent_dirs: usize, +} + +/// Globs rebased to a common root +pub struct RebasedGlobs { + // The new root directory to search from + pub root: PathBuf, + // The globs with the rebased patterns + pub globs: Vec, +} + +impl WalkRoot { + /// Build a list of globs into a structure that we can use to rebase or reparent + /// the globs when given + pub fn build<'t>(globs: impl IntoIterator) -> Result { + let mut specs = Vec::new(); + let mut max_parent_dirs = 0usize; + + for glob in globs { + let negated = glob.starts_with('!'); + let glob = if negated { &glob[1..] } else { glob }; + + // First split of any relative part information + let (prefix, pattern) = split_path_and_glob(glob); + + // Having an empty glob is an error + if pattern.is_empty() { + return Err(WalkRootsError::EmptyGlob { + prefix: prefix.to_string(), + glob: glob.to_string(), + }); + } + + let normalized_prefix = normalize_relative(Path::new(prefix)); + // This will determine how we need to rebase the globs + let mut parent_dirs = 0usize; + let mut concrete_components = Vec::new(); + + // Loop over components and split into concrete and relative parts + for comp in normalized_prefix.components() { + match comp { + Component::ParentDir => parent_dirs += 1, + Component::CurDir => {} + Component::Normal(s) => { + concrete_components.push(s.to_string_lossy().into_owned()); + } + Component::RootDir | Component::Prefix(_) => { + return Err(WalkRootsError::AbsolutePrefix { + prefix: prefix.to_string(), + }); + } + } + } + + // We skip !**/ patterns for rebasing, as we would probably always want to apply those + let skip_rebase = + negated && normalized_prefix.as_os_str().is_empty() && pattern.starts_with("**/"); + + max_parent_dirs = max_parent_dirs.max(parent_dirs); + specs.push(GlobSpec { + negated, + parent_dirs, + concrete_components, + pattern: pattern.to_string(), + skip_rebase, + }); + } + + Ok(Self { + specs, + max_parent_dirs, + }) + } + + pub fn is_empty(&self) -> bool { + self.specs.is_empty() + } + + /// Rebase the globs into the designated roots + /// How this rebasing works is determined by the input globs. + /// This only actually does something when we have some "relative" globs + /// Like `../../*.rs` or something of the sort + pub fn rebase(&self, root: &Path) -> Result { + if self.specs.is_empty() { + return Ok(RebasedGlobs { + root: root.to_path_buf(), + globs: Vec::new(), + }); + } + + // Count all available components in the path + let available = root + .components() + .filter(|c| matches!(c, Component::Normal(_) | Component::Prefix(_))) + .count(); + + if available < self.max_parent_dirs { + // This happens when we have a glob somewhere like + // `../../../foo` but we try to search in `/tmp` + // in that case we cannot ascend up high enough + return Err(WalkRootsError::CannotAscend { + required: self.max_parent_dirs, + root: root.to_path_buf(), + }); + } + + // We are going to modify till we get to the root + let mut effective_root = root.to_path_buf(); + let mut popped = Vec::with_capacity(self.max_parent_dirs); + for _ in 0..self.max_parent_dirs { + let name = effective_root + .file_name() + .map(|n| n.to_string_lossy().into_owned()) + .expect("bug: checked available components beforehand"); + effective_root.pop(); + popped.push(name); + } + popped.reverse(); + + let mut rebased = Vec::with_capacity(self.specs.len()); + for spec in &self.specs { + // Skip rebasing entirely + if spec.skip_rebase { + rebased.push(SimpleGlob::new(spec.pattern.clone(), spec.negated)); + continue; + } + + let keep_from_prefix = self.max_parent_dirs.saturating_sub(spec.parent_dirs); + + // Create the glob prefix + let mut components = Vec::new(); + components.extend(popped.iter().take(keep_from_prefix).cloned()); + components.extend(spec.concrete_components.iter().cloned()); + + let rebased_pattern = if components.is_empty() { + // No rebasing needs to be performed + spec.pattern.clone() + } else { + // Rebase the glob with the calculated parent + format!("{}/{}", components.join("/"), spec.pattern) + }; + + rebased.push(SimpleGlob::new(rebased_pattern, spec.negated)); + } + + Ok(RebasedGlobs { + root: effective_root, + globs: rebased, + }) + } +} + +/// Split a pattern into (path_prefix, glob_part). +/// - `path_prefix` ends at the last separator before the first glob metachar (`* ? [ {`) +/// and includes that separator (e.g. "src/"). +/// - `glob_part` is the rest starting from the component that contains the first meta. +/// If no glob is present, returns ("", input). +/// +/// Examples: +/// "../.././../*.{rs,cc}" -> ("../.././../", "*.{rs,cc}") +/// "src/*/test?.rs" -> ("src/", "*/test?.rs") +/// "*.rs" -> ("", "*.rs") +/// "plain/path" -> ("", "plain/path") +pub fn split_path_and_glob(input: &str) -> (&str, &str) { + fn is_meta(c: char) -> bool { + matches!(c, '*' | '?' | '[' | '{') + } + + fn is_sep(c: char) -> bool { + c == '/' + } + for (i, ch) in input.char_indices() { + if is_meta(ch) { + if let Some(sep_idx) = input[..i].rfind(|c: char| is_sep(c)) { + return (&input[..=sep_idx], &input[sep_idx + 1..]); + } else { + return ("", input); + } + } + } + + // In this case we have not found any meta patterns and we can assume the glob can be in the form of a file match like + // foo/bar.txt, because we will need to add a current directory `./` separator as we are using ignore and gitignore style + // glob rules + ("", input) +} + +/// Normalize paths like `../.././` into paths like `../../` +pub fn normalize_relative(path: &Path) -> PathBuf { + let mut out = PathBuf::new(); + for comp in path.components() { + match comp { + Component::CurDir => {} + _ => out.push(comp.as_os_str()), + } + } + out +} + +#[cfg(test)] +mod tests { + use std::path::Path; + + use super::{WalkRoot, normalize_relative, split_path_and_glob}; + use insta::assert_yaml_snapshot; + use serde::Serialize; + + #[derive(Serialize)] + struct SnapshotWalk { + root: String, + globs: Vec, + } + + #[derive(Serialize)] + struct SnapshotGlob { + pattern: String, + negated: bool, + } + + fn snapshot_walk_roots(plan: &WalkRoot, root: &Path) -> SnapshotWalk { + let rebased = plan.rebase(root).expect("rebase should succeed"); + let root_str = rebased.root.display().to_string().replace('\\', "/"); + let globs = rebased + .globs + .iter() + .map(|g| SnapshotGlob { + pattern: g.normalized_pattern().to_string(), + negated: g.is_negated(), + }) + .collect(); + SnapshotWalk { + root: root_str, + globs, + } + } + + #[test] + fn test_split_path_and_glob() { + assert_eq!( + split_path_and_glob("../.././../*.{rs,cc}"), + ("../.././../", "*.{rs,cc}") + ); + assert_eq!( + split_path_and_glob("src/*/test?.rs"), + ("src/", "*/test?.rs") + ); + assert_eq!(split_path_and_glob("*.rs"), ("", "*.rs")); + assert_eq!(split_path_and_glob("plain/path"), ("", "plain/path")); + assert_eq!(split_path_and_glob("foo[ab]/bar"), ("", "foo[ab]/bar")); + assert_eq!(split_path_and_glob("pixi.toml"), ("", "pixi.toml")); + } + + #[test] + fn test_normalize() { + assert_eq!( + normalize_relative(Path::new("./.././.././")), + Path::new("../../") + ); + } + + // Couple of test cases to check that rebasing works as expected + #[test] + fn determine_groups_globs_by_normalized_prefix() { + let globs = [ + "./src/**/*.rs", + "!./src/**/*.tmp", + "../include/*.c", + "!.pixi/**", + "!**/.pixi/**", + "**/*.cpp", + ]; + + let walk_roots = WalkRoot::build(globs).expect("determine should succeed"); + + assert_yaml_snapshot!( + snapshot_walk_roots(&walk_roots, Path::new("workspace/baz")), + @r###" + root: workspace + globs: + - pattern: baz/src/**/*.rs + negated: false + - pattern: baz/src/**/*.tmp + negated: true + - pattern: include/*.c + negated: false + - pattern: baz/.pixi/** + negated: true + - pattern: "**/.pixi/**" + negated: true + - pattern: baz/**/*.cpp + negated: false + "### + ); + } + + // Check that nothing happens when rebasing + #[test] + fn determine_handles_globs_without_prefix() { + let globs = ["*.rs", "!*.tmp"]; + + let walk_roots = WalkRoot::build(globs).expect("determine should succeed"); + + assert_yaml_snapshot!( + snapshot_walk_roots(&walk_roots, Path::new("workspace/baz")), + @r###" + root: workspace/baz + globs: + - pattern: "*.rs" + negated: false + - pattern: "*.tmp" + negated: true + "### + ); + } + + #[test] + fn iterates_over_roots_and_globs() { + let globs = ["src/**/*.rs", "!src/**/generated.rs", "docs/**/*.md"]; + + let walk_roots = WalkRoot::build(globs).expect("determine should succeed"); + assert_yaml_snapshot!( + snapshot_walk_roots(&walk_roots, Path::new("workspace")), + @r###" + root: workspace + globs: + - pattern: src/**/*.rs + negated: false + - pattern: src/**/generated.rs + negated: true + - pattern: docs/**/*.md + negated: false + "### + ); + } + + #[test] + fn determine_negated_directory_glob_sticks_to_root() { + let globs = ["!.pixi/**", "../*.{cc,cpp}"]; + + let walk_roots = WalkRoot::build(globs).expect("determine should succeed"); + + assert_yaml_snapshot!( + snapshot_walk_roots(&walk_roots, Path::new("workspace/baz")), + @r###" + root: workspace + globs: + - pattern: baz/.pixi/** + negated: true + - pattern: "*.{cc,cpp}" + negated: false + "### + ); + } + + #[test] + fn single_file_match() { + let globs = ["pixi.toml", "../*.{cc,cpp}"]; + + let walk_roots = WalkRoot::build(globs).expect("determine should succeed"); + + assert_yaml_snapshot!( + snapshot_walk_roots(&walk_roots, Path::new("workspace/baz")), + @r###" + root: workspace + globs: + - pattern: baz/pixi.toml + negated: false + - pattern: "*.{cc,cpp}" + negated: false + "### + ); + } +} diff --git a/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_1_satisfiability.snap b/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_1_satisfiability.snap index 30d9dc7ece..6c00ee8b49 100644 --- a/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_1_satisfiability.snap +++ b/crates/pixi_glob/src/snapshots/pixi_glob__glob_hash__test__glob_hash_case_1_satisfiability.snap @@ -1,7 +1,6 @@ --- source: crates/pixi_glob/src/glob_hash.rs expression: snapshot -snapshot_kind: text --- Globs: - tests/data/satisfiability/source-dependency/**/* diff --git a/crates/pixi_global/src/project/mod.rs b/crates/pixi_global/src/project/mod.rs index e721d49c2f..a7a0543052 100644 --- a/crates/pixi_global/src/project/mod.rs +++ b/crates/pixi_global/src/project/mod.rs @@ -31,6 +31,7 @@ use pixi_core::repodata::Repodata; use pixi_manifest::PrioritizedChannel; use pixi_progress::global_multi_progress; use pixi_reporters::TopLevelProgress; +use pixi_spec::{BinarySpec, PathBinarySpec}; use pixi_spec_containers::DependencyMap; use pixi_utils::{ executable_from_path, @@ -40,7 +41,7 @@ use pixi_utils::{ }; use rattler_conda_types::{ ChannelConfig, GenericVirtualPackage, MatchSpec, PackageName, Platform, PrefixRecord, - menuinst::MenuMode, + menuinst::MenuMode, package::ArchiveIdentifier, }; use rattler_repodata_gateway::Gateway; // Removed unused rattler_solve imports @@ -120,7 +121,7 @@ pub struct Project { /// The manifest for the project pub manifest: Manifest, /// The global configuration as loaded from the config file(s) - config: Config, + pub config: Config, /// Root directory of the global environments pub(crate) env_root: EnvRoot, /// Binary directory @@ -303,7 +304,10 @@ impl Project { .expect("manifest path should always have a parent") .to_owned(); - let config = Config::load(&root); + // Load the global config and ensure + // that the root_dir is relative to the manifest directory + let mut config = Config::load_global(); + config.channel_config.root_dir = root.clone(); let client = OnceCell::new(); let repodata_gateway = OnceCell::new(); @@ -1361,26 +1365,18 @@ impl Project { }) } - /// Infer the package name from a PixiSpec (path or git) by examining build - /// outputs - pub async fn infer_package_name_from_spec( + /// Infer the package name from a SourceSpec by examining build outputs + async fn infer_package_name_from_source_spec( &self, - pixi_spec: &pixi_spec::PixiSpec, + source_spec: pixi_spec::SourceSpec, ) -> Result { - let pinned_source_spec = match pixi_spec.clone().into_source_or_binary() { - Either::Left(source_spec) => { - let command_dispatcher = self.command_dispatcher()?; - let checkout = command_dispatcher - .pin_and_checkout(source_spec) - .await - .map_err(|e| InferPackageNameError::BuildBackendMetadata(Box::new(e)))?; + let command_dispatcher = self.command_dispatcher()?; + let checkout = command_dispatcher + .pin_and_checkout(source_spec) + .await + .map_err(|e| InferPackageNameError::BuildBackendMetadata(Box::new(e)))?; - checkout.pinned - } - Either::Right(_) => { - return Err(InferPackageNameError::UnsupportedSpecType); - } - }; + let pinned_source_spec = checkout.pinned; // Create the metadata spec let metadata_spec = BuildBackendMetadataSpec { @@ -1422,6 +1418,27 @@ impl Project { } } } + + /// Infer the package name from a PixiSpec (path or git) by examining build + /// outputs + pub async fn infer_package_name_from_spec( + &self, + pixi_spec: &pixi_spec::PixiSpec, + ) -> Result { + match pixi_spec.clone().into_source_or_binary() { + Either::Left(source_spec) => { + self.infer_package_name_from_source_spec(source_spec).await + } + Either::Right(binary_spec) => match binary_spec { + BinarySpec::Path(PathBinarySpec { path }) => path + .file_name() + .and_then(ArchiveIdentifier::try_from_filename) + .and_then(|iden| PackageName::from_str(&iden.name).ok()) + .ok_or(InferPackageNameError::UnsupportedSpecType), + _ => Err(InferPackageNameError::UnsupportedSpecType), + }, + } + } } impl Repodata for Project { diff --git a/crates/pixi_task/src/file_hashes.rs b/crates/pixi_task/src/file_hashes.rs index ca3778f396..19e2a508a5 100644 --- a/crates/pixi_task/src/file_hashes.rs +++ b/crates/pixi_task/src/file_hashes.rs @@ -91,7 +91,7 @@ impl FileHashes { ignore_builder.push(pat); } - let glob = GlobSet::create(ignore_builder.iter().map(|s| s.as_str()))?; + let glob = GlobSet::create(ignore_builder.iter().map(|s| s.as_str())); // Spawn a thread that will collect the results from a channel. let (tx, rx) = crossbeam_channel::bounded(100); @@ -102,7 +102,7 @@ impl FileHashes { let collect_root = Arc::new(root.to_owned()); // Collect all entries first to avoid holding lock during iteration - let entries: Vec<_> = glob.filter_directory(root).collect::, _>>()?; + let entries = glob.collect_matching(root)?; // Force the initialization of the rayon thread pool to avoid implicit creation // by the Installer. @@ -113,19 +113,20 @@ impl FileHashes { let tx = tx.clone(); let collect_root = Arc::clone(&collect_root); - let result: Result<(PathBuf, String), FileHashesError> = if entry.file_type().is_dir() { - // Skip directories - return; - } else { - compute_file_hash(entry.path()).map(|hash| { - let path = entry - .path() - .strip_prefix(&*collect_root) - .expect("path is not prefixed by the root"); - tracing::info!("Added hash for file: {:?}", path); - (path.to_owned(), hash) - }) - }; + let result: Result<(PathBuf, String), FileHashesError> = + if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) { + // Skip directories + return; + } else { + compute_file_hash(entry.path()).map(|hash| { + let path = entry + .path() + .strip_prefix(&*collect_root) + .expect("path is not prefixed by the root"); + tracing::info!("Added hash for file: {:?}", path); + (path.to_owned(), hash) + }) + }; // Send result to channel - if it fails, we just continue with the next item let _ = tx.send(result); @@ -188,14 +189,16 @@ mod test { .await .unwrap(); - assert!( - !hashes.files.contains_key(Path::new("build.rs")), - "build.rs should not be included" - ); + dbg!(&hashes); + assert!( !hashes.files.contains_key(Path::new("src/lib.rs")), "lib.rs should not be included" ); + assert!( + !hashes.files.contains_key(Path::new("build.rs")), + "build.rs should not be included" + ); assert_matches!( hashes .files diff --git a/docs/first_workspace.md b/docs/first_workspace.md index 03fda28d69..8088a7e9d5 100644 --- a/docs/first_workspace.md +++ b/docs/first_workspace.md @@ -1,6 +1,7 @@ # Making a Pixi workspace Pixi's biggest strength is its ability to create reproducible, powerful, and flexible workspaces. +A workspace lives in a directory on your system, and is a collection of Pixi environments that can be used to develop one or many projects in that directory. Let's go over the common steps to create a simple Pixi workspace. ## Creating a Pixi workspace diff --git a/docs/reference/cli/pixi/global/add.md b/docs/reference/cli/pixi/global/add.md index e336e52d80..64890f9ee7 100644 --- a/docs/reference/cli/pixi/global/add.md +++ b/docs/reference/cli/pixi/global/add.md @@ -18,7 +18,7 @@ pixi global add [OPTIONS] --environment [PACKAGE]... ## Options - `--path ` -: The path to the local directory +: The path to the local package - `--environment (-e) ` : Specifies the environment that the dependencies need to be added to
**required**: `true` diff --git a/docs/reference/cli/pixi/global/install.md b/docs/reference/cli/pixi/global/install.md index d7793b34a3..aa04a972eb 100644 --- a/docs/reference/cli/pixi/global/install.md +++ b/docs/reference/cli/pixi/global/install.md @@ -18,7 +18,7 @@ pixi global install [OPTIONS] [PACKAGE]... ## Options - `--path ` -: The path to the local directory +: The path to the local package - `--channel (-c) ` : The channels to consider as a name or a url. Multiple channels can be specified by using this field multiple times
May be provided more than once. diff --git a/docs/reference/cli/pixi/upgrade.md b/docs/reference/cli/pixi/upgrade.md index a0674414c3..69aedade3f 100644 --- a/docs/reference/cli/pixi/upgrade.md +++ b/docs/reference/cli/pixi/upgrade.md @@ -19,7 +19,6 @@ pixi upgrade [OPTIONS] [PACKAGES]... ## Options - `--feature (-f) ` : The feature to update -
**default**: `default` - `--exclude ` : The packages which should be excluded
May be provided more than once. @@ -65,7 +64,7 @@ pixi upgrade [OPTIONS] [PACKAGES]... ## Description Checks if there are newer versions of the dependencies and upgrades them in the lockfile and manifest file. -`pixi upgrade` loosens the requirements for the given packages, updates the lock file and the adapts the manifest accordingly. +`pixi upgrade` loosens the requirements for the given packages, updates the lock file and the adapts the manifest accordingly. By default, all features are upgraded. --8<-- "docs/reference/cli/pixi/upgrade_extender:example" diff --git a/docs/reference/cli/pixi/upgrade_extender b/docs/reference/cli/pixi/upgrade_extender index 448798fbad..5833f1ac9c 100644 --- a/docs/reference/cli/pixi/upgrade_extender +++ b/docs/reference/cli/pixi/upgrade_extender @@ -9,6 +9,10 @@ - `file_name` - `url` - `subdir`. + +!!! note + In v0.55.0 and earlier releases, by default only the `default` feature was upgraded. + Pass `--feature=default` if you want to emulate this behaviour on newer releases. --8<-- [end:description] --8<-- [start:example] diff --git a/docs/reference/pixi_manifest.md b/docs/reference/pixi_manifest.md index bf30d17702..e153d8dcb8 100644 --- a/docs/reference/pixi_manifest.md +++ b/docs/reference/pixi_manifest.md @@ -1,5 +1,6 @@ The `pixi.toml` is the workspace manifest, also known as the Pixi workspace configuration file. +It specifies environments for a workspace, and the package dependency requirements for those environments. It can also specify tasks which can run in those environments, as well as many other configuration options. A `toml` file is structured in different tables. This document will explain the usage of the different tables. diff --git a/docs/switching_from/conda.md b/docs/switching_from/conda.md index 1b8dc917cf..47c6cc37c3 100644 --- a/docs/switching_from/conda.md +++ b/docs/switching_from/conda.md @@ -67,11 +67,7 @@ bat pixi.toml ## Automated switching -With `pixi` you can import `environment.yml` files into a Pixi workspace. (See [import](../reference/cli/pixi/init.md)) -```shell -pixi init --import environment.yml -``` -This will create a new workspace with the dependencies from the `environment.yml` file. +You can import `environment.yml` files into a Pixi workspace — see our [import tutorial](../tutorials/import.md). ??? tip "Exporting your environment" If you are working with Conda users or systems, you can [export your environment to a `environment.yml`](../reference/cli/pixi/workspace/export.md) file to share them. diff --git a/docs/tutorials/import.md b/docs/tutorials/import.md new file mode 100644 index 0000000000..c7b047a888 --- /dev/null +++ b/docs/tutorials/import.md @@ -0,0 +1,145 @@ +In this tutorial we will show you how to import existing environments into a Pixi workspace. +In case some words used in the tutorial don't make sense to you, you may get value from first +reading some of our other tutorials, like [our first workspace walthrough](../first_workspace.md) and [our guide to multi-environment workspaces](./multi_environment.md). + +## `pixi import` +Within any Pixi workspace, you can use [`pixi import`](https://pixi.sh/latest/reference/cli/pixi/import/) to import an environment from a given file. At the time of writing, we support two import file formats: `conda-env` and `pypi-txt`. Running `pixi import` without providing a `format` will try each format in turn until one succeeds, or return an error if all formats fail. + +If you don't already have a Pixi workspace, you can create one with [`pixi init`](https://pixi.sh/latest/reference/cli/pixi/init/). + +### `conda-env` format +The `conda-env` format is for files in the conda ecosystem (typically called `environment.yml`) following [the syntax specified in the conda docs](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#create-env-file-manually). Suppose our environment to import is specified in this file: + +```yaml title="environment.yml" +name: simple-env +channels: ["conda-forge"] +dependencies: +- python +- pip: + - httpx +``` + +We can then run `pixi import --format=conda-env environment.yml` to import the environment into our workspace. By default, since our `environment.yml` has a `name` field, this creates a `feature` of the same name (or uses the feature of that name if it already exists), and creates an `environment` containing that feature (with [`no-default-feature`](https://pixi.sh/latest/reference/pixi_manifest/#the-environments-table) set): + +```toml title="pixi.toml" +[feature.simple-env] +channels = ["conda-forge"] + +[feature.simple-env.dependencies] +python = "*" + +[feature.simple-env.pypi-dependencies] +httpx = "*" + +[environments] +simple-env = { features = ["simple-env"], no-default-feature = true } +``` + +It is then possible to define tasks for that environment, run commands in that environment, and launch a [`pixi shell`](https://pixi.sh/latest/reference/cli/pixi/shell) in that environment — see the [getting started guide](../getting_started.md) for links to start learning about these topics! + +For files without a `name` field, or to override the default behaviour, you can specify custom `--feature` and `--environment` names. This also allows importing into existing features and environments (including the `default` feature and environment). For example, given this other environment file to import: + +```yaml title="env2.yml" +channels: ["conda-forge"] +dependencies: +- numpy +``` + +Running `pixi import --format=conda-env --feature=numpy --environment=simple-env env2.yml` will import the environment into a new feature called "numpy", and include that feature in the existing `simple-env` environment (effectively merging the environments from our two input files): + +```toml title="pixi.toml" +[feature.simple-env] +channels = ["conda-forge"] + +[feature.simple-env.dependencies] +python = "*" + +[feature.simple-env.pypi-dependencies] +httpx = "*" + +[feature.numpy] +channels = ["conda-forge"] + +[feature.numpy.dependencies] +numpy = "*" + +[environments] +simple-env = { features = ["simple-env", "numpy"], no-default-feature = true } +``` + +It is also possible to specify platforms for the feature via the `--platform` argument. For example, `pixi import --format=conda-env --feature=unix --platform=linux-64 --platform=osx-arm64 environment.yml` adds the following to our workspace manifest: + +```toml title="pixi.toml" +[feature.unix] +platforms = ["linux-64", "osx-arm64"] +channels = ["conda-forge"] + +[feature.unix.target.linux-64.dependencies] +python = "*" + +[feature.unix.target.osx-arm64.dependencies] +python = "*" + +[environments] +unix = { features = ["unix"], no-default-feature = true } +``` + +### `pypi-txt` format +The `pypi-txt` format is for files in the PyPI ecosystem following [the requirements file format specification in the `pip` docs](https://pip.pypa.io/en/stable/reference/requirements-file-format/). + +Suppose our environment to import is specified in this file: + +```yaml title="requirements.txt" +cowpy +array-api-extra>=0.8 +``` + +We can then run `pixi import --format=pypi-txt --feature=my-feature1 requirements.txt` to import the environment into our workspace. It is necessary to specify a `feature` or `environment` name (or both) via the arguments of the same names. If only one of these names is provided, a matching name is used for the other field. Hence, the following lines are added to our workspace manifest: + +```toml title="pixi.toml" +[feature.my-feature1.pypi-dependencies] +cowpy = "*" +array-api-extra = ">=0.8" + +[environments] +my-feature1 = { features = ["my-feature1"], no-default-feature = true } +``` + +Any dependencies listed in the file are added as [`pypi-dependencies`](https://pixi.sh/latest/reference/pixi_manifest/#pypi-dependencies). An environment will be created with [`no-default-feature`](https://pixi.sh/latest/reference/pixi_manifest/#the-environments-table) set if the given environment name does not already exist. + +It is then possible to define tasks for that environment, run commands in that environment, and launch a [`pixi shell`](https://pixi.sh/latest/reference/cli/pixi/shell) in that environment — see the [getting started guide](../getting_started.md) for links to start learning about these topics! + +Just like the `conda-env` format, it is possible to import into existing features/environments (including the `default` feature/environment), and set specific platforms for the feature. See the previous section for details. + +## `pixi init --import` +It is also possible to combine the steps of `pixi init` and `pixi import` into one, via [`pixi init --import`](https://pixi.sh/latest/reference/cli/pixi/init/#arg---import). For example, `pixi init --import environment.yml` (using the same file from our example above) produces a manifest which looks like this: + +```toml title="pixi.toml" +[workspace] +authors = ["Lucas Colley "] +channels = ["conda-forge"] +name = "simple-env" +platforms = ["osx-arm64"] +version = "0.1.0" + +[tasks] + +[dependencies] +python = "*" + +[pypi-dependencies] +httpx = "*" +``` + +Unlike `pixi import`, this by default uses the `default` feature and environment. Thus, it achieves a very similar workspace to that obtained by running `pixi init ` and `pixi import --feature=default environment.yml`. + +One difference is that `pixi init --import` will by default inherit its name from the given import file (if the file specifies the `name` field), rather than from its working directory. + +!!! note "Supported formats" + At the time of writing, only the `conda-env` format is supported by `pixi init --import`. + +## Conclusion +For further details, please see the CLI reference documentation for [`pixi import`](https://pixi.sh/latest/reference/cli/pixi/import/) and [`pixi init --import`](https://pixi.sh/latest/reference/cli/pixi/init/#arg---import). +If there are any questions, or you know how to improve this tutorial, feel free to reach out to us on [GitHub](https://github.com/prefix-dev/pixi). + +At the time of writing, there are plans for many potential extensions to our import capabilities — you can follow along with that work at the `import` [roadmap issue on GitHub](https://github.com/prefix-dev/pixi/issues/4192). diff --git a/mkdocs.yml b/mkdocs.yml index db131397bf..fe60d1f8e0 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -122,6 +122,7 @@ nav: - Conda/Mamba: switching_from/conda.md - Multi Environment: tutorials/multi_environment.md - Global Tools: global_tools/introduction.md + - Import Environments: tutorials/import.md - Concepts: - Environments: workspace/environment.md - Tasks: workspace/advanced_tasks.md diff --git a/pixi.toml b/pixi.toml index 9379e337dd..1a2462498d 100644 --- a/pixi.toml +++ b/pixi.toml @@ -40,7 +40,7 @@ test-all-extra-slow = { depends-on = [ test-all-fast = { depends-on = ["test-fast", "test-integration-fast"] } test-all-slow = { depends-on = ["test-slow", "test-integration-slow"] } test-fast = """RUST_LOG="debug,resolvo=info" cargo nextest run --workspace --all-targets""" -test-slow = """RUST_LOG="debug,resolvo=info" cargo nextest run --workspace --all-targets --features slow_integration_tests,online_tests +test-slow = """RUST_LOG="debug,resolvo=info" cargo nextest run --workspace --all-targets --features slow_integration_tests,online_tests --exclude pixi_bench --status-level skip --failure-output immediate-final --no-fail-fast --final-status-level slow""" [feature.pytest.dependencies] @@ -82,6 +82,11 @@ test-specific-test = { cmd = "pytest -k '{{ test_substring }}'", args = [ ], depends-on = [ "build-release", ] } +test-specific-test-debug = { cmd = "pytest --pixi-build=debug -k '{{ test_substring }}'", args = [ + "test_substring", +], depends-on = [ + "build-debug", +] } # Update one test channel by passing on value of `mappings.toml` # e.g. "multiple_versions_channel_1" update-test-channel = { cmd = "python update-channels.py {{ channel }}", args = [ diff --git a/tests/integration_python/pixi_global/test_global.py b/tests/integration_python/pixi_global/test_global.py index 81fb02936d..b16535a85a 100644 --- a/tests/integration_python/pixi_global/test_global.py +++ b/tests/integration_python/pixi_global/test_global.py @@ -1,3 +1,4 @@ +import os import platform import shutil import tomllib @@ -2211,3 +2212,156 @@ def test_tree_invert(pixi: Path, tmp_path: Path, dummy_channel_1: str) -> None: env=env, stdout_contains=["dummy-c", "dummy-a 0.1.0"], ) + + +class TestCondaFile: + @pytest.mark.parametrize("path_arg", [True, False]) + def test_install_conda_file( + self, pixi: Path, tmp_path: Path, shortcuts_channel_1: str, path_arg: bool + ) -> None: + """Test directly installing a `.conda` file with `pixi global`""" + env = {"PIXI_HOME": str(tmp_path), "PIXI_CACHE_DIR": str(tmp_path / "foo")} + cwd = tmp_path + + conda_file = tmp_path / "pixi-editor-1.0.0-h4616a5c_0.conda" + shutil.copyfile( + Path.from_uri(shortcuts_channel_1) / "noarch" / "pixi-editor-1.0.0-h4616a5c_0.conda", + conda_file, + ) + + def check_install(conda_file_path: Path, cwd: Path): + if path_arg: + verify_cli_command( + [pixi, "global", "install", "--path", conda_file_path], env=env, cwd=cwd + ) + else: + verify_cli_command( + [pixi, "global", "install", conda_file_path], + env=env, + expected_exit_code=ExitCode.FAILURE, + stderr_contains="please pass `--path`", + cwd=cwd, + ) + + # check absolute path + check_install(conda_file, cwd) + + # check relative path in same dir + cwd = conda_file.parent + relative_conda_file = conda_file.relative_to(cwd, walk_up=True) + check_install(relative_conda_file, cwd) + + # check relative path in subdir + cwd = conda_file.parent.parent + relative_conda_file = conda_file.relative_to(cwd, walk_up=True) + check_install(relative_conda_file, cwd) + + # check relative path in a 'cousin' relative directory + cwd = tmp_path + relative_conda_file = conda_file.relative_to(cwd, walk_up=True) + check_install(relative_conda_file, cwd) + + def test_update_sync_conda_file( + self, pixi: Path, tmp_path: Path, shortcuts_channel_1: str + ) -> None: + """Test that `pixi global {update, sync}` work and use the existing file.""" + env = {"PIXI_HOME": str(tmp_path), "PIXI_CACHE_DIR": str(tmp_path / "foo")} + cwd = tmp_path + + package_name = "pixi-editor" + conda_file = tmp_path / "pixi-editor-1.0.0-h4616a5c_0.conda" + shutil.copyfile( + Path.from_uri(shortcuts_channel_1) / "noarch" / "pixi-editor-1.0.0-h4616a5c_0.conda", + conda_file, + ) + + verify_cli_command( + [ + pixi, + "global", + "install", + "--path", + conda_file, + ], + env=env, + cwd=cwd, + ) + + # update with file still there + verify_cli_command( + [ + pixi, + "global", + "update", + "pixi-editor", + ], + env=env, + cwd=cwd, + stderr_contains="Environment pixi-editor was already up-to-date.", + ) + + # sync with file still there + verify_cli_command( + [ + pixi, + "global", + "sync", + ], + env=env, + cwd=cwd, + stderr_contains="Nothing to do", + ) + + os.remove(conda_file) + + # update with file gone + verify_cli_command( + [ + pixi, + "global", + "update", + "pixi-editor", + ], + env=env, + cwd=cwd, + stderr_contains="Environment pixi-editor was already up-to-date.", + ) + + # sync with file gone + verify_cli_command( + [ + pixi, + "global", + "sync", + ], + env=env, + cwd=cwd, + stderr_contains="Nothing to do", + ) + + # remove the environment + # XXX: should this fail instead? + shutil.rmtree(tmp_path / "envs" / package_name) + + # update with environment removed + verify_cli_command( + [ + pixi, + "global", + "update", + "pixi-editor", + ], + env=env, + cwd=cwd, + ) + + # sync with environment removed + verify_cli_command( + [ + pixi, + "global", + "sync", + ], + env=env, + cwd=cwd, + ) diff --git a/tests/integration_python/test_main_cli.py b/tests/integration_python/test_main_cli.py index 81ae36176f..a4da43ec1c 100644 --- a/tests/integration_python/test_main_cli.py +++ b/tests/integration_python/test_main_cli.py @@ -1,5 +1,4 @@ import json -import os import platform import shutil import sys @@ -343,376 +342,6 @@ def test_simple_project_setup(pixi: Path, tmp_pixi_workspace: Path) -> None: ) -def test_upgrade_package_does_not_exist( - pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str -) -> None: - manifest_path = tmp_pixi_workspace / "pixi.toml" - - # Create a new project - verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) - - # Add package - verify_cli_command([pixi, "add", "--manifest-path", manifest_path, "package"]) - - # Similar package names that don't exist should get suggestions - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "package_similar_name"], - ExitCode.FAILURE, - stderr_contains=[ - "could not find a package named 'package_similar_name'", - "did you mean 'package'", - ], - ) - - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "different_name"], - ExitCode.FAILURE, - stderr_contains="could not find a package named 'different_name'", - stderr_excludes="did you mean 'package'", - ) - - -def test_upgrade_conda_package( - pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str -) -> None: - manifest_path = tmp_pixi_workspace / "pixi.toml" - - # Create a new project - verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) - - # Add package pinned to version 0.1.0 - verify_cli_command( - [ - pixi, - "add", - "--manifest-path", - manifest_path, - f"package==0.1.0[channel={multiple_versions_channel_1},build_number=0]", - ] - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - package = parsed_manifest["dependencies"]["package"] - assert package["version"] == "==0.1.0" - assert package["channel"] == multiple_versions_channel_1 - assert package["build-number"] == "==0" - - # Upgrade package, it should now be at 0.2.0, with semver ranges - # The channel should still be specified - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "package"], - stderr_contains=["package", "0.1.0", "0.2.0"], - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - package = parsed_manifest["dependencies"]["package"] - assert package["version"] == ">=0.2.0,<0.3" - assert package["channel"] == multiple_versions_channel_1 - assert "build-number" not in package - - -def test_upgrade_exclude( - pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str -) -> None: - manifest_path = tmp_pixi_workspace / "pixi.toml" - - # Create a new project - verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) - - # Add package pinned to version 0.1.0 - verify_cli_command( - [pixi, "add", "--manifest-path", manifest_path, "package==0.1.0", "package2==0.1.0"] - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - assert parsed_manifest["dependencies"]["package"] == "==0.1.0" - assert parsed_manifest["dependencies"]["package2"] == "==0.1.0" - - # Upgrade package, it should now be at 0.2.0, with semver ranges - # package2, should still be at 0.1.0, since we excluded it - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "--exclude", "package2"], - stderr_contains=["package", "0.1.0", "0.2.0"], - stderr_excludes="package2", - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - assert parsed_manifest["dependencies"]["package"] == ">=0.2.0,<0.3" - assert parsed_manifest["dependencies"]["package2"] == "==0.1.0" - - -def test_upgrade_json_output( - pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str -) -> None: - manifest_path = tmp_pixi_workspace / "pixi.toml" - - # Create a new project - verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) - - # Add package pinned to version 0.1.0 - verify_cli_command( - [pixi, "add", "--manifest-path", manifest_path, "package==0.1.0", "package2==0.1.0"] - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - assert parsed_manifest["dependencies"]["package"] == "==0.1.0" - assert parsed_manifest["dependencies"]["package2"] == "==0.1.0" - - # Check if json output is correct and readable - result = verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "--json"], - stdout_contains=["package", "package2", "0.1.0", "0.2.0", 'version": ', "before", "after"], - ) - - data = json.loads(result.stdout) - assert data["environment"]["default"] - - -def test_upgrade_dryrun( - pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str -) -> None: - manifest_path = tmp_pixi_workspace / "pixi.toml" - lock_file_path = tmp_pixi_workspace / "pixi.lock" - # Create a new project - verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) - - # Add package pinned to version 0.1.0 - verify_cli_command( - [pixi, "add", "--manifest-path", manifest_path, "package==0.1.0", "package2==0.1.0"] - ) - - manifest_content = manifest_path.read_text() - lock_file_content = lock_file_path.read_text() - # Rename .pixi folder, no remove to avoid remove logic. - os.renames(tmp_pixi_workspace / ".pixi", tmp_pixi_workspace / ".pixi_backup") - - parsed_manifest = tomllib.loads(manifest_path.read_text()) - assert parsed_manifest["dependencies"]["package"] == "==0.1.0" - assert parsed_manifest["dependencies"]["package2"] == "==0.1.0" - - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "--dry-run"], - stderr_contains=["package", "0.1.0", "0.2.0"], - ) - - # Verify the manifest, lock file and .pixi folder are not modified - assert manifest_path.read_text() == manifest_content - assert lock_file_path.read_text() == lock_file_content - assert not os.path.exists(tmp_pixi_workspace / ".pixi") - - -@pytest.mark.slow -def test_upgrade_pypi_package(pixi: Path, tmp_pixi_workspace: Path) -> None: - manifest_path = tmp_pixi_workspace / "pixi.toml" - - # Create a new project - verify_cli_command([pixi, "init", tmp_pixi_workspace]) - - # Add python - verify_cli_command([pixi, "add", "--manifest-path", manifest_path, "python=3.13"]) - - # Add httpx pinned to version 0.26.0 - verify_cli_command( - [ - pixi, - "add", - "--manifest-path", - manifest_path, - "--pypi", - "httpx[cli]==0.26.0", - ] - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - assert parsed_manifest["pypi-dependencies"]["httpx"]["version"] == "==0.26.0" - assert parsed_manifest["pypi-dependencies"]["httpx"]["extras"] == ["cli"] - - # Upgrade httpx, it should now be upgraded - # Extras should be preserved - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "httpx"], - stderr_contains=["httpx", "0.26.0"], - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - assert parsed_manifest["pypi-dependencies"]["httpx"]["version"] != "==0.26.0" - assert parsed_manifest["pypi-dependencies"]["httpx"]["extras"] == ["cli"] - - -@pytest.mark.slow -def test_upgrade_pypi_and_conda_package(pixi: Path, tmp_pixi_workspace: Path) -> None: - manifest_path = tmp_pixi_workspace / "pyproject.toml" - - # Create a new project - verify_cli_command( - [ - pixi, - "init", - "--format", - "pyproject", - tmp_pixi_workspace, - "--channel", - "https://prefix.dev/conda-forge", - ] - ) - - # Add pinned numpy as conda and pypi dependency - verify_cli_command([pixi, "add", "--manifest-path", manifest_path, "numpy==1.*"]) - verify_cli_command([pixi, "add", "--manifest-path", manifest_path, "--pypi", "numpy==1.*"]) - - parsed_manifest = tomllib.loads(manifest_path.read_text()) - numpy_pypi = parsed_manifest["project"]["dependencies"][0] - assert numpy_pypi == "numpy==1.*" - numpy_conda = parsed_manifest["tool"]["pixi"]["dependencies"]["numpy"] - assert numpy_conda == "1.*" - - # Upgrade numpy, both conda and pypi should be upgraded - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "numpy"], - stderr_contains=["numpy", "1."], - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - numpy_pypi = parsed_manifest["project"]["dependencies"][0] - assert "1.*" not in numpy_pypi - numpy_conda = parsed_manifest["tool"]["pixi"]["dependencies"]["numpy"] - assert numpy_conda != "1.*" - - -@pytest.mark.slow -def test_upgrade_dependency_location_pixi(pixi: Path, tmp_path: Path) -> None: - # Test based on https://github.com/prefix-dev/pixi/issues/2470 - # Making sure pixi places the upgraded package in the correct location - manifest_path = tmp_path / "pyproject.toml" - pyproject = f""" -[project] -name = "test-upgrade" -dependencies = ["numpy==1.*"] -requires-python = "==3.13" - -[project.optional-dependencies] -cli = ["rich==12"] - -[dependency-groups] -test = ["pytest==6"] - -[tool.pixi.project] -channels = ["https://prefix.dev/conda-forge"] -platforms = ["{CURRENT_PLATFORM}"] - -[tool.pixi.pypi-dependencies] -polars = "==0.*" - -[tool.pixi.environments] -test = ["test"] - """ - - manifest_path.write_text(pyproject) - - # Upgrade numpy, both conda and pypi should be upgraded - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path], - stderr_contains=["polars"], - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - - # Check that `requrires-python` is the same - assert parsed_manifest["project"]["requires-python"] == "==3.13" - - # Check that `tool.pixi.dependencies.python` isn't added - assert "python" not in parsed_manifest.get("tool", {}).get("pixi", {}).get("dependencies", {}) - - # Check that project.dependencies are upgraded - project_dependencies = parsed_manifest["project"]["dependencies"] - numpy_pypi = project_dependencies[0] - assert "numpy" in numpy_pypi - assert "==1.*" not in numpy_pypi - assert "polars" not in project_dependencies - - # Check that the pypi-dependencies are upgraded - pypi_dependencies = parsed_manifest["tool"]["pixi"]["pypi-dependencies"] - polars_pypi = pypi_dependencies["polars"] - assert polars_pypi != "==0.*" - assert "numpy" not in pypi_dependencies - - -def test_upgrade_keep_info( - pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str -) -> None: - manifest_path = tmp_pixi_workspace / "pixi.toml" - - # Create a new project - verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) - - # Add package pinned to version 0.1.0 - verify_cli_command( - [ - pixi, - "add", - "--manifest-path", - manifest_path, - f"{multiple_versions_channel_1}::package3==0.1.0=ab*", - ] - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - assert "==0.1.0" in parsed_manifest["dependencies"]["package3"]["version"] - assert "ab*" in parsed_manifest["dependencies"]["package3"]["build"] - assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] - - # Upgrade all, it should now be at 0.2.0, with the build intact - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path], - stderr_contains=["package3", "0.1.0", "0.2.0"], - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - # Update version - assert parsed_manifest["dependencies"]["package3"]["version"] == ">=0.2.0,<0.3" - # Keep build - assert "ab*" in parsed_manifest["dependencies"]["package3"]["build"] - # Keep channel - assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] - - # Upgrade package3, it should now be at 0.2.0, with the build intact because it has a wildcard - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "package3"], - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - # Update version - assert parsed_manifest["dependencies"]["package3"]["version"] == ">=0.2.0,<0.3" - # Keep build - assert "ab*" in parsed_manifest["dependencies"]["package3"]["build"] - # Keep channel - assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] - - -def test_upgrade_remove_info( - pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str -) -> None: - manifest_path = tmp_pixi_workspace / "pixi.toml" - - # Create a new project - verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) - - # Add package pinned to version 0.1.0 - verify_cli_command( - [ - pixi, - "add", - "--manifest-path", - manifest_path, - f"{multiple_versions_channel_1}::package3==0.1.0=abc", - ] - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - assert "==0.1.0" in parsed_manifest["dependencies"]["package3"]["version"] - assert "abc" in parsed_manifest["dependencies"]["package3"]["build"] - assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] - - # Upgrade package3, it should now be at 0.2.0, without the build but with the channel - verify_cli_command( - [pixi, "upgrade", "--manifest-path", manifest_path, "package3"], - ) - parsed_manifest = tomllib.loads(manifest_path.read_text()) - # Update version - assert parsed_manifest["dependencies"]["package3"]["version"] == ">=0.2.0,<0.3" - # Keep channel - assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] - # Remove build - assert "build" not in parsed_manifest["dependencies"]["package3"] - - def test_concurrency_flags( pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str ) -> None: diff --git a/tests/integration_python/test_upgrade.py b/tests/integration_python/test_upgrade.py new file mode 100644 index 0000000000..22ff56d8c3 --- /dev/null +++ b/tests/integration_python/test_upgrade.py @@ -0,0 +1,500 @@ +import json +import os +import tomllib +from pathlib import Path + +import pytest + +from .common import ( + CURRENT_PLATFORM, + ExitCode, + verify_cli_command, +) + + +def test_upgrade_package_does_not_exist( + pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str +) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) + + # Add package + verify_cli_command([pixi, "add", "--manifest-path", manifest_path, "package"]) + + # Similar package names that don't exist should get suggestions + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "package_similar_name"], + ExitCode.FAILURE, + stderr_contains=[ + "could not find a package named 'package_similar_name'", + "did you mean 'package'", + ], + ) + + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "different_name"], + ExitCode.FAILURE, + stderr_contains="could not find a package named 'different_name'", + stderr_excludes="did you mean 'package'", + ) + + +def test_upgrade_conda_package( + pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str +) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) + + # Add package pinned to version 0.1.0 + verify_cli_command( + [ + pixi, + "add", + "--manifest-path", + manifest_path, + f"package==0.1.0[channel={multiple_versions_channel_1},build_number=0]", + ] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + package = parsed_manifest["dependencies"]["package"] + assert package["version"] == "==0.1.0" + assert package["channel"] == multiple_versions_channel_1 + assert package["build-number"] == "==0" + + # Upgrade package, it should now be at 0.2.0, with semver ranges + # The channel should still be specified + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "package"], + stderr_contains=["package", "0.1.0", "0.2.0"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + package = parsed_manifest["dependencies"]["package"] + assert package["version"] == ">=0.2.0,<0.3" + assert package["channel"] == multiple_versions_channel_1 + assert "build-number" not in package + + +def test_upgrade_exclude( + pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str +) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) + + # Add package pinned to version 0.1.0 + verify_cli_command( + [pixi, "add", "--manifest-path", manifest_path, "package==0.1.0", "package2==0.1.0"] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + assert parsed_manifest["dependencies"]["package"] == "==0.1.0" + assert parsed_manifest["dependencies"]["package2"] == "==0.1.0" + + # Upgrade package, it should now be at 0.2.0, with semver ranges + # package2, should still be at 0.1.0, since we excluded it + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "--exclude", "package2"], + stderr_contains=["package", "0.1.0", "0.2.0"], + stderr_excludes="package2", + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + assert parsed_manifest["dependencies"]["package"] == ">=0.2.0,<0.3" + assert parsed_manifest["dependencies"]["package2"] == "==0.1.0" + + +def test_upgrade_json_output( + pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str +) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) + + # Add package pinned to version 0.1.0 + verify_cli_command( + [pixi, "add", "--manifest-path", manifest_path, "package==0.1.0", "package2==0.1.0"] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + assert parsed_manifest["dependencies"]["package"] == "==0.1.0" + assert parsed_manifest["dependencies"]["package2"] == "==0.1.0" + + # Check if json output is correct and readable + result = verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "--json"], + stdout_contains=["package", "package2", "0.1.0", "0.2.0", 'version": ', "before", "after"], + ) + + data = json.loads(result.stdout) + assert data["environment"]["default"] + + +def test_upgrade_dryrun( + pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str +) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + lock_file_path = tmp_pixi_workspace / "pixi.lock" + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) + + # Add package pinned to version 0.1.0 + verify_cli_command( + [pixi, "add", "--manifest-path", manifest_path, "package==0.1.0", "package2==0.1.0"] + ) + + manifest_content = manifest_path.read_text() + lock_file_content = lock_file_path.read_text() + # Rename .pixi folder, no remove to avoid remove logic. + os.renames(tmp_pixi_workspace / ".pixi", tmp_pixi_workspace / ".pixi_backup") + + parsed_manifest = tomllib.loads(manifest_path.read_text()) + assert parsed_manifest["dependencies"]["package"] == "==0.1.0" + assert parsed_manifest["dependencies"]["package2"] == "==0.1.0" + + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "--dry-run"], + stderr_contains=["package", "0.1.0", "0.2.0"], + ) + + # Verify the manifest, lock file and .pixi folder are not modified + assert manifest_path.read_text() == manifest_content + assert lock_file_path.read_text() == lock_file_content + assert not os.path.exists(tmp_pixi_workspace / ".pixi") + + +@pytest.mark.slow +def test_upgrade_pypi_package(pixi: Path, tmp_pixi_workspace: Path) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", tmp_pixi_workspace]) + + # Add python + verify_cli_command([pixi, "add", "--manifest-path", manifest_path, "python=3.13"]) + + # Add httpx pinned to version 0.26.0 + verify_cli_command( + [ + pixi, + "add", + "--manifest-path", + manifest_path, + "--pypi", + "httpx[cli]==0.26.0", + ] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + assert parsed_manifest["pypi-dependencies"]["httpx"]["version"] == "==0.26.0" + assert parsed_manifest["pypi-dependencies"]["httpx"]["extras"] == ["cli"] + + # Upgrade httpx, it should now be upgraded + # Extras should be preserved + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "httpx"], + stderr_contains=["httpx", "0.26.0"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + assert parsed_manifest["pypi-dependencies"]["httpx"]["version"] != "==0.26.0" + assert parsed_manifest["pypi-dependencies"]["httpx"]["extras"] == ["cli"] + + +@pytest.mark.slow +def test_upgrade_pypi_and_conda_package(pixi: Path, tmp_pixi_workspace: Path) -> None: + manifest_path = tmp_pixi_workspace / "pyproject.toml" + + # Create a new project + verify_cli_command( + [ + pixi, + "init", + "--format", + "pyproject", + tmp_pixi_workspace, + "--channel", + "https://prefix.dev/conda-forge", + ] + ) + + # Add pinned numpy as conda and pypi dependency + verify_cli_command([pixi, "add", "--manifest-path", manifest_path, "numpy==1.*"]) + verify_cli_command([pixi, "add", "--manifest-path", manifest_path, "--pypi", "numpy==1.*"]) + + parsed_manifest = tomllib.loads(manifest_path.read_text()) + numpy_pypi = parsed_manifest["project"]["dependencies"][0] + assert numpy_pypi == "numpy==1.*" + numpy_conda = parsed_manifest["tool"]["pixi"]["dependencies"]["numpy"] + assert numpy_conda == "1.*" + + # Upgrade numpy, both conda and pypi should be upgraded + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "numpy"], + stderr_contains=["numpy", "1."], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + numpy_pypi = parsed_manifest["project"]["dependencies"][0] + assert "1.*" not in numpy_pypi + numpy_conda = parsed_manifest["tool"]["pixi"]["dependencies"]["numpy"] + assert numpy_conda != "1.*" + + +@pytest.mark.slow +def test_upgrade_dependency_location_pixi(pixi: Path, tmp_path: Path) -> None: + # Test based on https://github.com/prefix-dev/pixi/issues/2470 + # Making sure pixi places the upgraded package in the correct location + manifest_path = tmp_path / "pyproject.toml" + pyproject = f""" +[project] +name = "test-upgrade" +dependencies = ["numpy==1.*"] +requires-python = "==3.13" + +[project.optional-dependencies] +cli = ["rich==12"] + +[dependency-groups] +test = ["pytest==6"] + +[tool.pixi.project] +channels = ["https://prefix.dev/conda-forge"] +platforms = ["{CURRENT_PLATFORM}"] + +[tool.pixi.pypi-dependencies] +polars = "==0.*" + +[tool.pixi.environments] +test = ["test"] + """ + + manifest_path.write_text(pyproject) + + # Upgrade numpy, both conda and pypi should be upgraded + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path], + stderr_contains=["polars"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + + # Check that `requrires-python` is the same + assert parsed_manifest["project"]["requires-python"] == "==3.13" + + # Check that `tool.pixi.dependencies.python` isn't added + assert "python" not in parsed_manifest.get("tool", {}).get("pixi", {}).get("dependencies", {}) + + # Check that project.dependencies are upgraded + project_dependencies = parsed_manifest["project"]["dependencies"] + numpy_pypi = project_dependencies[0] + assert "numpy" in numpy_pypi + assert "==1.*" not in numpy_pypi + assert "polars" not in project_dependencies + + # Check that the pypi-dependencies are upgraded + pypi_dependencies = parsed_manifest["tool"]["pixi"]["pypi-dependencies"] + polars_pypi = pypi_dependencies["polars"] + assert polars_pypi != "==0.*" + assert "numpy" not in pypi_dependencies + + +def test_upgrade_keep_info( + pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str +) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) + + # Add package pinned to version 0.1.0 + verify_cli_command( + [ + pixi, + "add", + "--manifest-path", + manifest_path, + f"{multiple_versions_channel_1}::package3==0.1.0=ab*", + ] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + assert "==0.1.0" in parsed_manifest["dependencies"]["package3"]["version"] + assert "ab*" in parsed_manifest["dependencies"]["package3"]["build"] + assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] + + # Upgrade all, it should now be at 0.2.0, with the build intact + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path], + stderr_contains=["package3", "0.1.0", "0.2.0"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + # Update version + assert parsed_manifest["dependencies"]["package3"]["version"] == ">=0.2.0,<0.3" + # Keep build + assert "ab*" in parsed_manifest["dependencies"]["package3"]["build"] + # Keep channel + assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] + + # Upgrade package3, it should now be at 0.2.0, with the build intact because it has a wildcard + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "package3"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + # Update version + assert parsed_manifest["dependencies"]["package3"]["version"] == ">=0.2.0,<0.3" + # Keep build + assert "ab*" in parsed_manifest["dependencies"]["package3"]["build"] + # Keep channel + assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] + + +def test_upgrade_remove_info( + pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str +) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) + + # Add package pinned to version 0.1.0 + verify_cli_command( + [ + pixi, + "add", + "--manifest-path", + manifest_path, + f"{multiple_versions_channel_1}::package3==0.1.0=abc", + ] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + assert "==0.1.0" in parsed_manifest["dependencies"]["package3"]["version"] + assert "abc" in parsed_manifest["dependencies"]["package3"]["build"] + assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] + + # Upgrade package3, it should now be at 0.2.0, without the build but with the channel + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "package3"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + # Update version + assert parsed_manifest["dependencies"]["package3"]["version"] == ">=0.2.0,<0.3" + # Keep channel + assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] + # Remove build + assert "build" not in parsed_manifest["dependencies"]["package3"] + + +def test_upgrade_features( + pixi: Path, tmp_pixi_workspace: Path, multiple_versions_channel_1: str +) -> None: + manifest_path = tmp_pixi_workspace / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_pixi_workspace]) + + # Add package3 pinned to version 0.1.0 to feature "foo" + verify_cli_command( + [ + pixi, + "add", + "--manifest-path", + manifest_path, + "--feature", + "foo", + f"package3==0.1.0[channel={multiple_versions_channel_1}]", + ] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + package3 = parsed_manifest["feature"]["foo"]["dependencies"]["package3"] + assert package3["version"] == "==0.1.0" + assert package3["channel"] == multiple_versions_channel_1 + + # Add package2 pinned to version 0.1.0 to feature "bar" + verify_cli_command( + [ + pixi, + "add", + "--manifest-path", + manifest_path, + "--feature", + "bar", + f"package2==0.1.0[channel={multiple_versions_channel_1}]", + ] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + package2 = parsed_manifest["feature"]["bar"]["dependencies"]["package2"] + assert package2["version"] == "==0.1.0" + assert package2["channel"] == multiple_versions_channel_1 + + # Add package pinned to version 0.1.0 to default feature + verify_cli_command( + [ + pixi, + "add", + "--manifest-path", + manifest_path, + f"package==0.1.0[channel={multiple_versions_channel_1}]", + ] + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + package = parsed_manifest["dependencies"]["package"] + assert package["version"] == "==0.1.0" + assert package["channel"] == multiple_versions_channel_1 + + # make features used + verify_cli_command( + [ + pixi, + "workspace", + "environment", + "add", + "--manifest-path", + manifest_path, + "--force", + "default", + "--feature=foo", + "--feature=bar", + ] + ) + + # lock before upgrades + verify_cli_command( + [ + pixi, + "lock", + "--manifest-path", + manifest_path, + ] + ) + + # Upgrading with `--feature=default` should only upgrade the package in the default feature + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "--feature=default"], + stderr_excludes=["package3", "package2"], + stderr_contains=["package", "0.1.0", "0.2.0"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + package3 = parsed_manifest["feature"]["foo"]["dependencies"]["package3"] + package2 = parsed_manifest["feature"]["bar"]["dependencies"]["package2"] + package = parsed_manifest["dependencies"]["package"] + assert package3["version"] == package2["version"] == "==0.1.0" + assert package["version"] == ">=0.2.0,<0.3" + + # Upgrading with `--feature=foo` should not upgrade the package in feature "bar" + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path, "--feature=foo"], + stderr_excludes=["package2"], + stderr_contains=["package3", "0.1.0", "0.2.0"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + package3 = parsed_manifest["feature"]["foo"]["dependencies"]["package3"] + package2 = parsed_manifest["feature"]["bar"]["dependencies"]["package2"] + assert package2["version"] == "==0.1.0" + assert package3["version"] == ">=0.2.0,<0.3" + + # Upgrading with no specified feature should upgrade all features (hence "package2" in feature "bar") + verify_cli_command( + [pixi, "upgrade", "--manifest-path", manifest_path], + stderr_contains=["package2", "0.1.0", "0.2.0"], + ) + parsed_manifest = tomllib.loads(manifest_path.read_text()) + package2 = parsed_manifest["feature"]["bar"]["dependencies"]["package2"] + assert package2["version"] == ">=0.2.0,<0.3"