diff --git a/.github/actionlint.yml b/.github/actionlint.yml index 4aea2a42bb..43b0b27690 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -1,5 +1,6 @@ self-hosted-runner: labels: + - "codspeed-macro" - 8core_ubuntu_latest_runner - 16core_windows_latest_runner - windows_arm64_2025_large diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 0000000000..b02c018f6f --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,44 @@ +name: CodSpeed + +on: + push: + branches: + - "feature/codspeed" + pull_request: + types: [opened, synchronize, closed] + branches: + - main + - "feature/codspeed" + +jobs: + benchmarks: + name: Run benchmarks + runs-on: codspeed-macro + env: + RUST_BACKTRACE: full + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 + + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y tar bzip2 + + - name: Setup rust toolchain, cache and cargo-codspeed binary + uses: moonrepo/setup-rust@e013866c4215f77c925f42f60257dec7dd18836e + with: + channel: stable + cache-target: release + bins: cargo-codspeed + + - name: Build the benchmark target(s) + run: cargo codspeed build -p pixi_bench + + - name: Run the benchmarks + uses: CodSpeedHQ/action@cc824aeb2c86848c39cf722ab4c2b6c5bf290530 + with: + run: | + export PATH="~/.cargo/bin:$PATH" + cargo codspeed run -p pixi_bench + mode: walltime + token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 334c44bddb..da1cb679b0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -107,7 +107,7 @@ jobs: with: save-if: ${{ github.ref == 'refs/heads/main' }} - run: | - for package in $(cargo metadata --no-deps --format-version=1 | jq -r '.packages[] | .name'); do + for package in $(cargo metadata --no-deps --format-version=1 | jq -r '.packages[] | select(.name != "pixi_bench") | .name'); do cargo rustdoc -p "$package" --all-features -- -D warnings -W unreachable-pub done diff --git a/Cargo.lock b/Cargo.lock index abc03416fc..aea3aeae6c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,6 +65,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "anstream" version = "0.6.20" @@ -121,6 +127,15 @@ version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + [[package]] name = "arbitrary" version = "1.4.2" @@ -1223,6 +1238,12 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cbc" version = "0.1.2" @@ -1286,6 +1307,33 @@ dependencies = [ "windows-link 0.2.0", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "cipher" version = "0.4.4" @@ -1366,12 +1414,82 @@ dependencies = [ "tokio", ] +[[package]] +name = "codspeed" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35584c5fcba8059780748866387fb97c5a203bcfc563fc3d0790af406727a117" +dependencies = [ + "anyhow", + "bincode", + "colored", + "glob", + "libc", + "nix 0.29.0", + "serde", + "serde_json", + "statrs", + "uuid", +] + +[[package]] +name = "codspeed-criterion-compat" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78f6c1c6bed5fd84d319e8b0889da051daa361c79b7709c9394dfe1a882bba67" +dependencies = [ + "codspeed", + "codspeed-criterion-compat-walltime", + "colored", + "futures", + "tokio", +] + +[[package]] +name = "codspeed-criterion-compat-walltime" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c989289ce6b1cbde72ed560496cb8fbf5aa14d5ef5666f168e7f87751038352e" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "codspeed", + "criterion-plot", + "futures", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "tokio", + "walkdir", +] + [[package]] name = "colorchoice" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + [[package]] name = "combine" version = "4.6.7" @@ -1540,6 +1658,16 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + [[package]] name = "crossbeam-channel" version = "0.5.15" @@ -1574,6 +1702,12 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + [[package]] name = "crypto-bigint" version = "0.4.9" @@ -2257,6 +2391,15 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs-err" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" +dependencies = [ + "autocfg", +] + [[package]] name = "fs-err" version = "3.1.1" @@ -2283,7 +2426,7 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8640e34b88f7652208ce9e88b1a37a2ae95227d84abec377ccd3c5cfeb141ed4" dependencies = [ - "fs-err", + "fs-err 3.1.1", "rustix 1.1.2", "tokio", "windows-sys 0.59.0", @@ -2685,6 +2828,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" +dependencies = [ + "cfg-if", + "crunchy", +] + [[package]] name = "halfbrown" version = "0.3.0" @@ -3324,6 +3477,17 @@ dependencies = [ "serde", ] +[[package]] +name = "is-terminal" +version = "0.4.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.59.0", +] + [[package]] name = "is_ci" version = "1.2.0" @@ -3345,6 +3509,15 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.12.1" @@ -4259,6 +4432,12 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + [[package]] name = "openssl" version = "0.10.73" @@ -4442,7 +4621,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67957e099f5b76f4cad98e0d41dd53746ab3a2debc1da12ee5b1cccb1d7b8dc8" dependencies = [ - "fs-err", + "fs-err 3.1.1", "fxhash", "indexmap 2.11.1", "itertools 0.14.0", @@ -4605,7 +4784,7 @@ dependencies = [ "chrono", "dunce", "fake", - "fs-err", + "fs-err 3.1.1", "fs_extra", "futures", "http 1.3.1", @@ -4669,7 +4848,7 @@ dependencies = [ "console 0.15.11", "dunce", "fancy_display", - "fs-err", + "fs-err 3.1.1", "itertools 0.14.0", "miette 7.6.0", "minijinja", @@ -4688,6 +4867,30 @@ dependencies = [ "uv-normalize", ] +[[package]] +name = "pixi_bench" +version = "0.1.0" +dependencies = [ + "clap", + "codspeed-criterion-compat", + "fs-err 2.11.0", + "miette 7.6.0", + "once_cell", + "pixi_cli", + "pixi_config", + "pixi_core", + "pixi_global", + "pixi_manifest", + "pixi_spec", + "rattler_conda_types", + "reqwest", + "serde", + "serde_json", + "tempfile", + "tokio", + "uuid", +] + [[package]] name = "pixi_build_discovery" version = "0.1.0" @@ -4715,7 +4918,7 @@ dependencies = [ name = "pixi_build_frontend" version = "0.1.0" dependencies = [ - "fs-err", + "fs-err 3.1.1", "futures", "jsonrpsee", "miette 7.6.0", @@ -4778,7 +4981,7 @@ dependencies = [ "dunce", "fancy_display", "flate2", - "fs-err", + "fs-err 3.1.1", "futures", "human_bytes", "indexmap 2.11.1", @@ -4863,7 +5066,7 @@ dependencies = [ "derive_more", "dirs", "dunce", - "fs-err", + "fs-err 3.1.1", "futures", "indexmap 2.11.1", "insta", @@ -4919,7 +5122,7 @@ dependencies = [ "clap", "console 0.15.11", "dirs", - "fs-err", + "fs-err 3.1.1", "insta", "itertools 0.14.0", "miette 7.6.0", @@ -4964,7 +5167,7 @@ dependencies = [ "dunce", "fake", "fancy_display", - "fs-err", + "fs-err 3.1.1", "fs_extra", "futures", "http 1.3.1", @@ -5062,7 +5265,7 @@ name = "pixi_docs" version = "0.1.0" dependencies = [ "clap", - "fs-err", + "fs-err 3.1.1", "itertools 0.14.0", "pixi_cli", "rattler_conda_types", @@ -5074,7 +5277,7 @@ version = "0.0.1" dependencies = [ "dashmap", "dunce", - "fs-err", + "fs-err 3.1.1", "pixi_utils", "rattler_networking", "reqwest", @@ -5093,7 +5296,7 @@ name = "pixi_glob" version = "0.1.0" dependencies = [ "dashmap", - "fs-err", + "fs-err 3.1.1", "ignore", "insta", "itertools 0.14.0", @@ -5118,7 +5321,7 @@ dependencies = [ "dunce", "fake", "fancy_display", - "fs-err", + "fs-err 3.1.1", "futures", "indexmap 2.11.1", "indicatif", @@ -5174,7 +5377,7 @@ dependencies = [ "console 0.15.11", "csv", "fancy_display", - "fs-err", + "fs-err 3.1.1", "insta", "itertools 0.14.0", "miette 7.6.0", @@ -5235,7 +5438,7 @@ dependencies = [ "console 0.15.11", "dunce", "fancy_display", - "fs-err", + "fs-err 3.1.1", "glob", "indexmap 2.11.1", "insta", @@ -5425,7 +5628,7 @@ dependencies = [ "crossbeam-channel", "deno_task_shell", "fancy_display", - "fs-err", + "fs-err 3.1.1", "itertools 0.14.0", "miette 7.6.0", "pixi_consts", @@ -5475,7 +5678,7 @@ name = "pixi_utils" version = "0.1.0" dependencies = [ "async-fd-lock", - "fs-err", + "fs-err 3.1.1", "indicatif", "insta", "is_executable", @@ -5508,7 +5711,7 @@ dependencies = [ name = "pixi_uv_context" version = "0.1.0" dependencies = [ - "fs-err", + "fs-err 3.1.1", "miette 7.6.0", "pixi_config", "pixi_consts", @@ -5595,6 +5798,34 @@ dependencies = [ "time", ] +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + [[package]] name = "polling" version = "3.10.0" @@ -5813,7 +6044,7 @@ version = "0.1.0" dependencies = [ "async-once-cell", "dashmap", - "fs-err", + "fs-err 3.1.1", "futures", "http-cache-reqwest", "itertools 0.14.0", @@ -6060,7 +6291,7 @@ dependencies = [ "console 0.16.1", "digest", "dirs", - "fs-err", + "fs-err 3.1.1", "futures", "humantime", "indexmap 2.11.1", @@ -6104,7 +6335,7 @@ dependencies = [ "dashmap", "digest", "dirs", - "fs-err", + "fs-err 3.1.1", "fs4", "futures", "fxhash", @@ -6137,7 +6368,7 @@ dependencies = [ "core-foundation 0.10.1", "dirs", "file_url", - "fs-err", + "fs-err 3.1.1", "fxhash", "glob", "hex", @@ -6232,7 +6463,7 @@ dependencies = [ "chrono", "configparser", "dirs", - "fs-err", + "fs-err 3.1.1", "known-folders", "once_cell", "plist", @@ -6266,7 +6497,7 @@ dependencies = [ "aws-sdk-s3", "base64 0.22.1", "dirs", - "fs-err", + "fs-err 3.1.1", "getrandom 0.3.3", "google-cloud-auth", "http 1.3.1", @@ -6292,7 +6523,7 @@ checksum = "ba85366951a5ec70f7d355d6f7d7fe9c2391134824031ebf11d75a05a344ca22" dependencies = [ "bzip2 0.6.0", "chrono", - "fs-err", + "fs-err 3.1.1", "futures-util", "num_cpus", "rattler_conda_types", @@ -6355,7 +6586,7 @@ dependencies = [ "dashmap", "dirs", "file_url", - "fs-err", + "fs-err 3.1.1", "futures", "hex", "http 1.3.1", @@ -6404,7 +6635,7 @@ checksum = "e823fb6cec3bd53cf22d1a02fd9bb2bdf1c6ef7e55bb9354f9ada704d3dea056" dependencies = [ "anyhow", "enum_dispatch", - "fs-err", + "fs-err 3.1.1", "indexmap 2.11.1", "itertools 0.14.0", "rattler_conda_types", @@ -7646,6 +7877,16 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "statrs" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a3fe7c28c6512e766b0874335db33c94ad7b8f9054228ae1c2abd47ce7d335e" +dependencies = [ + "approx", + "num-traits", +] + [[package]] name = "strsim" version = "0.11.1" @@ -8010,6 +8251,16 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tinyvec" version = "1.10.0" @@ -8041,6 +8292,7 @@ dependencies = [ "io-uring", "libc", "mio", + "parking_lot", "pin-project-lite", "signal-hook-registry", "slab", @@ -8560,7 +8812,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "csv", "flate2", - "fs-err", + "fs-err 3.1.1", "globset", "itertools 0.14.0", "rustc-hash", @@ -8595,7 +8847,7 @@ version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "anstream", - "fs-err", + "fs-err 3.1.1", "indoc", "itertools 0.14.0", "owo-colors", @@ -8629,7 +8881,7 @@ name = "uv-cache" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "nanoid", "rmp-serde", "rustc-hash", @@ -8654,7 +8906,7 @@ name = "uv-cache-info" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "globwalk", "serde", "thiserror 2.0.16", @@ -8686,7 +8938,7 @@ dependencies = [ "async_http_range_reader", "async_zip", "bytecheck", - "fs-err", + "fs-err 3.1.1", "futures", "html-escape", "http 1.3.1", @@ -8736,7 +8988,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "bitflags", "either", - "fs-err", + "fs-err 3.1.1", "rayon", "rustc-hash", "same-file", @@ -8774,7 +9026,7 @@ version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "etcetera", - "fs-err", + "fs-err 3.1.1", "tracing", "uv-static", ] @@ -8818,7 +9070,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "anyhow", "either", - "fs-err", + "fs-err 3.1.1", "futures", "nanoid", "owo-colors", @@ -8882,7 +9134,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "arcstr", "bitflags", - "fs-err", + "fs-err 3.1.1", "http 1.3.1", "itertools 0.14.0", "jiff", @@ -8922,7 +9174,7 @@ dependencies = [ "async-compression", "async_zip", "blake2", - "fs-err", + "fs-err 3.1.1", "futures", "md-5", "rayon", @@ -8949,7 +9201,7 @@ dependencies = [ "dunce", "either", "encoding_rs_io", - "fs-err", + "fs-err 3.1.1", "fs2", "junction", "path-slash", @@ -8973,7 +9225,7 @@ dependencies = [ "anyhow", "cargo-util", "dashmap", - "fs-err", + "fs-err 3.1.1", "reqwest", "reqwest-middleware", "thiserror 2.0.16", @@ -9024,7 +9276,7 @@ dependencies = [ "configparser", "csv", "data-encoding", - "fs-err", + "fs-err 3.1.1", "mailparse", "pathdiff", "reflink-copy", @@ -9057,7 +9309,7 @@ source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f092 dependencies = [ "anyhow", "async-channel", - "fs-err", + "fs-err 3.1.1", "futures", "owo-colors", "rayon", @@ -9108,7 +9360,7 @@ version = "0.1.0" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "async_zip", - "fs-err", + "fs-err 3.1.1", "futures", "thiserror 2.0.16", "tokio", @@ -9193,7 +9445,7 @@ name = "uv-platform" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "goblin", "procfs", "regex", @@ -9257,7 +9509,7 @@ dependencies = [ "anyhow", "configparser", "dunce", - "fs-err", + "fs-err 3.1.1", "futures", "indexmap 2.11.1", "itertools 0.14.0", @@ -9325,7 +9577,7 @@ dependencies = [ "anyhow", "configparser", "console 0.16.1", - "fs-err", + "fs-err 3.1.1", "futures", "rustc-hash", "serde", @@ -9358,7 +9610,7 @@ name = "uv-requirements-txt" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "memchr", "reqwest", "reqwest-middleware", @@ -9466,7 +9718,7 @@ name = "uv-state" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "tempfile", "uv-dirs", ] @@ -9485,7 +9737,7 @@ version = "0.1.0" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "either", - "fs-err", + "fs-err 3.1.1", "serde", "thiserror 2.0.16", "tracing", @@ -9502,7 +9754,7 @@ name = "uv-trampoline-builder" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "thiserror 2.0.16", "uv-fs", "zip 2.4.2", @@ -9543,7 +9795,7 @@ version = "0.0.4" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ "console 0.16.1", - "fs-err", + "fs-err 3.1.1", "itertools 0.14.0", "owo-colors", "pathdiff", @@ -9575,7 +9827,7 @@ name = "uv-workspace" version = "0.0.1" source = "git+https://github.com/astral-sh/uv?tag=0.8.5#ce37286814dbb802c422f0926487cfab7aefd2b7" dependencies = [ - "fs-err", + "fs-err 3.1.1", "glob", "itertools 0.14.0", "owo-colors", diff --git a/crates/pixi_bench/Cargo.toml b/crates/pixi_bench/Cargo.toml new file mode 100644 index 0000000000..ce0d708548 --- /dev/null +++ b/crates/pixi_bench/Cargo.toml @@ -0,0 +1,59 @@ +[package] +edition = "2021" +license = "MIT OR Apache-2.0" +name = "pixi_bench" +publish = false +version = "0.1.0" + +[dev-dependencies] +clap = { workspace = true } +criterion = { version = "3.0.5", package = "codspeed-criterion-compat", features = [ + "async", + "async_futures", + "async_tokio", +] } +fs-err = "2.11.0" +miette = { workspace = true, features = ["fancy-no-backtrace"] } +once_cell = "1.19" +reqwest = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +tempfile = "3.0" +tokio = { version = "1.0", features = ["full"] } +uuid = { version = "1.0", features = ["v4"] } + +# Pixi crates for direct API usage +pixi_cli = { path = "../pixi_cli" } +pixi_config = { path = "../pixi_config" } +pixi_core = { path = "../pixi_core" } +pixi_global = { path = "../pixi_global" } +pixi_manifest = { path = "../pixi_manifest" } +pixi_spec = { path = "../pixi_spec" } + +# Rattler crates +rattler_conda_types = { workspace = true } + +[[bench]] +harness = false +name = "cold_warm_install" + +[[bench]] +harness = false +name = "lock_install" + + +[[bench]] +harness = false +name = "task_run" + + +[[bench]] +harness = false +name = "global_install" + +[[bench]] +harness = false +name = "clean" + +[profile.release] +debug = true diff --git a/crates/pixi_bench/benches/clean.rs b/crates/pixi_bench/benches/clean.rs new file mode 100644 index 0000000000..6364695f98 --- /dev/null +++ b/crates/pixi_bench/benches/clean.rs @@ -0,0 +1,708 @@ +use clap::Parser; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::PathBuf; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_cli::{clean, install}; +use pixi_config::ConfigCli; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi workspace environment for clean testing +struct IsolatedPixiWorkspace { + _temp_dir: TempDir, + workspace_dir: PathBuf, + cache_dir: PathBuf, +} + +impl IsolatedPixiWorkspace { + /// Create with shared cache directory for warm testing + fn new_with_shared_cache( + shared_cache_dir: &std::path::Path, + ) -> Result> { + let temp_dir = TempDir::new()?; + let workspace_dir = temp_dir.path().join("workspace"); + + fs::create_dir_all(&workspace_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + workspace_dir, + cache_dir: shared_cache_dir.to_path_buf(), + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Ensure local channel exists, create it dynamically if missing (for CI robustness) + fn ensure_local_channel_exists( + &self, + local_channel_dir: &std::path::Path, + packages: &[&str], + ) -> Result<(), Box> { + let noarch_dir = local_channel_dir.join("noarch"); + + // If the channel already exists, we're good + if noarch_dir.exists() && noarch_dir.join("repodata.json").exists() { + return Ok(()); + } + + println!("🔧 Creating local conda channel for CI environment..."); + + // Create the directory structure + fs::create_dir_all(&noarch_dir)?; + + // Create repodata.json + self.create_repodata_json(&noarch_dir, packages)?; + + // Create minimal conda packages + self.create_conda_packages(&noarch_dir, packages)?; + + println!("✅ Local conda channel created successfully"); + Ok(()) + } + + /// Create repodata.json for the local channel + fn create_repodata_json( + &self, + noarch_dir: &std::path::Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let mut repodata = serde_json::json!({ + "info": { + "subdir": "noarch" + }, + "packages": {}, + "packages.conda": {}, + "removed": [], + "repodata_version": 1 + }); + + // Add each package to the repodata + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + repodata["packages"][&package_filename] = serde_json::json!({ + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": package, + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64, + "version": "1.0.0" + }); + } + + let mut file = File::create(noarch_dir.join("repodata.json"))?; + file.write_all(serde_json::to_string_pretty(&repodata)?.as_bytes())?; + Ok(()) + } + + /// Create minimal conda packages + fn create_conda_packages( + &self, + noarch_dir: &std::path::Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + use std::process::Command as StdCommand; + + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + let package_path = noarch_dir.join(&package_filename); + + // Create a temporary directory for package contents + let temp_dir = tempfile::TempDir::new()?; + let info_dir = temp_dir.path().join("info"); + fs::create_dir_all(&info_dir)?; + + // Create index.json + let index_data = serde_json::json!({ + "name": package, + "version": "1.0.0", + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64 + }); + + let mut index_file = File::create(info_dir.join("index.json"))?; + index_file.write_all(serde_json::to_string_pretty(&index_data)?.as_bytes())?; + + // Create empty files list + File::create(info_dir.join("files"))?.write_all(b"")?; + + // Create paths.json + let paths_data = serde_json::json!({ + "paths": [], + "paths_version": 1 + }); + let mut paths_file = File::create(info_dir.join("paths.json"))?; + paths_file.write_all(serde_json::to_string_pretty(&paths_data)?.as_bytes())?; + + // Create the tar.bz2 package using system tar command + let output = StdCommand::new("tar") + .args([ + "-cjf", + package_path.to_str().unwrap(), + "-C", + temp_dir.path().to_str().unwrap(), + "info", + ]) + .output()?; + + if !output.status.success() { + return Err(format!( + "Failed to create tar.bz2 package for {}: {}", + package, + String::from_utf8_lossy(&output.stderr) + ) + .into()); + } + } + + Ok(()) + } + + /// Create a basic pixi.toml file with specified dependencies using local channel + fn create_pixi_toml(&self, dependencies: &[&str]) -> Result<(), Box> { + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, dependencies)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let pixi_toml_content = format!( + r#"[project] +name = "test-project" +version = "0.1.0" +description = "Test project for pixi clean benchmarks" +channels = ["{}", "conda-forge"] + +[dependencies] +{} + +[tasks] +test = "echo 'test task'" +"#, + local_channel_url, + dependencies + .iter() + .map(|dep| format!("{} = \"*\"", dep)) + .collect::>() + .join("\n") + ); + + let pixi_toml_path = self.workspace_dir.join("pixi.toml"); + fs::write(pixi_toml_path, pixi_toml_content)?; + Ok(()) + } + + /// Create a pixi.toml with multiple environments using local channel + fn create_multi_env_pixi_toml(&self) -> Result<(), Box> { + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // All packages used in multi-environment setup + let all_packages = [ + "python", + "pytest", + "pytest-cov", + "black", + "flake8", + "mypy", + "requests", + "flask", + ]; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, &all_packages)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let pixi_toml_content = format!( + r#"[project] +name = "multi-env-project" +version = "0.1.0" +description = "Multi-environment test project for pixi clean benchmarks" +channels = ["{}", "conda-forge"] + +[dependencies] +python = "*" + +[environments] +default = {{ solve-group = "default" }} +test = {{ features = ["test"], solve-group = "test" }} +dev = {{ features = ["dev"], solve-group = "dev" }} +prod = {{ features = ["prod"], solve-group = "prod" }} + +[feature.test.dependencies] +pytest = "*" +pytest-cov = "*" + +[feature.dev.dependencies] +black = "*" +flake8 = "*" +mypy = "*" + +[feature.prod.dependencies] +requests = "*" +flask = "*" + +[tasks] +test = "pytest" +lint = "flake8 ." +format = "black ." +"#, + local_channel_url + ); + + let pixi_toml_path = self.workspace_dir.join("pixi.toml"); + fs::write(pixi_toml_path, pixi_toml_content)?; + Ok(()) + } + + /// Install dependencies to create environments using pixi API directly + async fn install_dependencies(&self) -> Result<(), Box> { + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to workspace directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.workspace_dir)?; + + // Create install arguments + let install_args = install::Args { + project_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_file_usage: pixi_cli::LockFileUsageConfig::default(), + environment: None, + config: ConfigCli::default(), + all: false, + skip: None, + skip_with_deps: None, + only: None, + }; + + // Execute pixi install directly + let result = install::execute(install_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => Ok(()), + Err(e) => Err(format!("pixi install failed: {}", e).into()), + } + } + + /// Run pixi clean and measure execution time using pixi API directly + async fn pixi_clean( + &self, + environment: Option<&str>, + ) -> Result> { + let env_desc = environment.map_or("all environments".to_string(), |e| { + format!("environment '{}'", e) + }); + println!("⏱️ Timing: pixi clean {}", env_desc); + + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Force non-interactive mode for benchmarks + std::env::set_var("NO_COLOR", "1"); + std::env::set_var("PIXI_NO_PROGRESS", "1"); + std::env::set_var("CI", "1"); + + // Change to workspace directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.workspace_dir)?; + + let start = Instant::now(); + + // Create clean arguments + let clean_args = clean::Args::parse_from(["clean", "cache", "-y"]); + + // Execute pixi clean directly + let result = clean::execute(clean_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + let duration = start.elapsed(); + + match result { + Ok(_) => { + println!("✅ Clean completed in {:.2}s", duration.as_secs_f64()); + Ok(duration) + } + Err(e) => { + println!("❌ pixi clean failed: {}", e); + Err(format!("pixi clean failed: {}", e).into()) + } + } + } + + /// Check if environments exist + fn environments_exist(&self) -> bool { + self.workspace_dir.join(".pixi").join("envs").exists() + } + + /// Get size of .pixi/envs directory + fn get_envs_size(&self) -> Result> { + let envs_dir = self.workspace_dir.join(".pixi").join("envs"); + if !envs_dir.exists() { + return Ok(0); + } + + let mut total_size = 0; + for entry in fs::read_dir(&envs_dir)? { + let entry = entry?; + let metadata = entry.metadata()?; + if metadata.is_file() { + total_size += metadata.len(); + } else if metadata.is_dir() { + total_size += self.get_dir_size(&entry.path())?; + } + } + Ok(total_size) + } + + #[allow(clippy::only_used_in_recursion)] + fn get_dir_size(&self, dir: &std::path::Path) -> Result> { + let mut total_size = 0; + for entry in fs::read_dir(dir)? { + let entry = entry?; + let metadata = entry.metadata()?; + if metadata.is_file() { + total_size += metadata.len(); + } else if metadata.is_dir() { + total_size += self.get_dir_size(&entry.path())?; + } + } + Ok(total_size) + } + + /// Clean small environment (few small packages) + async fn clean_small_environment(&self) -> Result> { + self.create_pixi_toml(&["python"])?; + self.install_dependencies().await?; + self.pixi_clean(None).await + } + + /// Clean medium environment (several packages) + async fn clean_medium_environment(&self) -> Result> { + self.create_pixi_toml(&["python", "numpy", "pandas", "requests"])?; + self.install_dependencies().await?; + self.pixi_clean(None).await + } + + /// Clean large environment (many packages) + async fn clean_large_environment(&self) -> Result> { + self.create_pixi_toml(&[ + "python", + "numpy", + "pandas", + "scipy", + "matplotlib", + "jupyter", + "scikit-learn", + "requests", + "flask", + "django", + ])?; + self.install_dependencies().await?; + self.pixi_clean(None).await + } + + /// Clean specific environment from multi-environment setup + async fn clean_specific_environment(&self) -> Result> { + self.create_multi_env_pixi_toml()?; + // Install all environments first (pixi install installs all environments by default) + self.install_dependencies().await?; + + // Clean only the test environment + self.pixi_clean(Some("test")).await + } + + /// Clean all environments from multi-environment setup + async fn clean_multi_environments(&self) -> Result> { + self.create_multi_env_pixi_toml()?; + // Install all environments first (pixi install installs all environments by default) + self.install_dependencies().await?; + + // Clean all environments + self.pixi_clean(None).await + } + + /// Clean empty workspace (no environments to clean) + async fn clean_empty_workspace(&self) -> Result> { + self.create_pixi_toml(&["python"])?; + // Don't install dependencies, so no environments exist + self.pixi_clean(None).await + } +} + +/// Shared cache for warm testing +struct SharedCache { + cache_dir: PathBuf, + _temp_dir: TempDir, +} + +impl SharedCache { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let cache_dir = temp_dir.path().join("shared_pixi_cache"); + fs::create_dir_all(&cache_dir)?; + + Ok(Self { + cache_dir, + _temp_dir: temp_dir, + }) + } +} + +fn bench_environment_sizes(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("environment_sizes_clean"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(8); // Moderate sample size + group.warm_up_time(Duration::from_secs(10)); + + // Small environment clean + group.bench_function("clean_small_environment", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_small_environment()) + .expect("Failed to time pixi clean"); + black_box(duration) + }) + }); + + // Medium environment clean + group.bench_function("clean_medium_environment", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_medium_environment()) + .expect("Failed to time pixi clean"); + black_box(duration) + }) + }); + + // Large environment clean + group.bench_function("clean_large_environment", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_large_environment()) + .expect("Failed to time pixi clean"); + black_box(duration) + }) + }); +} + +fn bench_multi_environment_scenarios(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("multi_environment_clean"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum required sample size + group.warm_up_time(Duration::from_secs(15)); + + // Clean specific environment + group.bench_function("clean_specific_environment", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_specific_environment()) + .expect("Failed to time pixi clean specific environment"); + black_box(duration) + }) + }); + + // Clean all environments in multi-environment setup + group.bench_function("clean_all_multi_environments", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_multi_environments()) + .expect("Failed to time pixi clean all environments"); + black_box(duration) + }) + }); +} + +fn bench_edge_cases(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("edge_cases_clean"); + group.measurement_time(Duration::from_secs(60)); // 1 minute + group.sample_size(10); // More samples for quick operations + group.warm_up_time(Duration::from_secs(5)); + + // Clean empty workspace (no environments exist) + group.bench_function("clean_empty_workspace", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + let duration = RUNTIME + .block_on(workspace.clean_empty_workspace()) + .expect("Failed to time pixi clean empty workspace"); + black_box(duration) + }) + }); +} + +fn bench_repeated_clean_operations(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("repeated_clean_operations"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(8); // Moderate sample size + group.warm_up_time(Duration::from_secs(10)); + + // Clean, reinstall, clean again cycle + group.bench_function("clean_reinstall_clean_cycle", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + + RUNTIME.block_on(async { + // Setup environment + workspace + .create_pixi_toml(&["python", "numpy"]) + .expect("Failed to create pixi.toml"); + workspace + .install_dependencies() + .await + .expect("Failed to install dependencies"); + + // First clean + let duration1 = workspace + .pixi_clean(None) + .await + .expect("Failed to clean first time"); + + // Reinstall + workspace + .install_dependencies() + .await + .expect("Failed to reinstall dependencies"); + + // Second clean + let duration2 = workspace + .pixi_clean(None) + .await + .expect("Failed to clean second time"); + + black_box((duration1, duration2)) + }) + }) + }); +} + +fn bench_clean_performance_by_size(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("clean_performance_by_size"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum required sample size + group.warm_up_time(Duration::from_secs(15)); + + // Measure clean performance vs environment size + group.bench_function("clean_with_size_measurement", |b| { + b.iter(|| { + let workspace = IsolatedPixiWorkspace::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create workspace with shared cache"); + + RUNTIME.block_on(async { + // Create large environment + workspace + .create_pixi_toml(&[ + "python", + "numpy", + "pandas", + "scipy", + "matplotlib", + "jupyter", + "scikit-learn", + "requests", + "flask", + ]) + .expect("Failed to create pixi.toml"); + workspace + .install_dependencies() + .await + .expect("Failed to install dependencies"); + + // Measure size before clean + let size_before = workspace + .get_envs_size() + .expect("Failed to get environment size"); + + // Clean and measure time + let clean_duration = workspace + .pixi_clean(None) + .await + .expect("Failed to clean environment"); + + // Verify environments are gone + let environments_exist_after = workspace.environments_exist(); + + black_box((clean_duration, size_before, environments_exist_after)) + }) + }) + }); +} + +criterion_group!( + benches, + bench_environment_sizes, + bench_multi_environment_scenarios, + bench_edge_cases, + bench_repeated_clean_operations, + bench_clean_performance_by_size +); +criterion_main!(benches); diff --git a/crates/pixi_bench/benches/cold_warm_install.rs b/crates/pixi_bench/benches/cold_warm_install.rs new file mode 100644 index 0000000000..27d9ad24d0 --- /dev/null +++ b/crates/pixi_bench/benches/cold_warm_install.rs @@ -0,0 +1,481 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_cli::install; +use pixi_config::ConfigCli; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi environment with shared cache for warm testing +struct IsolatedPixiEnv { + _temp_dir: TempDir, + cache_dir: PathBuf, + home_dir: PathBuf, + project_dir: PathBuf, + project_created: bool, +} + +impl IsolatedPixiEnv { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let cache_dir = base_path.join("pixi_cache"); + let home_dir = base_path.join("pixi_home"); + let project_dir = base_path.join("project"); + + fs::create_dir_all(&cache_dir)?; + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&project_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir, + home_dir, + project_dir, + project_created: false, + }) + } + + /// Create with shared cache directory for warm testing + fn new_with_shared_cache(shared_cache_dir: &Path) -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let home_dir = base_path.join("pixi_home"); + let project_dir = base_path.join("project"); + + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&project_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir: shared_cache_dir.to_path_buf(), + home_dir, + project_dir, + project_created: false, + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_HOME".to_string(), + self.home_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Ensure local channel exists, create it dynamically if missing (for CI robustness) + fn ensure_local_channel_exists( + &self, + local_channel_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + let noarch_dir = local_channel_dir.join("noarch"); + + // If the channel already exists, we're good + if noarch_dir.exists() && noarch_dir.join("repodata.json").exists() { + return Ok(()); + } + + println!("🔧 Creating local conda channel for CI environment..."); + + // Create the directory structure + fs::create_dir_all(&noarch_dir)?; + + // Create repodata.json + self.create_repodata_json(&noarch_dir, packages)?; + + // Create minimal conda packages + self.create_conda_packages(&noarch_dir, packages)?; + + println!("✅ Local conda channel created successfully"); + Ok(()) + } + + /// Create repodata.json for the local channel + fn create_repodata_json( + &self, + noarch_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let mut repodata = serde_json::json!({ + "info": { + "subdir": "noarch" + }, + "packages": {}, + "packages.conda": {}, + "removed": [], + "repodata_version": 1 + }); + + // Add each package to the repodata + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + repodata["packages"][&package_filename] = serde_json::json!({ + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": package, + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64, + "version": "1.0.0" + }); + } + + let mut file = File::create(noarch_dir.join("repodata.json"))?; + file.write_all(serde_json::to_string_pretty(&repodata)?.as_bytes())?; + Ok(()) + } + + /// Create minimal conda packages + fn create_conda_packages( + &self, + noarch_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + use std::process::Command as StdCommand; + + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + let package_path = noarch_dir.join(&package_filename); + + // Create a temporary directory for package contents + let temp_dir = tempfile::TempDir::new()?; + let info_dir = temp_dir.path().join("info"); + fs::create_dir_all(&info_dir)?; + + // Create index.json + let index_data = serde_json::json!({ + "name": package, + "version": "1.0.0", + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64 + }); + + let mut index_file = File::create(info_dir.join("index.json"))?; + index_file.write_all(serde_json::to_string_pretty(&index_data)?.as_bytes())?; + + // Create empty files list + File::create(info_dir.join("files"))?.write_all(b"")?; + + // Create paths.json + let paths_data = serde_json::json!({ + "paths": [], + "paths_version": 1 + }); + let mut paths_file = File::create(info_dir.join("paths.json"))?; + paths_file.write_all(serde_json::to_string_pretty(&paths_data)?.as_bytes())?; + + // Create the tar.bz2 package using system tar command + let output = StdCommand::new("tar") + .args([ + "-cjf", + package_path.to_str().unwrap(), + "-C", + temp_dir.path().to_str().unwrap(), + "info", + ]) + .output()?; + + if !output.status.success() { + return Err(format!( + "Failed to create tar.bz2 package for {}: {}", + package, + String::from_utf8_lossy(&output.stderr) + ) + .into()); + } + } + + Ok(()) + } + + /// Create pixi project only once + fn ensure_pixi_project_created( + &mut self, + packages: &[&str], + ) -> Result<(), Box> { + if self.project_created { + return Ok(()); + } + + use std::fs::File; + use std::io::Write; + + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, packages)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let mut pixi_toml = format!( + r#"[project] +name = "benchmark-project" +version = "0.1.0" +description = "Benchmark project for pixi local channel benchmark" +channels = ["{}"] +platforms = ["linux-64", "osx-64", "osx-arm64", "win-64"] + +[dependencies] +"#, + local_channel_url + ); + + // Add all packages to dependencies + for package in packages { + pixi_toml.push_str(&format!("{} = \"==1.0.0\"\n", package)); + } + + let mut file = File::create(self.project_dir.join("pixi.toml"))?; + file.write_all(pixi_toml.as_bytes())?; + + self.project_created = true; + Ok(()) + } + + /// For cold cache: create new project and install + async fn pixi_install_cold( + &mut self, + packages: &[&str], + ) -> Result> { + // Always create fresh project for cold test + self.project_created = false; + self.ensure_pixi_project_created(packages)?; + + self.run_pixi_install(packages).await + } + + /// For warm cache: reuse existing project and install + async fn pixi_install_warm( + &mut self, + packages: &[&str], + ) -> Result> { + // Ensure project exists (but don't recreate if already exists) + self.ensure_pixi_project_created(packages)?; + + // For warm test, we measure re-installation or verification time + // This simulates "pixi install" when packages are already resolved/cached + self.run_pixi_install(packages).await + } + + /// Run the actual pixi install command using direct API + async fn run_pixi_install( + &self, + packages: &[&str], + ) -> Result> { + println!("⏱️ Timing: pixi install {} packages", packages.len()); + + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to project directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.project_dir)?; + + let start = Instant::now(); + + // Create install arguments + let install_args = install::Args { + project_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_file_usage: pixi_cli::LockFileUsageConfig::default(), + environment: None, + config: ConfigCli::default(), + all: false, + skip: None, + skip_with_deps: None, + only: None, + }; + + // Execute pixi install directly + let result = install::execute(install_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => { + let duration = start.elapsed(); + println!("✅ Completed in {:.2}s", duration.as_secs_f64()); + Ok(duration) + } + Err(e) => { + println!("❌ pixi install failed: {}", e); + Err(format!("pixi install failed: {}", e).into()) + } + } + } +} + +/// Shared cache for warm testing +struct SharedCache { + cache_dir: PathBuf, + _temp_dir: TempDir, +} + +impl SharedCache { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let cache_dir = temp_dir.path().join("shared_pixi_cache"); + fs::create_dir_all(&cache_dir)?; + + Ok(Self { + cache_dir, + _temp_dir: temp_dir, + }) + } +} + +fn bench_small(c: &mut Criterion) { + let packages = ["numpy"]; + + // Create shared cache for warm testing + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("small_package_installs"); + group.measurement_time(Duration::from_secs(60)); // Allow 1 minute for measurements + group.sample_size(10); // Reduce sample size for long operations + group.warm_up_time(Duration::from_secs(5)); // Warm up time + + // Cold cache benchmark - always creates new isolated environment + group.bench_function("cold_cache_small", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_cold(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); + + // Warm cache benchmark - reuses shared cache and may reuse project + group.bench_function("warm_cache_small", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .pixi_install_warm(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); +} + +fn bench_medium(c: &mut Criterion) { + let packages = ["numpy", "pandas", "requests", "click", "pyyaml"]; + + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + + let mut group = c.benchmark_group("medium_package_installs"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(5); // Even fewer samples for medium complexity + group.warm_up_time(Duration::from_secs(10)); + + group.bench_function("cold_cache_medium", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_cold(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); + + group.bench_function("warm_cache_medium", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .pixi_install_warm(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); +} + +fn bench_large(c: &mut Criterion) { + let packages = [ + "pytorch", + "scipy", + "scikit-learn", + "matplotlib", + "jupyter", + "bokeh", + "dask", + "xarray", + "opencv", + "pandas", + ]; + + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("large_package_installs"); + group.measurement_time(Duration::from_secs(180)); // 3 minutes + group.sample_size(3); // Very few samples for large operations + group.warm_up_time(Duration::from_secs(15)); + + group.bench_function("cold_cache_large", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_cold(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); + + group.bench_function("warm_cache_large", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .pixi_install_warm(&packages) + .await + .expect("Failed to time pixi install"); + black_box(duration) + }) + }); +} + +criterion_group!(benches, bench_small, bench_medium, bench_large); +criterion_main!(benches); diff --git a/crates/pixi_bench/benches/global_install.rs b/crates/pixi_bench/benches/global_install.rs new file mode 100644 index 0000000000..f81d1706e9 --- /dev/null +++ b/crates/pixi_bench/benches/global_install.rs @@ -0,0 +1,653 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::PathBuf; +use std::str::FromStr; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_global::project::GlobalSpec; +use pixi_global::{EnvironmentName, Project}; +use rattler_conda_types::{NamedChannelOrUrl, Platform}; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi environment for global install testing +struct IsolatedPixiGlobalEnv { + _temp_dir: TempDir, + cache_dir: PathBuf, + home_dir: PathBuf, + global_dir: PathBuf, +} + +impl IsolatedPixiGlobalEnv { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let cache_dir = base_path.join("pixi_cache"); + let home_dir = base_path.join("pixi_home"); + let global_dir = base_path.join("pixi_global"); + + fs::create_dir_all(&cache_dir)?; + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&global_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir, + home_dir, + global_dir, + }) + } + + /// Create with shared cache directory for warm testing + fn new_with_shared_cache( + shared_cache_dir: &std::path::Path, + ) -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let home_dir = base_path.join("pixi_home"); + let global_dir = base_path.join("pixi_global"); + + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&global_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir: shared_cache_dir.to_path_buf(), + home_dir, + global_dir, + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_HOME".to_string(), + self.home_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_GLOBAL_DIR".to_string(), + self.global_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Run pixi global install and measure execution time using pixi_global crate directly + async fn pixi_global_install( + &self, + packages: &[&str], + channels: Option>, + platform: Option, + _force_reinstall: bool, + ) -> Result> { + println!("⏱️ Timing: pixi global install {} packages", packages.len()); + + let start = Instant::now(); + + // Set environment variables for pixi_global + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create or discover the global project + let mut project = Project::discover_or_create().await?; + + // Create environment name from first package + let env_name = EnvironmentName::from_str(&format!("bench_{}", packages[0]))?; + + // Use local channel if no channels specified + let channels = channels.unwrap_or_else(|| { + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())] + }); + + // Add environment to manifest with channels + project + .manifest + .add_environment(&env_name, Some(channels))?; + + // Set platform if specified + if let Some(platform) = platform { + project.manifest.set_platform(&env_name, platform)?; + } + + // Add each package as a dependency with version constraint to match local channel + for package in packages { + let package_spec = format!("{}==1.0.0", package); + let global_spec = + GlobalSpec::try_from_str(&package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + } + + // Install the environment + let _environment_update = project.install_environment(&env_name).await?; + + let duration = start.elapsed(); + println!( + "✅ Global install completed in {:.2}s", + duration.as_secs_f64() + ); + + Ok(duration) + } + + /// Install a single small package + async fn install_single_small(&self) -> Result> { + self.pixi_global_install(&["numpy"], None, None, false) + .await + } + + /// Install multiple small packages + async fn install_multiple_small(&self) -> Result> { + self.pixi_global_install(&["numpy", "pandas", "requests"], None, None, false) + .await + } + + /// Install a medium-sized package + async fn install_medium(&self) -> Result> { + self.pixi_global_install(&["matplotlib"], None, None, false) + .await + } + + /// Install a large package + async fn install_large(&self) -> Result> { + self.pixi_global_install(&["jupyter"], None, None, false) + .await + } + + /// Install with force reinstall + async fn install_with_force_reinstall(&self) -> Result> { + // First install + let _ = self + .pixi_global_install(&["numpy"], None, None, false) + .await?; + // Then force reinstall + self.pixi_global_install(&["numpy"], None, None, true).await + } + + /// Install with specific platform + async fn install_with_platform(&self) -> Result> { + let platform = Platform::current(); + self.pixi_global_install(&["click"], None, Some(platform), false) + .await + } + + /// Install with custom channel + async fn install_with_custom_channel(&self) -> Result> { + // Use local channel for this test too, but with different packages + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + self.pixi_global_install(&["scipy"], Some(channels), None, false) + .await + } + + /// Install and uninstall a single small package (only uninstall is timed) + async fn install_and_uninstall_single_small( + &self, + ) -> Result> { + // Set environment variables once for both operations + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create a single project instance for both operations + let mut project = Project::discover_or_create().await?; + let env_name = EnvironmentName::from_str("bench_numpy")?; + + // Use local channel + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + + // Setup: Install the package (not timed) + project + .manifest + .add_environment(&env_name, Some(channels))?; + let package_spec = "numpy==1.0.0"; + let global_spec = GlobalSpec::try_from_str(package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + let _ = project.install_environment(&env_name).await?; + + // Measure: Only the uninstall operation + println!("⏱️ Timing: pixi global uninstall 1 packages"); + let start = Instant::now(); + let _ = project.remove_environment(&env_name).await?; + let duration = start.elapsed(); + println!( + "✅ Global uninstall completed in {:.2}s", + duration.as_secs_f64() + ); + + Ok(duration) + } + + /// Install and uninstall multiple small packages (only uninstall is timed) + async fn install_and_uninstall_multiple_small( + &self, + ) -> Result> { + // Set environment variables once for all operations + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create a single project instance for all operations + let mut project = Project::discover_or_create().await?; + + // Use local channel + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + + // Setup: Install the packages (not timed) + let packages = ["numpy", "pandas", "requests"]; + for package in &packages { + let env_name = EnvironmentName::from_str(&format!("bench_{}", package))?; + project + .manifest + .add_environment(&env_name, Some(channels.clone()))?; + let package_spec = format!("{}==1.0.0", package); + let global_spec = + GlobalSpec::try_from_str(&package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + let _ = project.install_environment(&env_name).await?; + } + + // Measure: Only the uninstall operations + println!( + "⏱️ Timing: pixi global uninstall {} packages", + packages.len() + ); + let start = Instant::now(); + for package in &packages { + let env_name = EnvironmentName::from_str(&format!("bench_{}", package))?; + let _ = project.remove_environment(&env_name).await?; + } + let duration = start.elapsed(); + println!( + "✅ Multiple uninstall completed in {:.2}s", + duration.as_secs_f64() + ); + Ok(duration) + } + + /// Install and uninstall a medium-sized package (only uninstall is timed) + async fn install_and_uninstall_medium(&self) -> Result> { + // Set environment variables once for both operations + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create a single project instance for both operations + let mut project = Project::discover_or_create().await?; + let env_name = EnvironmentName::from_str("bench_matplotlib")?; + + // Use local channel + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + + // Setup: Install the package (not timed) + project + .manifest + .add_environment(&env_name, Some(channels))?; + let package_spec = "matplotlib==1.0.0"; + let global_spec = GlobalSpec::try_from_str(package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + let _ = project.install_environment(&env_name).await?; + + // Measure: Only the uninstall operation + println!("⏱️ Timing: pixi global uninstall 1 packages"); + let start = Instant::now(); + let _ = project.remove_environment(&env_name).await?; + let duration = start.elapsed(); + println!( + "✅ Global uninstall completed in {:.2}s", + duration.as_secs_f64() + ); + + Ok(duration) + } + + /// Install and uninstall a large package (only uninstall is timed) + async fn install_and_uninstall_large(&self) -> Result> { + // Set environment variables once for both operations + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Create a single project instance for both operations + let mut project = Project::discover_or_create().await?; + let env_name = EnvironmentName::from_str("bench_jupyter")?; + + // Use local channel + let current_dir = std::env::current_dir().unwrap_or_default(); + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + let channels = vec![NamedChannelOrUrl::Url(local_channel_url.parse().unwrap())]; + + // Setup: Install the package (not timed) + project + .manifest + .add_environment(&env_name, Some(channels))?; + let package_spec = "jupyter==1.0.0"; + let global_spec = GlobalSpec::try_from_str(package_spec, project.global_channel_config())?; + project.manifest.add_dependency(&env_name, &global_spec)?; + let _ = project.install_environment(&env_name).await?; + + // Measure: Only the uninstall operation + println!("⏱️ Timing: pixi global uninstall 1 packages"); + let start = Instant::now(); + let _ = project.remove_environment(&env_name).await?; + let duration = start.elapsed(); + println!( + "✅ Global uninstall completed in {:.2}s", + duration.as_secs_f64() + ); + + Ok(duration) + } +} + +/// Shared cache for warm testing +struct SharedCache { + cache_dir: PathBuf, + _temp_dir: TempDir, +} + +impl SharedCache { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let cache_dir = temp_dir.path().join("shared_pixi_cache"); + fs::create_dir_all(&cache_dir)?; + + Ok(Self { + cache_dir, + _temp_dir: temp_dir, + }) + } +} + +fn bench_single_package(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("single_package_global_install"); + group.measurement_time(Duration::from_secs(60)); // Allow 1 minute for measurements + group.sample_size(10); // Reduce sample size for long operations + group.warm_up_time(Duration::from_secs(5)); // Warm up time + + // Cold cache benchmark - always creates new isolated environment + group.bench_function("cold_cache_single", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new().expect("Failed to create isolated environment"); + let duration = env + .install_single_small() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); + + // Warm cache benchmark - reuses shared cache + group.bench_function("warm_cache_single", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_single_small() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); +} + +fn bench_multiple_packages(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("multiple_packages_global_install"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(10)); + + // Cold cache benchmark + group.bench_function("cold_cache_multiple", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new().expect("Failed to create isolated environment"); + let duration = env + .install_multiple_small() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); + + // Warm cache benchmark + group.bench_function("warm_cache_multiple", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_multiple_small() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); +} + +fn bench_package_sizes(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("package_sizes_global_install"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(15)); + + // Medium package benchmark + group.bench_function("medium_package", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_medium() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); + + // Large package benchmark + group.bench_function("large_package", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_large() + .await + .expect("Failed to time pixi global install"); + black_box(duration) + }) + }); +} + +fn bench_special_scenarios(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("special_scenarios_global_install"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(10)); + + // Force reinstall benchmark + group.bench_function("force_reinstall", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_with_force_reinstall() + .await + .expect("Failed to time pixi global install with force reinstall"); + black_box(duration) + }) + }); + + // Platform-specific install benchmark + group.bench_function("platform_specific", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_with_platform() + .await + .expect("Failed to time pixi global install with platform"); + black_box(duration) + }) + }); + + // Custom channel benchmark + group.bench_function("custom_channel", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + let duration = env + .install_with_custom_channel() + .await + .expect("Failed to time pixi global install with custom channel"); + black_box(duration) + }) + }); +} + +fn bench_single_package_uninstall(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("single_package_global_uninstall"); + group.measurement_time(Duration::from_secs(60)); // Allow 1 minute for measurements + group.sample_size(10); // Reduce sample size for long operations + group.warm_up_time(Duration::from_secs(5)); // Warm up time + + // Uninstall single package benchmark + group.bench_function("uninstall_single", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + // Install and uninstall (only uninstall is timed) + let duration = env + .install_and_uninstall_single_small() + .await + .expect("Failed to time pixi global uninstall"); + black_box(duration) + }) + }); +} + +fn bench_multiple_packages_uninstall(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("multiple_packages_global_uninstall"); + group.measurement_time(Duration::from_secs(90)); // 1.5 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(10)); + + // Uninstall multiple packages benchmark + group.bench_function("uninstall_multiple", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + // Install and uninstall (only uninstall is timed) + let duration = env + .install_and_uninstall_multiple_small() + .await + .expect("Failed to time pixi global uninstall"); + black_box(duration) + }) + }); +} + +fn bench_package_sizes_uninstall(c: &mut Criterion) { + let shared_cache = SharedCache::new().expect("Failed to create shared cache"); + let mut group = c.benchmark_group("package_sizes_global_uninstall"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum required samples + group.warm_up_time(Duration::from_secs(15)); + + // Medium package uninstall benchmark + group.bench_function("uninstall_medium_package", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + // Install and uninstall (only uninstall is timed) + let duration = env + .install_and_uninstall_medium() + .await + .expect("Failed to time pixi global uninstall"); + black_box(duration) + }) + }); + + // Large package uninstall benchmark + group.bench_function("uninstall_large_package", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let env = IsolatedPixiGlobalEnv::new_with_shared_cache(&shared_cache.cache_dir) + .expect("Failed to create environment with shared cache"); + // Install and uninstall (only uninstall is timed) + let duration = env + .install_and_uninstall_large() + .await + .expect("Failed to time pixi global uninstall"); + black_box(duration) + }) + }); +} + +criterion_group!( + benches, + bench_single_package, + bench_multiple_packages, + bench_package_sizes, + bench_special_scenarios, + bench_single_package_uninstall, + bench_multiple_packages_uninstall, + bench_package_sizes_uninstall +); +criterion_main!(benches); diff --git a/crates/pixi_bench/benches/lock_install.rs b/crates/pixi_bench/benches/lock_install.rs new file mode 100644 index 0000000000..2d47e7e8e3 --- /dev/null +++ b/crates/pixi_bench/benches/lock_install.rs @@ -0,0 +1,545 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_cli::install; +use pixi_config::ConfigCli; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi environment for lockfile testing +struct IsolatedPixiEnv { + _temp_dir: TempDir, + cache_dir: PathBuf, + home_dir: PathBuf, + project_dir: PathBuf, + project_created: bool, +} + +impl IsolatedPixiEnv { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let cache_dir = base_path.join("pixi_cache"); + let home_dir = base_path.join("pixi_home"); + let project_dir = base_path.join("project"); + + fs::create_dir_all(&cache_dir)?; + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&project_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir, + home_dir, + project_dir, + project_created: false, + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_HOME".to_string(), + self.home_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Ensure local channel exists, create it dynamically if missing (for CI robustness) + fn ensure_local_channel_exists( + &self, + local_channel_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + let noarch_dir = local_channel_dir.join("noarch"); + + // If the channel already exists, we're good + if noarch_dir.exists() && noarch_dir.join("repodata.json").exists() { + return Ok(()); + } + + println!("🔧 Creating local conda channel for CI environment..."); + + // Create the directory structure + fs::create_dir_all(&noarch_dir)?; + + // Create repodata.json + self.create_repodata_json(&noarch_dir, packages)?; + + // Create minimal conda packages + self.create_conda_packages(&noarch_dir, packages)?; + + println!("✅ Local conda channel created successfully"); + Ok(()) + } + + /// Create repodata.json for the local channel + fn create_repodata_json( + &self, + noarch_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let mut repodata = serde_json::json!({ + "info": { + "subdir": "noarch" + }, + "packages": {}, + "packages.conda": {}, + "removed": [], + "repodata_version": 1 + }); + + // Add each package to the repodata + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + repodata["packages"][&package_filename] = serde_json::json!({ + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": package, + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64, + "version": "1.0.0" + }); + } + + let mut file = File::create(noarch_dir.join("repodata.json"))?; + file.write_all(serde_json::to_string_pretty(&repodata)?.as_bytes())?; + Ok(()) + } + + /// Create minimal conda packages + fn create_conda_packages( + &self, + noarch_dir: &Path, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + use std::process::Command as StdCommand; + + for package in packages { + let package_filename = format!("{}-1.0.0-py_0.tar.bz2", package); + let package_path = noarch_dir.join(&package_filename); + + // Create a temporary directory for package contents + let temp_dir = tempfile::TempDir::new()?; + let info_dir = temp_dir.path().join("info"); + fs::create_dir_all(&info_dir)?; + + // Create index.json + let index_data = serde_json::json!({ + "name": package, + "version": "1.0.0", + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000i64 + }); + + let mut index_file = File::create(info_dir.join("index.json"))?; + index_file.write_all(serde_json::to_string_pretty(&index_data)?.as_bytes())?; + + // Create empty files list + File::create(info_dir.join("files"))?.write_all(b"")?; + + // Create paths.json + let paths_data = serde_json::json!({ + "paths": [], + "paths_version": 1 + }); + let mut paths_file = File::create(info_dir.join("paths.json"))?; + paths_file.write_all(serde_json::to_string_pretty(&paths_data)?.as_bytes())?; + + // Create the tar.bz2 package using system tar command + let output = StdCommand::new("tar") + .args([ + "-cjf", + package_path.to_str().unwrap(), + "-C", + temp_dir.path().to_str().unwrap(), + "info", + ]) + .output()?; + + if !output.status.success() { + return Err(format!( + "Failed to create tar.bz2 package for {}: {}", + package, + String::from_utf8_lossy(&output.stderr) + ) + .into()); + } + } + + Ok(()) + } + + /// Create pixi project and generate lockfile + async fn create_pixi_project_with_lockfile( + &mut self, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, packages)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let mut pixi_toml = format!( + r#"[project] +name = "lockfile-benchmark-project" +version = "0.1.0" +description = "Benchmark project for pixi lockfile testing" +channels = ["{}"] +platforms = ["linux-64", "osx-64", "osx-arm64", "win-64"] + +[dependencies] +"#, + local_channel_url + ); + + // Add all packages to dependencies + for package in packages { + pixi_toml.push_str(&format!("{} = \"==1.0.0\"\n", package)); + } + + let mut file = File::create(self.project_dir.join("pixi.toml"))?; + file.write_all(pixi_toml.as_bytes())?; + + // Generate lockfile by running install once + self.run_pixi_install_internal(packages).await?; + + self.project_created = true; + Ok(()) + } + + /// Create pixi project without lockfile + fn create_pixi_project_without_lockfile( + &mut self, + packages: &[&str], + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + let current_dir = std::env::current_dir()?; + let local_channel_dir = if current_dir.ends_with("pixi_bench") { + current_dir.join("my-local-channel") + } else { + current_dir.join("crates/pixi_bench/my-local-channel") + }; + + // Ensure the local channel exists, create it if it doesn't + self.ensure_local_channel_exists(&local_channel_dir, packages)?; + + let local_channel_url = format!("file://{}", local_channel_dir.to_string_lossy()); + + let mut pixi_toml = format!( + r#"[project] +name = "no-lockfile-benchmark-project" +version = "0.1.0" +description = "Benchmark project for pixi no-lockfile testing" +channels = ["{}"] +platforms = ["linux-64", "osx-64", "osx-arm64", "win-64"] + +[dependencies] +"#, + local_channel_url + ); + + // Add all packages to dependencies + for package in packages { + pixi_toml.push_str(&format!("{} = \"==1.0.0\"\n", package)); + } + + let mut file = File::create(self.project_dir.join("pixi.toml"))?; + file.write_all(pixi_toml.as_bytes())?; + + // Ensure no lockfile exists + let lockfile_path = self.project_dir.join("pixi.lock"); + if lockfile_path.exists() { + fs::remove_file(lockfile_path)?; + } + + self.project_created = true; + Ok(()) + } + + /// Install with existing lockfile - should be faster as dependency resolution is skipped + async fn pixi_install_with_lockfile( + &mut self, + packages: &[&str], + ) -> Result> { + // Create project with lockfile if not already created + if !self.project_created { + self.create_pixi_project_with_lockfile(packages).await?; + } + + // Ensure lockfile exists + let lockfile_path = self.project_dir.join("pixi.lock"); + if !lockfile_path.exists() { + return Err("Lockfile does not exist for with-lockfile benchmark".into()); + } + + println!( + "⏱️ Timing: pixi install with lockfile ({} packages)", + packages.len() + ); + self.run_pixi_install_timed(packages).await + } + + /// Install without lockfile - requires full dependency resolution + async fn pixi_install_without_lockfile( + &mut self, + packages: &[&str], + ) -> Result> { + // Always create fresh project without lockfile + self.project_created = false; + self.create_pixi_project_without_lockfile(packages)?; + + // Ensure no lockfile exists + let lockfile_path = self.project_dir.join("pixi.lock"); + if lockfile_path.exists() { + fs::remove_file(lockfile_path)?; + } + + println!( + "⏱️ Timing: pixi install without lockfile ({} packages)", + packages.len() + ); + self.run_pixi_install_timed(packages).await + } + + /// Internal install method for setup (not timed) + async fn run_pixi_install_internal( + &self, + _packages: &[&str], + ) -> Result<(), Box> { + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to project directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.project_dir)?; + + // Create install arguments + let install_args = install::Args { + project_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_file_usage: pixi_cli::LockFileUsageConfig::default(), + environment: None, + config: ConfigCli::default(), + all: false, + skip: None, + skip_with_deps: None, + only: None, + }; + + // Execute pixi install directly + let result = install::execute(install_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => Ok(()), + Err(e) => Err(format!("pixi install failed: {}", e).into()), + } + } + + /// Run the actual pixi install command using direct API (timed) + async fn run_pixi_install_timed( + &self, + _packages: &[&str], + ) -> Result> { + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to project directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.project_dir)?; + + let start = Instant::now(); + + // Create install arguments + let install_args = install::Args { + project_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_file_usage: pixi_cli::LockFileUsageConfig::default(), + environment: None, + config: ConfigCli::default(), + all: false, + skip: None, + skip_with_deps: None, + only: None, + }; + + // Execute pixi install directly + let result = install::execute(install_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => { + let duration = start.elapsed(); + println!("✅ Completed in {:.2}s", duration.as_secs_f64()); + Ok(duration) + } + Err(e) => { + println!("❌ pixi install failed: {}", e); + Err(format!("pixi install failed: {}", e).into()) + } + } + } +} + +fn bench_lockfile_small(c: &mut Criterion) { + let packages = ["numpy"]; + + let mut group = c.benchmark_group("small_lockfile_installs"); + group.measurement_time(Duration::from_secs(30)); // Allow 30 seconds for measurements + group.sample_size(20); // Increase sample size to meet criterion requirements + group.warm_up_time(Duration::from_secs(5)); // Warm up time + + // Install with lockfile - should be faster + group.bench_function("with_lockfile_small", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_with_lockfile(&packages) + .await + .expect("Failed to time pixi install with lockfile"); + black_box(duration) + }) + }); + + // Install without lockfile - requires dependency resolution + group.bench_function("without_lockfile_small", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_without_lockfile(&packages) + .await + .expect("Failed to time pixi install without lockfile"); + black_box(duration) + }) + }); +} + +fn bench_lockfile_medium(c: &mut Criterion) { + let packages = ["numpy", "pandas", "requests", "click", "pyyaml"]; + + let mut group = c.benchmark_group("medium_lockfile_installs"); + group.measurement_time(Duration::from_secs(60)); // 1 minute + group.sample_size(15); // Increase sample size to meet criterion requirements + group.warm_up_time(Duration::from_secs(10)); + + group.bench_function("with_lockfile_medium", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_with_lockfile(&packages) + .await + .expect("Failed to time pixi install with lockfile"); + black_box(duration) + }) + }); + + group.bench_function("without_lockfile_medium", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_without_lockfile(&packages) + .await + .expect("Failed to time pixi install without lockfile"); + black_box(duration) + }) + }); +} + +fn bench_lockfile_large(c: &mut Criterion) { + let packages = [ + "pytorch", + "scipy", + "scikit-learn", + "matplotlib", + "jupyter", + "bokeh", + "dask", + "xarray", + "opencv", + "pandas", + ]; + + let mut group = c.benchmark_group("large_lockfile_installs"); + group.measurement_time(Duration::from_secs(120)); // 2 minutes + group.sample_size(10); // Minimum sample size to meet criterion requirements + group.warm_up_time(Duration::from_secs(15)); + + group.bench_function("with_lockfile_large", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_with_lockfile(&packages) + .await + .expect("Failed to time pixi install with lockfile"); + black_box(duration) + }) + }); + + group.bench_function("without_lockfile_large", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .pixi_install_without_lockfile(&packages) + .await + .expect("Failed to time pixi install without lockfile"); + black_box(duration) + }) + }); +} + +criterion_group!( + benches, + bench_lockfile_small, + bench_lockfile_medium, + bench_lockfile_large +); +criterion_main!(benches); diff --git a/crates/pixi_bench/benches/task_run.rs b/crates/pixi_bench/benches/task_run.rs new file mode 100644 index 0000000000..3be85f8f62 --- /dev/null +++ b/crates/pixi_bench/benches/task_run.rs @@ -0,0 +1,359 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use fs_err as fs; +use once_cell::sync::Lazy; +use std::collections::HashMap; +use std::path::PathBuf; +use std::time::{Duration, Instant}; +use tempfile::TempDir; + +// Pixi crate imports for direct API usage +use pixi_cli::run; +use pixi_config::ConfigCli; + +// Single global runtime for all benchmarks +static RUNTIME: Lazy = + Lazy::new(|| tokio::runtime::Runtime::new().expect("Failed to create Tokio runtime")); + +/// Create an isolated pixi environment for task runner testing +struct IsolatedPixiEnv { + _temp_dir: TempDir, + cache_dir: PathBuf, + home_dir: PathBuf, + project_dir: PathBuf, + project_created: bool, +} + +impl IsolatedPixiEnv { + fn new() -> Result> { + let temp_dir = TempDir::new()?; + let base_path = temp_dir.path(); + + let cache_dir = base_path.join("pixi_cache"); + let home_dir = base_path.join("pixi_home"); + let project_dir = base_path.join("project"); + + fs::create_dir_all(&cache_dir)?; + fs::create_dir_all(&home_dir)?; + fs::create_dir_all(&project_dir)?; + + Ok(Self { + _temp_dir: temp_dir, + cache_dir, + home_dir, + project_dir, + project_created: false, + }) + } + + fn get_env_vars(&self) -> HashMap { + let mut env_vars = HashMap::new(); + env_vars.insert( + "PIXI_CACHE_DIR".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "PIXI_HOME".to_string(), + self.home_dir.to_string_lossy().to_string(), + ); + env_vars.insert( + "XDG_CACHE_HOME".to_string(), + self.cache_dir.to_string_lossy().to_string(), + ); + env_vars + } + + /// Create pixi project with tasks + fn create_pixi_project_with_tasks( + &mut self, + _packages: &[&str], + task_type: TaskType, + ) -> Result<(), Box> { + use std::fs::File; + use std::io::Write; + + // Create a minimal pixi.toml without external dependencies to avoid platform issues + let mut pixi_toml = r#"[project] +name = "task-benchmark-project" +version = "0.1.0" +description = "Benchmark project for pixi task runner testing" +channels = ["conda-forge"] +platforms = ["osx-arm64", "linux-64", "win-64"] + +[dependencies] +# No external dependencies to avoid platform resolution issues + +"# + .to_string(); + + // Add tasks based on the task type + pixi_toml.push_str("\n[tasks]\n"); + match task_type { + TaskType::Simple => { + pixi_toml.push_str( + r#"simple = "echo 'Hello from simple task'" +simple-with-args = "echo 'Task with args:' $@" +"#, + ); + } + TaskType::Complex => { + pixi_toml.push_str(r#"complex = "echo 'Starting complex task' && sleep 0.1 && echo 'Complex task completed'" +multi-step = "echo 'Step 1: Preparation' && echo 'Step 2: Processing' && echo 'Step 3: Cleanup'" +"#); + } + TaskType::WithDependencies => { + pixi_toml.push_str( + r#"prepare = "echo 'Preparing...'" +build = { cmd = "echo 'Building...'", depends-on = ["prepare"] } +test = { cmd = "echo 'Testing...'", depends-on = ["build"] } +deploy = { cmd = "echo 'Deploying...'", depends-on = ["test"] } +"#, + ); + } + TaskType::Python => { + pixi_toml.push_str( + r#"shell-simple = "echo 'Hello from shell task'" +shell-version = "echo 'Shell version check'" +shell-script = "echo 'Running shell script' && date" +"#, + ); + } + } + + let mut file = File::create(self.project_dir.join("pixi.toml"))?; + file.write_all(pixi_toml.as_bytes())?; + + self.project_created = true; + Ok(()) + } + + /// Run a pixi task and measure execution time + async fn run_pixi_task( + &mut self, + packages: &[&str], + task_type: TaskType, + task_name: &str, + task_args: Vec, + ) -> Result> { + // Create project if not already created + if !self.project_created { + self.create_pixi_project_with_tasks(packages, task_type)?; + } + + // Set environment variables for pixi + for (key, value) in self.get_env_vars() { + std::env::set_var(key, value); + } + + // Change to project directory + let original_dir = std::env::current_dir()?; + std::env::set_current_dir(&self.project_dir)?; + + let start = Instant::now(); + + // Create run arguments + let mut task_cmd = vec![task_name.to_string()]; + task_cmd.extend(task_args); + + let run_args = run::Args { + task: task_cmd, + workspace_config: pixi_cli::cli_config::WorkspaceConfig::default(), + lock_and_install_config: pixi_cli::cli_config::LockAndInstallConfig::default(), + config: ConfigCli::default(), + activation_config: pixi_config::ConfigCliActivation::default(), + environment: None, + clean_env: false, + skip_deps: false, + dry_run: false, + help: None, + h: None, + }; + + // Execute pixi run directly + let result = run::execute(run_args).await; + + // Restore original directory + std::env::set_current_dir(original_dir)?; + + match result { + Ok(_) => { + let duration = start.elapsed(); + println!( + "✅ Task '{}' completed in {:.2}s", + task_name, + duration.as_secs_f64() + ); + Ok(duration) + } + Err(e) => { + println!("❌ Task '{}' failed: {}", task_name, e); + Err(format!("Task '{}' failed: {}", task_name, e).into()) + } + } + } +} + +#[derive(Debug, Clone, Copy)] +enum TaskType { + Simple, + Complex, + WithDependencies, + Python, +} + +fn bench_simple_tasks(c: &mut Criterion) { + let packages = []; + + let mut group = c.benchmark_group("simple_task_execution"); + group.measurement_time(Duration::from_secs(30)); + group.sample_size(15); + group.warm_up_time(Duration::from_secs(5)); + + // Simple echo task + group.bench_function("simple_echo", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Simple, "simple", vec![]) + .await + .expect("Failed to run simple task"); + black_box(duration) + }) + }); + + // Simple task with arguments + group.bench_function("simple_with_args", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task( + &packages, + TaskType::Simple, + "simple-with-args", + vec!["arg1".to_string(), "arg2".to_string()], + ) + .await + .expect("Failed to run simple task with args"); + black_box(duration) + }) + }); +} + +fn bench_complex_tasks(c: &mut Criterion) { + let packages = []; + + let mut group = c.benchmark_group("complex_task_execution"); + group.measurement_time(Duration::from_secs(45)); + group.sample_size(12); + group.warm_up_time(Duration::from_secs(5)); + + // Complex task with multiple commands + group.bench_function("complex_multi_command", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Complex, "complex", vec![]) + .await + .expect("Failed to run complex task"); + black_box(duration) + }) + }); + + // Multi-step task + group.bench_function("multi_step_task", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Complex, "multi-step", vec![]) + .await + .expect("Failed to run multi-step task"); + black_box(duration) + }) + }); +} + +fn bench_dependency_tasks(c: &mut Criterion) { + let packages = []; + + let mut group = c.benchmark_group("dependency_task_execution"); + group.measurement_time(Duration::from_secs(60)); + group.sample_size(10); + group.warm_up_time(Duration::from_secs(5)); + + // Task with dependencies (should run prepare -> build -> test -> deploy) + group.bench_function("task_with_dependencies", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::WithDependencies, "deploy", vec![]) + .await + .expect("Failed to run task with dependencies"); + black_box(duration) + }) + }); + + // Single dependency task + group.bench_function("single_dependency", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::WithDependencies, "build", vec![]) + .await + .expect("Failed to run task with single dependency"); + black_box(duration) + }) + }); +} + +fn bench_python_tasks(c: &mut Criterion) { + let packages = []; + + let mut group = c.benchmark_group("shell_task_execution"); + group.measurement_time(Duration::from_secs(30)); + group.sample_size(15); + group.warm_up_time(Duration::from_secs(5)); + + // Simple shell task + group.bench_function("shell_simple", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Python, "shell-simple", vec![]) + .await + .expect("Failed to run shell simple task"); + black_box(duration) + }) + }); + + // Shell version check + group.bench_function("shell_version", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Python, "shell-version", vec![]) + .await + .expect("Failed to run shell version task"); + black_box(duration) + }) + }); + + // Shell script execution + group.bench_function("shell_script", |b| { + b.to_async(&*RUNTIME).iter(|| async { + let mut env = IsolatedPixiEnv::new().expect("Failed to create isolated environment"); + let duration = env + .run_pixi_task(&packages, TaskType::Python, "shell-script", vec![]) + .await + .expect("Failed to run shell script task"); + black_box(duration) + }) + }); +} + +criterion_group!( + benches, + bench_simple_tasks, + bench_complex_tasks, + bench_dependency_tasks, + bench_python_tasks +); +criterion_main!(benches); diff --git a/crates/pixi_bench/my-local-channel/noarch/bokeh-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/bokeh-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..0aff836e55 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/bokeh-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/click-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/click-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..6cada3c788 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/click-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/dask-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/dask-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..ddd2b1d4f6 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/dask-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/jupyter-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/jupyter-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..40d111105f Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/jupyter-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/matplotlib-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/matplotlib-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..e4a250e8d4 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/matplotlib-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/numpy-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/numpy-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..1aee12b45c Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/numpy-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/opencv-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/opencv-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..09a1b6b6b1 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/opencv-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/pandas-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/pandas-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..8e0c5ff500 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/pandas-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/pytorch-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/pytorch-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..cce603991c Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/pytorch-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/pyyaml-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/pyyaml-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..8fb7bbe209 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/pyyaml-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/repodata.json b/crates/pixi_bench/my-local-channel/noarch/repodata.json new file mode 100644 index 0000000000..1e16e12699 --- /dev/null +++ b/crates/pixi_bench/my-local-channel/noarch/repodata.json @@ -0,0 +1,164 @@ +{ + "info": { + "subdir": "noarch" + }, + "packages": { + "numpy-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "numpy", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "pandas-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "pandas", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "requests-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "requests", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "click-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "click", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "pyyaml-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "pyyaml", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "pytorch-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "pytorch", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "scipy-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "scipy", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "scikit-learn-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "scikit-learn", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "matplotlib-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "matplotlib", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "jupyter-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "jupyter", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "bokeh-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "bokeh", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "dask-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "dask", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "xarray-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "xarray", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + }, + "opencv-1.0.0-py_0.tar.bz2": { + "build": "py_0", + "build_number": 0, + "depends": [], + "license": "MIT", + "name": "opencv", + "platform": null, + "subdir": "noarch", + "timestamp": 1640995200000, + "version": "1.0.0" + } + }, + "packages.conda": {}, + "removed": [], + "repodata_version": 1 +} diff --git a/crates/pixi_bench/my-local-channel/noarch/requests-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/requests-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..e44e9f63ea Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/requests-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/scikit-learn-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/scikit-learn-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..05dd530b0b Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/scikit-learn-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/scipy-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/scipy-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..44a50a3491 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/scipy-1.0.0-py_0.tar.bz2 differ diff --git a/crates/pixi_bench/my-local-channel/noarch/xarray-1.0.0-py_0.tar.bz2 b/crates/pixi_bench/my-local-channel/noarch/xarray-1.0.0-py_0.tar.bz2 new file mode 100644 index 0000000000..7cebecb4a5 Binary files /dev/null and b/crates/pixi_bench/my-local-channel/noarch/xarray-1.0.0-py_0.tar.bz2 differ diff --git a/pixi.toml b/pixi.toml index acab25a246..1a2462498d 100644 --- a/pixi.toml +++ b/pixi.toml @@ -40,7 +40,7 @@ test-all-extra-slow = { depends-on = [ test-all-fast = { depends-on = ["test-fast", "test-integration-fast"] } test-all-slow = { depends-on = ["test-slow", "test-integration-slow"] } test-fast = """RUST_LOG="debug,resolvo=info" cargo nextest run --workspace --all-targets""" -test-slow = """RUST_LOG="debug,resolvo=info" cargo nextest run --workspace --all-targets --features slow_integration_tests,online_tests +test-slow = """RUST_LOG="debug,resolvo=info" cargo nextest run --workspace --all-targets --features slow_integration_tests,online_tests --exclude pixi_bench --status-level skip --failure-output immediate-final --no-fail-fast --final-status-level slow""" [feature.pytest.dependencies]