diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..678cb6151 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,3 @@ +docs/ +tests/ +tests_integration/ diff --git a/.gitattributes b/.gitattributes index 00a7b00c9..27e9fb15d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,2 @@ .git_archival.txt export-subst +conda-lock.yml merge=binary linguist-generated=true diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..e96011d08 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,10 @@ +.github/workflows/test-integration.yml @ytausch +conda_forge_tick/contexts.py @ytausch +conda_forge_tick/git_utils.py @ytausch +conda_forge_tick/models/* @ytausch +tests/github_api/* @ytausch +tests/conda_forge_tick/* @ytausch +tests/model/* @ytausch +tests/test_contexts.py @ytausch +tests/test_git_utils.py @ytausch +tests_integration/* @ytausch diff --git a/.github/workflows/bot-cache.yml b/.github/workflows/bot-cache.yml index f3681ed6f..30eb45930 100644 --- a/.github/workflows/bot-cache.yml +++ b/.github/workflows/bot-cache.yml @@ -22,7 +22,7 @@ jobs: steps: - name: run cache if: success() && ! env.CI_SKIP - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 with: path: cf-graph.tar.zstd key: cf-graph-tzstd-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.version }} diff --git a/.github/workflows/keepalive.yml b/.github/workflows/keepalive.yml deleted file mode 100644 index c4d1bf34a..000000000 --- a/.github/workflows/keepalive.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: keepalive -on: - schedule: - - cron: "0 0 * * *" - -permissions: - contents: write - -jobs: - cronjob-based-github-action: - name: Cronjob based github action - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - - uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc # v2.0.4 - with: - environment-file: conda-lock.yml - environment-name: cf-scripts - condarc-file: autotick-bot/condarc - - - name: stop me if needed - run: | - python autotick-bot/stop_me_if_needed.py - - - uses: gautamkrishnar/keepalive-workflow@75c59336a3a6589777ab715d83668bb3033c68b2 # v2.0.9 - if: ${{ ! env.CI_SKIP }} - with: - commit_message: "Ah ah ah, stayin' alive" - committer_username: conda-forge-bot - committer_email: "conda-forge-bot@users.noreply.github.com" - time_elapsed: 50 # days - use_api: false diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 096db8146..fc028cb4b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -48,7 +48,7 @@ jobs: echo "NEXT=${NEXT}" >> "$GITHUB_OUTPUT" - name: log into ghcr.io - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 if: ${{ ! env.CI_SKIP }} with: registry: ${{ env.REGISTRY }} @@ -68,7 +68,7 @@ jobs: type=raw,value=latest - name: build and push image - uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 + uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0 if: ${{ ! env.CI_SKIP }} with: context: . diff --git a/.github/workflows/test-integration.yml b/.github/workflows/test-integration.yml new file mode 100644 index 000000000..c039eb1dc --- /dev/null +++ b/.github/workflows/test-integration.yml @@ -0,0 +1,90 @@ +name: Integration Tests +on: + pull_request: + branches: + - main + merge_group: null + + +# Integration tests interact with GitHub resources in the integration test infrastructure and therefore +# cannot run concurrently with other integration tests. +concurrency: + group: cf-scripts-integration-tests + cancel-in-progress: false + +defaults: + run: + shell: bash -leo pipefail {0} + +jobs: + integration-tests: + name: Run Integration Tests + # if triggered by pull_request, only run on non-fork PRs (secrets access needed) + # Nevertheless, this check is always run in the merge queue. + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository + runs-on: ubuntu-latest + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 + + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + path: cf-scripts + submodules: 'true' + + - name: Build Docker Image + uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 + with: + context: cf-scripts + push: false + load: true + tags: conda-forge-tick:test + + - name: Setup micromamba + uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc # v2.0.4 + with: + environment-file: cf-scripts/conda-lock.yml + environment-name: cf-scripts + condarc-file: cf-scripts/autotick-bot/condarc + + - name: Run pip install + working-directory: cf-scripts + run: | + pip install --no-deps --no-build-isolation -e . + + - name: Run mitmproxy certificate setup wizard + working-directory: cf-scripts + run: | + # place a script in the mitmproxy directory that will be run by the setup wizard + # to trust the mitmproxy certificate + cat < ./tests_integration/.mitmproxy/${{ env.MITMPROXY_WIZARD_HEADLESS_TRUST_SCRIPT }} + #!/usr/bin/env bash + set -euo pipefail + sudo cp "\$1" /usr/local/share/ca-certificates/mitmproxy.crt + sudo update-ca-certificates + EOF + + ./tests_integration/mitmproxy_setup_wizard.sh + env: + MITMPROXY_WIZARD_HEADLESS: true + MITMPROXY_WIZARD_HEADLESS_TRUST_SCRIPT: mitmproxy_trust_script.sh + + - name: Set up git identity + run: | + git config --global user.name "regro-cf-autotick-bot-staging" + git config --global user.email "regro-cf-autotick-bot-staging@users.noreply.github.com" + + - name: Run Integration Tests with pytest + working-directory: cf-scripts + run: | + pytest -s -v \ + --dist=no \ + tests_integration + env: + BOT_TOKEN: ${{ secrets.GH_TOKEN_STAGING_BOT_USER }} + TEST_SETUP_TOKEN: ${{ secrets.GH_TOKEN_STAGING_BOT_USER }} + + - name: Print Proxy Logs + run: cat /tmp/mitmproxy.log + if: always() diff --git a/.github/workflows/tests-reusable.yml b/.github/workflows/tests-reusable.yml index 366f47002..e210861d3 100644 --- a/.github/workflows/tests-reusable.yml +++ b/.github/workflows/tests-reusable.yml @@ -55,7 +55,7 @@ jobs: - name: download lockfile if it is an artifact if: ${{ inputs.lockfile-is-artifact }} - uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: ${{ inputs.lockfile }} path: input-lockfile @@ -110,7 +110,7 @@ jobs: if: ${{ ! env.CI_SKIP }} - name: build docker image - uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 + uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0 if: ${{ ! env.CI_SKIP }} with: context: . @@ -119,7 +119,7 @@ jobs: tags: ${{ env.IMAGE_NAME }}:test - name: restore test durations - uses: actions/cache/restore@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 if: ${{ ! env.CI_SKIP }} with: path: .test_durations @@ -157,7 +157,7 @@ jobs: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} - name: upload test durations - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ ! env.CI_SKIP }} with: name: test-durations-${{ matrix.group }} @@ -165,7 +165,7 @@ jobs: include-hidden-files: true - name: upload coverage - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ ! env.CI_SKIP }} with: name: coverage-${{ matrix.group }} @@ -201,7 +201,7 @@ jobs: - name: download lockfile if it is an artifact if: ${{ inputs.lockfile-is-artifact }} - uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: ${{ inputs.lockfile }} path: input-lockfile @@ -265,7 +265,7 @@ jobs: MONGODB_CONNECTION_STRING: "mongodb://127.0.0.1:27017/?directConnection=true&serverSelectionTimeoutMS=2000" - name: restore test durations - uses: actions/cache/restore@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 if: ${{ ! env.CI_SKIP }} with: path: .test_durations @@ -305,7 +305,7 @@ jobs: BOT_TOKEN: ${{ secrets.BOT_TOKEN }} - name: upload test durations - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ ! env.CI_SKIP }} with: name: test-durations-mongodb @@ -313,7 +313,7 @@ jobs: include-hidden-files: true - name: upload coverage - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ ! env.CI_SKIP }} with: name: coverage-mongodb @@ -342,7 +342,7 @@ jobs: - name: download lockfile if it is an artifact if: ${{ inputs.lockfile-is-artifact }} - uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: ${{ inputs.lockfile }} path: input-lockfile @@ -363,7 +363,7 @@ jobs: python autotick-bot/stop_me_if_needed.py - name: download coverage artifacts - uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 if: ${{ ! env.CI_SKIP }} with: pattern: coverage-* @@ -375,13 +375,13 @@ jobs: coverage xml - name: upload codecov - uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5.1.8 + uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.1.8 if: ${{ ! env.CI_SKIP }} with: token: ${{ secrets.CODECOV_TOKEN }} - name: cache test durations - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 if: ${{ ! env.CI_SKIP }} with: path: .test_durations @@ -391,7 +391,7 @@ jobs: test-durations- - name: download test duration artifacts - uses: actions/download-artifact@cc203385981b70ca67e1cc392babf9cc229d5806 # v4.1.9 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 if: ${{ ! env.CI_SKIP }} with: pattern: test-durations-* @@ -403,9 +403,24 @@ jobs: jq 'reduce inputs as $i (.; . + $i)' test-durations-*/.test_durations.* > .test_durations - name: upload test durations - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ ! env.CI_SKIP }} with: name: test-durations path: .test_durations include-hidden-files: true + + # this little bit follows the same pattern from conda-build + status: + needs: + - tests + - tests-mongodb + - test-coverage-and-durations + if: '!cancelled()' + + runs-on: ubuntu-latest + steps: + - name: determine success + uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe # v1.2.2 + with: + jobs: ${{ toJSON(needs) }} diff --git a/.gitignore b/.gitignore index 65785e3a4..1485c489b 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ docs/_build/ node_attrs/* graph.json +!tests_integration/resources/empty-graph/graph.json pr_json/* pr_status/* status/* @@ -59,3 +60,6 @@ pixi.lock pixi.toml .ruff_cache/ .repodata_cache/ +venv +oryx-build-commands.txt +.env diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..2167a7d22 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,9 @@ +[submodule "tests_integration/definitions/conda_forge_pinning/resources/feedstock"] + path = tests_integration/lib/_definitions/conda_forge_pinning/resources/feedstock + url = https://github.com/conda-forge/conda-forge-pinning-feedstock.git +[submodule "tests_integration/definitions/pydantic/resources/feedstock"] + path = tests_integration/lib/_definitions/pydantic/resources/feedstock + url = https://github.com/conda-forge/pydantic-feedstock.git +[submodule "tests_integration/definitions/pydantic/resources/feedstock_v1"] + path = tests_integration/lib/_definitions/pydantic/resources/feedstock_v1 + url = https://github.com/conda-forge/pydantic-feedstock.git diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f9a52ab54..c3f719b9b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,12 +24,17 @@ repos: - --py36-plus - repo: https://github.com/crate-ci/typos - rev: v1.30.0 + rev: v1 hooks: - id: typos + exclude: | + (?x)^( + ^tests_integration\/definitions\/.*\/resources\/.*| + ^docs\/assets\/.* + )$ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.9 + rev: v0.11.8 hooks: - id: ruff args: [ --fix ] diff --git a/README.md b/README.md index 2f2963961..d6edaaad7 100644 --- a/README.md +++ b/README.md @@ -237,6 +237,8 @@ If your migrator needs special configuration, you should write a new factory fun - `CF_FEEDSTOCK_OPS_CONTAINER_NAME`: the name of the container to use in the bot, otherwise defaults to `ghcr.io/regro/conda-forge-tick` - `CF_FEEDSTOCK_OPS_CONTAINER_TAG`: set this to override the default container tag used in production runs, otherwise the value of `__version__` is used +Additional environment variables are described in [the settings module](conda_forge_tick/settings.py). + ### Getting a Working Environment The bot has an abstract set of requirements stored in the `environment.yml` file in this repo. @@ -299,6 +301,10 @@ Currently, the following commands are supported and tested: - `update-upstream-versions` +### Integration Tests + +See [tests_integration/README.md](tests_integration/README.md). + ### Structure of the Bot's Jobs #### History diff --git a/autotick-bot/install_bot_code.sh b/autotick-bot/install_bot_code.sh index a0963af30..e1459285a 100644 --- a/autotick-bot/install_bot_code.sh +++ b/autotick-bot/install_bot_code.sh @@ -1,4 +1,16 @@ #!/bin/bash + +# Environment Variables: +# - CF_FEEDSTOCK_OPS_CONTAINER_NAME: The name of the container image to use for the bot (optional, not used but left intact) +# - CF_FEEDSTOCK_OPS_CONTAINER_TAG: The tag of the container image to use for the bot (optional). +# - CF_TICK_GRAPH_GITHUB_BACKEND_REPO: The GitHub repository to clone cf-graph from. Default: regro/cf-graph-countyfair + +# Sets the following environment variables via GITHUB_ENV: +# - CF_FEEDSTOCK_OPS_CONTAINER_NAME (see above) +# - CF_FEEDSTOCK_OPS_CONTAINER_TAG (see above) + +set -euo pipefail + git config --global user.name regro-cf-autotick-bot git config --global user.email 36490558+regro-cf-autotick-bot@users.noreply.github.com git config --global pull.rebase false @@ -29,11 +41,15 @@ for arg in "$@"; do fi done if [[ "${clone_graph}" == "true" ]]; then - git clone --depth=5 https://github.com/regro/cf-graph-countyfair.git cf-graph + cf_graph_repo=${CF_TICK_GRAPH_GITHUB_BACKEND_REPO:-"regro/cf-graph-countyfair"} + cf_graph_remote="https://github.com/${cf_graph_repo}.git" + # please make sure the cloning depth is always identical to the one used in the integration tests (test_integration.py) + git clone --depth=5 "${cf_graph_remote}" cf-graph else echo "Skipping cloning of cf-graph" fi +docker_name=${CF_FEEDSTOCK_OPS_CONTAINER_NAME:-"ghcr.io/regro/conda-forge-tick"} bot_tag=$(python -c "import conda_forge_tick; print(conda_forge_tick.__version__)") docker_tag=${CF_FEEDSTOCK_OPS_CONTAINER_TAG:-${bot_tag}} @@ -44,11 +60,12 @@ for arg in "$@"; do fi done if [[ "${pull_cont}" == "true" ]]; then - docker pull ghcr.io/regro/conda-forge-tick:${docker_tag} + docker pull "${docker_name}:${docker_tag}" fi -export CF_FEEDSTOCK_OPS_CONTAINER_TAG=${docker_tag} -export CF_FEEDSTOCK_OPS_CONTAINER_NAME="ghcr.io/regro/conda-forge-tick" +# left intact if already set +export CF_FEEDSTOCK_OPS_CONTAINER_TAG="${docker_tag}" +export CF_FEEDSTOCK_OPS_CONTAINER_NAME="${docker_name}" echo "CF_FEEDSTOCK_OPS_CONTAINER_TAG=${CF_FEEDSTOCK_OPS_CONTAINER_TAG}" >> "$GITHUB_ENV" echo "CF_FEEDSTOCK_OPS_CONTAINER_NAME=${CF_FEEDSTOCK_OPS_CONTAINER_NAME}" >> "$GITHUB_ENV" diff --git a/conda-lock.yml b/conda-lock.yml index e2de4b091..5237e2f98 100644 --- a/conda-lock.yml +++ b/conda-lock.yml @@ -3,9 +3,9 @@ metadata: - url: conda-forge used_env_vars: [] content_hash: - linux-64: ea0be66fad42c0be775b6964370d3e0630c3edfb46574a41e2fc7c21377be6dc - osx-64: 72950d92f109f4e2d6564afa451fe7c844d672d1bc57572fa706d45f85958c5d - osx-arm64: 85ed712bf6cefe23ec9a3973cc2b4bca64aaa23adcf0a6b0e3df5edda95b6198 + linux-64: b6e1bb8b05c8e2e978adf6045bbfbf3dd667be28ad370051e7def217c59677cf + osx-64: 976ee67b87113aaa96914dd74febe852a4f446b5cfd13d7970d596ddd49f0054 + osx-arm64: 955bee3a78e887405cec719f9153364a9fb08a16cf094727fe339313c3e243ff platforms: - osx-arm64 - linux-64 @@ -43,28 +43,28 @@ package: cpython: '' python-gil: '' hash: - md5: 72bdca5fa72b5b89fc8a86d2e98793f0 - sha256: aeee03ce021e13648c82414358616cc3edad15101ef354cae9a2d4ba3ba7a5e4 + md5: aaa2a381ccc56eac91d63b6c1240312f + sha256: a3967b937b9abf0f2a99f3173fa4630293979bd1644709d89580e7c62a544661 manager: conda name: _python_abi3_support optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_1.conda + https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda version: '1.0' - category: main dependencies: cpython: '' python-gil: '' hash: - md5: 72bdca5fa72b5b89fc8a86d2e98793f0 - sha256: aeee03ce021e13648c82414358616cc3edad15101ef354cae9a2d4ba3ba7a5e4 + md5: aaa2a381ccc56eac91d63b6c1240312f + sha256: a3967b937b9abf0f2a99f3173fa4630293979bd1644709d89580e7c62a544661 manager: conda name: _python_abi3_support optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_1.conda + https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda version: '1.0' - category: main dependencies: @@ -273,6 +273,139 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/noarch/archspec-0.2.5-pyhd8ed1ab_0.conda version: 0.2.5 + - category: main + dependencies: + argon2-cffi-bindings: '' + python: '>=3.9' + typing-extensions: '' + hash: + md5: a7ee488b71c30ada51c48468337b85ba + sha256: 7af62339394986bc470a7a231c7f37ad0173ffb41f6bc0e8e31b0be9e3b9d20f + manager: conda + name: argon2-cffi + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda + version: 23.1.0 + - category: main + dependencies: + argon2-cffi-bindings: '' + python: '>=3.9' + typing-extensions: '' + hash: + md5: a7ee488b71c30ada51c48468337b85ba + sha256: 7af62339394986bc470a7a231c7f37ad0173ffb41f6bc0e8e31b0be9e3b9d20f + manager: conda + name: argon2-cffi + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda + version: 23.1.0 + - category: main + dependencies: + argon2-cffi-bindings: '' + python: '>=3.9' + typing-extensions: '' + hash: + md5: a7ee488b71c30ada51c48468337b85ba + sha256: 7af62339394986bc470a7a231c7f37ad0173ffb41f6bc0e8e31b0be9e3b9d20f + manager: conda + name: argon2-cffi + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda + version: 23.1.0 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + cffi: '>=1.0.1' + libgcc: '>=13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 18143eab7fcd6662c604b85850f0db1e + sha256: d1af1fbcb698c2e07b0d1d2b98384dd6021fa55c8bcb920e3652e0b0c393881b + manager: conda + name: argon2-cffi-bindings + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-21.2.0-py311h9ecbd09_5.conda + version: 21.2.0 + - category: main + dependencies: + __osx: '>=10.13' + cffi: '>=1.0.1' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 29b46bd410067f668c4cef7fdc78fe25 + sha256: fa5eb633b320e10fc2138f3d842d8a8ca72815f106acbab49a68ec9783e4d70d + manager: conda + name: argon2-cffi-bindings + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/argon2-cffi-bindings-21.2.0-py311h3336109_5.conda + version: 21.2.0 + - category: main + dependencies: + __osx: '>=11.0' + cffi: '>=1.0.1' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 1e8260965552c6ec86453b7d15a598de + sha256: 6eabd1bcefc235b7943688d865519577d7668a2f4dc3a24ee34d81eb4bfe77d1 + manager: conda + name: argon2-cffi-bindings + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/argon2-cffi-bindings-21.2.0-py311h460d6c5_5.conda + version: 21.2.0 + - category: main + dependencies: + python: '>=3.7' + typing_extensions: '>=4' + hash: + md5: 596932155bf88bb6837141550cb721b0 + sha256: 63f85717fd38912a69be5a03d35a648c404cb86843cd4a1302c380c0e7744e30 + manager: conda + name: asgiref + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/asgiref-3.7.2-pyhd8ed1ab_0.conda + version: 3.7.2 + - category: main + dependencies: + python: '>=3.7' + typing_extensions: '>=4' + hash: + md5: 596932155bf88bb6837141550cb721b0 + sha256: 63f85717fd38912a69be5a03d35a648c404cb86843cd4a1302c380c0e7744e30 + manager: conda + name: asgiref + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/asgiref-3.7.2-pyhd8ed1ab_0.conda + version: 3.7.2 + - category: main + dependencies: + python: '>=3.7' + typing_extensions: '>=4' + hash: + md5: 596932155bf88bb6837141550cb721b0 + sha256: 63f85717fd38912a69be5a03d35a648c404cb86843cd4a1302c380c0e7744e30 + manager: conda + name: asgiref + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/asgiref-3.7.2-pyhd8ed1ab_0.conda + version: 3.7.2 - category: main dependencies: at-spi2-core: '>=2.40.0,<2.41.0a0' @@ -541,51 +674,96 @@ package: url: https://conda.anaconda.org/conda-forge/noarch/backports.tarfile-1.2.0-pyhd8ed1ab_1.conda version: 1.2.0 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: f4bf34159bdc05e00ec3ff1335539671 + sha256: 3e3fccfe53b4973e379f1593f8cd2a6a61b0a0f30bd521bd24467f995777c315 + manager: conda + name: bcrypt + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/bcrypt-4.3.0-py311h9e33e62_0.conda + version: 4.3.0 + - category: main + dependencies: + __osx: '>=10.13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 832437e99c434257bdc86f5ec66b7422 + sha256: 10b2cc01a0a56f9db38a5b16dc37c93b484057aa7d5d2bc071fe062e3a0a622e + manager: conda + name: bcrypt + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/bcrypt-4.3.0-py311h3b9c2be_0.conda + version: 4.3.0 + - category: main + dependencies: + __osx: '>=11.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: e9e5de6d88db912afdb0f7414cf0cb8b + sha256: dd0e8a2ff62e77fe4cd33bd8d668558f7bcd564af00aac7706908981ea6862da + manager: conda + name: bcrypt + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/bcrypt-4.3.0-py311h3ff9189_0.conda + version: 4.3.0 - category: main dependencies: python: '>=3.9' soupsieve: '>=1.2' typing-extensions: '' hash: - md5: 373374a3ed20141090504031dc7b693e - sha256: 4ce42860292a57867cfc81a5d261fb9886fc709a34eca52164cc8bbf6d03de9f + md5: 9f07c4fc992adb2d6c30da7fab3959a7 + sha256: ddb0df12fd30b2d36272f5daf6b6251c7625d6a99414d7ea930005bbaecad06d manager: conda name: beautifulsoup4 optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda - version: 4.13.3 + https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.4-pyha770c72_0.conda + version: 4.13.4 - category: main dependencies: python: '>=3.9' soupsieve: '>=1.2' typing-extensions: '' hash: - md5: 373374a3ed20141090504031dc7b693e - sha256: 4ce42860292a57867cfc81a5d261fb9886fc709a34eca52164cc8bbf6d03de9f + md5: 9f07c4fc992adb2d6c30da7fab3959a7 + sha256: ddb0df12fd30b2d36272f5daf6b6251c7625d6a99414d7ea930005bbaecad06d manager: conda name: beautifulsoup4 optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda - version: 4.13.3 + https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.4-pyha770c72_0.conda + version: 4.13.4 - category: main dependencies: python: '>=3.9' soupsieve: '>=1.2' typing-extensions: '' hash: - md5: 373374a3ed20141090504031dc7b693e - sha256: 4ce42860292a57867cfc81a5d261fb9886fc709a34eca52164cc8bbf6d03de9f + md5: 9f07c4fc992adb2d6c30da7fab3959a7 + sha256: ddb0df12fd30b2d36272f5daf6b6251c7625d6a99414d7ea930005bbaecad06d manager: conda name: beautifulsoup4 optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda - version: 4.13.3 + https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.4-pyha770c72_0.conda + version: 4.13.4 - category: main dependencies: python: '>=3.9' @@ -662,38 +840,38 @@ package: dependencies: python: '>=3.9' hash: - md5: 27cb6fe5f5bf73b837a6854d5a2daadc - sha256: 5b032b2a7bf2de4ddba16df54c66cc4a0953cc243bec42509dd04ded2be92613 + md5: 26c3480f80364e9498a48bb5c3e35f85 + sha256: 6195e09f7d8a3a5e2fc0dddd6d1e87198e9c3d2a1982ff04624957a6c6466e54 manager: conda name: boolean.py optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/boolean.py-4.0-pyhd8ed1ab_1.conda - version: '4.0' + url: https://conda.anaconda.org/conda-forge/noarch/boolean.py-5.0-pyhd8ed1ab_0.conda + version: '5.0' - category: main dependencies: python: '>=3.9' hash: - md5: 27cb6fe5f5bf73b837a6854d5a2daadc - sha256: 5b032b2a7bf2de4ddba16df54c66cc4a0953cc243bec42509dd04ded2be92613 + md5: 26c3480f80364e9498a48bb5c3e35f85 + sha256: 6195e09f7d8a3a5e2fc0dddd6d1e87198e9c3d2a1982ff04624957a6c6466e54 manager: conda name: boolean.py optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/boolean.py-4.0-pyhd8ed1ab_1.conda - version: '4.0' + url: https://conda.anaconda.org/conda-forge/noarch/boolean.py-5.0-pyhd8ed1ab_0.conda + version: '5.0' - category: main dependencies: python: '>=3.9' hash: - md5: 27cb6fe5f5bf73b837a6854d5a2daadc - sha256: 5b032b2a7bf2de4ddba16df54c66cc4a0953cc243bec42509dd04ded2be92613 + md5: 26c3480f80364e9498a48bb5c3e35f85 + sha256: 6195e09f7d8a3a5e2fc0dddd6d1e87198e9c3d2a1982ff04624957a6c6466e54 manager: conda name: boolean.py optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/boolean.py-4.0-pyhd8ed1ab_1.conda - version: '4.0' + url: https://conda.anaconda.org/conda-forge/noarch/boolean.py-5.0-pyhd8ed1ab_0.conda + version: '5.0' - category: main dependencies: jmespath: '>=0.7.1,<2.0.0' @@ -701,15 +879,15 @@ package: python-dateutil: '>=2.1,<3.0.0' urllib3: '>=1.25.4,!=2.2.0,<3' hash: - md5: d7106dcf924bac189ac2aaaff0187e20 - sha256: d2fa02da9413028547e01e1d45e5f07231d294b48fef71cd603307de25b6fbf1 + md5: 9df14635230105f80d20c63f6ac1f36e + sha256: 53e33940055bd400cc309b7b1e35e035ea2e8c77bf5faeb7df7fcc497c12421b manager: conda name: botocore optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/botocore-1.37.21-pyge310_1234567_0.conda - version: 1.37.21 + https://conda.anaconda.org/conda-forge/noarch/botocore-1.38.21-pyge310_1234567_0.conda + version: 1.38.21 - category: main dependencies: jmespath: '>=0.7.1,<2.0.0' @@ -717,15 +895,15 @@ package: python-dateutil: '>=2.1,<3.0.0' urllib3: '>=1.25.4,!=2.2.0,<3' hash: - md5: d7106dcf924bac189ac2aaaff0187e20 - sha256: d2fa02da9413028547e01e1d45e5f07231d294b48fef71cd603307de25b6fbf1 + md5: 9df14635230105f80d20c63f6ac1f36e + sha256: 53e33940055bd400cc309b7b1e35e035ea2e8c77bf5faeb7df7fcc497c12421b manager: conda name: botocore optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/botocore-1.37.21-pyge310_1234567_0.conda - version: 1.37.21 + https://conda.anaconda.org/conda-forge/noarch/botocore-1.38.21-pyge310_1234567_0.conda + version: 1.38.21 - category: main dependencies: jmespath: '>=0.7.1,<2.0.0' @@ -733,64 +911,61 @@ package: python-dateutil: '>=2.1,<3.0.0' urllib3: '>=1.25.4,!=2.2.0,<3' hash: - md5: d7106dcf924bac189ac2aaaff0187e20 - sha256: d2fa02da9413028547e01e1d45e5f07231d294b48fef71cd603307de25b6fbf1 + md5: 9df14635230105f80d20c63f6ac1f36e + sha256: 53e33940055bd400cc309b7b1e35e035ea2e8c77bf5faeb7df7fcc497c12421b manager: conda name: botocore optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/botocore-1.37.21-pyge310_1234567_0.conda - version: 1.37.21 + https://conda.anaconda.org/conda-forge/noarch/botocore-1.38.21-pyge310_1234567_0.conda + version: 1.38.21 - category: main dependencies: - __glibc: '>=2.17,<3.0.a0' - libgcc: '>=13' - libstdcxx: '>=13' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: d21daab070d76490cb39a8f1d1729d79 - sha256: 949913bbd1f74d1af202d3e4bff2e0a4e792ec00271dc4dd08641d4221aa2e12 + md5: ced5340f5dc6cff43a80deac8d0e398f + sha256: e2c0a391839914a1b3611b661ebd1736dd747f43a4611254d9333d9db3163ec7 manager: conda name: brotli-python optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hfdbb021_2.conda - version: 1.1.0 + https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py311ha362b79_9.conda + version: 1.0.9 - category: main dependencies: - __osx: '>=10.13' - libcxx: '>=17' + libcxx: '>=14.0.6' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: d75f06ee06001794aa83a05e885f1520 - sha256: 004cefbd18f581636a8dcb1964fb73478f15d496769226ec896c1d4a0161b7d8 + md5: 034ddcc806d421524fbc46778447e87c + sha256: 0d49fcc6eddfc5d87844419b9de4267a83e870331102bf01ca41e404e4374293 manager: conda name: brotli-python optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py311hd89902b_2.conda - version: 1.1.0 + https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.0.9-py311h814d153_9.conda + version: 1.0.9 - category: main dependencies: - __osx: '>=11.0' - libcxx: '>=17' + libcxx: '>=14.0.6' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: c8793a23206344faa25f4e0b5d0e7908 - sha256: f507d65e740777a629ceacb062c768829ab76fde01446b191699a734521ecaad + md5: 34c36b315dc70cde887ea8c3991b994d + sha256: a0f54181606c26b754567feac9d0595b7d5de5d199aa15129dcfa3eed10ef3b7 manager: conda name: brotli-python optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py311h3f08180_2.conda - version: 1.1.0 + https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.0.9-py311ha397e9f_9.conda + version: 1.0.9 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -833,164 +1008,167 @@ package: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' hash: - md5: e2775acf57efd5af15b8e3d1d74d72d3 - sha256: d4f28d87b6339b94f74762c0076e29c8ef8ddfff51a564a92da2843573c18320 + md5: f7f0d6cc2dc986d42ac2689ec88192be + sha256: f8003bef369f57396593ccd03d08a8e21966157269426f71e943f96e4b579aeb manager: conda name: c-ares optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda - version: 1.34.4 + url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.5-hb9d3cd8_0.conda + version: 1.34.5 - category: main dependencies: __osx: '>=10.13' hash: - md5: 133255af67aaf1e0c0468cc753fd800b - sha256: 8dcc1628d34fe7d759f3a7dee52e09c5162a3f9669dddd6100bff965450f4a0a + md5: eafe5d9f1a8c514afe41e6e833f66dfd + sha256: b37f5dacfe1c59e0a207c1d65489b760dff9ddb97b8df7126ceda01692ba6e97 manager: conda name: c-ares optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.4-hf13058a_0.conda - version: 1.34.4 + url: https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.34.5-hf13058a_0.conda + version: 1.34.5 - category: main dependencies: __osx: '>=11.0' hash: - md5: c1c999a38a4303b29d75c636eaa13cf9 - sha256: 09c0c8476e50b2955f474a4a1c17c4c047dd52993b5366b6ea8e968e583b921f + md5: f8cd1beb98240c7edb1a95883360ccfa + sha256: b4bb55d0806e41ffef94d0e3f3c97531f322b3cb0ca1f7cdf8e47f62538b7a2b manager: conda name: c-ares optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.4-h5505292_0.conda - version: 1.34.4 + url: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.5-h5505292_0.conda + version: 1.34.5 - category: main - dependencies: {} + dependencies: + __unix: '' hash: - md5: 19f3a56f68d2fd06c516076bff482c52 - sha256: bf832198976d559ab44d6cdb315642655547e26d826e34da67cbee6624cda189 + md5: 95db94f75ba080a22eb623590993167b + sha256: 2a70ed95ace8a3f8a29e6cd1476a943df294a7111dfb3e152e3478c4c889b7ac manager: conda name: ca-certificates optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2025.1.31-hbcca054_0.conda - version: 2025.1.31 + https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2025.4.26-hbd8a1cb_0.conda + version: 2025.4.26 - category: main - dependencies: {} + dependencies: + __unix: '' hash: - md5: 3418b6c8cac3e71c0bc089fc5ea53042 - sha256: 42e911ee2d8808eacedbec46d99b03200a6138b8e8a120bd8acabe1cac41c63b + md5: 95db94f75ba080a22eb623590993167b + sha256: 2a70ed95ace8a3f8a29e6cd1476a943df294a7111dfb3e152e3478c4c889b7ac manager: conda name: ca-certificates optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2025.1.31-h8857fd0_0.conda - version: 2025.1.31 + https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2025.4.26-hbd8a1cb_0.conda + version: 2025.4.26 - category: main - dependencies: {} + dependencies: + __unix: '' hash: - md5: 3569d6a9141adc64d2fe4797f3289e06 - sha256: 7e12816618173fe70f5c638b72adf4bfd4ddabf27794369bb17871c5bb75b9f9 + md5: 95db94f75ba080a22eb623590993167b + sha256: 2a70ed95ace8a3f8a29e6cd1476a943df294a7111dfb3e152e3478c4c889b7ac manager: conda name: ca-certificates optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2025.1.31-hf0a4a13_0.conda - version: 2025.1.31 + https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2025.4.26-hbd8a1cb_0.conda + version: 2025.4.26 - category: main dependencies: msgpack-python: '>=0.5.2,<2.0.0' python: '>=3.9' requests: '>=2.16.0' hash: - md5: df6a1180171318e6a58c206c38ff66fd - sha256: 5684d23509525b65dd019a70bbb73c987a5d64177c0ce3def3dfdb175687ea27 + md5: 241ef6e3db47a143ac34c21bfba510f1 + sha256: ec791bb6f1ef504411f87b28946a7ae63ed1f3681cefc462cf1dfdaf0790b6a9 manager: conda name: cachecontrol optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/cachecontrol-0.14.2-pyha770c72_0.conda - version: 0.14.2 + https://conda.anaconda.org/conda-forge/noarch/cachecontrol-0.14.3-pyha770c72_0.conda + version: 0.14.3 - category: main dependencies: msgpack-python: '>=0.5.2,<2.0.0' python: '>=3.9' requests: '>=2.16.0' hash: - md5: df6a1180171318e6a58c206c38ff66fd - sha256: 5684d23509525b65dd019a70bbb73c987a5d64177c0ce3def3dfdb175687ea27 + md5: 241ef6e3db47a143ac34c21bfba510f1 + sha256: ec791bb6f1ef504411f87b28946a7ae63ed1f3681cefc462cf1dfdaf0790b6a9 manager: conda name: cachecontrol optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/cachecontrol-0.14.2-pyha770c72_0.conda - version: 0.14.2 + https://conda.anaconda.org/conda-forge/noarch/cachecontrol-0.14.3-pyha770c72_0.conda + version: 0.14.3 - category: main dependencies: msgpack-python: '>=0.5.2,<2.0.0' python: '>=3.9' requests: '>=2.16.0' hash: - md5: df6a1180171318e6a58c206c38ff66fd - sha256: 5684d23509525b65dd019a70bbb73c987a5d64177c0ce3def3dfdb175687ea27 + md5: 241ef6e3db47a143ac34c21bfba510f1 + sha256: ec791bb6f1ef504411f87b28946a7ae63ed1f3681cefc462cf1dfdaf0790b6a9 manager: conda name: cachecontrol optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/cachecontrol-0.14.2-pyha770c72_0.conda - version: 0.14.2 + https://conda.anaconda.org/conda-forge/noarch/cachecontrol-0.14.3-pyha770c72_0.conda + version: 0.14.3 - category: main dependencies: - cachecontrol: 0.14.2 + cachecontrol: 0.14.3 filelock: '>=3.8.0' python: '>=3.9' hash: - md5: 193d7362ba6d1b551ffe7b1da103f47f - sha256: cee46674041043c046232c6334b25487caa5c3d57c8b78adec0265afade4bda3 + md5: b4af8c1b61929b1bcb001c2953882149 + sha256: 4ba4d08fba095556b7f1e06ec1dca068b367e68aadab0aca73115d02ddfcd518 manager: conda name: cachecontrol-with-filecache optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/cachecontrol-with-filecache-0.14.2-pyhd8ed1ab_0.conda - version: 0.14.2 + https://conda.anaconda.org/conda-forge/noarch/cachecontrol-with-filecache-0.14.3-pyhd8ed1ab_0.conda + version: 0.14.3 - category: main dependencies: - cachecontrol: 0.14.2 + cachecontrol: 0.14.3 filelock: '>=3.8.0' python: '>=3.9' hash: - md5: 193d7362ba6d1b551ffe7b1da103f47f - sha256: cee46674041043c046232c6334b25487caa5c3d57c8b78adec0265afade4bda3 + md5: b4af8c1b61929b1bcb001c2953882149 + sha256: 4ba4d08fba095556b7f1e06ec1dca068b367e68aadab0aca73115d02ddfcd518 manager: conda name: cachecontrol-with-filecache optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/cachecontrol-with-filecache-0.14.2-pyhd8ed1ab_0.conda - version: 0.14.2 + https://conda.anaconda.org/conda-forge/noarch/cachecontrol-with-filecache-0.14.3-pyhd8ed1ab_0.conda + version: 0.14.3 - category: main dependencies: - cachecontrol: 0.14.2 + cachecontrol: 0.14.3 filelock: '>=3.8.0' python: '>=3.9' hash: - md5: 193d7362ba6d1b551ffe7b1da103f47f - sha256: cee46674041043c046232c6334b25487caa5c3d57c8b78adec0265afade4bda3 + md5: b4af8c1b61929b1bcb001c2953882149 + sha256: 4ba4d08fba095556b7f1e06ec1dca068b367e68aadab0aca73115d02ddfcd518 manager: conda name: cachecontrol-with-filecache optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/cachecontrol-with-filecache-0.14.2-pyhd8ed1ab_0.conda - version: 0.14.2 + https://conda.anaconda.org/conda-forge/noarch/cachecontrol-with-filecache-0.14.3-pyhd8ed1ab_0.conda + version: 0.14.3 - category: main dependencies: python: '>=3.9' @@ -1030,42 +1208,6 @@ package: url: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.2-pyhd8ed1ab_0.conda version: 5.5.2 - - category: main - dependencies: - python: '>=3.9' - hash: - md5: 1efe226b868cf59b8330356c37c8186e - sha256: 2560a98e3dc0ff4ff408a199d05922ae10fab2629417c4c4309e4226267cef8c - manager: conda - name: cachy - optional: false - platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/cachy-0.3.0-pyhd8ed1ab_2.conda - version: 0.3.0 - - category: main - dependencies: - python: '>=3.9' - hash: - md5: 1efe226b868cf59b8330356c37c8186e - sha256: 2560a98e3dc0ff4ff408a199d05922ae10fab2629417c4c4309e4226267cef8c - manager: conda - name: cachy - optional: false - platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/cachy-0.3.0-pyhd8ed1ab_2.conda - version: 0.3.0 - - category: main - dependencies: - python: '>=3.9' - hash: - md5: 1efe226b868cf59b8330356c37c8186e - sha256: 2560a98e3dc0ff4ff408a199d05922ae10fab2629417c4c4309e4226267cef8c - manager: conda - name: cachy - optional: false - platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/cachy-0.3.0-pyhd8ed1ab_2.conda - version: 0.3.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -1143,15 +1285,15 @@ package: dependencies: cctools_osx-64: '1010.6' ld64: '951.9' - libllvm20: '>=20.1.1,<20.2.0a0' + libllvm20: '>=20.1.2,<20.2.0a0' hash: - md5: 9b509294f38c455675c3eaa284c1e161 - sha256: 694cf773863629829f6a465c3f387e533ad94b9fa85c870d6b59515a17c193e7 + md5: b771817db18969a81739c8d3e64ddc9b + sha256: ec1697505db0eef66020ebaabda058fdec12f84d322d7955712a4f7179c8b96b manager: conda name: cctools optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cctools-1010.6-h8f84d09_4.conda + url: https://conda.anaconda.org/conda-forge/osx-64/cctools-1010.6-h8f84d09_6.conda version: '1010.6' - category: main dependencies: @@ -1159,32 +1301,32 @@ package: ld64: '951.9' libllvm18: '>=18.1.8,<18.2.0a0' hash: - md5: 2fecdd2278ff651073e9373f32151e41 - sha256: 02f7ab57ddf0bfe291dac3a3e59ab7c65a3ae0a3a086440a7e2666b0e862b922 + md5: e0ba8df6997102eb4d367e3e70f90778 + sha256: 393fc3bf21b0187384e652aa4fab184d633e57e3e63f2b10f16a3d5f7bb0717b manager: conda name: cctools optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/cctools-1010.6-hb4fb6a3_4.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/cctools-1010.6-hb4fb6a3_6.conda version: '1010.6' - category: main dependencies: __osx: '>=10.13' ld64_osx-64: '>=951.9,<951.10.0a0' libcxx: '' - libllvm20: '>=20.1.1,<20.2.0a0' + libllvm20: '>=20.1.2,<20.2.0a0' libzlib: '>=1.3.1,<2.0a0' llvm-tools: 20.1.* sigtool: '' hash: - md5: adb560e242fd17e65e5088c0dc747d10 - sha256: 8b2d3eaad7c98afd1336f1185611c2a80a5d2ce52945a497a3074eb8e67ee1bc + md5: 9613d173e27079504b2e9be8955d2fbb + sha256: 566b631eb24b68d625b40ed31a204e9a2895ba747c4190a35c124bd65ebdb251 manager: conda name: cctools_osx-64 optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/cctools_osx-64-1010.6-hfe8da7e_4.conda + https://conda.anaconda.org/conda-forge/osx-64/cctools_osx-64-1010.6-hfe8da7e_6.conda version: '1010.6' - category: main dependencies: @@ -1196,54 +1338,54 @@ package: llvm-tools: 18.1.* sigtool: '' hash: - md5: 57ce83eec79eff26016ae3e1af07e431 - sha256: e223912a174344cddfe7ea8a598d091b18e5defbc63c2037c3e42165654b09dc + md5: b876da50fbe92a19737933c7aa92fb02 + sha256: 6e9463499dddad0ee61c999031c84bd1b8233676bcd220aece1b754667c680d7 manager: conda name: cctools_osx-arm64 optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/cctools_osx-arm64-1010.6-h3b4f5d3_4.conda + https://conda.anaconda.org/conda-forge/osx-arm64/cctools_osx-arm64-1010.6-h3b4f5d3_6.conda version: '1010.6' - category: main dependencies: python: '>=3.9' hash: - md5: c207fa5ac7ea99b149344385a9c0880d - sha256: 42a78446da06a2568cb13e69be3355169fbd0ea424b00fc80b7d840f5baaacf3 + md5: c33eeaaa33f45031be34cda513df39b6 + sha256: 52aa837642fd851b3f7ad3b1f66afc5366d133c1d452323f786b0378a391915c manager: conda name: certifi optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda - version: 2025.1.31 + https://conda.anaconda.org/conda-forge/noarch/certifi-2025.4.26-pyhd8ed1ab_0.conda + version: 2025.4.26 - category: main dependencies: python: '>=3.9' hash: - md5: c207fa5ac7ea99b149344385a9c0880d - sha256: 42a78446da06a2568cb13e69be3355169fbd0ea424b00fc80b7d840f5baaacf3 + md5: c33eeaaa33f45031be34cda513df39b6 + sha256: 52aa837642fd851b3f7ad3b1f66afc5366d133c1d452323f786b0378a391915c manager: conda name: certifi optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda - version: 2025.1.31 + https://conda.anaconda.org/conda-forge/noarch/certifi-2025.4.26-pyhd8ed1ab_0.conda + version: 2025.4.26 - category: main dependencies: python: '>=3.9' hash: - md5: c207fa5ac7ea99b149344385a9c0880d - sha256: 42a78446da06a2568cb13e69be3355169fbd0ea424b00fc80b7d840f5baaacf3 + md5: c33eeaaa33f45031be34cda513df39b6 + sha256: 52aa837642fd851b3f7ad3b1f66afc5366d133c1d452323f786b0378a391915c manager: conda name: certifi optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/certifi-2025.1.31-pyhd8ed1ab_0.conda - version: 2025.1.31 + https://conda.anaconda.org/conda-forge/noarch/certifi-2025.4.26-pyhd8ed1ab_0.conda + version: 2025.4.26 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -1371,131 +1513,131 @@ package: dependencies: python: '>=3.9' hash: - md5: e83a31202d1c0a000fce3e9cf3825875 - sha256: 4e0ee91b97e5de3e74567bdacea27f0139709fceca4db8adffbe24deffccb09b + md5: 40fe4284b8b5835a9073a645139f35af + sha256: 535ae5dcda8022e31c6dc063eb344c80804c537a5a04afba43a845fa6fa130f5 manager: conda name: charset-normalizer optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda - version: 3.4.1 + https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.2-pyhd8ed1ab_0.conda + version: 3.4.2 - category: main dependencies: python: '>=3.9' hash: - md5: e83a31202d1c0a000fce3e9cf3825875 - sha256: 4e0ee91b97e5de3e74567bdacea27f0139709fceca4db8adffbe24deffccb09b + md5: 40fe4284b8b5835a9073a645139f35af + sha256: 535ae5dcda8022e31c6dc063eb344c80804c537a5a04afba43a845fa6fa130f5 manager: conda name: charset-normalizer optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda - version: 3.4.1 + https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.2-pyhd8ed1ab_0.conda + version: 3.4.2 - category: main dependencies: python: '>=3.9' hash: - md5: e83a31202d1c0a000fce3e9cf3825875 - sha256: 4e0ee91b97e5de3e74567bdacea27f0139709fceca4db8adffbe24deffccb09b + md5: 40fe4284b8b5835a9073a645139f35af + sha256: 535ae5dcda8022e31c6dc063eb344c80804c537a5a04afba43a845fa6fa130f5 manager: conda name: charset-normalizer optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda - version: 3.4.1 + https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.2-pyhd8ed1ab_0.conda + version: 3.4.2 - category: main dependencies: hatch: '' hatchling: '' - python: '>=3.8' + python: '>=3.9' requests: '' rich: '' typer: '' hash: - md5: 1e4a430e1d1f4c009d72334607d032cd - sha256: 81dff2b53ea88201db0d40be3b45fcc572cb44c7d9da5500ba3d1c89c8f1d5cd + md5: 6ac974d26885bd69d4e725e37d5e5d59 + sha256: abcb755e3d9be73fc5c512d257d3254502bf2264ab14d512a6b3373685244515 manager: conda name: cirun optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/cirun-0.30-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/cirun-0.30-pyhd8ed1ab_1.conda version: '0.30' - category: main dependencies: hatch: '' hatchling: '' - python: '>=3.8' + python: '>=3.9' requests: '' rich: '' typer: '' hash: - md5: 1e4a430e1d1f4c009d72334607d032cd - sha256: 81dff2b53ea88201db0d40be3b45fcc572cb44c7d9da5500ba3d1c89c8f1d5cd + md5: 6ac974d26885bd69d4e725e37d5e5d59 + sha256: abcb755e3d9be73fc5c512d257d3254502bf2264ab14d512a6b3373685244515 manager: conda name: cirun optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/cirun-0.30-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/cirun-0.30-pyhd8ed1ab_1.conda version: '0.30' - category: main dependencies: hatch: '' hatchling: '' - python: '>=3.8' + python: '>=3.9' requests: '' rich: '' typer: '' hash: - md5: 1e4a430e1d1f4c009d72334607d032cd - sha256: 81dff2b53ea88201db0d40be3b45fcc572cb44c7d9da5500ba3d1c89c8f1d5cd + md5: 6ac974d26885bd69d4e725e37d5e5d59 + sha256: abcb755e3d9be73fc5c512d257d3254502bf2264ab14d512a6b3373685244515 manager: conda name: cirun optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/cirun-0.30-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/cirun-0.30-pyhd8ed1ab_1.conda version: '0.30' - category: main dependencies: __unix: '' - python: '>=3.9' + python: '>=3.10' hash: - md5: f22f4d4970e09d68a10b922cbb0408d3 - sha256: c920d23cd1fcf565031c679adb62d848af60d6fbb0edc2d50ba475cea4f0d8ab + md5: 94b550b8d3a614dbd326af798c7dfb40 + sha256: 8aee789c82d8fdd997840c952a586db63c6890b00e88c4fb6e80a38edd5f51c0 manager: conda name: click optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda - version: 8.1.8 + url: https://conda.anaconda.org/conda-forge/noarch/click-8.2.1-pyh707e725_0.conda + version: 8.2.1 - category: main dependencies: __unix: '' - python: '>=3.9' + python: '>=3.10' hash: - md5: f22f4d4970e09d68a10b922cbb0408d3 - sha256: c920d23cd1fcf565031c679adb62d848af60d6fbb0edc2d50ba475cea4f0d8ab + md5: 94b550b8d3a614dbd326af798c7dfb40 + sha256: 8aee789c82d8fdd997840c952a586db63c6890b00e88c4fb6e80a38edd5f51c0 manager: conda name: click optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda - version: 8.1.8 + url: https://conda.anaconda.org/conda-forge/noarch/click-8.2.1-pyh707e725_0.conda + version: 8.2.1 - category: main dependencies: __unix: '' - python: '>=3.9' + python: '>=3.10' hash: - md5: f22f4d4970e09d68a10b922cbb0408d3 - sha256: c920d23cd1fcf565031c679adb62d848af60d6fbb0edc2d50ba475cea4f0d8ab + md5: 94b550b8d3a614dbd326af798c7dfb40 + sha256: 8aee789c82d8fdd997840c952a586db63c6890b00e88c4fb6e80a38edd5f51c0 manager: conda name: click optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda - version: 8.1.8 + url: https://conda.anaconda.org/conda-forge/noarch/click-8.2.1-pyh707e725_0.conda + version: 8.2.1 - category: main dependencies: click: '' @@ -1538,48 +1680,6 @@ package: url: https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda version: 1.2.4 - - category: main - dependencies: - pastel: '>=0.2.0,<0.3.0' - pylev: '>=1.3,<2.0' - python: '>=3.9' - hash: - md5: 37e178bf9356122c35005a62d850e5d9 - sha256: da000653be96a15b9aad5c59f655dbd4a60cb66fc0137e1018db9de76671bb08 - manager: conda - name: clikit - optional: false - platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/clikit-0.6.2-pyhd8ed1ab_3.conda - version: 0.6.2 - - category: main - dependencies: - pastel: '>=0.2.0,<0.3.0' - pylev: '>=1.3,<2.0' - python: '>=3.9' - hash: - md5: 37e178bf9356122c35005a62d850e5d9 - sha256: da000653be96a15b9aad5c59f655dbd4a60cb66fc0137e1018db9de76671bb08 - manager: conda - name: clikit - optional: false - platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/clikit-0.6.2-pyhd8ed1ab_3.conda - version: 0.6.2 - - category: main - dependencies: - pastel: '>=0.2.0,<0.3.0' - pylev: '>=1.3,<2.0' - python: '>=3.9' - hash: - md5: 37e178bf9356122c35005a62d850e5d9 - sha256: da000653be96a15b9aad5c59f655dbd4a60cb66fc0137e1018db9de76671bb08 - manager: conda - name: clikit - optional: false - platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/clikit-0.6.2-pyhd8ed1ab_3.conda - version: 0.6.2 - category: main dependencies: python: '>=3.9' @@ -1702,7 +1802,7 @@ package: archspec: '>=0.2.3' boltons: '>=23.0.0' charset-normalizer: '' - conda-libmamba-solver: '>=23.11.0' + conda-libmamba-solver: '>=24.11.0' conda-package-handling: '>=2.2.0' distro: '>=1.5.0' frozendict: '>=2.4.2' @@ -1721,21 +1821,21 @@ package: truststore: '>=0.8.0' zstandard: '>=0.19.0' hash: - md5: 3928dbcf8641f9d62093ca98ed2ae22d - sha256: 8b7854d016c106055b28c34e0af53bb41e1d35c9eb215fcc98544701465a3137 + md5: c96f87e2e2eecb7cddfde0c888fddbfd + sha256: 7c50c6b299fe111b00681a2dc318e09270c658bd8a657aedc9f99e0724e0a536 manager: conda name: conda optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/conda-25.3.0-py311h38be061_0.conda - version: 25.3.0 + https://conda.anaconda.org/conda-forge/linux-64/conda-25.3.1-py311h38be061_1.conda + version: 25.3.1 - category: main dependencies: archspec: '>=0.2.3' boltons: '>=23.0.0' charset-normalizer: '' - conda-libmamba-solver: '>=23.11.0' + conda-libmamba-solver: '>=24.11.0' conda-package-handling: '>=2.2.0' distro: '>=1.5.0' frozendict: '>=2.4.2' @@ -1754,20 +1854,20 @@ package: truststore: '>=0.8.0' zstandard: '>=0.19.0' hash: - md5: ba0c2614f3baa23e6083e42bfab1da81 - sha256: 60863313e39c11d97d4bf35808b2afbb3c70cfcb36ccc6b2e3e272070d8b8555 + md5: e98291eb0a712aa348fe64f329435f76 + sha256: 1107abae59cbd3c503997f8ba46bc9c9de155c263058c1e36dc95098c2b42a04 manager: conda name: conda optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/conda-25.3.0-py311h6eed73b_0.conda - version: 25.3.0 + url: https://conda.anaconda.org/conda-forge/osx-64/conda-25.3.1-py311h6eed73b_1.conda + version: 25.3.1 - category: main dependencies: archspec: '>=0.2.3' boltons: '>=23.0.0' charset-normalizer: '' - conda-libmamba-solver: '>=23.11.0' + conda-libmamba-solver: '>=24.11.0' conda-package-handling: '>=2.2.0' distro: '>=1.5.0' frozendict: '>=2.4.2' @@ -1786,15 +1886,15 @@ package: truststore: '>=0.8.0' zstandard: '>=0.19.0' hash: - md5: 7b8cb0bf5eb5c4e8d5eb20173e2c1b06 - sha256: df94327583278d755e764231767b97adc16f72d6e62d1ab70ac1a617dfbd0cb6 + md5: 929694c3769a8c5ada32f034d3c190ac + sha256: e7462c59c1ad8e78e378a6c93c757c3bc9cbc9faa7416b8b659376751ba36b8d manager: conda name: conda optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/conda-25.3.0-py311h267d04e_0.conda - version: 25.3.0 + https://conda.anaconda.org/conda-forge/osx-arm64/conda-25.3.1-py311h267d04e_1.conda + version: 25.3.1 - category: main dependencies: beautifulsoup4: '' @@ -1802,6 +1902,7 @@ package: conda: '>=23.7.0' conda-index: '>=0.4.0' conda-package-handling: '>=2.2.0' + evalidate: '>=2,<3.0a' filelock: '' frozendict: '>=2.4.2' jinja2: '' @@ -1812,7 +1913,7 @@ package: patchelf: <0.18 pkginfo: '' psutil: '' - py-lief: <0.15.0a0 + py-lief: <0.17.0a0 python: '>=3.11,<3.12.0a0' python-libarchive-c: '' python_abi: 3.11.* @@ -1822,15 +1923,15 @@ package: ripgrep: '' tqdm: '' hash: - md5: 291b622f9f94444d558c56fcce72520b - sha256: e79fbd613125bb57bb39b37c96350e133694ef81b09d035e37d3e15ac0e69e7d + md5: 76d402f2e804ba52c0107f9dd912b09a + sha256: 14a91107e7400d12351cb99fd608feac2971841673afa04499d4c55467708e98 manager: conda name: conda-build optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/conda-build-25.1.2-py311h38be061_3.conda - version: 25.1.2 + https://conda.anaconda.org/conda-forge/linux-64/conda-build-25.4.2-py311h38be061_0.conda + version: 25.4.2 - category: main dependencies: beautifulsoup4: '' @@ -1839,6 +1940,7 @@ package: conda: '>=23.7.0' conda-index: '>=0.4.0' conda-package-handling: '>=2.2.0' + evalidate: '>=2,<3.0a' filelock: '' frozendict: '>=2.4.2' jinja2: '' @@ -1848,7 +1950,7 @@ package: patch: '>=2.6' pkginfo: '' psutil: '' - py-lief: <0.15.0a0 + py-lief: <0.17.0a0 python: '>=3.11,<3.12.0a0' python-libarchive-c: '' python_abi: 3.11.* @@ -1858,15 +1960,15 @@ package: ripgrep: '' tqdm: '' hash: - md5: a595439e18431269e4464f7fac0b51a5 - sha256: bbf9e0fcb83e8e831c69cc8f8661ed2b1709c3a1ce1d869136a7eb917f7417f3 + md5: ef4d68ccec07cbd2698391240c207c73 + sha256: 669447b0a3bb946534ba81857cc72655e4789d1fbe1352f6932c4cfc4f38a952 manager: conda name: conda-build optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/conda-build-25.1.2-py311h6eed73b_3.conda - version: 25.1.2 + https://conda.anaconda.org/conda-forge/osx-64/conda-build-25.4.2-py311h6eed73b_0.conda + version: 25.4.2 - category: main dependencies: beautifulsoup4: '' @@ -1875,6 +1977,7 @@ package: conda: '>=23.7.0' conda-index: '>=0.4.0' conda-package-handling: '>=2.2.0' + evalidate: '>=2,<3.0a' filelock: '' frozendict: '>=2.4.2' jinja2: '' @@ -1884,7 +1987,7 @@ package: patch: '>=2.6' pkginfo: '' psutil: '' - py-lief: <0.15.0a0 + py-lief: <0.17.0a0 python: '>=3.11,<3.12.0a0' python-libarchive-c: '' python_abi: 3.11.* @@ -1894,15 +1997,15 @@ package: ripgrep: '' tqdm: '' hash: - md5: 619a39adcb504aed554df233b9357f21 - sha256: c89b13f5b57086940a34ace98b1b5102a1b1197be9104e8e85f82149653df199 + md5: 9ac08f1c880a001fdeb2b94f633f86f3 + sha256: 2ca0c1faede43b63afdca0e30dd471c3965bd8e26ba7180cceb86c16a1c49eb8 manager: conda name: conda-build optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/conda-build-25.1.2-py311h267d04e_3.conda - version: 25.1.2 + https://conda.anaconda.org/conda-forge/osx-arm64/conda-build-25.4.2-py311h267d04e_0.conda + version: 25.4.2 - category: main dependencies: boltons: '>=23.0.0' @@ -1989,58 +2092,61 @@ package: click: '' conda-build: '>=3.27' conda-smithy: '>=3.40.1' + pydantic-settings: '' python: '>=3.11' python-rapidjson: '' pyyaml: '' rattler-build-conda-compat: '>=0.0.2,<2.0.0a0' hash: - md5: 5ad034cd3f0b4bff682cf48a527e2de2 - sha256: 38406e3a3fe4fefd7f1e400fdf7e09d2aaad1b5aa09175edb3cd1445eaf655e2 + md5: 7b442e74087b017a26654ec1c991e42c + sha256: 73d03669a92c2a33904200d0df8434aa82a9df9f526735816f2a3c22ac0185c2 manager: conda name: conda-forge-feedstock-ops optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-forge-feedstock-ops-0.11.0-pyhd8ed1ab_0.conda - version: 0.11.0 + https://conda.anaconda.org/conda-forge/noarch/conda-forge-feedstock-ops-0.12.0-pyhd8ed1ab_0.conda + version: 0.12.0 - category: main dependencies: click: '' conda-build: '>=3.27' conda-smithy: '>=3.40.1' + pydantic-settings: '' python: '>=3.11' python-rapidjson: '' pyyaml: '' rattler-build-conda-compat: '>=0.0.2,<2.0.0a0' hash: - md5: 5ad034cd3f0b4bff682cf48a527e2de2 - sha256: 38406e3a3fe4fefd7f1e400fdf7e09d2aaad1b5aa09175edb3cd1445eaf655e2 + md5: 7b442e74087b017a26654ec1c991e42c + sha256: 73d03669a92c2a33904200d0df8434aa82a9df9f526735816f2a3c22ac0185c2 manager: conda name: conda-forge-feedstock-ops optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-forge-feedstock-ops-0.11.0-pyhd8ed1ab_0.conda - version: 0.11.0 + https://conda.anaconda.org/conda-forge/noarch/conda-forge-feedstock-ops-0.12.0-pyhd8ed1ab_0.conda + version: 0.12.0 - category: main dependencies: click: '' conda-build: '>=3.27' conda-smithy: '>=3.40.1' + pydantic-settings: '' python: '>=3.11' python-rapidjson: '' pyyaml: '' rattler-build-conda-compat: '>=0.0.2,<2.0.0a0' hash: - md5: 5ad034cd3f0b4bff682cf48a527e2de2 - sha256: 38406e3a3fe4fefd7f1e400fdf7e09d2aaad1b5aa09175edb3cd1445eaf655e2 + md5: 7b442e74087b017a26654ec1c991e42c + sha256: 73d03669a92c2a33904200d0df8434aa82a9df9f526735816f2a3c22ac0185c2 manager: conda name: conda-forge-feedstock-ops optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-forge-feedstock-ops-0.11.0-pyhd8ed1ab_0.conda - version: 0.11.0 + https://conda.anaconda.org/conda-forge/noarch/conda-forge-feedstock-ops-0.12.0-pyhd8ed1ab_0.conda + version: 0.12.0 - category: main dependencies: beautifulsoup4: '' @@ -2104,258 +2210,299 @@ package: - category: main dependencies: {} hash: - md5: 8413451a65856dee516a7c1e5ae54f6d - sha256: 5b89bc1ff4f71b51c8a03fcb229904c33538e7967677a62dc89298f42ec0a38f + md5: c90c2e4ee9cc524cb1a2ee25201903d4 + sha256: cac72ce207ddaee07922f54c87e87bc80309e61e9a1ca1c30cc1cdc47c98a8de manager: conda name: conda-forge-pinning optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-forge-pinning-2025.03.27.11.47.42-hd8ed1ab_0.conda - version: 2025.03.27.11.47.42 + https://conda.anaconda.org/conda-forge/noarch/conda-forge-pinning-2025.05.23.05.43.47-hd8ed1ab_0.conda + version: 2025.05.23.05.43.47 - category: main dependencies: {} hash: - md5: 8413451a65856dee516a7c1e5ae54f6d - sha256: 5b89bc1ff4f71b51c8a03fcb229904c33538e7967677a62dc89298f42ec0a38f + md5: c90c2e4ee9cc524cb1a2ee25201903d4 + sha256: cac72ce207ddaee07922f54c87e87bc80309e61e9a1ca1c30cc1cdc47c98a8de manager: conda name: conda-forge-pinning optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-forge-pinning-2025.03.27.11.47.42-hd8ed1ab_0.conda - version: 2025.03.27.11.47.42 + https://conda.anaconda.org/conda-forge/noarch/conda-forge-pinning-2025.05.23.05.43.47-hd8ed1ab_0.conda + version: 2025.05.23.05.43.47 - category: main dependencies: {} hash: - md5: 8413451a65856dee516a7c1e5ae54f6d - sha256: 5b89bc1ff4f71b51c8a03fcb229904c33538e7967677a62dc89298f42ec0a38f + md5: c90c2e4ee9cc524cb1a2ee25201903d4 + sha256: cac72ce207ddaee07922f54c87e87bc80309e61e9a1ca1c30cc1cdc47c98a8de manager: conda name: conda-forge-pinning optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-forge-pinning-2025.03.27.11.47.42-hd8ed1ab_0.conda - version: 2025.03.27.11.47.42 + https://conda.anaconda.org/conda-forge/noarch/conda-forge-pinning-2025.05.23.05.43.47-hd8ed1ab_0.conda + version: 2025.05.23.05.43.47 - category: main dependencies: click: '>=8' - conda: '>=4.12.0' + conda: '>=4.14.0' conda-package-streaming: '' filelock: '' jinja2: '' more-itertools: '' - python: '>=3.7' - pyyaml: '>=6' + msgpack-python: '>=1.0.2' + python: '>=3.9' + ruamel.yaml: '' hash: - md5: 6e312377c7c70eb15a3981fc82d6ecba - sha256: 05a9fe18fedf8ea151a9a4d3665f069b668cc149e77b923b4244c5ee48d3ce04 + md5: 1140378976bfd784b9485be9ff34a66b + sha256: a7b9e17dfa405c7c2625bda43074e87b0c356dacb994d06f5ab828dfd70a37f8 manager: conda name: conda-index optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-index-0.5.0-pyhd8ed1ab_0.conda - version: 0.5.0 + https://conda.anaconda.org/conda-forge/noarch/conda-index-0.6.0-pyhd8ed1ab_0.conda + version: 0.6.0 - category: main dependencies: click: '>=8' - conda: '>=4.12.0' + conda: '>=4.14.0' conda-package-streaming: '' filelock: '' jinja2: '' more-itertools: '' - python: '>=3.7' - pyyaml: '>=6' + msgpack-python: '>=1.0.2' + python: '>=3.9' + ruamel.yaml: '' hash: - md5: 6e312377c7c70eb15a3981fc82d6ecba - sha256: 05a9fe18fedf8ea151a9a4d3665f069b668cc149e77b923b4244c5ee48d3ce04 + md5: 1140378976bfd784b9485be9ff34a66b + sha256: a7b9e17dfa405c7c2625bda43074e87b0c356dacb994d06f5ab828dfd70a37f8 manager: conda name: conda-index optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-index-0.5.0-pyhd8ed1ab_0.conda - version: 0.5.0 + https://conda.anaconda.org/conda-forge/noarch/conda-index-0.6.0-pyhd8ed1ab_0.conda + version: 0.6.0 - category: main dependencies: click: '>=8' - conda: '>=4.12.0' + conda: '>=4.14.0' conda-package-streaming: '' filelock: '' jinja2: '' more-itertools: '' - python: '>=3.7' - pyyaml: '>=6' + msgpack-python: '>=1.0.2' + python: '>=3.9' + ruamel.yaml: '' hash: - md5: 6e312377c7c70eb15a3981fc82d6ecba - sha256: 05a9fe18fedf8ea151a9a4d3665f069b668cc149e77b923b4244c5ee48d3ce04 + md5: 1140378976bfd784b9485be9ff34a66b + sha256: a7b9e17dfa405c7c2625bda43074e87b0c356dacb994d06f5ab828dfd70a37f8 manager: conda name: conda-index optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-index-0.5.0-pyhd8ed1ab_0.conda - version: 0.5.0 + https://conda.anaconda.org/conda-forge/noarch/conda-index-0.6.0-pyhd8ed1ab_0.conda + version: 0.6.0 - category: main dependencies: boltons: '>=23.0.0' - conda: '>=23.7.4' + conda: '>=24.11' libmambapy: '>=2.0.0' python: '>=3.9' hash: - md5: c4c938b8bc776bb79a374cd634949c7d - sha256: 3144fe96cf80186c7b679d742ae0168a220a42aaf25b684afdb6bc76128339d7 + md5: d62b8f745ff471d5594ad73605cb9b59 + sha256: 48999a7a6e300075e4ef1c85130614d75429379eea8fe78f18a38a8aab8da384 manager: conda name: conda-libmamba-solver optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-libmamba-solver-25.3.0-pyhd8ed1ab_0.conda - version: 25.3.0 + https://conda.anaconda.org/conda-forge/noarch/conda-libmamba-solver-25.4.0-pyhd8ed1ab_0.conda + version: 25.4.0 - category: main dependencies: boltons: '>=23.0.0' - conda: '>=23.7.4' + conda: '>=24.11' libmambapy: '>=2.0.0' python: '>=3.9' hash: - md5: c4c938b8bc776bb79a374cd634949c7d - sha256: 3144fe96cf80186c7b679d742ae0168a220a42aaf25b684afdb6bc76128339d7 + md5: d62b8f745ff471d5594ad73605cb9b59 + sha256: 48999a7a6e300075e4ef1c85130614d75429379eea8fe78f18a38a8aab8da384 manager: conda name: conda-libmamba-solver optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-libmamba-solver-25.3.0-pyhd8ed1ab_0.conda - version: 25.3.0 + https://conda.anaconda.org/conda-forge/noarch/conda-libmamba-solver-25.4.0-pyhd8ed1ab_0.conda + version: 25.4.0 - category: main dependencies: boltons: '>=23.0.0' - conda: '>=23.7.4' + conda: '>=24.11' libmambapy: '>=2.0.0' python: '>=3.9' hash: - md5: c4c938b8bc776bb79a374cd634949c7d - sha256: 3144fe96cf80186c7b679d742ae0168a220a42aaf25b684afdb6bc76128339d7 + md5: d62b8f745ff471d5594ad73605cb9b59 + sha256: 48999a7a6e300075e4ef1c85130614d75429379eea8fe78f18a38a8aab8da384 manager: conda name: conda-libmamba-solver optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-libmamba-solver-25.3.0-pyhd8ed1ab_0.conda - version: 25.3.0 + https://conda.anaconda.org/conda-forge/noarch/conda-libmamba-solver-25.4.0-pyhd8ed1ab_0.conda + version: 25.4.0 - category: main dependencies: - cachecontrol-with-filecache: '>=0.12.9' - cachy: '>=0.3.0' + __linux: '' + boltons: '>=23.0.0' + cachecontrol: '>=0.14.0,<0.15.0' + cachecontrol-with-filecache: '' + charset-normalizer: '' click: '>=8.0' click-default-group: '' - clikit: '>=0.6.2' - crashtest: '>=0.3.0' - ensureconda: '>=1.3' + crashtest: '>=0.4.1,<0.5.0' + dulwich: '>=0.22.6,<0.23.0' + ensureconda: '>=1.4.7' + filelock: '' gitpython: '>=3.1.30' - html5lib: '>=1.0' + importlib-metadata: '>=4.4' jinja2: '' - keyring: '>=21.2.0' - packaging: '>=20.4' - pkginfo: '>=1.4' - pydantic: '>=1.10' - python: '>=3.9' + keyring: '>=25.1.0,<26.0.0' + packaging: '>=24.0' + pkginfo: '>=1.12,<2.0' + platformdirs: '>=3.10.0,<5.0.0' + pydantic: '>=2' + pyproject_hooks: '>=1.0.0,<2.0.0' + python: '' + python-build: '>=1.2.1,<2.0.0' + python-fastjsonschema: '>=2.18.0,<3.0.0' + python-installer: '>=0.7.0,<0.8.0' pyyaml: '>=5.1' - requests: '>=2.18' + requests: '>=2.26,<3.0' + requests-toolbelt: '>=1.0.0,<2.0.0' ruamel.yaml: '' + semver: '>=3,<4' setuptools: '' - tomli: '' - tomlkit: '>=0.7.0' - toolz: '>=0.12.0,<1.0.0' - typing_extensions: '' - urllib3: '>=1.26.5,<2.0' - virtualenv: '>=20.0.26' + shellingham: '>=1.5,<2.0' + tomli: '>=2.0.1,<3.0.0' + tomlkit: '>=0.11.4,<1.0.0' + trove-classifiers: '>=2022.5.19' + typing_extensions: '>=4.6.1' + virtualenv: '>=20.26.6,<21.0.0' + zstandard: '>=0.15' hash: - md5: 518d59879a7ba4f3972109e8666860b2 - sha256: 905618b595d7a067fe37a282e3b84a4ed46542c1b497c76cef7b0f33f9335cb7 + md5: 5765138ff6fd447c9886763da9513e2a + sha256: 89538f7671ceea0a1511498fd321ea72f5f9c5cbde0c02fb6f8276b0af6fcf5a manager: conda name: conda-lock optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-lock-2.5.7-pyhd8ed1ab_1.conda - version: 2.5.7 + https://conda.anaconda.org/conda-forge/noarch/conda-lock-3.0.2-pyh367d9c9_1.conda + version: 3.0.2 - category: main dependencies: - cachecontrol-with-filecache: '>=0.12.9' - cachy: '>=0.3.0' + __osx: '' + boltons: '>=23.0.0' + cachecontrol: '>=0.14.0,<0.15.0' + cachecontrol-with-filecache: '' + charset-normalizer: '' click: '>=8.0' click-default-group: '' - clikit: '>=0.6.2' - crashtest: '>=0.3.0' - ensureconda: '>=1.3' + crashtest: '>=0.4.1,<0.5.0' + dulwich: '>=0.22.6,<0.23.0' + ensureconda: '>=1.4.7' + filelock: '' gitpython: '>=3.1.30' - html5lib: '>=1.0' + importlib-metadata: '>=4.4' jinja2: '' - keyring: '>=21.2.0' - packaging: '>=20.4' - pkginfo: '>=1.4' - pydantic: '>=1.10' - python: '>=3.9' + keyring: '>=25.1.0,<26.0.0' + packaging: '>=24.0' + pkginfo: '>=1.12,<2.0' + platformdirs: '>=3.10.0,<5.0.0' + pydantic: '>=2' + pyproject_hooks: '>=1.0.0,<2.0.0' + python: '>=3.9' + python-build: '>=1.2.1,<2.0.0' + python-fastjsonschema: '>=2.18.0,<3.0.0' + python-installer: '>=0.7.0,<0.8.0' pyyaml: '>=5.1' - requests: '>=2.18' + requests: '>=2.26,<3.0' + requests-toolbelt: '>=1.0.0,<2.0.0' ruamel.yaml: '' + semver: '>=3,<4' setuptools: '' - tomli: '' - tomlkit: '>=0.7.0' - toolz: '>=0.12.0,<1.0.0' - typing_extensions: '' - urllib3: '>=1.26.5,<2.0' - virtualenv: '>=20.0.26' + shellingham: '>=1.5,<2.0' + tomli: '>=2.0.1,<3.0.0' + tomlkit: '>=0.11.4,<1.0.0' + trove-classifiers: '>=2022.5.19' + typing_extensions: '>=4.6.1' + virtualenv: '>=20.26.6,<21.0.0' + xattr: '>=1.0.0,<2.0.0' + zstandard: '>=0.15' hash: - md5: 518d59879a7ba4f3972109e8666860b2 - sha256: 905618b595d7a067fe37a282e3b84a4ed46542c1b497c76cef7b0f33f9335cb7 + md5: f2aa585ca1d9e14b758ffcc7f62e818f + sha256: b82def1417eba859a418769d2f26dd0aba7ced48fc27fe755c7d80506af4bc34 manager: conda name: conda-lock optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-lock-2.5.7-pyhd8ed1ab_1.conda - version: 2.5.7 + https://conda.anaconda.org/conda-forge/noarch/conda-lock-3.0.2-pyh3eb8d45_1.conda + version: 3.0.2 - category: main dependencies: - cachecontrol-with-filecache: '>=0.12.9' - cachy: '>=0.3.0' + __osx: '' + boltons: '>=23.0.0' + cachecontrol: '>=0.14.0,<0.15.0' + cachecontrol-with-filecache: '' + charset-normalizer: '' click: '>=8.0' click-default-group: '' - clikit: '>=0.6.2' - crashtest: '>=0.3.0' - ensureconda: '>=1.3' + crashtest: '>=0.4.1,<0.5.0' + dulwich: '>=0.22.6,<0.23.0' + ensureconda: '>=1.4.7' + filelock: '' gitpython: '>=3.1.30' - html5lib: '>=1.0' + importlib-metadata: '>=4.4' jinja2: '' - keyring: '>=21.2.0' - packaging: '>=20.4' - pkginfo: '>=1.4' - pydantic: '>=1.10' - python: '>=3.9' + keyring: '>=25.1.0,<26.0.0' + packaging: '>=24.0' + pkginfo: '>=1.12,<2.0' + platformdirs: '>=3.10.0,<5.0.0' + pydantic: '>=2' + pyproject_hooks: '>=1.0.0,<2.0.0' + python: '>=3.9' + python-build: '>=1.2.1,<2.0.0' + python-fastjsonschema: '>=2.18.0,<3.0.0' + python-installer: '>=0.7.0,<0.8.0' pyyaml: '>=5.1' - requests: '>=2.18' + requests: '>=2.26,<3.0' + requests-toolbelt: '>=1.0.0,<2.0.0' ruamel.yaml: '' + semver: '>=3,<4' setuptools: '' - tomli: '' - tomlkit: '>=0.7.0' - toolz: '>=0.12.0,<1.0.0' - typing_extensions: '' - urllib3: '>=1.26.5,<2.0' - virtualenv: '>=20.0.26' + shellingham: '>=1.5,<2.0' + tomli: '>=2.0.1,<3.0.0' + tomlkit: '>=0.11.4,<1.0.0' + trove-classifiers: '>=2022.5.19' + typing_extensions: '>=4.6.1' + virtualenv: '>=20.26.6,<21.0.0' + xattr: '>=1.0.0,<2.0.0' + zstandard: '>=0.15' hash: - md5: 518d59879a7ba4f3972109e8666860b2 - sha256: 905618b595d7a067fe37a282e3b84a4ed46542c1b497c76cef7b0f33f9335cb7 + md5: f2aa585ca1d9e14b758ffcc7f62e818f + sha256: b82def1417eba859a418769d2f26dd0aba7ced48fc27fe755c7d80506af4bc34 manager: conda name: conda-lock optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-lock-2.5.7-pyhd8ed1ab_1.conda - version: 2.5.7 + https://conda.anaconda.org/conda-forge/noarch/conda-lock-3.0.2-pyh3eb8d45_1.conda + version: 3.0.2 - category: main dependencies: click: '' @@ -2532,15 +2679,15 @@ package: toolz: '' vsts-python-api: '' hash: - md5: 5f9eb31961ff155520094c1672febdc7 - sha256: fbd6dcffdc1f07925cc191e3578b6997f09df7be377892c8b2d8a39b9b42224a + md5: e8d4f0d7bfbe1ac3ddee5cb877e82187 + sha256: 58d71b52ce655eec981f58c2d37de3a8d01be06036c33bf03de77153b53585b4 manager: conda name: conda-smithy optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-smithy-3.47.0-unix_pyhd81877a_0.conda - version: 3.47.0 + https://conda.anaconda.org/conda-forge/noarch/conda-smithy-3.48.1-unix_pyhd81877a_0.conda + version: 3.48.1 - category: main dependencies: __unix: '' @@ -2570,15 +2717,15 @@ package: toolz: '' vsts-python-api: '' hash: - md5: 5f9eb31961ff155520094c1672febdc7 - sha256: fbd6dcffdc1f07925cc191e3578b6997f09df7be377892c8b2d8a39b9b42224a + md5: e8d4f0d7bfbe1ac3ddee5cb877e82187 + sha256: 58d71b52ce655eec981f58c2d37de3a8d01be06036c33bf03de77153b53585b4 manager: conda name: conda-smithy optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-smithy-3.47.0-unix_pyhd81877a_0.conda - version: 3.47.0 + https://conda.anaconda.org/conda-forge/noarch/conda-smithy-3.48.1-unix_pyhd81877a_0.conda + version: 3.48.1 - category: main dependencies: __unix: '' @@ -2608,15 +2755,15 @@ package: toolz: '' vsts-python-api: '' hash: - md5: 5f9eb31961ff155520094c1672febdc7 - sha256: fbd6dcffdc1f07925cc191e3578b6997f09df7be377892c8b2d8a39b9b42224a + md5: e8d4f0d7bfbe1ac3ddee5cb877e82187 + sha256: 58d71b52ce655eec981f58c2d37de3a8d01be06036c33bf03de77153b53585b4 manager: conda name: conda-smithy optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/conda-smithy-3.47.0-unix_pyhd81877a_0.conda - version: 3.47.0 + https://conda.anaconda.org/conda-forge/noarch/conda-smithy-3.48.1-unix_pyhd81877a_0.conda + version: 3.48.1 - category: main dependencies: python: '>=3.8' @@ -2670,15 +2817,15 @@ package: python_abi: 3.11.* tomli: '' hash: - md5: 5f57c67f3880dd62b83b3867ea03d9bc - sha256: 88eceeaed558d6b313564142a6c013646cbd5289d5f20a61253bcdfe198e6f32 + md5: e761745f85b5fc909aab137ff59bc9cb + sha256: dabff490f3a4a4beb70c16624766286aa47dfb7dec0275966498a0854951e754 manager: conda name: coverage optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/coverage-7.7.1-py311h2dc5d0c_0.conda - version: 7.7.1 + https://conda.anaconda.org/conda-forge/linux-64/coverage-7.8.1-py311h2dc5d0c_0.conda + version: 7.8.1 - category: main dependencies: __osx: '>=10.13' @@ -2686,15 +2833,15 @@ package: python_abi: 3.11.* tomli: '' hash: - md5: 588d993196273d5a122d24643751c39b - sha256: 495ea8caa559fbe5cceabc181219ea22de06fc26a3878ef8ab876e9ff99fe54a + md5: a232a2c290d493aec5c321d42e8bb5f3 + sha256: ceeb9931a48ba76b587ec728e1c87c84591d2dc3fd84d8b33070d9331770f95b manager: conda name: coverage optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/coverage-7.7.1-py311ha3cf9ac_0.conda - version: 7.7.1 + https://conda.anaconda.org/conda-forge/osx-64/coverage-7.8.1-py311ha3cf9ac_0.conda + version: 7.8.1 - category: main dependencies: __osx: '>=11.0' @@ -2702,83 +2849,86 @@ package: python_abi: 3.11.* tomli: '' hash: - md5: 1ba342dd65d19f88074b07c773cbb0e9 - sha256: 9af78df87955d068231e8a436041f8cc4ec4e559ab59697037eb183cba0c1435 + md5: 3311b8464e609c253df8ed73b030a91b + sha256: 33f71ac48479ad4d33517ef4384dddad1804d986cc55b99c78539b3185f5d3b9 manager: conda name: coverage optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/coverage-7.7.1-py311h4921393_0.conda - version: 7.7.1 + https://conda.anaconda.org/conda-forge/osx-arm64/coverage-7.8.1-py311h4921393_0.conda + version: 7.8.1 - category: main dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libstdcxx: '>=13' hash: - md5: a7f1500bf47196443b67355d67afec6d - sha256: fc809e6894537a77c6cd1e65f593ae1bfbf60f494bce55295212d1a9bacd7fa7 + md5: 54e8e1a8144fd678c5d43905e3ba684d + sha256: 234e423531e0d5f31e8e8b2979c4dfa05bdb4c502cb3eb0a5db865bd831d333e manager: conda name: cpp-expected optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/cpp-expected-1.1.0-hf52228f_0.conda + https://conda.anaconda.org/conda-forge/linux-64/cpp-expected-1.1.0-hff21bea_1.conda version: 1.1.0 - category: main dependencies: - libcxx: '>=14.0.6' + __osx: '>=10.13' + libcxx: '>=18' hash: - md5: 53c16c2f79183b459ef6acb6c93f3550 - sha256: 80c0551e5d297c59991c09f6611331f3d56517894b63c8f6a85d51e601b8ea69 + md5: 4187c6203b403154e42460fa106579d0 + sha256: 0c0c4589439ff342b73c3eeced3b202661b0882db9fbacce191c4badad422a1f manager: conda name: cpp-expected optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/cpp-expected-1.1.0-hb8565cd_0.conda + https://conda.anaconda.org/conda-forge/osx-64/cpp-expected-1.1.0-hd6aca1a_1.conda version: 1.1.0 - category: main dependencies: - libcxx: '>=14.0.6' + __osx: '>=11.0' + libcxx: '>=18' hash: - md5: d58ea142acc3d93f6f0176e31e4493ad - sha256: 9af3323963a059681eb848218c11ba2208f12bc5416ee357b0d4f9f8bef5ebca + md5: 05692bdc7830e860bd32652fa7857705 + sha256: a41d97157e628947d13bf5920bf0d533f81b8a3ed68dbe4171149f522e99eae6 manager: conda name: cpp-expected optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/cpp-expected-1.1.0-hffc8910_0.conda + https://conda.anaconda.org/conda-forge/osx-arm64/cpp-expected-1.1.0-h177bc72_1.conda version: 1.1.0 - category: main dependencies: - python: 3.11.11.* + python: '>=3.11,<3.12.0a0' python_abi: '*' hash: - md5: fb7bc3f1bccb39021a53309e83bce28d - sha256: 52e462716ff6b062bf6992f9e95fcb65a0b95a47db73f0478bd0ceab8a37036a + md5: 451718359f1658c6819d8665f82585ab + sha256: 91e8da449682e37e326a560aa3575ee0f32ab697119e4cf4a76fd68af61fc1a0 manager: conda name: cpython optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/cpython-3.11.11-py311hd8ed1ab_2.conda - version: 3.11.11 + https://conda.anaconda.org/conda-forge/noarch/cpython-3.11.12-py311hd8ed1ab_0.conda + version: 3.11.12 - category: main dependencies: - python: 3.11.11.* + python: '>=3.11,<3.12.0a0' python_abi: '*' hash: - md5: fb7bc3f1bccb39021a53309e83bce28d - sha256: 52e462716ff6b062bf6992f9e95fcb65a0b95a47db73f0478bd0ceab8a37036a + md5: 451718359f1658c6819d8665f82585ab + sha256: 91e8da449682e37e326a560aa3575ee0f32ab697119e4cf4a76fd68af61fc1a0 manager: conda name: cpython optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/cpython-3.11.11-py311hd8ed1ab_2.conda - version: 3.11.11 + https://conda.anaconda.org/conda-forge/noarch/cpython-3.11.12-py311hd8ed1ab_0.conda + version: 3.11.12 - category: main dependencies: python: '>=3.9,<4.0' @@ -2817,111 +2967,108 @@ package: version: 0.4.1 - category: main dependencies: - __glibc: '>=2.17,<3.0.a0' cffi: '>=1.12' - libgcc: '>=13' - openssl: '>=3.4.1,<4.0a0' + libgcc-ng: '>=12' + openssl: '>=3.1.0,<4.0a0' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 8b3117a632cf269ca87ab6a2559bc0ba - sha256: 92c094d392767ab721578fa50470b7741f3910d41bc72c1b8e369b9a9ba1886d + md5: 4df4df92db0b9168c11b72460baec870 + sha256: e0f62e90e664ce33054c7839ee10a975e0a80010c2691e99679319f60decca9f manager: conda name: cryptography optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/cryptography-44.0.2-py311hafd3f86_0.conda - version: 44.0.2 + https://conda.anaconda.org/conda-forge/linux-64/cryptography-40.0.2-py311h9b4c7bb_0.conda + version: 40.0.2 - category: main dependencies: - __osx: '>=10.13' cffi: '>=1.12' - openssl: '>=3.4.1,<4.0a0' + openssl: '>=3.1.0,<4.0a0' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 0c38e5dea3d1fb475434e37d9551a825 - sha256: 245faf5df4125816390c20ba6182b87930c03139d159df588237b772115610c4 + md5: 724b75f84bb1b5d932627d090a527168 + sha256: 2700217dfc3a48e8715d7f4e94870448a37d8e9bb25c4130e83c4807c2f1f3c3 manager: conda name: cryptography optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/cryptography-44.0.2-py311h336e25c_0.conda - version: 44.0.2 + https://conda.anaconda.org/conda-forge/osx-64/cryptography-40.0.2-py311h61927ef_0.conda + version: 40.0.2 - category: main dependencies: - __osx: '>=11.0' cffi: '>=1.12' - openssl: '>=3.4.1,<4.0a0' + openssl: '>=3.1.0,<4.0a0' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 6243c5d50fc0dc2ca3d48d811c9b0477 - sha256: 5ee66121e0f7495c65431960811ef6e4cdfdf3f0cbdfd79e90ab75fcc8607fbb + md5: 09ebc937e6441f174bf76ea8f3b789ce + sha256: 2c10a11166f3199795efb6ceceb4dd4557c38f40d568df8af2b829e4597dc360 manager: conda name: cryptography optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-44.0.2-py311h8be0713_0.conda - version: 44.0.2 + https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-40.0.2-py311h507f6e9_0.conda + version: 40.0.2 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' krb5: '>=1.21.3,<1.22.0a0' - libcurl: 8.12.1 + libcurl: 8.13.0 libgcc: '>=13' libssh2: '>=1.11.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' openssl: '>=3.4.1,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 2c36813e99b680af6c47b6cc19feca7b - sha256: 9c11bca8e5400c13a56ea6f3bfab7208e8f31d70786904a4bc2175856db26f18 + md5: d50b765d509a4fe2e723b069266e17eb + sha256: e01eab0947009ac3bd9f45b565ad7d821d2c7621d9394694a49e296c63ef680d manager: conda name: curl optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/curl-8.12.1-h332b0f4_0.conda - version: 8.12.1 + url: https://conda.anaconda.org/conda-forge/linux-64/curl-8.13.0-h332b0f4_0.conda + version: 8.13.0 - category: main dependencies: __osx: '>=10.13' krb5: '>=1.21.3,<1.22.0a0' - libcurl: 8.12.1 + libcurl: 8.13.0 libssh2: '>=1.11.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' openssl: '>=3.4.1,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 16a098af6e194b242ef441bcaa51c5a8 - sha256: b3d6d8ccd80b608adaae7e14ecf64a5a5bf39f1510a10b4e540019c8e5d5d78a + md5: c09f68ee05935b286fabc302d154fb2b + sha256: e86062152032b304bf69279f1e01b5260f0c717791807672d6f533891caef9f6 manager: conda name: curl optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/curl-8.12.1-h5dec5d8_0.conda - version: 8.12.1 + url: https://conda.anaconda.org/conda-forge/osx-64/curl-8.13.0-h5dec5d8_0.conda + version: 8.13.0 - category: main dependencies: __osx: '>=11.0' krb5: '>=1.21.3,<1.22.0a0' - libcurl: 8.12.1 + libcurl: 8.13.0 libssh2: '>=1.11.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' openssl: '>=3.4.1,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 37ac6893d6e5c964a96ab239ebe09b12 - sha256: 47e98a701e83589fbf7e5b990930bf275f840ab49f43c1212346f8c165f52f7c + md5: ced1f266875e2b53624b5b55881462c1 + sha256: f3b74a382a7940d1bd2191a8321cb571e6b9cfdf02541ca03835c0b6dd3e844b manager: conda name: curl optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/curl-8.12.1-h73640d1_0.conda - version: 8.12.1 + url: https://conda.anaconda.org/conda-forge/osx-arm64/curl-8.13.0-h73640d1_0.conda + version: 8.13.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -2983,15 +3130,15 @@ package: pyyaml: '>=5.3.1' toolz: '>=0.10.0' hash: - md5: 36f6cc22457e3d6a6051c5370832f96c - sha256: 72badd945d856d2928fdbe051f136f903bcfee8136f1526c8362c6c465b793ec + md5: 8f0ef561cd615a17df3256742a3457c4 + sha256: 993fe9ff727441c57fab9969c61eb04eeca2ca82cce432804798f258177ab419 manager: conda name: dask-core optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.5.1-pyhd8ed1ab_0.conda + version: 2025.5.1 - category: main dependencies: click: '>=8.1' @@ -3004,15 +3151,15 @@ package: pyyaml: '>=5.3.1' toolz: '>=0.10.0' hash: - md5: 36f6cc22457e3d6a6051c5370832f96c - sha256: 72badd945d856d2928fdbe051f136f903bcfee8136f1526c8362c6c465b793ec + md5: 8f0ef561cd615a17df3256742a3457c4 + sha256: 993fe9ff727441c57fab9969c61eb04eeca2ca82cce432804798f258177ab419 manager: conda name: dask-core optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.5.1-pyhd8ed1ab_0.conda + version: 2025.5.1 - category: main dependencies: click: '>=8.1' @@ -3025,29 +3172,32 @@ package: pyyaml: '>=5.3.1' toolz: '>=0.10.0' hash: - md5: 36f6cc22457e3d6a6051c5370832f96c - sha256: 72badd945d856d2928fdbe051f136f903bcfee8136f1526c8362c6c465b793ec + md5: 8f0ef561cd615a17df3256742a3457c4 + sha256: 993fe9ff727441c57fab9969c61eb04eeca2ca82cce432804798f258177ab419 manager: conda name: dask-core optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.5.1-pyhd8ed1ab_0.conda + version: 2025.5.1 - category: main dependencies: - expat: '>=2.4.2,<3.0a0' - libgcc-ng: '>=9.4.0' - libglib: '>=2.70.2,<3.0a0' + __glibc: '>=2.17,<3.0.a0' + libexpat: '>=2.7.0,<3.0a0' + libgcc: '>=13' + libglib: '>=2.84.2,<3.0a0' + libstdcxx: '>=13' + libzlib: '>=1.3.1,<2.0a0' hash: - md5: ecfff944ba3960ecb334b9a2663d708d - sha256: 8f5f995699a2d9dbdd62c61385bfeeb57c82a681a7c8c5313c395aa0ccab68a5 + md5: 679616eb5ad4e521c83da4650860aba7 + sha256: 3b988146a50e165f0fa4e839545c679af88e4782ec284cc7b6d07dd226d6a068 manager: conda name: dbus optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2 - version: 1.13.6 + url: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h3c4dab8_0.conda + version: 1.16.2 - category: main dependencies: python: '>=3.11,<3.12.0a0' @@ -3179,7 +3329,7 @@ package: click: '>=8.0' cloudpickle: '>=3.0.0' cytoolz: '>=0.11.2' - dask-core: '>=2025.3.0,<2025.3.1.0a0' + dask-core: '>=2025.5.1,<2025.5.2.0a0' jinja2: '>=2.10.3' locket: '>=1.0.0' msgpack-python: '>=1.0.2' @@ -3194,21 +3344,21 @@ package: urllib3: '>=1.26.5' zict: '>=3.0.0' hash: - md5: 968a7a4ff98bcfb515b0f1c94d35553f - sha256: ea055aeda774d03ec96e0901ec119c6d3dc21ddd50af166bec664a76efd5f82a + md5: d2949f56a1479507e36e847681903376 + sha256: fc550701d648ba791f271068a792788047850bfd23ed082beb5317bb7d77a099 manager: conda name: distributed optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/distributed-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + https://conda.anaconda.org/conda-forge/noarch/distributed-2025.5.1-pyhd8ed1ab_0.conda + version: 2025.5.1 - category: main dependencies: click: '>=8.0' cloudpickle: '>=3.0.0' cytoolz: '>=0.11.2' - dask-core: '>=2025.3.0,<2025.3.1.0a0' + dask-core: '>=2025.5.1,<2025.5.2.0a0' jinja2: '>=2.10.3' locket: '>=1.0.0' msgpack-python: '>=1.0.2' @@ -3223,21 +3373,21 @@ package: urllib3: '>=1.26.5' zict: '>=3.0.0' hash: - md5: 968a7a4ff98bcfb515b0f1c94d35553f - sha256: ea055aeda774d03ec96e0901ec119c6d3dc21ddd50af166bec664a76efd5f82a + md5: d2949f56a1479507e36e847681903376 + sha256: fc550701d648ba791f271068a792788047850bfd23ed082beb5317bb7d77a099 manager: conda name: distributed optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/distributed-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + https://conda.anaconda.org/conda-forge/noarch/distributed-2025.5.1-pyhd8ed1ab_0.conda + version: 2025.5.1 - category: main dependencies: click: '>=8.0' cloudpickle: '>=3.0.0' cytoolz: '>=0.11.2' - dask-core: '>=2025.3.0,<2025.3.1.0a0' + dask-core: '>=2025.5.1,<2025.5.2.0a0' jinja2: '>=2.10.3' locket: '>=1.0.0' msgpack-python: '>=1.0.2' @@ -3252,15 +3402,15 @@ package: urllib3: '>=1.26.5' zict: '>=3.0.0' hash: - md5: 968a7a4ff98bcfb515b0f1c94d35553f - sha256: ea055aeda774d03ec96e0901ec119c6d3dc21ddd50af166bec664a76efd5f82a + md5: d2949f56a1479507e36e847681903376 + sha256: fc550701d648ba791f271068a792788047850bfd23ed082beb5317bb7d77a099 manager: conda name: distributed optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/distributed-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + https://conda.anaconda.org/conda-forge/noarch/distributed-2025.5.1-pyhd8ed1ab_0.conda + version: 2025.5.1 - category: main dependencies: python: '>=3.9' @@ -3299,43 +3449,101 @@ package: version: 1.9.0 - category: main dependencies: - python: '>=3.9,<4.0.0' + cryptography: '>=2.6,<42.0' + httpcore: '>=0.17.3' + idna: '>=2.1,<4.0' + python: '>=3.8.0,<4.0.0' sniffio: '' hash: - md5: 5fbd60d61d21b4bd2f9d7a48fe100418 - sha256: 3ec40ccf63f2450c5e6c7dd579e42fc2e97caf0d8cd4ba24aa434e6fc264eda0 + md5: a0059139087e108074f4b48b5e94730e + sha256: 11feaf50685db60b7b0b2c3253930fe8c38c6ff1b7a40aafbf37e5a3f4dc97fc manager: conda name: dnspython optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.7.0-pyhff2d567_1.conda - version: 2.7.0 + url: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.6.1-pyhd8ed1ab_0.conda + version: 2.6.1 - category: main dependencies: - python: '>=3.9,<4.0.0' + cryptography: '>=2.6,<42.0' + httpcore: '>=0.17.3' + idna: '>=2.1,<4.0' + python: '>=3.8.0,<4.0.0' sniffio: '' hash: - md5: 5fbd60d61d21b4bd2f9d7a48fe100418 - sha256: 3ec40ccf63f2450c5e6c7dd579e42fc2e97caf0d8cd4ba24aa434e6fc264eda0 + md5: a0059139087e108074f4b48b5e94730e + sha256: 11feaf50685db60b7b0b2c3253930fe8c38c6ff1b7a40aafbf37e5a3f4dc97fc manager: conda name: dnspython optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.7.0-pyhff2d567_1.conda - version: 2.7.0 + url: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.6.1-pyhd8ed1ab_0.conda + version: 2.6.1 - category: main dependencies: - python: '>=3.9,<4.0.0' + cryptography: '>=2.6,<42.0' + httpcore: '>=0.17.3' + idna: '>=2.1,<4.0' + python: '>=3.8.0,<4.0.0' sniffio: '' hash: - md5: 5fbd60d61d21b4bd2f9d7a48fe100418 - sha256: 3ec40ccf63f2450c5e6c7dd579e42fc2e97caf0d8cd4ba24aa434e6fc264eda0 + md5: a0059139087e108074f4b48b5e94730e + sha256: 11feaf50685db60b7b0b2c3253930fe8c38c6ff1b7a40aafbf37e5a3f4dc97fc manager: conda name: dnspython optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.7.0-pyhff2d567_1.conda - version: 2.7.0 + url: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.6.1-pyhd8ed1ab_0.conda + version: 2.6.1 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + urllib3: '>=1.25' + hash: + md5: 2d2e806c71919732efcb879a192f37e7 + sha256: 64f2704c4b3907fefae27e744b414aa3f81df1df2b6c88a440723b88fe2d7477 + manager: conda + name: dulwich + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/dulwich-0.22.8-py311h9e33e62_0.conda + version: 0.22.8 + - category: main + dependencies: + __osx: '>=10.13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + urllib3: '>=1.25' + hash: + md5: 1e59e251e1263df70b041012cbebbf33 + sha256: 53d5efaa84042a6d36c039516395ce9308917d1a50d6c315670880a133426f1c + manager: conda + name: dulwich + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/dulwich-0.22.8-py311h3b9c2be_0.conda + version: 0.22.8 + - category: main + dependencies: + __osx: '>=11.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + urllib3: '>=1.25' + hash: + md5: 1d7e4d1a3e814510311f893206565d22 + sha256: 21fd442c9ff7b8081b0997cc95cb3389d6709262dfd59ceaef6e41755a4080f2 + manager: conda + name: dulwich + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/dulwich-0.22.8-py311h3ff9189_0.conda + version: 0.22.8 - category: main dependencies: python: '>=3.9' @@ -3372,24 +3580,108 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/noarch/editables-0.5-pyhd8ed1ab_1.conda version: '0.5' + - category: main + dependencies: + dnspython: '>=2.0.0' + idna: '>=2.0.0' + python: '>=3.9' + hash: + md5: da16dd3b0b71339060cd44cb7110ddf9 + sha256: b91a19eb78edfc2dbb36de9a67f74ee2416f1b5273dd7327abe53f2dbf864736 + manager: conda + name: email-validator + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/email-validator-2.2.0-pyhd8ed1ab_1.conda + version: 2.2.0 + - category: main + dependencies: + dnspython: '>=2.0.0' + idna: '>=2.0.0' + python: '>=3.9' + hash: + md5: da16dd3b0b71339060cd44cb7110ddf9 + sha256: b91a19eb78edfc2dbb36de9a67f74ee2416f1b5273dd7327abe53f2dbf864736 + manager: conda + name: email-validator + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/email-validator-2.2.0-pyhd8ed1ab_1.conda + version: 2.2.0 + - category: main + dependencies: + dnspython: '>=2.0.0' + idna: '>=2.0.0' + python: '>=3.9' + hash: + md5: da16dd3b0b71339060cd44cb7110ddf9 + sha256: b91a19eb78edfc2dbb36de9a67f74ee2416f1b5273dd7327abe53f2dbf864736 + manager: conda + name: email-validator + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/email-validator-2.2.0-pyhd8ed1ab_1.conda + version: 2.2.0 + - category: main + dependencies: + email-validator: '>=2.2.0,<2.2.1.0a0' + hash: + md5: 0794f8807ff2c6f020422cacb1bd7bfa + sha256: e0d0fdf587aa0ed0ff08b2bce3ab355f46687b87b0775bfba01cc80a859ee6a2 + manager: conda + name: email_validator + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/email_validator-2.2.0-hd8ed1ab_1.conda + version: 2.2.0 + - category: main + dependencies: + email-validator: '>=2.2.0,<2.2.1.0a0' + hash: + md5: 0794f8807ff2c6f020422cacb1bd7bfa + sha256: e0d0fdf587aa0ed0ff08b2bce3ab355f46687b87b0775bfba01cc80a859ee6a2 + manager: conda + name: email_validator + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/email_validator-2.2.0-hd8ed1ab_1.conda + version: 2.2.0 + - category: main + dependencies: + email-validator: '>=2.2.0,<2.2.1.0a0' + hash: + md5: 0794f8807ff2c6f020422cacb1bd7bfa + sha256: e0d0fdf587aa0ed0ff08b2bce3ab355f46687b87b0775bfba01cc80a859ee6a2 + manager: conda + name: email_validator + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/email_validator-2.2.0-hd8ed1ab_1.conda + version: 2.2.0 - category: main dependencies: appdirs: '' click: '>=5.1' filelock: '' packaging: '' - python: '>=3.9' + python: '' requests: '>=2' hash: - md5: a18423d4b24e6480165a38f102ca8b49 - sha256: 4efc864d9245a30f15bbc6eb12d06a5cf7a11d91d3e2c84630df1ce83f8b9878 + md5: 349b1d4311d7344bff92ad890fdbe6aa + sha256: a54317217ac986038b01c254a9f06b59f4401587f936ba2089e5fc025c7fc698 manager: conda name: ensureconda optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/ensureconda-1.4.4-pyhd8ed1ab_1.conda - version: 1.4.4 + https://conda.anaconda.org/conda-forge/noarch/ensureconda-1.4.7-pyh29332c3_0.conda + version: 1.4.7 - category: main dependencies: appdirs: '' @@ -3399,15 +3691,15 @@ package: python: '>=3.9' requests: '>=2' hash: - md5: a18423d4b24e6480165a38f102ca8b49 - sha256: 4efc864d9245a30f15bbc6eb12d06a5cf7a11d91d3e2c84630df1ce83f8b9878 + md5: 349b1d4311d7344bff92ad890fdbe6aa + sha256: a54317217ac986038b01c254a9f06b59f4401587f936ba2089e5fc025c7fc698 manager: conda name: ensureconda optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/ensureconda-1.4.4-pyhd8ed1ab_1.conda - version: 1.4.4 + https://conda.anaconda.org/conda-forge/noarch/ensureconda-1.4.7-pyh29332c3_0.conda + version: 1.4.7 - category: main dependencies: appdirs: '' @@ -3417,15 +3709,15 @@ package: python: '>=3.9' requests: '>=2' hash: - md5: a18423d4b24e6480165a38f102ca8b49 - sha256: 4efc864d9245a30f15bbc6eb12d06a5cf7a11d91d3e2c84630df1ce83f8b9878 + md5: 349b1d4311d7344bff92ad890fdbe6aa + sha256: a54317217ac986038b01c254a9f06b59f4401587f936ba2089e5fc025c7fc698 manager: conda name: ensureconda optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/ensureconda-1.4.4-pyhd8ed1ab_1.conda - version: 1.4.4 + https://conda.anaconda.org/conda-forge/noarch/ensureconda-1.4.7-pyh29332c3_0.conda + version: 1.4.7 - category: main dependencies: libgcc-ng: '>=10.3.0' @@ -3460,45 +3752,84 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/osx-arm64/epoxy-1.5.10-h1c322ee_1.tar.bz2 version: 1.5.10 + - category: main + dependencies: + python: '' + hash: + md5: 202726f0a6ffa85897273591c4b32b0e + sha256: e98899cec440273341378bcff0e47bc10a5dbdffc0e77a7bc7010ca9189e3b4f + manager: conda + name: evalidate + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/evalidate-2.0.5-pyhe01879c_0.conda + version: 2.0.5 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 202726f0a6ffa85897273591c4b32b0e + sha256: e98899cec440273341378bcff0e47bc10a5dbdffc0e77a7bc7010ca9189e3b4f + manager: conda + name: evalidate + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/evalidate-2.0.5-pyhe01879c_0.conda + version: 2.0.5 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 202726f0a6ffa85897273591c4b32b0e + sha256: e98899cec440273341378bcff0e47bc10a5dbdffc0e77a7bc7010ca9189e3b4f + manager: conda + name: evalidate + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/evalidate-2.0.5-pyhe01879c_0.conda + version: 2.0.5 - category: main dependencies: python: '>=3.9' + typing_extensions: '>=4.6.0' hash: - md5: a16662747cdeb9abbac74d0057cc976e - sha256: cbde2c64ec317118fc06b223c5fd87c8a680255e7348dd60e7b292d2e103e701 + md5: 72e42d28960d875c7654614f8b50939a + sha256: ce61f4f99401a4bd455b89909153b40b9c823276aefcbb06f2044618696009ca manager: conda name: exceptiongroup optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda - version: 1.2.2 + https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.0-pyhd8ed1ab_0.conda + version: 1.3.0 - category: main dependencies: python: '>=3.9' + typing_extensions: '>=4.6.0' hash: - md5: a16662747cdeb9abbac74d0057cc976e - sha256: cbde2c64ec317118fc06b223c5fd87c8a680255e7348dd60e7b292d2e103e701 + md5: 72e42d28960d875c7654614f8b50939a + sha256: ce61f4f99401a4bd455b89909153b40b9c823276aefcbb06f2044618696009ca manager: conda name: exceptiongroup optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda - version: 1.2.2 + https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.0-pyhd8ed1ab_0.conda + version: 1.3.0 - category: main dependencies: python: '>=3.9' + typing_extensions: '>=4.6.0' hash: - md5: a16662747cdeb9abbac74d0057cc976e - sha256: cbde2c64ec317118fc06b223c5fd87c8a680255e7348dd60e7b292d2e103e701 + md5: 72e42d28960d875c7654614f8b50939a + sha256: ce61f4f99401a4bd455b89909153b40b9c823276aefcbb06f2044618696009ca manager: conda name: exceptiongroup optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda - version: 1.2.2 + https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.0-pyhd8ed1ab_0.conda + version: 1.3.0 - category: main dependencies: python: '>=3.9' @@ -3537,18 +3868,118 @@ package: version: 2.1.1 - category: main dependencies: - __glibc: '>=2.17,<3.0.a0' - libexpat: 2.6.4 - libgcc: '>=13' + email_validator: '>=2.0.0' + fastapi-cli: '>=0.0.5' + httpx: '>=0.23.0' + jinja2: '>=3.1.5' + pydantic: '>=1.7.4,!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0' + python: '' + python-multipart: '>=0.0.18' + starlette: '>=0.40.0,<0.47.0' + typing_extensions: '>=4.8.0' + uvicorn-standard: '>=0.12.0' + hash: + md5: 4bc12ece07c8c717e19fd790bfec100d + sha256: d72da6ea523d80968f0cca4ba4fb6c31fc27450d07e419f039da9b99654a56e6 + manager: conda + name: fastapi + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/fastapi-0.115.12-pyh29332c3_0.conda + version: 0.115.12 + - category: main + dependencies: + email_validator: '>=2.0.0' + fastapi-cli: '>=0.0.5' + httpx: '>=0.23.0' + jinja2: '>=3.1.5' + pydantic: '>=1.7.4,!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0' + python: '>=3.9' + python-multipart: '>=0.0.18' + starlette: '>=0.40.0,<0.47.0' + typing_extensions: '>=4.8.0' + uvicorn-standard: '>=0.12.0' + hash: + md5: 4bc12ece07c8c717e19fd790bfec100d + sha256: d72da6ea523d80968f0cca4ba4fb6c31fc27450d07e419f039da9b99654a56e6 + manager: conda + name: fastapi + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/fastapi-0.115.12-pyh29332c3_0.conda + version: 0.115.12 + - category: main + dependencies: + email_validator: '>=2.0.0' + fastapi-cli: '>=0.0.5' + httpx: '>=0.23.0' + jinja2: '>=3.1.5' + pydantic: '>=1.7.4,!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0' + python: '>=3.9' + python-multipart: '>=0.0.18' + starlette: '>=0.40.0,<0.47.0' + typing_extensions: '>=4.8.0' + uvicorn-standard: '>=0.12.0' + hash: + md5: 4bc12ece07c8c717e19fd790bfec100d + sha256: d72da6ea523d80968f0cca4ba4fb6c31fc27450d07e419f039da9b99654a56e6 + manager: conda + name: fastapi + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/fastapi-0.115.12-pyh29332c3_0.conda + version: 0.115.12 + - category: main + dependencies: + python: '>=3.9' + rich-toolkit: '>=0.11.1' + typer: '>=0.12.3' + uvicorn-standard: '>=0.15.0' hash: - md5: 1d6afef758879ef5ee78127eb4cd2c4a - sha256: 1848c7db9e264e3b8036ee133d570dd880422983cd20dd9585a505289606d276 + md5: d960e0ea9e1c561aa928f6c4439f04c7 + sha256: 300683731013b7221922339cd40430bb3c2ddeeb658fd7e37f5099ffe64e4db0 manager: conda - name: expat + name: fastapi-cli optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda - version: 2.6.4 + url: + https://conda.anaconda.org/conda-forge/noarch/fastapi-cli-0.0.7-pyhd8ed1ab_0.conda + version: 0.0.7 + - category: main + dependencies: + python: '>=3.9' + rich-toolkit: '>=0.11.1' + typer: '>=0.12.3' + uvicorn-standard: '>=0.15.0' + hash: + md5: d960e0ea9e1c561aa928f6c4439f04c7 + sha256: 300683731013b7221922339cd40430bb3c2ddeeb658fd7e37f5099ffe64e4db0 + manager: conda + name: fastapi-cli + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/fastapi-cli-0.0.7-pyhd8ed1ab_0.conda + version: 0.0.7 + - category: main + dependencies: + python: '>=3.9' + rich-toolkit: '>=0.11.1' + typer: '>=0.12.3' + uvicorn-standard: '>=0.15.0' + hash: + md5: d960e0ea9e1c561aa928f6c4439f04c7 + sha256: 300683731013b7221922339cd40430bb3c2ddeeb658fd7e37f5099ffe64e4db0 + manager: conda + name: fastapi-cli + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/fastapi-cli-0.0.7-pyhd8ed1ab_0.conda + version: 0.0.7 - category: main dependencies: python: '>=3.9' @@ -3663,6 +4094,60 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/noarch/flaky-3.8.1-pyhd8ed1ab_1.conda version: 3.8.1 + - category: main + dependencies: + blinker: '>=1.6.2' + click: '>=8.1.3' + importlib-metadata: '>=3.6.0' + itsdangerous: '>=2.1.2' + jinja2: '>=3.1.2' + python: '>=3.8' + werkzeug: '>=2.3.7' + hash: + md5: 9b0d29067484a8dfacfae85b8fba81bc + sha256: 4f84ffdc5471236e8225db86c7508426b46aa2c3802d58ca40b3c3e174533b39 + manager: conda + name: flask + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + version: 2.3.3 + - category: main + dependencies: + blinker: '>=1.6.2' + click: '>=8.1.3' + importlib-metadata: '>=3.6.0' + itsdangerous: '>=2.1.2' + jinja2: '>=3.1.2' + python: '>=3.8' + werkzeug: '>=2.3.7' + hash: + md5: 9b0d29067484a8dfacfae85b8fba81bc + sha256: 4f84ffdc5471236e8225db86c7508426b46aa2c3802d58ca40b3c3e174533b39 + manager: conda + name: flask + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + version: 2.3.3 + - category: main + dependencies: + blinker: '>=1.6.2' + click: '>=8.1.3' + importlib-metadata: '>=3.6.0' + itsdangerous: '>=2.1.2' + jinja2: '>=3.1.2' + python: '>=3.8' + werkzeug: '>=2.3.7' + hash: + md5: 9b0d29067484a8dfacfae85b8fba81bc + sha256: 4f84ffdc5471236e8225db86c7508426b46aa2c3802d58ca40b3c3e174533b39 + manager: conda + name: flask + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/flask-2.3.3-pyhd8ed1ab_0.conda + version: 2.3.3 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -3979,47 +4464,43 @@ package: version: '1' - category: main dependencies: - __glibc: '>=2.17,<3.0.a0' - libgcc: '>=13' - libpng: '>=1.6.47,<1.7.0a0' - libzlib: '>=1.3.1,<2.0a0' + libfreetype: 2.13.3 + libfreetype6: 2.13.3 hash: - md5: 9ecfd6f2ca17077dd9c2d24770bb9ccd - sha256: 7385577509a9c4730130f54bb6841b9b416249d5f4e9f74bf313e6378e313c57 + md5: 9ccd736d31e0c6e41f54e704e5312811 + sha256: 7ef7d477c43c12a5b4cddcf048a83277414512d1116aba62ebadfa7056a7d84f manager: conda name: freetype optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.13.3-h48d6fc4_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.13.3-ha770c72_1.conda version: 2.13.3 - category: main dependencies: - __osx: '>=10.13' - libpng: '>=1.6.47,<1.7.0a0' - libzlib: '>=1.3.1,<2.0a0' + libfreetype: 2.13.3 + libfreetype6: 2.13.3 hash: - md5: e391f0c2d07df272cf7c6df235e97bb9 - sha256: 66cc36a313accf28f4ab9b40ad11e4a8ff757c11314cd499435d9b8df1fa0150 + md5: 126dba1baf5030cb6f34533718924577 + sha256: e2870e983889eec73fdc0d4ab27d3f6501de4750ffe32d7d0a3a287f00bc2f15 manager: conda name: freetype optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/freetype-2.13.3-h40dfd5c_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/freetype-2.13.3-h694c41f_1.conda version: 2.13.3 - category: main dependencies: - __osx: '>=11.0' - libpng: '>=1.6.47,<1.7.0a0' - libzlib: '>=1.3.1,<2.0a0' + libfreetype: 2.13.3 + libfreetype6: 2.13.3 hash: - md5: 630445a505ea6e59f55714853d8c9ed0 - sha256: 2c273de32431c431a118a8cd33afb6efc616ddbbab9e5ba0fe31e3b4d1ff57a3 + md5: e684de4644067f1956a580097502bf03 + sha256: 6b63c72ea51a41d41964841404564c0729fdddd3e952e2715839fd759b7cfdfc manager: conda name: freetype optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.13.3-h1d14073_0.conda + https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.13.3-hce30654_1.conda version: 2.13.3 - category: main dependencies: @@ -4107,38 +4588,38 @@ package: dependencies: python: '>=3.9' hash: - md5: 5ecafd654e33d1f2ecac5ec97057593b - sha256: 9cbba3b36d1e91e4806ba15141936872d44d20a4d1e3bb74f4aea0ebeb01b205 + md5: 7ac28047cd73cf02a294a64f036b2b02 + sha256: 6c907128b6464b8f4d5cba3160c7ec1505d10a86c188b1356ecddfd662285fcd manager: conda name: fsspec optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.5.0-pyhd8ed1ab_0.conda + version: 2025.5.0 - category: main dependencies: python: '>=3.9' hash: - md5: 5ecafd654e33d1f2ecac5ec97057593b - sha256: 9cbba3b36d1e91e4806ba15141936872d44d20a4d1e3bb74f4aea0ebeb01b205 + md5: 7ac28047cd73cf02a294a64f036b2b02 + sha256: 6c907128b6464b8f4d5cba3160c7ec1505d10a86c188b1356ecddfd662285fcd manager: conda name: fsspec optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.5.0-pyhd8ed1ab_0.conda + version: 2025.5.0 - category: main dependencies: python: '>=3.9' hash: - md5: 5ecafd654e33d1f2ecac5ec97057593b - sha256: 9cbba3b36d1e91e4806ba15141936872d44d20a4d1e3bb74f4aea0ebeb01b205 + md5: 7ac28047cd73cf02a294a64f036b2b02 + sha256: 6c907128b6464b8f4d5cba3160c7ec1505d10a86c188b1356ecddfd662285fcd manager: conda name: fsspec optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.3.0-pyhd8ed1ab_0.conda - version: 2025.3.0 + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.5.0-pyhd8ed1ab_0.conda + version: 2025.5.0 - category: main dependencies: libgcc-ng: '>=12' @@ -4195,64 +4676,64 @@ package: - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - libcurl: '>=8.12.1,<9.0a0' - libexpat: '>=2.6.4,<3.0a0' + libcurl: '>=8.13.0,<9.0a0' + libexpat: '>=2.7.0,<3.0a0' libgcc: '>=13' libiconv: '>=1.18,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.1,<4.0a0' - pcre2: '>=10.44,<10.45.0a0' + openssl: '>=3.5.0,<4.0a0' + pcre2: '>=10.45,<10.46.0a0' perl: 5.* hash: - md5: 757e04df008ac271bf9fcc3ee21d5ea8 - sha256: 9279eaa7c973f474a73607d65f9afc9c7d18e8374c45eaf5461c0969947a35be + md5: 245f8de3067054533c6e8e46ff23aa0a + sha256: 2b0ff36cf9bb1b6ca8c512aeb1908f886ef38a358a194345252c062c62148de4 manager: conda name: git optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/git-2.49.0-pl5321h59d505e_0.conda + https://conda.anaconda.org/conda-forge/linux-64/git-2.49.0-pl5321hc2ff736_1.conda version: 2.49.0 - category: main dependencies: __osx: '>=10.10' - libcurl: '>=8.12.1,<9.0a0' - libexpat: '>=2.6.4,<3.0a0' + libcurl: '>=8.13.0,<9.0a0' + libexpat: '>=2.7.0,<3.0a0' libiconv: '>=1.18,<2.0a0' - libintl: '>=0.23.1,<1.0a0' + libintl: '>=0.24.1,<1.0a0' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.1,<4.0a0' - pcre2: '>=10.44,<10.45.0a0' + openssl: '>=3.5.0,<4.0a0' + pcre2: '>=10.45,<10.46.0a0' perl: 5.* hash: - md5: 1ae715c093cf6feee33cbe1061685a52 - sha256: d8acf43d9d7fffdd54271682849515e5d0c9ac05209d21834293a49ffe0132df + md5: f868766975818340f5eaff1e6ef697b7 + sha256: 2763ed05b9426e873a0027d5851fee53dca02f05f0f36317913d75e6d7b094ce manager: conda name: git optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/git-2.49.0-pl5321h0e333bc_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/git-2.49.0-pl5321h3bb66fe_1.conda version: 2.49.0 - category: main dependencies: __osx: '>=11.0' - libcurl: '>=8.12.1,<9.0a0' - libexpat: '>=2.6.4,<3.0a0' + libcurl: '>=8.13.0,<9.0a0' + libexpat: '>=2.7.0,<3.0a0' libiconv: '>=1.18,<2.0a0' - libintl: '>=0.23.1,<1.0a0' + libintl: '>=0.24.1,<1.0a0' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.1,<4.0a0' - pcre2: '>=10.44,<10.45.0a0' + openssl: '>=3.5.0,<4.0a0' + pcre2: '>=10.45,<10.46.0a0' perl: 5.* hash: - md5: 465d2f91648a3626ca62b02e551dec26 - sha256: 820ae89cee4e47f41915430e41e1298d653383b7120d92ee06619939594d39c9 + md5: 2f73d1205848db09444042609c8fb7e7 + sha256: cb771ea5af8f588b031f21ed52d8d79ae4531a30bbe50db5b863fa74765579c8 manager: conda name: git optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/git-2.49.0-pl5321hd71a902_0.conda + https://conda.anaconda.org/conda-forge/osx-arm64/git-2.49.0-pl5321ha659579_1.conda version: 2.49.0 - category: main dependencies: @@ -4393,46 +4874,46 @@ package: dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - libglib: 2.84.0 + libglib: 2.84.2 hash: - md5: 2d876130380b1593f25c20998df37880 - sha256: bb9124c26e382627f343ffb7da48d30eadb27b40d461b1d50622610e48c45595 + md5: f2ec1facec64147850b7674633978050 + sha256: eee7655422577df78386513322ea2aa691e7638947584faa715a20488ef6cc4e manager: conda name: glib-tools optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.84.0-h4833e2c_0.conda - version: 2.84.0 + https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.84.2-h4833e2c_0.conda + version: 2.84.2 - category: main dependencies: __osx: '>=10.13' - libglib: 2.84.0 - libintl: '>=0.23.1,<1.0a0' + libglib: 2.84.2 + libintl: '>=0.24.1,<1.0a0' hash: - md5: 03d506bd28830a841105d3015744612e - sha256: 6ea60fa3aee44ba7223ee4a5955dc341a4dac1f2256a8511a821741545a6da27 + md5: eee63bf8e7ee9e2752fa78196dba373c + sha256: 8d20f2df840e69588ae406504edc802b901b70881f484919e2dcdd40343488e4 manager: conda name: glib-tools optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/glib-tools-2.84.0-hf8faeaf_0.conda - version: 2.84.0 + url: https://conda.anaconda.org/conda-forge/osx-64/glib-tools-2.84.2-hf8faeaf_0.conda + version: 2.84.2 - category: main dependencies: __osx: '>=11.0' - libglib: 2.84.0 - libintl: '>=0.23.1,<1.0a0' + libglib: 2.84.2 + libintl: '>=0.24.1,<1.0a0' hash: - md5: a4374a5bc561b673045db55e090cb6cb - sha256: 55d1f1dc1884f434936917dc6bec938d6e552e361c3936cc85f606404fe16c65 + md5: c6dd3b852d7287ee3bf1d392f107f1ac + sha256: 809cb62fe75ca0bcf0eecd223d100b4b4aa4555eee4c3e335ab7f453506bbb78 manager: conda name: glib-tools optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/glib-tools-2.84.0-h1dc7a0c_0.conda - version: 2.84.0 + https://conda.anaconda.org/conda-forge/osx-arm64/glib-tools-2.84.2-h1dc7a0c_0.conda + version: 2.84.2 - category: main dependencies: libgcc-ng: '>=12' @@ -4611,14 +5092,14 @@ package: tomli: '' tomli-w: '' hash: - md5: aaf573edde352953947e2bbdc66a2c63 - sha256: eafd3d566a59112099d001caec009d46092660c85b3bc385779cacb31292e1f5 + md5: f6afeec0813083056ead2fa12d96c8bb + sha256: 63355d32d58030e059f43b3c871ffcafc4a7f30dba00a31f0a0973f7b20db442 manager: conda name: grayskull optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/grayskull-2.8.0-pyhd8ed1ab_0.conda - version: 2.8.0 + url: https://conda.anaconda.org/conda-forge/noarch/grayskull-2.9.1-pyhd8ed1ab_0.conda + version: 2.9.1 - category: main dependencies: beautifulsoup4: '' @@ -4638,14 +5119,14 @@ package: tomli: '' tomli-w: '' hash: - md5: aaf573edde352953947e2bbdc66a2c63 - sha256: eafd3d566a59112099d001caec009d46092660c85b3bc385779cacb31292e1f5 + md5: f6afeec0813083056ead2fa12d96c8bb + sha256: 63355d32d58030e059f43b3c871ffcafc4a7f30dba00a31f0a0973f7b20db442 manager: conda name: grayskull optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/grayskull-2.8.0-pyhd8ed1ab_0.conda - version: 2.8.0 + url: https://conda.anaconda.org/conda-forge/noarch/grayskull-2.9.1-pyhd8ed1ab_0.conda + version: 2.9.1 - category: main dependencies: beautifulsoup4: '' @@ -4665,14 +5146,14 @@ package: tomli: '' tomli-w: '' hash: - md5: aaf573edde352953947e2bbdc66a2c63 - sha256: eafd3d566a59112099d001caec009d46092660c85b3bc385779cacb31292e1f5 + md5: f6afeec0813083056ead2fa12d96c8bb + sha256: 63355d32d58030e059f43b3c871ffcafc4a7f30dba00a31f0a0973f7b20db442 manager: conda name: grayskull optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/grayskull-2.8.0-pyhd8ed1ab_0.conda - version: 2.8.0 + url: https://conda.anaconda.org/conda-forge/noarch/grayskull-2.9.1-pyhd8ed1ab_0.conda + version: 2.9.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -4685,18 +5166,18 @@ package: fribidi: '>=1.0.10,<2.0a0' gdk-pixbuf: '>=2.42.12,<3.0a0' glib-tools: '' - harfbuzz: '>=10.4.0,<11.0a0' + harfbuzz: '>=11.0.0,<12.0a0' hicolor-icon-theme: '' libcups: '>=2.3.3,<3.0a0' libexpat: '>=2.6.4,<3.0a0' libgcc: '>=13' - libglib: '>=2.82.2,<3.0a0' + libglib: '>=2.84.0,<3.0a0' liblzma: '>=5.6.4,<6.0a0' - libxkbcommon: '>=1.8.0,<2.0a0' + libxkbcommon: '>=1.8.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - pango: '>=1.56.1,<2.0a0' + pango: '>=1.56.3,<2.0a0' wayland: '>=1.23.1,<2.0a0' - xorg-libx11: '>=1.8.11,<2.0a0' + xorg-libx11: '>=1.8.12,<2.0a0' xorg-libxcomposite: '>=0.4.6,<1.0a0' xorg-libxcursor: '>=1.2.3,<2.0a0' xorg-libxdamage: '>=1.1.6,<2.0a0' @@ -4707,13 +5188,13 @@ package: xorg-libxrandr: '>=1.5.4,<2.0a0' xorg-libxrender: '>=0.9.12,<0.10.0a0' hash: - md5: a891e341072432fafb853b3762957cbf - sha256: fc8abccb4b0d454891847bdd8163332ff8607aa33ea9cf1e43b3828fc88c42ce + md5: 67d00e9cfe751cfe581726c5eff7c184 + sha256: d36263cbcbce34ec463ce92bd72efa198b55d987959eab6210cc256a0e79573b manager: conda name: gtk3 optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h021d004_4.conda + url: https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h0c6a113_5.conda version: 3.24.43 - category: main dependencies: @@ -4724,22 +5205,22 @@ package: fribidi: '>=1.0.10,<2.0a0' gdk-pixbuf: '>=2.42.12,<3.0a0' glib-tools: '' - harfbuzz: '>=10.4.0,<11.0a0' + harfbuzz: '>=11.0.0,<12.0a0' hicolor-icon-theme: '' libexpat: '>=2.6.4,<3.0a0' - libglib: '>=2.82.2,<3.0a0' + libglib: '>=2.84.0,<3.0a0' libintl: '>=0.23.1,<1.0a0' liblzma: '>=5.6.4,<6.0a0' libzlib: '>=1.3.1,<2.0a0' - pango: '>=1.56.1,<2.0a0' + pango: '>=1.56.3,<2.0a0' hash: - md5: 522364f052b5e18bfea181e33d1eed1b - sha256: fc74cae058d39dc25697572b39d97cf2a39b3b37d6d9a557a1b9f95b75d39b3a + md5: 38eeb48f9466e5763567d1be1b7ff444 + sha256: 4f1be786342408492578dc696165ed3515bb1c4887c30e0909e50d0f8245fb38 manager: conda name: gtk3 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/gtk3-3.24.43-h82a860e_4.conda + url: https://conda.anaconda.org/conda-forge/osx-64/gtk3-3.24.43-h70b172e_5.conda version: 3.24.43 - category: main dependencies: @@ -4750,22 +5231,22 @@ package: fribidi: '>=1.0.10,<2.0a0' gdk-pixbuf: '>=2.42.12,<3.0a0' glib-tools: '' - harfbuzz: '>=10.4.0,<11.0a0' + harfbuzz: '>=11.0.0,<12.0a0' hicolor-icon-theme: '' libexpat: '>=2.6.4,<3.0a0' - libglib: '>=2.82.2,<3.0a0' + libglib: '>=2.84.0,<3.0a0' libintl: '>=0.23.1,<1.0a0' liblzma: '>=5.6.4,<6.0a0' libzlib: '>=1.3.1,<2.0a0' - pango: '>=1.56.1,<2.0a0' + pango: '>=1.56.3,<2.0a0' hash: - md5: a01d2dd60413e43f581445d1b2ed8d5d - sha256: 5adbee61709811186022ba0013cdda2029ae340be4de95c909a718045ec79d00 + md5: 8353369d4c2ecc5afd888405d3226fd9 + sha256: 9650ac1a02975ae0a3917443dc3c35ddc4d8e87a1cb04fda115af5f98e5d457c manager: conda name: gtk3 optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/gtk3-3.24.43-he7bb075_4.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/gtk3-3.24.43-h07173f4_5.conda version: 3.24.43 - category: main dependencies: @@ -4891,149 +5372,155 @@ package: - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - cairo: '>=1.18.2,<2.0a0' - freetype: '>=2.12.1,<3.0a0' + cairo: '>=1.18.4,<2.0a0' + freetype: '' graphite2: '' icu: '>=75.1,<76.0a0' - libexpat: '>=2.6.4,<3.0a0' + libexpat: '>=2.7.0,<3.0a0' + libfreetype: '>=2.13.3' + libfreetype6: '>=2.13.3' libgcc: '>=13' - libglib: '>=2.82.2,<3.0a0' + libglib: '>=2.84.1,<3.0a0' libstdcxx: '>=13' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 81f137b4153cf111ff8e3188b6fb8e73 - sha256: 3b4ccabf170e1bf98c593f724cc4defe286d64cb19288751a50c63809ca32d5f + md5: 0e6e192d4b3d95708ad192d957cf3163 + sha256: 5bd0f3674808862838d6e2efc0b3075e561c34309c5c2f4c976f7f1f57c91112 manager: conda name: harfbuzz optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.4.0-h76408a6_0.conda - version: 10.4.0 + url: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-11.2.1-h3beb420_0.conda + version: 11.2.1 - category: main dependencies: __osx: '>=10.13' - cairo: '>=1.18.2,<2.0a0' - freetype: '>=2.12.1,<3.0a0' + cairo: '>=1.18.4,<2.0a0' + freetype: '' graphite2: '' icu: '>=75.1,<76.0a0' libcxx: '>=18' - libexpat: '>=2.6.4,<3.0a0' - libglib: '>=2.82.2,<3.0a0' + libexpat: '>=2.7.0,<3.0a0' + libfreetype: '>=2.13.3' + libfreetype6: '>=2.13.3' + libglib: '>=2.84.1,<3.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 05493515d0b4467f8229f1e154ec80c3 - sha256: 87e47de769f93f756e61e40555796382fb1dc3cb754e2e068958a949b3df33f7 + md5: ecd1e793e20518bf438a0d5070465ecb + sha256: ed21d2e7ebe6f77154b7b851dfd0c9e5d4b8c590badb54ca4094cee1cf9ad470 manager: conda name: harfbuzz optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/harfbuzz-10.4.0-h86b413f_0.conda - version: 10.4.0 + url: https://conda.anaconda.org/conda-forge/osx-64/harfbuzz-11.2.1-hdfbcdba_0.conda + version: 11.2.1 - category: main dependencies: __osx: '>=11.0' - cairo: '>=1.18.2,<2.0a0' - freetype: '>=2.12.1,<3.0a0' + cairo: '>=1.18.4,<2.0a0' + freetype: '' graphite2: '' icu: '>=75.1,<76.0a0' libcxx: '>=18' - libexpat: '>=2.6.4,<3.0a0' - libglib: '>=2.82.2,<3.0a0' + libexpat: '>=2.7.0,<3.0a0' + libfreetype: '>=2.13.3' + libfreetype6: '>=2.13.3' + libglib: '>=2.84.1,<3.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: c13f50a1000cc3adadb2d93c76dcedab - sha256: 5c0ba63cdc0ccda3309923deff839528cf870daf4ae0173ab07e275698236321 + md5: 12f4520f618ff6e398a2c8e0bed1e580 + sha256: 244e4071229aa3b824dd2a9814c0e8b4c2b40dfb28914ec2247bf27c5c681584 manager: conda name: harfbuzz optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/harfbuzz-10.4.0-hb72c1af_0.conda - version: 10.4.0 + https://conda.anaconda.org/conda-forge/osx-arm64/harfbuzz-11.2.1-hab40de2_0.conda + version: 11.2.1 - category: main dependencies: click: '>=8.0.6' - hatchling: '>=1.24.2' + hatchling: '>=1.26.3' httpx: '>=0.22.0' hyperlink: '>=21.0.0' keyring: '>=23.5.0' - packaging: '>=24.2' - pexpect: '>=4.8,<5' + packaging: '>=23.2' + pexpect: '>=4.8,<5.dev0' platformdirs: '>=2.5.0' python: '>=3.9' rich: '>=11.2.0' shellingham: '>=1.4.0' tomli-w: '>=1.0' tomlkit: '>=0.11.1' - userpath: '>=1.7,<2' - uv: '>=0.1.35' + userpath: '>=1.7,<2.dev0' + uv: '>=0.5.23' virtualenv: '>=20.26.6' - zstandard: <1.0 + zstandard: <1 hash: - md5: b34bdd91d7298c76d9891cea6c8ab27f - sha256: 908fc6e847da57da39011be839db0f56f16428d3d950f49a8c7ac1c9c1ed0505 + md5: 82f74ce5f4548c3627ed52dfac9da8ca + sha256: d245185287bdf5d3c808267aebda2cb7fbce779099493f92e368f813e6a157d4 manager: conda name: hatch optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/hatch-1.14.0-pyhd8ed1ab_1.conda - version: 1.14.0 + url: https://conda.anaconda.org/conda-forge/noarch/hatch-1.14.1-pyhd8ed1ab_0.conda + version: 1.14.1 - category: main dependencies: click: '>=8.0.6' - hatchling: '>=1.24.2' + hatchling: '>=1.26.3' httpx: '>=0.22.0' hyperlink: '>=21.0.0' keyring: '>=23.5.0' - packaging: '>=24.2' - pexpect: '>=4.8,<5' + packaging: '>=23.2' + pexpect: '>=4.8,<5.dev0' platformdirs: '>=2.5.0' python: '>=3.9' rich: '>=11.2.0' shellingham: '>=1.4.0' tomli-w: '>=1.0' tomlkit: '>=0.11.1' - userpath: '>=1.7,<2' - uv: '>=0.1.35' + userpath: '>=1.7,<2.dev0' + uv: '>=0.5.23' virtualenv: '>=20.26.6' - zstandard: <1.0 + zstandard: <1 hash: - md5: b34bdd91d7298c76d9891cea6c8ab27f - sha256: 908fc6e847da57da39011be839db0f56f16428d3d950f49a8c7ac1c9c1ed0505 + md5: 82f74ce5f4548c3627ed52dfac9da8ca + sha256: d245185287bdf5d3c808267aebda2cb7fbce779099493f92e368f813e6a157d4 manager: conda name: hatch optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/hatch-1.14.0-pyhd8ed1ab_1.conda - version: 1.14.0 + url: https://conda.anaconda.org/conda-forge/noarch/hatch-1.14.1-pyhd8ed1ab_0.conda + version: 1.14.1 - category: main dependencies: click: '>=8.0.6' - hatchling: '>=1.24.2' + hatchling: '>=1.26.3' httpx: '>=0.22.0' hyperlink: '>=21.0.0' keyring: '>=23.5.0' - packaging: '>=24.2' - pexpect: '>=4.8,<5' + packaging: '>=23.2' + pexpect: '>=4.8,<5.dev0' platformdirs: '>=2.5.0' python: '>=3.9' rich: '>=11.2.0' shellingham: '>=1.4.0' tomli-w: '>=1.0' tomlkit: '>=0.11.1' - userpath: '>=1.7,<2' - uv: '>=0.1.35' + userpath: '>=1.7,<2.dev0' + uv: '>=0.5.23' virtualenv: '>=20.26.6' - zstandard: <1.0 + zstandard: <1 hash: - md5: b34bdd91d7298c76d9891cea6c8ab27f - sha256: 908fc6e847da57da39011be839db0f56f16428d3d950f49a8c7ac1c9c1ed0505 + md5: 82f74ce5f4548c3627ed52dfac9da8ca + sha256: d245185287bdf5d3c808267aebda2cb7fbce779099493f92e368f813e6a157d4 manager: conda name: hatch optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/hatch-1.14.0-pyhd8ed1ab_1.conda - version: 1.14.0 + url: https://conda.anaconda.org/conda-forge/noarch/hatch-1.14.1-pyhd8ed1ab_0.conda + version: 1.14.1 - category: main dependencies: editables: '>=0.3' @@ -5166,48 +5653,6 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda version: 4.1.0 - - category: main - dependencies: - python: '>=3.9' - six: '>=1.9' - webencodings: '' - hash: - md5: cf25bfddbd3bc275f3d3f9936cee1dd3 - sha256: 8027e436ad59e2a7392f6036392ef9d6c223798d8a1f4f12d5926362def02367 - manager: conda - name: html5lib - optional: false - platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/html5lib-1.1-pyhd8ed1ab_2.conda - version: '1.1' - - category: main - dependencies: - python: '>=3.9' - six: '>=1.9' - webencodings: '' - hash: - md5: cf25bfddbd3bc275f3d3f9936cee1dd3 - sha256: 8027e436ad59e2a7392f6036392ef9d6c223798d8a1f4f12d5926362def02367 - manager: conda - name: html5lib - optional: false - platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/html5lib-1.1-pyhd8ed1ab_2.conda - version: '1.1' - - category: main - dependencies: - python: '>=3.9' - six: '>=1.9' - webencodings: '' - hash: - md5: cf25bfddbd3bc275f3d3f9936cee1dd3 - sha256: 8027e436ad59e2a7392f6036392ef9d6c223798d8a1f4f12d5926362def02367 - manager: conda - name: html5lib - optional: false - platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/html5lib-1.1-pyhd8ed1ab_2.conda - version: '1.1' - category: main dependencies: anyio: '>=3.0,<5.0' @@ -5259,6 +5704,52 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda version: 1.0.7 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: c16a94f3d0c6a2a495b3071cff3f598d + sha256: 1775083ed07111778559e9a0b47033c13cbe6f1c489eaceff204f6cf7a9e02da + manager: conda + name: httptools + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/httptools-0.6.4-py311h9ecbd09_0.conda + version: 0.6.4 + - category: main + dependencies: + __osx: '>=10.13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: eee2f4ab03514d6ca5cd33ac2c3a1846 + sha256: bb796dfdbf36aedf07471a3b0911803cac5b9cb2e1bdf8301a633ba3f8dd9d4e + manager: conda + name: httptools + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/httptools-0.6.4-py311h4d7f069_0.conda + version: 0.6.4 + - category: main + dependencies: + __osx: '>=11.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 4aca39fe9eb4224026c907e1aa8156fb + sha256: 47af7c9e41ea0327f12757527cea28c430ef84aade923d81cc397ebb2bf9eb28 + manager: conda + name: httptools + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/httptools-0.6.4-py311h917b07b_0.conda + version: 0.6.4 - category: main dependencies: anyio: '' @@ -5431,40 +5922,40 @@ package: python: '>=3.9' ukkonen: '' hash: - md5: 542f45bf054c6b9cf8d00a3b1976f945 - sha256: b74a2ffa7be9278d7b8770b6870c360747149c683865e63476b0e1db23038429 + md5: 8d5b9b702810fb3054d52ba146023bc3 + sha256: 02f47df6c6982b796aecb086b434627207e87c0a90a50226f11f2cc99c089770 manager: conda name: identify optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.9-pyhd8ed1ab_0.conda - version: 2.6.9 + url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.10-pyhd8ed1ab_0.conda + version: 2.6.10 - category: main dependencies: python: '>=3.9' ukkonen: '' hash: - md5: 542f45bf054c6b9cf8d00a3b1976f945 - sha256: b74a2ffa7be9278d7b8770b6870c360747149c683865e63476b0e1db23038429 + md5: 8d5b9b702810fb3054d52ba146023bc3 + sha256: 02f47df6c6982b796aecb086b434627207e87c0a90a50226f11f2cc99c089770 manager: conda name: identify optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.9-pyhd8ed1ab_0.conda - version: 2.6.9 + url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.10-pyhd8ed1ab_0.conda + version: 2.6.10 - category: main dependencies: python: '>=3.9' ukkonen: '' hash: - md5: 542f45bf054c6b9cf8d00a3b1976f945 - sha256: b74a2ffa7be9278d7b8770b6870c360747149c683865e63476b0e1db23038429 + md5: 8d5b9b702810fb3054d52ba146023bc3 + sha256: 02f47df6c6982b796aecb086b434627207e87c0a90a50226f11f2cc99c089770 manager: conda name: identify optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.9-pyhd8ed1ab_0.conda - version: 2.6.9 + url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.10-pyhd8ed1ab_0.conda + version: 2.6.10 - category: main dependencies: python: '>=3.9' @@ -5503,46 +5994,46 @@ package: version: '3.10' - category: main dependencies: - python: '>=3.9' - zipp: '>=0.5' + python: '' + zipp: '>=3.20' hash: - md5: f4b39bf00c69f56ac01e020ebfac066c - sha256: 598951ebdb23e25e4cec4bbff0ae369cec65ead80b50bc08b441d8e54de5cf03 + md5: 63ccfdc3a3ce25b027b8767eb722fca8 + sha256: c18ab120a0613ada4391b15981d86ff777b5690ca461ea7e9e49531e8f374745 manager: conda name: importlib-metadata optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda - version: 8.6.1 + https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda + version: 8.7.0 - category: main dependencies: python: '>=3.9' - zipp: '>=0.5' + zipp: '>=3.20' hash: - md5: f4b39bf00c69f56ac01e020ebfac066c - sha256: 598951ebdb23e25e4cec4bbff0ae369cec65ead80b50bc08b441d8e54de5cf03 + md5: 63ccfdc3a3ce25b027b8767eb722fca8 + sha256: c18ab120a0613ada4391b15981d86ff777b5690ca461ea7e9e49531e8f374745 manager: conda name: importlib-metadata optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda - version: 8.6.1 + https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda + version: 8.7.0 - category: main dependencies: python: '>=3.9' - zipp: '>=0.5' + zipp: '>=3.20' hash: - md5: f4b39bf00c69f56ac01e020ebfac066c - sha256: 598951ebdb23e25e4cec4bbff0ae369cec65ead80b50bc08b441d8e54de5cf03 + md5: 63ccfdc3a3ce25b027b8767eb722fca8 + sha256: c18ab120a0613ada4391b15981d86ff777b5690ca461ea7e9e49531e8f374745 manager: conda name: importlib-metadata optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda - version: 8.6.1 + https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda + version: 8.7.0 - category: main dependencies: python: '>=3.9' @@ -5657,6 +6148,45 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/noarch/isodate-0.7.2-pyhd8ed1ab_1.conda version: 0.7.2 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 7ac5f795c15f288984e32add616cdc59 + sha256: 1684b7b16eec08efef5302ce298c606b163c18272b69a62b666fbaa61516f170 + manager: conda + name: itsdangerous + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_1.conda + version: 2.2.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 7ac5f795c15f288984e32add616cdc59 + sha256: 1684b7b16eec08efef5302ce298c606b163c18272b69a62b666fbaa61516f170 + manager: conda + name: itsdangerous + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_1.conda + version: 2.2.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 7ac5f795c15f288984e32add616cdc59 + sha256: 1684b7b16eec08efef5302ce298c606b163c18272b69a62b666fbaa61516f170 + manager: conda + name: itsdangerous + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_1.conda + version: 2.2.0 - category: main dependencies: more-itertools: '' @@ -6010,46 +6540,85 @@ package: version: 4.23.0 - category: main dependencies: - python: '>=3.9' + python: '' referencing: '>=0.31.0' hash: - md5: 3b519bc21bc80e60b456f1e62962a766 - sha256: 37127133837444cf0e6d1a95ff5a505f8214ed4e89e8e9343284840e674c6891 + md5: 41ff526b1083fde51fbdc93f29282e0e + sha256: 66fbad7480f163509deec8bd028cd3ea68e58022982c838683586829f63f3efa manager: conda name: jsonschema-specifications optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda - version: 2024.10.1 + https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.4.1-pyh29332c3_0.conda + version: 2025.4.1 - category: main dependencies: python: '>=3.9' referencing: '>=0.31.0' hash: - md5: 3b519bc21bc80e60b456f1e62962a766 - sha256: 37127133837444cf0e6d1a95ff5a505f8214ed4e89e8e9343284840e674c6891 + md5: 41ff526b1083fde51fbdc93f29282e0e + sha256: 66fbad7480f163509deec8bd028cd3ea68e58022982c838683586829f63f3efa manager: conda name: jsonschema-specifications optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda - version: 2024.10.1 + https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.4.1-pyh29332c3_0.conda + version: 2025.4.1 - category: main dependencies: python: '>=3.9' referencing: '>=0.31.0' hash: - md5: 3b519bc21bc80e60b456f1e62962a766 - sha256: 37127133837444cf0e6d1a95ff5a505f8214ed4e89e8e9343284840e674c6891 + md5: 41ff526b1083fde51fbdc93f29282e0e + sha256: 66fbad7480f163509deec8bd028cd3ea68e58022982c838683586829f63f3efa manager: conda name: jsonschema-specifications optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda - version: 2024.10.1 + https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.4.1-pyh29332c3_0.conda + version: 2025.4.1 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: fa5ce9990186f1999a77fe7e1ff76e72 + sha256: 8986592c083706f7c791e6325c5cbd43c1c90df1ec2680f74beb23f0830454da + manager: conda + name: kaitaistruct + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/kaitaistruct-0.10-pyhd8ed1ab_1.conda + version: '0.10' + - category: main + dependencies: + python: '>=3.9' + hash: + md5: fa5ce9990186f1999a77fe7e1ff76e72 + sha256: 8986592c083706f7c791e6325c5cbd43c1c90df1ec2680f74beb23f0830454da + manager: conda + name: kaitaistruct + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/kaitaistruct-0.10-pyhd8ed1ab_1.conda + version: '0.10' + - category: main + dependencies: + python: '>=3.9' + hash: + md5: fa5ce9990186f1999a77fe7e1ff76e72 + sha256: 8986592c083706f7c791e6325c5cbd43c1c90df1ec2680f74beb23f0830454da + manager: conda + name: kaitaistruct + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/kaitaistruct-0.10-pyhd8ed1ab_1.conda + version: '0.10' - category: main dependencies: __linux: '' @@ -6168,44 +6737,44 @@ package: - category: main dependencies: ld64_osx-64: '951.9' - libllvm20: '>=20.1.1,<20.2.0a0' + libllvm20: '>=20.1.2,<20.2.0a0' hash: - md5: 76199bfbb657cb3d69fc95e5dc668138 - sha256: 20266131a28e5e81573f8b4f176e0a8551c8182879a32c3eed626baa0bd2a8c5 + md5: 7a18ec4bf47a28ba7710ddefd6c7ff6e + sha256: 96e114a1b64f32157057b43fa1c9155cae894007632ce9f6246cbcf9a5e841f8 manager: conda name: ld64 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ld64-951.9-h2b71b23_4.conda + url: https://conda.anaconda.org/conda-forge/osx-64/ld64-951.9-h2b71b23_6.conda version: '951.9' - category: main dependencies: ld64_osx-arm64: '951.9' libllvm18: '>=18.1.8,<18.2.0a0' hash: - md5: de921c0941f051f3b019d46a0c83fdda - sha256: 4806f1356117fe4a6c0c9927587cd456ee9a891bb943e300b03aff9f17ad3a5c + md5: f756d0a0ffba157687a29077f3408016 + sha256: 2c796872c89dee18c8455bd5e4d7dcc6c4f8544c873856d12a64585ac60e315f manager: conda name: ld64 optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ld64-951.9-h4c6efb1_4.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/ld64-951.9-h4c6efb1_6.conda version: '951.9' - category: main dependencies: __osx: '>=10.13' libcxx: '' - libllvm20: '>=20.1.1,<20.2.0a0' + libllvm20: '>=20.1.2,<20.2.0a0' sigtool: '' tapi: '>=1300.6.5,<1301.0a0' hash: - md5: 88c50783c9f35b45c78f90b0956fea84 - sha256: 0e7ee3143413e5e9fc505845fb231e9885906a75bf4e55ae2aad83e904c840ee + md5: dc020c6d538c41f03ebb849c46749d09 + sha256: 4398ca61ad589d7f233195bb8b98629fc066e5daf08e68ff70cd2ee80f35e28c manager: conda name: ld64_osx-64 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ld64_osx-64-951.9-hc68d7ca_4.conda + url: https://conda.anaconda.org/conda-forge/osx-64/ld64_osx-64-951.9-hc68d7ca_6.conda version: '951.9' - category: main dependencies: @@ -6215,14 +6784,14 @@ package: sigtool: '' tapi: '>=1300.6.5,<1301.0a0' hash: - md5: d01a78a16542f235dd755ca66772795e - sha256: 0376873d88573688168b5b7618391dd68fa0b309ddce7fa77c5f9037ada7cf66 + md5: 61743b006633f5e1f9aa9e707f44fcb1 + sha256: 5ab2c15358d0ebfe26bafd2f768f524962f1a785c81d42518afb4f5d397e83f9 manager: conda name: ld64_osx-arm64 optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/ld64_osx-arm64-951.9-hb6b49e2_4.conda + https://conda.anaconda.org/conda-forge/osx-arm64/ld64_osx-arm64-951.9-hb6b49e2_6.conda version: '951.9' - category: main dependencies: @@ -6239,105 +6808,190 @@ package: version: '2.43' - category: main dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' + pyasn1: '>=0.4.6' + python: '>=3.9' + hash: + md5: 7319a76eaab1e21f84ad7949ff2f66e7 + sha256: 0816b267189241330b85bbe9524423255cd4daf8dd4d97765213b49991d1c33d + manager: conda + name: ldap3 + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/ldap3-2.9.1-pyhd8ed1ab_1.conda + version: 2.9.1 + - category: main + dependencies: + pyasn1: '>=0.4.6' + python: '>=3.9' + hash: + md5: 7319a76eaab1e21f84ad7949ff2f66e7 + sha256: 0816b267189241330b85bbe9524423255cd4daf8dd4d97765213b49991d1c33d + manager: conda + name: ldap3 + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/ldap3-2.9.1-pyhd8ed1ab_1.conda + version: 2.9.1 + - category: main + dependencies: + pyasn1: '>=0.4.6' + python: '>=3.9' + hash: + md5: 7319a76eaab1e21f84ad7949ff2f66e7 + sha256: 0816b267189241330b85bbe9524423255cd4daf8dd4d97765213b49991d1c33d + manager: conda + name: ldap3 + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/ldap3-2.9.1-pyhd8ed1ab_1.conda + version: 2.9.1 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libstdcxx: '>=13' hash: - md5: 76bbff344f0134279f225174e9064c8f - sha256: cb55f36dcd898203927133280ae1dc643368af041a48bcf7c026acb7c47b0c12 + md5: 9344155d33912347b37f0ae6c410a835 + sha256: 412381a43d5ff9bbed82cd52a0bbca5b90623f62e41007c9c42d3870c60945ff manager: conda name: lerc optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h0aef613_1.conda version: 4.0.0 - category: main dependencies: - libcxx: '>=13.0.1' + __osx: '>=10.13' + libcxx: '>=18' hash: - md5: f9d6a4c82889d5ecedec1d90eb673c55 - sha256: e41790fc0f4089726369b3c7f813117bbc14b533e0ed8b94cf75aba252e82497 + md5: 21f765ced1a0ef4070df53cb425e1967 + sha256: cc1f1d7c30aa29da4474ec84026ec1032a8df1d7ec93f4af3b98bb793d01184e manager: conda name: lerc optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/lerc-4.0.0-hb486fe8_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/osx-64/lerc-4.0.0-hcca01a6_1.conda version: 4.0.0 - category: main dependencies: - libcxx: '>=13.0.1' + __osx: '>=11.0' + libcxx: '>=18' hash: - md5: de462d5aacda3b30721b512c5da4e742 - sha256: 6f068bb53dfb6147d3147d981bb851bb5477e769407ad4e6a68edf482fdcb958 + md5: a74332d9b60b62905e3d30709df08bf1 + sha256: 12361697f8ffc9968907d1a7b5830e34c670e4a59b638117a2cdfed8f63a38f8 manager: conda name: lerc optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/lerc-4.0.0-h9a09cb3_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/osx-arm64/lerc-4.0.0-hd64df32_1.conda version: 4.0.0 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + hash: + md5: c48fc56ec03229f294176923c3265c05 + sha256: 945396726cadae174a661ce006e3f74d71dbd719219faf7cc74696b267f7b0b5 + manager: conda + name: libabseil + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda + version: '20240116.2' + - category: main + dependencies: + __osx: '>=10.13' + libcxx: '>=16' + hash: + md5: d6c78ca84abed3fea5f308ac83b8f54e + sha256: 396d18f39d5207ecae06fddcbc6e5f20865718939bc4e0ea9729e13952833aac + manager: conda + name: libabseil + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/libabseil-20240116.2-cxx17_hf036a51_1.conda + version: '20240116.2' + - category: main + dependencies: + __osx: '>=11.0' + libcxx: '>=16' + hash: + md5: f16963d88aed907af8b90878b8d8a05c + sha256: a9517c8683924f4b3b9380cdaa50fdd2009cd8d5f3918c92f64394238189d3cb + manager: conda + name: libabseil + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20240116.2-cxx17_h00cdb27_1.conda + version: '20240116.2' - category: main dependencies: __glibc: '>=2.17,<3.0.a0' bzip2: '>=1.0.8,<2.0a0' libgcc: '>=13' - liblzma: '>=5.6.3,<6.0a0' - libxml2: '>=2.13.5,<3.0a0' + liblzma: '>=5.8.1,<6.0a0' + libxml2: '>=2.13.7,<2.14.0a0' libzlib: '>=1.3.1,<2.0a0' lz4-c: '>=1.10.0,<1.11.0a0' lzo: '>=2.10,<3.0a0' - openssl: '>=3.4.0,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + openssl: '>=3.5.0,<4.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: a28808eae584c7f519943719b2a2b386 - sha256: 2466803e26ae9dbd2263de3a102b572b741c056549875c04b6ec10830bd5d338 + md5: b80309616f188ac77c4740acba40f796 + sha256: d49b2a3617b689763d1377a5d1fbfc3c914ee0afa26b3c1858e1c4329329c6df manager: conda name: libarchive optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-h4585015_3.conda + https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-h75ea233_4.conda version: 3.7.7 - category: main dependencies: __osx: '>=10.13' bzip2: '>=1.0.8,<2.0a0' - libiconv: '>=1.17,<2.0a0' - liblzma: '>=5.6.3,<6.0a0' - libxml2: '>=2.13.5,<3.0a0' + libiconv: '>=1.18,<2.0a0' + liblzma: '>=5.8.1,<6.0a0' + libxml2: '>=2.13.7,<2.14.0a0' libzlib: '>=1.3.1,<2.0a0' lz4-c: '>=1.10.0,<1.11.0a0' lzo: '>=2.10,<3.0a0' - openssl: '>=3.4.0,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + openssl: '>=3.5.0,<4.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 5cc55f063de099a537a56c4db2e8d58d - sha256: fd1f0d23787057fce1c9b7e598e91bde3868cfed02a0c3c666f720bab71b136e + md5: 90b169a22e86d4b7d7b4e2e75b53a3bd + sha256: e574fbfa9255aa03072cc43734aae610fddba3e1c228eb2396652773c8cd7fa0 manager: conda name: libarchive optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libarchive-3.7.7-h1a33361_3.conda + url: https://conda.anaconda.org/conda-forge/osx-64/libarchive-3.7.7-h2c98640_4.conda version: 3.7.7 - category: main dependencies: __osx: '>=11.0' bzip2: '>=1.0.8,<2.0a0' - libiconv: '>=1.17,<2.0a0' - liblzma: '>=5.6.3,<6.0a0' - libxml2: '>=2.13.5,<3.0a0' + libiconv: '>=1.18,<2.0a0' + liblzma: '>=5.8.1,<6.0a0' + libxml2: '>=2.13.7,<2.14.0a0' libzlib: '>=1.3.1,<2.0a0' lz4-c: '>=1.10.0,<1.11.0a0' lzo: '>=2.10,<3.0a0' - openssl: '>=3.4.0,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + openssl: '>=3.5.0,<4.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 1c2eda2163510220b9f9d56a85c8da9d - sha256: cbce64423e72bcd3576b5cfe0e4edd255900100f72467d5b4ea1d77449ac1ce9 + md5: 4b12c69a3c3ca02ceac535ae6168f3af + sha256: b7f862cfa4522dd4774c61376a95b1b3aea80ff0d42dd5ebf6c9a07d32eb6f18 manager: conda name: libarchive optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libarchive-3.7.7-h3b16cec_3.conda + https://conda.anaconda.org/conda-forge/osx-arm64/libarchive-3.7.7-h3c2f2b0_4.conda version: 3.7.7 - category: main dependencies: @@ -6345,43 +6999,43 @@ package: libgcc: '>=13' libstdcxx: '>=13' hash: - md5: 988f4937281a66ca19d1adb3b5e3f859 - sha256: 13b863584fccbb9089de73a2442e540703ce4873e4719c9d98c98e4a8e12f9d1 + md5: 57566a81dd1e5aa3d98ac7582e8bfe03 + sha256: e30733a729eb6efd9cb316db0202897c882d46f6c20a0e647b4de8ec921b7218 manager: conda name: libasprintf optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.23.1-h8e693c7_0.conda - version: 0.23.1 + https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.24.1-h8e693c7_0.conda + version: 0.24.1 - category: main dependencies: __osx: '>=10.13' libcxx: '>=18' hash: - md5: 43e1d9e1712208ac61941a513259248d - sha256: d6a4fbf497040ab4733c5dc65dd273ed6fa827ce6e67fd12abbe08c3cc3e192e + md5: 9d7c96ed1ebdf2f180b20d3e09a4c694 + sha256: 86febbb2cc53b0978cb22057da2e9dc8f07ffe96305148d011c241c3eae668d0 manager: conda name: libasprintf optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/libasprintf-0.23.1-h27064b9_0.conda - version: 0.23.1 + https://conda.anaconda.org/conda-forge/osx-64/libasprintf-0.24.1-h27064b9_0.conda + version: 0.24.1 - category: main dependencies: __osx: '>=11.0' libcxx: '>=18' hash: - md5: baf9e4423f10a15ca7eab26480007639 - sha256: 2b27d2ede7867fd362f94644aac1d7fb9af7f7fc3f122cb014647b47ffd402a4 + md5: b5a77d2b7c2013b3b1ffce193764302f + sha256: 54293ab2ce43085ac424dc62804fd4d7ec62cce404a77f0c99a9a48857bca0a9 manager: conda name: libasprintf optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libasprintf-0.23.1-h493aca8_0.conda - version: 0.23.1 + https://conda.anaconda.org/conda-forge/osx-arm64/libasprintf-0.24.1-h493aca8_0.conda + version: 0.24.1 - category: main dependencies: libopenblas: '>=0.3.29,<1.0a0' @@ -6484,16 +7138,16 @@ package: libssh2: '>=1.11.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' openssl: '>=3.4.1,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 45e9dc4e7b25e2841deb392be085500e - sha256: 2ebc3039af29269e4cdb858fca36265e5e400c1125a4bcd84ae73a596e0e76ca + md5: cbdc92ac0d93fe3c796e36ad65c7905c + sha256: 38e528acfaa0276b7052f4de44271ff9293fdb84579650601a8c49dac171482a manager: conda name: libcurl optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.12.1-h332b0f4_0.conda - version: 8.12.1 + url: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.13.0-h332b0f4_0.conda + version: 8.13.0 - category: main dependencies: __osx: '>=10.13' @@ -6502,16 +7156,16 @@ package: libssh2: '>=1.11.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' openssl: '>=3.4.1,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: b39e6b74b4eb475eacdfd463fce82138 - sha256: 51168abcbee14814b94dea3358300de4053423c6ce8d4627475464fb8cf0e5d3 + md5: a35b1976d746d55cd7380c8842d9a1b5 + sha256: 137d92f1107141d9eb39598fb05837be4f9aad4ead957194d94364834f3cc590 manager: conda name: libcurl optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.12.1-h5dec5d8_0.conda - version: 8.12.1 + url: https://conda.anaconda.org/conda-forge/osx-64/libcurl-8.13.0-h5dec5d8_0.conda + version: 8.13.0 - category: main dependencies: __osx: '>=11.0' @@ -6520,78 +7174,78 @@ package: libssh2: '>=1.11.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' openssl: '>=3.4.1,<4.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 105f0cceef753644912f42e11c1ae9cf - sha256: 0bddd1791eb0602c8c6aa465802e9d4526d3ec1251d900b209e767753565d5df + md5: 4a5d33f75f9ead15089b04bed8d0eafe + sha256: 747f7e8aad390b9b39a300401579ff1b5731537a586869b724dc071a9b315f03 manager: conda name: libcurl optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.12.1-h73640d1_0.conda - version: 8.12.1 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.13.0-h73640d1_0.conda + version: 8.13.0 - category: main dependencies: __osx: '>=10.13' hash: - md5: 85cff0ed95d940c4762d5a99a6fe34ae - sha256: b30ef239517cfffb71d8ece7b903afe2a1bac0425f5bd38976b35d3cbf77312b + md5: 9dde68cee0a231b19e189954ac29027b + sha256: 9003bd12988a54713602999999737590f3b023b0cadb2b316cd3ac256d6740d6 manager: conda name: libcxx optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-20.1.1-hf95d169_0.conda - version: 20.1.1 + url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-20.1.5-hf95d169_0.conda + version: 20.1.5 - category: main dependencies: __osx: '>=11.0' hash: - md5: 833c4899914bf96caf64b52ef415e319 - sha256: 80dd8ae3fbcf508ed72f074ada2c7784298e822e8d19c3b84c266bb31456d77c + md5: 4ef1bdb94d42055f511bb358f2048c58 + sha256: 2765b6e23da91807ce2ed44587fbaadd5ba933b0269810b3c22462f9582aedd3 manager: conda name: libcxx optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-20.1.1-ha82da77_0.conda - version: 20.1.1 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-20.1.5-ha82da77_0.conda + version: 20.1.5 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' hash: - md5: 8dfae1d2e74767e9ce36d5fa0d8605db - sha256: 511d801626d02f4247a04fff957cc6e9ec4cc7e8622bd9acd076bcdc5de5fe66 + md5: 64f0c503da58ec25ebd359e4d990afa8 + sha256: 8420748ea1cc5f18ecc5068b4f24c7a023cc9b20971c99c824ba10641fb95ddf manager: conda name: libdeflate optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda - version: '1.23' + url: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.24-h86f0d12_0.conda + version: '1.24' - category: main dependencies: __osx: '>=10.13' hash: - md5: 120f8f7ba6a8defb59f4253447db4bb4 - sha256: 20c1e685e7409bb82c819ba55b9f7d9a654e8e6d597081581493badb7464520e + md5: f0a46c359722a3e84deb05cd4072d153 + sha256: 2733a4adf53daca1aa4f41fe901f0f8ee9e4c509abd23ffcd7660013772d6f45 manager: conda name: libdeflate optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.23-he65b83e_0.conda - version: '1.23' + url: https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.24-hcc1b750_0.conda + version: '1.24' - category: main dependencies: __osx: '>=11.0' hash: - md5: 1d8b9588be14e71df38c525767a1ac30 - sha256: 887c02deaed6d583459eba6367023e36d8761085b2f7126e389424f57155da53 + md5: 3baf58a5a87e7c2f4d243ce2f8f2fe5c + sha256: 417d52b19c679e1881cce3f01cad3a2d542098fa2d6df5485aac40f01aede4d1 manager: conda name: libdeflate optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.23-hec38601_0.conda - version: '1.23' + https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.24-h5773f1b_0.conda + version: '1.24' - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -6674,100 +7328,186 @@ package: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' hash: - md5: db833e03127376d461e1e13e76f09b6c - sha256: 56541b98447b58e52d824bd59d6382d609e11de1f8adf20b23143e353d2b8d26 + md5: db0bfbe7dd197b68ad5f30333bae6ce0 + sha256: 33ab03438aee65d6aa667cf7d90c91e5e7d734c19a67aa4c7040742c0a13d505 manager: conda name: libexpat optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda - version: 2.6.4 + url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.0-h5888daf_0.conda + version: 2.7.0 - category: main dependencies: __osx: '>=10.13' hash: - md5: 20307f4049a735a78a29073be1be2626 - sha256: d10f43d0c5df6c8cf55259bce0fe14d2377eed625956cddce06f58827d288c59 + md5: 026d0a1056ba2a3dbbea6d4b08188676 + sha256: 976f2e23ad2bb2b8e92c99bfa2ead3ad557b17a129b170f7e2dfcf233193dd7e manager: conda name: libexpat optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.6.4-h240833e_0.conda - version: 2.6.4 + url: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.7.0-h240833e_0.conda + version: 2.7.0 - category: main dependencies: __osx: '>=11.0' hash: - md5: 38d2656dd914feb0cab8c629370768bf - sha256: e42ab5ace927ee7c84e3f0f7d813671e1cf3529f5f06ee5899606630498c2745 + md5: 6934bbb74380e045741eb8637641a65b + sha256: ee550e44765a7bbcb2a0216c063dcd53ac914a7be5386dd0554bd06e6be61840 manager: conda name: libexpat optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.4-h286801f_0.conda - version: 2.6.4 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.0-h286801f_0.conda + version: 2.7.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' hash: - md5: e3eb7806380bc8bcecba6d749ad5f026 - sha256: 67a6c95e33ebc763c1adc3455b9a9ecde901850eb2fceb8e646cc05ef3a663da + md5: ede4673863426c0883c0063d853bbd85 + sha256: 764432d32db45466e87f10621db5b74363a9f847d2b8b1f9743746cd160f06ab manager: conda name: libffi optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.6-h2dba641_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.6-h2dba641_1.conda version: 3.4.6 - category: main dependencies: __osx: '>=10.13' hash: - md5: b8667b0d0400b8dcb6844d8e06b2027d - sha256: 7805fdc536a3da7fb63dc48e040105cd4260c69a1d2bf5804dadd31bde8bab51 + md5: 4ca9ea59839a9ca8df84170fab4ceb41 + sha256: 6394b1bc67c64a21a5cc73d1736d1d4193a64515152e861785c44d2cfc49edf3 manager: conda name: libffi optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.6-h281671d_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.6-h281671d_1.conda version: 3.4.6 - category: main - dependencies: {} + dependencies: + __osx: '>=11.0' hash: - md5: 086914b672be056eb70fd4285b6783b6 - sha256: 41b3d13efb775e340e4dba549ab5c029611ea6918703096b2eaa9c015c0750ca + md5: c215a60c2935b517dcda8cad4705734d + sha256: c6a530924a9b14e193ea9adfe92843de2a806d1b7dbfd341546ece9653129e60 manager: conda name: libffi optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 - version: 3.4.2 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.6-h1da3d7d_1.conda + version: 3.4.6 + - category: main + dependencies: + libfreetype6: '>=2.13.3' + hash: + md5: 51f5be229d83ecd401fb369ab96ae669 + sha256: 7be9b3dac469fe3c6146ff24398b685804dfc7a1de37607b84abd076f57cc115 + manager: conda + name: libfreetype + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.13.3-ha770c72_1.conda + version: 2.13.3 + - category: main + dependencies: + libfreetype6: '>=2.13.3' + hash: + md5: 07c8d3fbbe907f32014b121834b36dd5 + sha256: afe0e2396844c8cfdd6256ac84cabc9af823b1727f704c137b030b85839537a6 + manager: conda + name: libfreetype + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/libfreetype-2.13.3-h694c41f_1.conda + version: 2.13.3 + - category: main + dependencies: + libfreetype6: '>=2.13.3' + hash: + md5: d06282e08e55b752627a707d58779b8f + sha256: 1f8c16703fe333cdc2639f7cdaf677ac2120843453222944a7c6c85ec342903c + manager: conda + name: libfreetype + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype-2.13.3-hce30654_1.conda + version: 2.13.3 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libpng: '>=1.6.47,<1.7.0a0' + libzlib: '>=1.3.1,<2.0a0' + hash: + md5: 3c255be50a506c50765a93a6644f32fe + sha256: 7759bd5c31efe5fbc36a7a1f8ca5244c2eabdbeb8fc1bee4b99cf989f35c7d81 + manager: conda + name: libfreetype6 + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.13.3-h48d6fc4_1.conda + version: 2.13.3 + - category: main + dependencies: + __osx: '>=10.13' + libpng: '>=1.6.47,<1.7.0a0' + libzlib: '>=1.3.1,<2.0a0' + hash: + md5: c76e6f421a0e95c282142f820835e186 + sha256: 058165962aa64fc5a6955593212c0e1ea42ca6d6dba60ee61dff612d4c3818d7 + manager: conda + name: libfreetype6 + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/libfreetype6-2.13.3-h40dfd5c_1.conda + version: 2.13.3 + - category: main + dependencies: + __osx: '>=11.0' + libpng: '>=1.6.47,<1.7.0a0' + libzlib: '>=1.3.1,<2.0a0' + hash: + md5: b163d446c55872ef60530231879908b9 + sha256: c278df049b1a071841aa0aca140a338d087ea594e07dcf8a871d2cfe0e330e75 + manager: conda + name: libfreetype6 + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype6-2.13.3-h1d14073_1.conda + version: 2.13.3 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' _openmp_mutex: '>=4.5' hash: - md5: ef504d1acbd74b7cc6849ef8af47dd03 - sha256: 3a572d031cb86deb541d15c1875aaa097baefc0c580b54dc61f5edab99215792 + md5: ea8ac52380885ed41c1baa8f1d6d2b93 + sha256: 0024f9ab34c09629621aefd8603ef77bf9d708129b0dd79029e502c39ffc2195 manager: conda name: libgcc optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h767d61c_2.conda - version: 14.2.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.1.0-h767d61c_2.conda + version: 15.1.0 - category: main dependencies: - libgcc: 14.2.0 + libgcc: 15.1.0 hash: - md5: a2222a6ada71fb478682efe483ce0f92 - sha256: fb7558c328b38b2f9d2e412c48da7890e7721ba018d733ebdfea57280df01904 + md5: ddca86c7040dd0e73b2b69bd7833d225 + sha256: 0ab5421a89f090f3aa33841036bb3af4ed85e1f91315b528a9d75fab9aad51ae manager: conda name: libgcc-ng optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_2.conda - version: 14.2.0 + https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.1.0-h69a702a_2.conda + version: 15.1.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -6842,123 +7582,123 @@ package: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' hash: - md5: a09ce5decdef385bcce78c32809fa794 - sha256: 190097140d9c16637aa516757d8087f17e8c22cc844c87288da64404b81ef43c + md5: 2ee6d71b72f75d50581f2f68e965efdb + sha256: 104f2341546e295d1136ab3010e81391bd3fd5be0f095db59266e8eba2082d37 manager: conda name: libgettextpo optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.23.1-h5888daf_0.conda - version: 0.23.1 + https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.24.1-h5888daf_0.conda + version: 0.24.1 - category: main dependencies: __osx: '>=10.13' - libiconv: '>=1.17,<2.0a0' - libintl: 0.23.1 + libiconv: '>=1.18,<2.0a0' + libintl: 0.24.1 hash: - md5: 352ffb2b7788775a65a32c018d972a8a - sha256: 52c2423df75223df4ebee991eb33e3827b9d360957211022246145b99c672dc5 + md5: facba41133c6e10d9f67b1a12f66bd3a + sha256: e26e5bfe706c37cfbcbfe7598d3ebcdf4c39d89a9497e6c9bfe9069b0a18e3f3 manager: conda name: libgettextpo optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/libgettextpo-0.23.1-h27064b9_0.conda - version: 0.23.1 + https://conda.anaconda.org/conda-forge/osx-64/libgettextpo-0.24.1-h27064b9_0.conda + version: 0.24.1 - category: main dependencies: __osx: '>=11.0' - libiconv: '>=1.17,<2.0a0' - libintl: 0.23.1 + libiconv: '>=1.18,<2.0a0' + libintl: 0.24.1 hash: - md5: 18ad77def4cb7326692033eded9c815d - sha256: 4dbd3f698d027330033f06778567eda5b985e2348ca92900083654a114ddd051 + md5: 218a45f477876644cf75c7ed0b5158c7 + sha256: 0f380fee5d5dc870b6b9d3134cca344965d68bbf454f6ac741907fee4cc3e07a manager: conda name: libgettextpo optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libgettextpo-0.23.1-h493aca8_0.conda - version: 0.23.1 + https://conda.anaconda.org/conda-forge/osx-arm64/libgettextpo-0.24.1-h493aca8_0.conda + version: 0.24.1 - category: main dependencies: - libgfortran5: 14.2.0 + libgfortran5: 15.1.0 hash: - md5: fb54c4ea68b460c278d26eea89cfbcc3 - sha256: e05263e8960da03c341650f2a3ffa4ccae4e111cb198e8933a2908125459e5a6 + md5: f92e6e0a3c0c0c85561ef61aa59d555d + sha256: 914daa4f632b786827ea71b5e07cd00d25fc6e67789db2f830dc481eec660342 manager: conda name: libgfortran optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_2.conda - version: 14.2.0 + https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.1.0-h69a702a_2.conda + version: 15.1.0 - category: main dependencies: libgfortran5: 14.2.0 hash: - md5: e8b6b4962db050d7923e2cee3efff446 - sha256: fcf482d36f4ea05b6183813ff59c893998e568d48cbc82a7ad5f4c3abd35ec6a + md5: 6b27baf030f5d6603713c7e72d3f6b9a + sha256: 984040aa98dedcfbe1cf59befd73740e30d368b96cbfa17c002297e67fa5af23 manager: conda name: libgfortran optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/libgfortran-5.0.0-14_2_0_h51e75f0_1.conda - version: 5.0.0 + https://conda.anaconda.org/conda-forge/osx-64/libgfortran-14.2.0-hef36b68_105.conda + version: 14.2.0 - category: main dependencies: libgfortran5: 14.2.0 hash: - md5: d0e8a9e0efd41b9821833d5bbfd7e653 - sha256: 00adc502de159fef380cc16150ec328309910e241b4a465500f2084e6c9646dd + md5: ad35937216e65cfeecd828979ee5e9e6 + sha256: 6ca48762c330d1cdbdaa450f197ccc16ffb7181af50d112b4ccf390223d916a1 manager: conda name: libgfortran optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-5.0.0-14_2_0_h6c33f7e_1.conda - version: 5.0.0 + https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-14.2.0-heb5dd2a_105.conda + version: 14.2.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - libgcc: '>=14.2.0' + libgcc: '>=15.1.0' hash: - md5: 556a4fdfac7287d349b8f09aba899693 - sha256: c17b7cf3073a1f4e1f34d50872934fa326346e104d3c445abc1e62481ad6085c + md5: 01de444988ed960031dbe84cf4f9b1fc + sha256: be23750f3ca1a5cb3ada858c4f633effe777487d1ea35fddca04c0965c073350 manager: conda name: libgfortran5 optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hf1ad2bd_2.conda - version: 14.2.0 + https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.1.0-hcea5267_2.conda + version: 15.1.0 - category: main dependencies: llvm-openmp: '>=8.0.0' hash: - md5: 9e089ae71e7caca1565af0b632027d4d - sha256: 4f0b4bed6a3dae0e91c74f84cfc4adde3dbd8bdcf5307ae53489cedfbf1509df + md5: 94560312ff3c78225bed62ab59854c31 + sha256: 02fc48106e1ca65cf7de15f58ec567f866f6e8e9dcced157d0cff89f0768bb59 manager: conda name: libgfortran5 optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-14.2.0-h51e75f0_1.conda + https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-14.2.0-h58528f3_105.conda version: 14.2.0 - category: main dependencies: llvm-openmp: '>=8.0.0' hash: - md5: fa7750a7197063eed8fdf8e74e148d03 - sha256: a578ecffb79d81eb67bbdeac7bcddbfea5908393d51b0c4a9a461e73a3524274 + md5: 06f35a3b1479ec55036e1c9872f97f2c + sha256: de09987e1080f71e2285deec45ccb949c2620a672b375029534fbb878e471b22 manager: conda name: libgfortran5 optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-14.2.0-h6c33f7e_1.conda + https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-14.2.0-h2c44a93_105.conda version: 14.2.0 - category: main dependencies: @@ -6967,116 +7707,116 @@ package: libssh2: '>=1.11.1,<2.0a0' libstdcxx: '>=13' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.0,<4.0a0' - pcre2: '>=10.44,<10.45.0a0' + openssl: '>=3.5.0,<4.0a0' + pcre2: '>=10.45,<10.46.0a0' hash: - md5: fbb1b5f37dbfe61dce67db8addc4b41e - sha256: f41090eac5473c9c02e514a893610bf197e6c3e96bd96481c8e33ded961f6ec8 + md5: 45d962ba72f98ec3bd2e71d3e8a6c72a + sha256: f4b819345c61f423ab264aac901e13ef677d639d006e2573baa011c717405b61 manager: conda name: libgit2 optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.9.0-hd24f944_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.9.0-ha56cf85_2.conda version: 1.9.0 - category: main dependencies: __osx: '>=10.13' libcxx: '>=18' - libiconv: '>=1.17,<2.0a0' + libiconv: '>=1.18,<2.0a0' libssh2: '>=1.11.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.0,<4.0a0' - pcre2: '>=10.44,<10.45.0a0' + openssl: '>=3.5.0,<4.0a0' + pcre2: '>=10.45,<10.46.0a0' hash: - md5: 6c955a6f0fdc890bf957b9e6f6fd412b - sha256: 6699a282b118901ba749309d70d12bbb75fcc9ed6a3fb7318133312464669430 + md5: 9344fbe2fd3c20fc77ff72f18556eda4 + sha256: 4fdbb8277926d12e25c4a53612fa6d5917749957957d91a0f5f9d8d8477ee715 manager: conda name: libgit2 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libgit2-1.9.0-hf50decd_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/libgit2-1.9.0-h1ad2543_2.conda version: 1.9.0 - category: main dependencies: __osx: '>=11.0' libcxx: '>=18' - libiconv: '>=1.17,<2.0a0' + libiconv: '>=1.18,<2.0a0' libssh2: '>=1.11.1,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.0,<4.0a0' - pcre2: '>=10.44,<10.45.0a0' + openssl: '>=3.5.0,<4.0a0' + pcre2: '>=10.45,<10.46.0a0' hash: - md5: 8b48c3c783ce259513ddbf7576c1c52a - sha256: 059daa9a40b23351a494ae08e7a9d532f54d255e64826672c3a7472085344bcd + md5: 05cde10ce2cf31ddb9ced05dbc0642bd + sha256: 45c49e5aaebb8dae79fd9a0b2e49370c8061050a3893e4a9c82ab47ae270a986 manager: conda name: libgit2 optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgit2-1.9.0-h211146d_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgit2-1.9.0-he19e06a_2.conda version: 1.9.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - libffi: '>=3.4,<4.0a0' + libffi: '>=3.4.6,<3.5.0a0' libgcc: '>=13' libiconv: '>=1.18,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - pcre2: '>=10.44,<10.45.0a0' + pcre2: '>=10.45,<10.46.0a0' hash: - md5: 40cdeafb789a5513415f7bdbef053cf5 - sha256: 8e8737ca776d897d81a97e3de28c4bb33c45b5877bbe202b9b0ad2f61ca39397 + md5: 072ab14a02164b7c0c089055368ff776 + sha256: a6b5cf4d443044bc9a0293dd12ca2015f0ebe5edfdc9c4abdde0b9947f9eb7bd manager: conda name: libglib optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.84.0-h2ff4ddf_0.conda - version: 2.84.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.84.2-h3618099_0.conda + version: 2.84.2 - category: main dependencies: __osx: '>=10.13' - libffi: '>=3.4,<4.0a0' + libffi: '>=3.4.6,<3.5.0a0' libiconv: '>=1.18,<2.0a0' - libintl: '>=0.23.1,<1.0a0' + libintl: '>=0.24.1,<1.0a0' libzlib: '>=1.3.1,<2.0a0' - pcre2: '>=10.44,<10.45.0a0' + pcre2: '>=10.45,<10.46.0a0' hash: - md5: 9d9e772b8e01ce350ddff9b277503514 - sha256: 6345cb63429ca1d216e47502a04dcce8b9f8a4fe08547cef42bbc040dc453b9e + md5: eeb11015e8b75f8af67014faea18f305 + sha256: 4445ab5b45bfeeb087ef3fd4f94c90f41261b5638916c58928600c1fc1f4f6ab manager: conda name: libglib optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.84.0-h5c976ab_0.conda - version: 2.84.0 + url: https://conda.anaconda.org/conda-forge/osx-64/libglib-2.84.2-h3139dbc_0.conda + version: 2.84.2 - category: main dependencies: __osx: '>=11.0' - libffi: '>=3.4,<4.0a0' + libffi: '>=3.4.6,<3.5.0a0' libiconv: '>=1.18,<2.0a0' - libintl: '>=0.23.1,<1.0a0' + libintl: '>=0.24.1,<1.0a0' libzlib: '>=1.3.1,<2.0a0' - pcre2: '>=10.44,<10.45.0a0' + pcre2: '>=10.45,<10.46.0a0' hash: - md5: 86bdf23c648be3498294c4ab861e7090 - sha256: 70a414faef075e11e7a51861e9e9c953d8373b0089070f98136a7578d8cda67e + md5: 7bbb8961dca1b4b9f2b01b6e722111a7 + sha256: 5fcc5e948706cc64e45e2454267f664ed5a1e84f15345aae04a41d852a879c0e manager: conda name: libglib optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.84.0-hdff4504_0.conda - version: 2.84.0 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.84.2-hbec27ea_0.conda + version: 2.84.2 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' hash: - md5: 06d02030237f4d5b3d9a7e7d348fe3c6 - sha256: 1a3130e0b9267e781b89399580f3163632d59fe5b0142900d63052ab1a53490e + md5: fbe7d535ff9d3a168c148e07358cd5b1 + sha256: 05fff3dc7e80579bc28de13b511baec281c4343d703c406aefd54389959154fb manager: conda name: libgomp optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h767d61c_2.conda - version: 14.2.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.1.0-h767d61c_2.conda + version: 15.1.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -7165,66 +7905,69 @@ package: - category: main dependencies: __osx: '>=10.13' - libiconv: '>=1.17,<2.0a0' + libiconv: '>=1.18,<2.0a0' hash: - md5: 4182fe11073548596723d9cd2c23b1ac - sha256: 1bce54e6c76064032129ba138898a5b188d9415c533eb585f89d48b04e00e576 + md5: b3f498d87404090f731cb6a474045150 + sha256: f0a759b35784d5a31aeaf519f8f24019415321e62e52579a3ec854a413a1509d manager: conda name: libintl optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libintl-0.23.1-h27064b9_0.conda - version: 0.23.1 + url: https://conda.anaconda.org/conda-forge/osx-64/libintl-0.24.1-h27064b9_0.conda + version: 0.24.1 - category: main dependencies: __osx: '>=11.0' - libiconv: '>=1.17,<2.0a0' + libiconv: '>=1.18,<2.0a0' hash: - md5: 7b8faf3b5fc52744bda99c4cd1d6438d - sha256: 30d2a8a37070615a61777ce9317968b54c2197d04e9c6c2eea6cdb46e47f94dc + md5: 0dca9914f2722b773c863508723dfe6e + sha256: fb6d211d9e75e6becfbf339d255ea01f7bd3a61fe6237b3dad740de1b74b3b81 manager: conda name: libintl optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libintl-0.23.1-h493aca8_0.conda - version: 0.23.1 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libintl-0.24.1-h493aca8_0.conda + version: 0.24.1 - category: main dependencies: - libgcc-ng: '>=12' + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' hash: - md5: ea25936bb4080d843790b586850f82b8 - sha256: b954e09b7e49c2f2433d6f3bb73868eda5e378278b0f8c1dd10a7ef090e14f2f + md5: 9fa334557db9f63da6c9285fd2a48638 + sha256: 98b399287e27768bf79d48faba8a99a2289748c65cd342ca21033fab1860d4a4 manager: conda name: libjpeg-turbo optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda - version: 3.0.0 + https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.0-hb9d3cd8_0.conda + version: 3.1.0 - category: main - dependencies: {} + dependencies: + __osx: '>=10.13' hash: - md5: 72507f8e3961bc968af17435060b6dd6 - sha256: d9572fd1024adc374aae7c247d0f29fdf4b122f1e3586fe62acc18067f40d02f + md5: 87537967e6de2f885a9fcebd42b7cb10 + sha256: 9c0009389c1439ec96a08e3bf7731ac6f0eab794e0a133096556a9ae10be9c27 manager: conda name: libjpeg-turbo optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/libjpeg-turbo-3.0.0-h0dc2134_1.conda - version: 3.0.0 + https://conda.anaconda.org/conda-forge/osx-64/libjpeg-turbo-3.1.0-h6e16a3a_0.conda + version: 3.1.0 - category: main - dependencies: {} + dependencies: + __osx: '>=11.0' hash: - md5: 3ff1e053dc3a2b8e36b9bfa4256a58d1 - sha256: a42054eaa38e84fc1e5ab443facac4bbc9d1b6b6f23f54b7bf4f1eb687e1d993 + md5: 01caa4fbcaf0e6b08b3aef1151e91745 + sha256: 78df2574fa6aa5b6f5fc367c03192f8ddf8e27dc23641468d54e031ff560b9d4 manager: conda name: libjpeg-turbo optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.0.0-hb547adb_1.conda - version: 3.0.0 + https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.1.0-h5505292_0.conda + version: 3.1.0 - category: main dependencies: libblas: 3.9.0 @@ -7269,46 +8012,49 @@ package: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' libstdcxx: '>=13' + mbedtls: '>=3.5.1,<3.6.0a0' hash: - md5: 956ddbc5d3b221e8fbd5cb170dd86356 - sha256: a5fba46e8e1439fdcbeb4431f15b22c1001b1882031367afc78601e4a5fe35af + md5: e15f14557342e646e3137257b857f63c + sha256: f33cda942261fbba52d69c8b150a7fe4554520f35e22f3826f87a2154a10430a manager: conda name: liblief optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/liblief-0.14.1-h5888daf_2.conda - version: 0.14.1 + url: https://conda.anaconda.org/conda-forge/linux-64/liblief-0.16.4-h5888daf_0.conda + version: 0.16.4 - category: main dependencies: __osx: '>=10.13' - libcxx: '>=17' + libcxx: '>=18' + mbedtls: '>=3.5.1,<3.6.0a0' hash: - md5: fea38f7bd278c21851f208358dd8e5b4 - sha256: a91a753d4a589e4c920c1a1281b33d668ab03cf964408866acc174a45c184288 + md5: 1f667326a9c55cd8af0802a3dc132381 + sha256: dc3cf7e285f867e1c9662b17f008420f68dad9bcd4ded03ba30b3d641633587f manager: conda name: liblief optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/liblief-0.14.1-hac325c4_2.conda - version: 0.14.1 + url: https://conda.anaconda.org/conda-forge/osx-64/liblief-0.16.4-h240833e_0.conda + version: 0.16.4 - category: main dependencies: __osx: '>=11.0' - libcxx: '>=17' + libcxx: '>=18' + mbedtls: '>=3.5.1,<3.6.0a0' hash: - md5: 9cd24e3468e4c510836f68f453a31df8 - sha256: 0da590030191ce2f52ce315165b88898bd2df5b51374bb33a57722a84521a7f5 + md5: f57b1c43b62c535318812597ec625d05 + sha256: 7f53e6338b5053bba55250eaea947142b63c7204121e0848274f2bfd027390bc manager: conda name: liblief optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/liblief-0.14.1-hf9b8971_2.conda - version: 0.14.1 + url: https://conda.anaconda.org/conda-forge/osx-arm64/liblief-0.16.4-h286801f_0.conda + version: 0.16.4 - category: main dependencies: __osx: '>=11.0' libcxx: '>=18' - libxml2: '>=2.13.5,<3.0a0' + libxml2: '>=2.13.5,<2.14.0a0' libzlib: '>=1.3.1,<2.0a0' zstd: '>=1.5.6,<1.6.0a0' hash: @@ -7325,201 +8071,198 @@ package: dependencies: __osx: '>=10.13' libcxx: '>=18' - libxml2: '>=2.13.6,<3.0a0' + libxml2: '>=2.13.8,<2.14.0a0' libzlib: '>=1.3.1,<2.0a0' zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 70f34a501b95a2f69590fb5444b5c8ec - sha256: d2c18a2973e04686676d36b42854253cf7b03a0bac364df71ddc238afcaf048e + md5: 0719752afded8768ac26d6ff307e34ca + sha256: 514e2046c86f39eacf82cbd802c17372588d5ddcacc410ebfd589f467df784a9 manager: conda name: libllvm20 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libllvm20-20.1.1-hc29ff6c_0.conda - version: 20.1.1 + url: https://conda.anaconda.org/conda-forge/osx-64/libllvm20-20.1.5-h29c3a6c_0.conda + version: 20.1.5 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' hash: - md5: 42d5b6a0f30d3c10cd88cb8584fda1cb - sha256: cad52e10319ca4585bc37f0bc7cce99ec7c15dc9168e42ccb96b741b0a27db3f + md5: a76fd702c93cd2dfd89eff30a5fd45a8 + sha256: eeff241bddc8f1b87567dd6507c9f441f7f472c27f0860a07628260c000ef27c manager: conda name: liblzma optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.4-hb9d3cd8_0.conda - version: 5.6.4 + url: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_1.conda + version: 5.8.1 - category: main dependencies: __osx: '>=10.13' hash: - md5: db9d7b0152613f097cdb61ccf9f70ef5 - sha256: a895b5b16468a6ed436f022d72ee52a657f9b58214b91fabfab6230e3592a6dd + md5: f87e8821e0e38a4140a7ed4f52530053 + sha256: 20a4c5291f3e338548013623bb1dc8ee2fba5dbac8f77acaddd730ed2a7d29b6 manager: conda name: liblzma optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/liblzma-5.6.4-hd471939_0.conda - version: 5.6.4 + url: https://conda.anaconda.org/conda-forge/osx-64/liblzma-5.8.1-hd471939_1.conda + version: 5.8.1 - category: main dependencies: __osx: '>=11.0' hash: - md5: e3fd1f8320a100f2b210e690a57cd615 - sha256: 560c59d3834cc652a84fb45531bd335ad06e271b34ebc216e380a89798fe8e2c + md5: 4e8ef3d79c97c9021b34d682c24c2044 + sha256: 5ab62c179229640c34491a7de806ad4ab7bec47ea2b5fc2136e3b8cf5ef26a57 manager: conda name: liblzma optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.6.4-h39f12f2_0.conda - version: 5.6.4 + url: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.1-h39f12f2_1.conda + version: 5.8.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - cpp-expected: '>=1.1.0,<1.2.0a0' + cpp-expected: '>=1.1.0,<1.1.1.0a0' fmt: '>=11.1.4,<11.2.0a0' libarchive: '>=3.7.7,<3.8.0a0' libcurl: '>=8.12.1,<9.0a0' libgcc: '>=13' - libsolv: '>=0.7.30,<0.8.0a0' + libsolv: '>=0.7.32,<0.8.0a0' libstdcxx: '>=13' - nlohmann_json: '>=3.11.3,<3.12.0a0' - openssl: '>=3.4.1,<4.0a0' - reproc: '>=14.2.5.0inf.0,<14.3.0a0' - reproc-cpp: '>=14.2.5.0inf.0,<14.3.0a0' - simdjson: '>=3.12.2,<3.13.0a0' - spdlog: '>=1.15.1,<1.16.0a0' + nlohmann_json: '>=3.11.3,<3.11.4.0a0' + openssl: '>=3.5.0,<4.0a0' + reproc: '>=14.2,<15.0a0' + reproc-cpp: '>=14.2,<15.0a0' + simdjson: '>=3.12.3,<3.13.0a0' yaml-cpp: '>=0.8.0,<0.9.0a0' - zstd: '>=1.5.7,<1.6.0a0' + zstd: '>=1.5.6,<1.6.0a0' hash: - md5: 8cf07c46bcc17b0231a400d381ff0fef - sha256: 7f5ca0cec46335de37585be1043d78a9d56a9967997802338c79927fb1d661d9 + md5: d30e4c7af7ded036a2ebf7ea5178d0f6 + sha256: d330ddba91c2e0140e9e43e15855ee3592a84db223c481ff9e49d9217d15f1a7 manager: conda name: libmamba optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libmamba-2.0.8-hf3fef5c_1.conda - version: 2.0.8 + url: https://conda.anaconda.org/conda-forge/linux-64/libmamba-2.1.1-h430c389_0.conda + version: 2.1.1 - category: main dependencies: __osx: '>=10.13' - cpp-expected: '>=1.1.0,<1.2.0a0' + cpp-expected: '>=1.1.0,<1.1.1.0a0' fmt: '>=11.1.4,<11.2.0a0' libarchive: '>=3.7.7,<3.8.0a0' - libcurl: '>=8.12.1,<9.0a0' + libcurl: '>=8.13.0,<9.0a0' libcxx: '>=18' - libsolv: '>=0.7.30,<0.8.0a0' - nlohmann_json: '>=3.11.3,<3.12.0a0' - openssl: '>=3.4.1,<4.0a0' - reproc: '>=14.2.5.0inf.0,<14.3.0a0' - reproc-cpp: '>=14.2.5.0inf.0,<14.3.0a0' - simdjson: '>=3.12.2,<3.13.0a0' - spdlog: '>=1.15.1,<1.16.0a0' + libsolv: '>=0.7.32,<0.8.0a0' + nlohmann_json: '>=3.11.3,<3.11.4.0a0' + openssl: '>=3.5.0,<4.0a0' + reproc: '>=14.2,<15.0a0' + reproc-cpp: '>=14.2,<15.0a0' + simdjson: '>=3.12.3,<3.13.0a0' yaml-cpp: '>=0.8.0,<0.9.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 43b88d85961231450e11ab7bc3977e31 - sha256: 838ef8b8a9818b00f491c496701bc5fb0865f7917ffa9d052f895a531840924d + md5: 4720d433fa67a8835e99da9ddb1465d6 + sha256: 1c41f93f4bbcb5f9b790d34b4ebeedf10851cbded9cf441541c122e1285aa5c6 manager: conda name: libmamba optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libmamba-2.0.8-h10bde34_1.conda - version: 2.0.8 + url: https://conda.anaconda.org/conda-forge/osx-64/libmamba-2.1.1-h16a6c4c_0.conda + version: 2.1.1 - category: main dependencies: __osx: '>=11.0' - cpp-expected: '>=1.1.0,<1.2.0a0' + cpp-expected: '>=1.1.0,<1.1.1.0a0' fmt: '>=11.1.4,<11.2.0a0' libarchive: '>=3.7.7,<3.8.0a0' - libcurl: '>=8.12.1,<9.0a0' + libcurl: '>=8.13.0,<9.0a0' libcxx: '>=18' - libsolv: '>=0.7.30,<0.8.0a0' - nlohmann_json: '>=3.11.3,<3.12.0a0' - openssl: '>=3.4.1,<4.0a0' - reproc: '>=14.2.5.0inf.0,<14.3.0a0' - reproc-cpp: '>=14.2.5.0inf.0,<14.3.0a0' - simdjson: '>=3.12.2,<3.13.0a0' - spdlog: '>=1.15.1,<1.16.0a0' + libsolv: '>=0.7.32,<0.8.0a0' + nlohmann_json: '>=3.11.3,<3.11.4.0a0' + openssl: '>=3.5.0,<4.0a0' + reproc: '>=14.2,<15.0a0' + reproc-cpp: '>=14.2,<15.0a0' + simdjson: '>=3.12.3,<3.13.0a0' yaml-cpp: '>=0.8.0,<0.9.0a0' zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 474f25277b8335200b5af5e156e832cb - sha256: 11f01b7d0fdf9ea8e6f948dda75b7c7272f6b5fe7f30d393ea72aeeca1086e7f + md5: b56ffedbf3c18b8c53278fee56ab441c + sha256: 35fdceb8a576425b94e551a941f294e675ecfda6f2fa47482e229011d20daa6f manager: conda name: libmamba optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libmamba-2.0.8-h41ecc7d_1.conda - version: 2.0.8 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libmamba-2.1.1-h7c3736b_0.conda + version: 2.1.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' fmt: '>=11.1.4,<11.2.0a0' libgcc: '>=13' - libmamba: 2.0.8 + libmamba: 2.1.1 libstdcxx: '>=13' - openssl: '>=3.4.1,<4.0a0' + openssl: '>=3.5.0,<4.0a0' pybind11-abi: '4' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* yaml-cpp: '>=0.8.0,<0.9.0a0' - zstd: '>=1.5.7,<1.6.0a0' + zstd: '>=1.5.6,<1.6.0a0' hash: - md5: d70a4f63379519d6e748e76511a07378 - sha256: 36e6a894f1af9ecf0edd8afe135e6dfbe9681aa3fb9b536b77176038263c782b + md5: 75937d2fe48db9f9f509665d367fa328 + sha256: f6b8a1bebf1c0582a341fe6b3f86bfde320371cef1e5e5bd85f21c99c3a6a04a manager: conda name: libmambapy optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libmambapy-2.0.8-py311h91f7488_1.conda - version: 2.0.8 + https://conda.anaconda.org/conda-forge/linux-64/libmambapy-2.1.1-py311h952910f_0.conda + version: 2.1.1 - category: main dependencies: __osx: '>=10.13' fmt: '>=11.1.4,<11.2.0a0' libcxx: '>=18' - libmamba: 2.0.8 - openssl: '>=3.4.1,<4.0a0' + libmamba: 2.1.1 + openssl: '>=3.5.0,<4.0a0' pybind11-abi: '4' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* yaml-cpp: '>=0.8.0,<0.9.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 71acdb6a564a01047c5df2ed8c33119b - sha256: dac42723075f09793ff1c1bbf81ca89876f3d627bd9f5e964c4a21be3fda5b88 + md5: 3e4abc1a7e91c48ed2bd2cff10b6dfd8 + sha256: 890da0805ea0e0d61bef0b4bcedac413275a4c1bce362e5eab55fc5e7c51c52f manager: conda name: libmambapy optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/libmambapy-2.0.8-py311hc8eeebb_1.conda - version: 2.0.8 + https://conda.anaconda.org/conda-forge/osx-64/libmambapy-2.1.1-py311h554b8d6_0.conda + version: 2.1.1 - category: main dependencies: __osx: '>=11.0' fmt: '>=11.1.4,<11.2.0a0' libcxx: '>=18' - libmamba: 2.0.8 - openssl: '>=3.4.1,<4.0a0' + libmamba: 2.1.1 + openssl: '>=3.5.0,<4.0a0' pybind11-abi: '4' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* yaml-cpp: '>=0.8.0,<0.9.0a0' zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 6ea64938669b013ceff5577f56271a1d - sha256: 85ce0fec39978b1e913bff262915e3795bba764673669d928841de37292c0a12 + md5: 72bcb1f86c6e2daf03da10e703c80395 + sha256: 36e6f472bbf2a89ad00fe3612bda0e0244801d809024762635cac53d9ecdb12d manager: conda name: libmambapy optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libmambapy-2.0.8-py311h4848e19_1.conda - version: 2.0.8 + https://conda.anaconda.org/conda-forge/osx-arm64/libmambapy-2.1.1-py311h3e4fba9_0.conda + version: 2.1.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -7605,7 +8348,7 @@ package: - category: main dependencies: __osx: '>=10.13' - libgfortran: 5.* + libgfortran: '>=5' libgfortran5: '>=13.2.0' llvm-openmp: '>=18.1.8' hash: @@ -7621,7 +8364,7 @@ package: - category: main dependencies: __osx: '>=11.0' - libgfortran: 5.* + libgfortran: '>=5' libgfortran5: '>=13.2.0' llvm-openmp: '>=18.1.8' hash: @@ -7677,57 +8420,106 @@ package: - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - cairo: '>=1.18.2,<2.0a0' - freetype: '>=2.12.1,<3.0a0' + libabseil: '>=20240116.2,<20240117.0a0' + libgcc: '>=13' + libstdcxx: '>=13' + libzlib: '>=1.3.1,<2.0a0' + hash: + md5: 06def97690ef90781a91b786cb48a0a9 + sha256: 8b5e4e31ed93bf36fd14e9cf10cd3af78bb9184d0f1f87878b8d28c0374aa4dc + manager: conda + name: libprotobuf + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-hd5b35b9_1.conda + version: 4.25.3 + - category: main + dependencies: + __osx: '>=10.13' + libabseil: '>=20240116.2,<20240117.0a0' + libcxx: '>=17' + libzlib: '>=1.3.1,<2.0a0' + hash: + md5: 64ad501f0fd74955056169ec9c42c5c0 + sha256: f509cb24a164b84553b28837ec1e8311ceb0212a1dbb8c7fd99ca383d461ea6c + manager: conda + name: libprotobuf + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/libprotobuf-4.25.3-hd4aba4c_1.conda + version: 4.25.3 + - category: main + dependencies: + __osx: '>=11.0' + libabseil: '>=20240116.2,<20240117.0a0' + libcxx: '>=17' + libzlib: '>=1.3.1,<2.0a0' + hash: + md5: fa77986d9170450c014586ab87e144f8 + sha256: f51bde2dfe73968ab3090c1098f520b65a8d8f11e945cb13bf74d19e30966b61 + manager: conda + name: libprotobuf + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-4.25.3-hc39d83c_1.conda + version: 4.25.3 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + cairo: '>=1.18.4,<2.0a0' + freetype: '>=2.13.3,<3.0a0' gdk-pixbuf: '>=2.42.12,<3.0a0' - harfbuzz: '>=10.1.0,<11.0a0' + harfbuzz: '>=11.0.0,<12.0a0' libgcc: '>=13' - libglib: '>=2.82.2,<3.0a0' - libpng: '>=1.6.44,<1.7.0a0' - libxml2: '>=2.13.5,<3.0a0' - pango: '>=1.54.0,<2.0a0' + libglib: '>=2.84.0,<3.0a0' + libpng: '>=1.6.47,<1.7.0a0' + libxml2: '>=2.13.7,<2.14.0a0' + pango: '>=1.56.3,<2.0a0' hash: - md5: b9846db0abffb09847e2cb0fec4b4db6 - sha256: 475013475a3209c24a82f9e80c545d56ccca2fa04df85952852f3d73caa38ff9 + md5: d27665b20bc4d074b86e628b3ba5ab8b + sha256: a45ef03e6e700cc6ac6c375e27904531cf8ade27eb3857e080537ff283fb0507 manager: conda name: librsvg optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-he92a37e_3.conda version: 2.58.4 - category: main dependencies: __osx: '>=10.13' - cairo: '>=1.18.2,<2.0a0' + cairo: '>=1.18.4,<2.0a0' gdk-pixbuf: '>=2.42.12,<3.0a0' - libglib: '>=2.82.2,<3.0a0' - libxml2: '>=2.13.5,<3.0a0' - pango: '>=1.54.0,<2.0a0' + libglib: '>=2.84.0,<3.0a0' + libxml2: '>=2.13.7,<2.14.0a0' + pango: '>=1.56.3,<2.0a0' hash: - md5: 0aa68f5a6ebfd2254daae40170439f03 - sha256: 482cde0a3828935edc31c529e15c2686425f64b07a7e52551b6ed672360f2a15 + md5: 213dcdb373bf108d1beb18d33075f51d + sha256: 87432fca28ddfaaf82b3cd12ce4e31fcd963428d1f2c5e2a3aef35dd30e56b71 manager: conda name: librsvg optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/librsvg-2.58.4-h21a6cfa_2.conda + url: https://conda.anaconda.org/conda-forge/osx-64/librsvg-2.58.4-h21a6cfa_3.conda version: 2.58.4 - category: main dependencies: __osx: '>=11.0' - cairo: '>=1.18.2,<2.0a0' + cairo: '>=1.18.4,<2.0a0' gdk-pixbuf: '>=2.42.12,<3.0a0' - libglib: '>=2.82.2,<3.0a0' - libxml2: '>=2.13.5,<3.0a0' - pango: '>=1.54.0,<2.0a0' + libglib: '>=2.84.0,<3.0a0' + libxml2: '>=2.13.7,<2.14.0a0' + pango: '>=1.56.3,<2.0a0' hash: - md5: 82c31ce77bac095b5700b1fdaad9a628 - sha256: c1ef2c5855166001967952d7525aa2f29707214495c74c2bbb60e691aee45ef0 + md5: 95d6ad8fb7a2542679c08ce52fafbb6c + sha256: 0ec066d7f22bcd9acb6ca48b2e6a15e9be4f94e67cb55b0a2c05a37ac13f9315 manager: conda name: librsvg optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/librsvg-2.58.4-h266df6f_2.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/librsvg-2.58.4-h266df6f_3.conda version: 2.58.4 - category: main dependencies: @@ -7770,217 +8562,217 @@ package: - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' + libgcc: '>=13' + libstdcxx: '>=13' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 02539b77d25aa4f65b20246549e256c3 - sha256: 1dddbde791efdfc34c8fefa74dc2f910eac9cf87bf37ee6c3c9132eb96a0e7d4 + md5: 5339dae097a6e1ab631adb3cb04b8f7e + sha256: d11a45b603dc40d080f46983d828f0afb9f8f37cf376b774da84c3ada72ea9e1 manager: conda name: libsolv optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsolv-0.7.30-h3509ff9_0.conda - version: 0.7.30 + url: https://conda.anaconda.org/conda-forge/linux-64/libsolv-0.7.32-h7955e40_2.conda + version: 0.7.32 - category: main dependencies: __osx: '>=10.13' - libcxx: '>=16' + libcxx: '>=18' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 8f8fd9f1740c8cb7dcfebf1a1ed7e678 - sha256: d0c8a8a448dc8b01aecc023b8e6a26f8cdd03f04263ca0a282a057d636b47b3c + md5: 3d2ff9a3e063bde2e2e44c44093beb91 + sha256: 7a6cd5550866d337f1ffdb604f8f131ef6a06699b4f98ed1b516db39e1bc828a manager: conda name: libsolv optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libsolv-0.7.30-h69d5d9b_0.conda - version: 0.7.30 + url: https://conda.anaconda.org/conda-forge/osx-64/libsolv-0.7.32-h04d1b7c_2.conda + version: 0.7.32 - category: main dependencies: __osx: '>=11.0' - libcxx: '>=16' + libcxx: '>=18' libzlib: '>=1.3.1,<2.0a0' hash: - md5: a5795a7ca73c9c99f112abce7864b500 - sha256: e5ffda8a71a334edff7af4f194aa6c72df2f0763321250270f9f68dfc8eaf439 + md5: ef92ebefa0615aadd988c8c35aebcd68 + sha256: 3bb59671c96d8122a371f73fc7fc95013ebcb0d7761cdf09056717f7801f3211 manager: conda name: libsolv optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libsolv-0.7.30-h6c9b7f8_0.conda - version: 0.7.30 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libsolv-0.7.32-h13dfb9a_2.conda + version: 0.7.32 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 962d6ac93c30b1dfc54c9cccafd1003e - sha256: a086289bf75c33adc1daed3f1422024504ffb5c3c8b3285c49f025c29708ed16 + md5: 93048463501053a00739215ea3f36324 + sha256: 525d4a0e24843f90b3ff1ed733f0a2e408aa6dd18b9d4f15465595e078e104a2 manager: conda name: libsqlite optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.49.1-hee588c1_2.conda - version: 3.49.1 + https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.49.2-hee588c1_0.conda + version: 3.49.2 - category: main dependencies: __osx: '>=10.13' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 1819e770584a7e83a81541d8253cbabe - sha256: 82695c9b16a702de615c8303387384c6ec5cf8b98e16458e5b1935b950e4ec38 + md5: 9377ba1ade655ea3fc831b456f4a2351 + sha256: 8fd9562478b4d1dc90ab2bcad5289ee2b5a971ca8ad87e6b137ce0ca53bf801d manager: conda name: libsqlite optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.49.1-hdb6dae5_2.conda - version: 3.49.1 + url: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.49.2-hdb6dae5_0.conda + version: 3.49.2 - category: main dependencies: __osx: '>=11.0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 3b1e330d775170ac46dff9a94c253bd0 - sha256: 907a95f73623c343fc14785cbfefcb7a6b4f2bcf9294fcb295c121611c3a590d + md5: 85f443033cd5b3df82b5cabf79bddb09 + sha256: d89f979497cf56eccb099b6ab9558da7bba1f1ba264f50af554e0ea293d9dcf9 manager: conda name: libsqlite optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.49.1-h3f77e49_2.conda - version: 3.49.1 + https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.49.2-h3f77e49_0.conda + version: 3.49.2 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.0,<4.0a0' + openssl: '>=3.5.0,<4.0a0' hash: - md5: be2de152d8073ef1c01b7728475f2fe7 - sha256: 0407ac9fda2bb67e11e357066eff144c845801d00b5f664efbc48813af1e7bb9 + md5: eecce068c7e4eddeb169591baac20ac4 + sha256: fa39bfd69228a13e553bd24601332b7cfeb30ca11a3ca50bb028108fe90a7661 manager: conda name: libssh2 optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda version: 1.11.1 - category: main dependencies: __osx: '>=10.13' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.0,<4.0a0' + openssl: '>=3.5.0,<4.0a0' hash: - md5: b1caec4561059e43a5d056684c5a2de0 - sha256: ef2a81c9a15080b996a37f0e1712881da90a710b234e63d8539d69892353de90 + md5: a6cb15db1c2dc4d3a5f6cf3772e09e81 + sha256: 00654ba9e5f73aa1f75c1f69db34a19029e970a4aeb0fa8615934d8e9c369c3c manager: conda name: libssh2 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.11.1-h3dc7d44_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.11.1-hed3591d_0.conda version: 1.11.1 - category: main dependencies: libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.4.0,<4.0a0' + openssl: '>=3.5.0,<4.0a0' hash: - md5: ddc7194676c285513706e5fc64f214d7 - sha256: f7047c6ed44bcaeb04432e8c74da87591940d091b0a3940c0d884b7faa8062e9 + md5: b68e8f66b94b44aaa8de4583d3d4cc40 + sha256: 8bfe837221390ffc6f111ecca24fa12d4a6325da0c8d131333d63d6c37f27e0a manager: conda name: libssh2 optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.1-h9cc3647_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.1-h1590b86_0.conda version: 1.11.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - libgcc: 14.2.0 + libgcc: 15.1.0 hash: - md5: a78c856b6dc6bf4ea8daeb9beaaa3fb0 - sha256: 8f5bd92e4a24e1d35ba015c5252e8f818898478cb3bc50bd8b12ab54707dc4da + md5: 1cb1c67961f6dd257eae9e9691b341aa + sha256: 6ae3d153e78f6069d503d9309f2cac6de5b93d067fc6433160a4c05226a5dad4 manager: conda name: libstdcxx optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-h8f9b012_2.conda - version: 14.2.0 + https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.1.0-h8f9b012_2.conda + version: 15.1.0 - category: main dependencies: - libstdcxx: 14.2.0 + libstdcxx: 15.1.0 hash: - md5: c75da67f045c2627f59e6fcb5f4e3a9b - sha256: e86f38b007cf97cc2c67cd519f2de12a313c4ee3f5ef11652ad08932a5e34189 + md5: 9d2072af184b5caa29492bf2344597bb + sha256: 11bea86e11de7d6bce87589197a383344df3fa0a3552dab7e931785ff1159a5b manager: conda name: libstdcxx-ng optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_2.conda - version: 14.2.0 + https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.1.0-h4852527_2.conda + version: 15.1.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' lerc: '>=4.0.0,<5.0a0' - libdeflate: '>=1.23,<1.24.0a0' + libdeflate: '>=1.24,<1.25.0a0' libgcc: '>=13' - libjpeg-turbo: '>=3.0.0,<4.0a0' - liblzma: '>=5.6.3,<6.0a0' + libjpeg-turbo: '>=3.1.0,<4.0a0' + liblzma: '>=5.8.1,<6.0a0' libstdcxx: '>=13' - libwebp-base: '>=1.4.0,<2.0a0' + libwebp-base: '>=1.5.0,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 0ea6510969e1296cc19966fad481f6de - sha256: b224e16b88d76ea95e4af56e2bc638c603bd26a770b98d117d04541d3aafa002 + md5: e79a094918988bb1807462cd42c83962 + sha256: 7fa6ddac72e0d803bb08e55090a8f2e71769f1eb7adbd5711bdd7789561601b1 manager: conda name: libtiff optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hf01ce69_5.conda version: 4.7.0 - category: main dependencies: __osx: '>=10.13' lerc: '>=4.0.0,<5.0a0' libcxx: '>=18' - libdeflate: '>=1.23,<1.24.0a0' - libjpeg-turbo: '>=3.0.0,<4.0a0' - liblzma: '>=5.6.3,<6.0a0' - libwebp-base: '>=1.4.0,<2.0a0' + libdeflate: '>=1.24,<1.25.0a0' + libjpeg-turbo: '>=3.1.0,<4.0a0' + liblzma: '>=5.8.1,<6.0a0' + libwebp-base: '>=1.5.0,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 6f2f9df7b093d6b33bc0c334acc7d2d9 - sha256: bb50df7cfc1acb11eae63c5f4fdc251d381cda96bf02c086c3202c83a5200032 + md5: fc84af14a09e779f1d37ab1d16d5c4e2 + sha256: 517a34be9fc697aaf930218f6727a2eff7c38ee57b3b41fd7d1cc0d72aaac562 manager: conda name: libtiff optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.7.0-hb77a491_3.conda + url: https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.7.0-h1167cee_5.conda version: 4.7.0 - category: main dependencies: __osx: '>=11.0' lerc: '>=4.0.0,<5.0a0' libcxx: '>=18' - libdeflate: '>=1.23,<1.24.0a0' - libjpeg-turbo: '>=3.0.0,<4.0a0' - liblzma: '>=5.6.3,<6.0a0' - libwebp-base: '>=1.4.0,<2.0a0' + libdeflate: '>=1.24,<1.25.0a0' + libjpeg-turbo: '>=3.1.0,<4.0a0' + liblzma: '>=5.8.1,<6.0a0' + libwebp-base: '>=1.5.0,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: a5d084a957563e614ec0c0196d890654 - sha256: 91417846157e04992801438a496b151df89604b2e7c6775d6f701fcd0cbed5ae + md5: 4eb183bbf7f734f69875702fdbe17ea0 + sha256: cc5ee1cffb8a8afb25a4bfd08fce97c5447f97aa7064a055cb4a617df45bc848 manager: conda name: libtiff optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.0-h551f018_3.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.0-h2f21f7c_5.conda version: 4.7.0 - category: main dependencies: @@ -8031,6 +8823,43 @@ package: platform: linux-64 url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda version: 2.38.1 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + hash: + md5: 771ee65e13bc599b0b62af5359d80169 + sha256: b4a8890023902aef9f1f33e3e35603ad9c2f16c21fdb58e968fa6c1bd3e94c0b + manager: conda + name: libuv + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libuv-1.50.0-hb9d3cd8_0.conda + version: 1.50.0 + - category: main + dependencies: + __osx: '>=11.0' + hash: + md5: c86c7473f79a3c06de468b923416aa23 + sha256: ec9da0a005c668c0964e0a6546c21416bab608569b5863edbdf135cee26e67d8 + manager: conda + name: libuv + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/libuv-1.50.0-h4cb831e_0.conda + version: 1.50.0 + - category: main + dependencies: + __osx: '>=11.0' + hash: + md5: 20717343fb30798ab7c23c2e92b748c1 + sha256: d13fb49d4c8262bf2c44ffb2c77bb2b5d0f85fc6de76bdb75208efeccb29fce6 + manager: conda + name: libuv + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.50.0-h5505292_0.conda + version: 1.50.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -8106,68 +8935,68 @@ package: libgcc: '>=13' libstdcxx: '>=13' libxcb: '>=1.17.0,<2.0a0' - libxml2: '>=2.13.6,<3.0a0' + libxml2: '>=2.13.8,<2.14.0a0' xkeyboard-config: '' xorg-libxau: '>=1.0.12,<2.0a0' hash: - md5: e7e5b0652227d646b44abdcbd989da7b - sha256: 61a282353fcc512b5643ee58898130f5c7f8757c329a21fe407a3ef397d449eb + md5: fedf6bfe5d21d21d2b1785ec00a8889a + sha256: a8043a46157511b3ceb6573a99952b5c0232313283f2d6a066cec7c8dcaed7d0 manager: conda name: libxkbcommon optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.8.1-hc4a0caf_0.conda - version: 1.8.1 + https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.10.0-h65c71a3_0.conda + version: 1.10.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' icu: '>=75.1,<76.0a0' libgcc: '>=13' libiconv: '>=1.18,<2.0a0' - liblzma: '>=5.6.4,<6.0a0' + liblzma: '>=5.8.1,<6.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 328382c0e0ca648e5c189d5ec336c604 - sha256: db8af71ea9c0ae95b7cb4a0f59319522ed2243942437a1200ceb391493018d85 + md5: 14dbe05b929e329dbaa6f2d0aa19466d + sha256: b0b3a96791fa8bb4ec030295e8c8bf2d3278f33c0f9ad540e73b5e538e6268e7 manager: conda name: libxml2 optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.6-h8d12d68_0.conda - version: 2.13.6 + url: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.8-h4bc477f_0.conda + version: 2.13.8 - category: main dependencies: __osx: '>=10.13' icu: '>=75.1,<76.0a0' libiconv: '>=1.18,<2.0a0' - liblzma: '>=5.6.4,<6.0a0' + liblzma: '>=5.8.1,<6.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: f27851d50ccddf3c3234dd0efc78fdbd - sha256: 3962cce8158ce6ebb9239fe58bbc1ce49b0ac4997827e932e70dd6e4ab335c40 + md5: e42a93a31cbc6826620144343d42f472 + sha256: 4b29663164d7beb9a9066ddcb8578fc67fe0e9b40f7553ea6255cd6619d24205 manager: conda name: libxml2 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/libxml2-2.13.6-hebb159f_0.conda - version: 2.13.6 + url: https://conda.anaconda.org/conda-forge/osx-64/libxml2-2.13.8-h93c44a6_0.conda + version: 2.13.8 - category: main dependencies: __osx: '>=11.0' icu: '>=75.1,<76.0a0' libiconv: '>=1.18,<2.0a0' - liblzma: '>=5.6.4,<6.0a0' + liblzma: '>=5.8.1,<6.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 277864577d514bea4b30f8a9335b8d26 - sha256: 1d2ebce1a16db1017e3892a67cb7ced4aa2858f549dba6852a60d02a4925c205 + md5: d7884c7af8af5a729353374c189aede8 + sha256: 13eb825eddce93761d965da3edaf3a42d868c61ece7d9cf21f7e2a13087c2abe manager: conda name: libxml2 optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.13.6-h178c5d8_0.conda - version: 2.13.6 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.13.8-h52572c6_0.conda + version: 2.13.8 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -8251,47 +9080,47 @@ package: dependencies: __osx: '>=10.13' hash: - md5: a1c6289fb8ae152b8cb53a535639c2c7 - sha256: 2aeb63d771120fc7a8129ca81417c07cea09e3a0f47e097f1967a9c24888f5cf + md5: 7b6a67507141ea93541943f0c011a872 + sha256: f858ef4cbc7f449da06e7e5cf62d6db0f8269e4e723144be35b0ef3531e28591 manager: conda name: llvm-openmp optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-20.1.1-ha54dae1_1.conda - version: 20.1.1 + https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-20.1.5-ha54dae1_0.conda + version: 20.1.5 - category: main dependencies: __osx: '>=11.0' hash: - md5: 97236e94c3a82367c5fe3a90557e6207 - sha256: ae57041a588cd190cb55b602c1ed0ef3604ce28d3891515386a85693edd3c175 + md5: 66771cb733ad80bd46b66f856601001a + sha256: 3515d520338a334c987ce2737dfba1ebd66eb1e360582c7511738ad3dc8a9145 manager: conda name: llvm-openmp optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-20.1.1-hdb05f8b_1.conda - version: 20.1.1 + https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-20.1.5-hdb05f8b_0.conda + version: 20.1.5 - category: main dependencies: __osx: '>=10.13' - libllvm20: 20.1.1 - llvm-tools-20: 20.1.1 + libllvm20: 20.1.5 + llvm-tools-20: 20.1.5 hash: - md5: 301d0c707a81cc5c32f2070f055798f4 - sha256: fb701fc99fc9cc066ec9203fb97619dc5d99dc84b6c2ab29d514746ec11e8163 + md5: 6f15676844a983f5a6f677c3d93aadb1 + sha256: ef91dc5787771eaf5a5dfd9b535f796ba1263e277ce48337fce648b32e7f2ccd manager: conda name: llvm-tools optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/llvm-tools-20.1.1-h3fe3016_0.conda - version: 20.1.1 + url: https://conda.anaconda.org/conda-forge/osx-64/llvm-tools-20.1.5-h3fe3016_0.conda + version: 20.1.5 - category: main dependencies: __osx: '>=11.0' libllvm18: 18.1.8 - libxml2: '>=2.13.5,<3.0a0' + libxml2: '>=2.13.5,<2.14.0a0' libzlib: '>=1.3.1,<2.0a0' llvm-tools-18: 18.1.8 zstd: '>=1.5.6,<1.6.0a0' @@ -8309,7 +9138,7 @@ package: dependencies: __osx: '>=11.0' libllvm18: 18.1.8 - libxml2: '>=2.13.5,<3.0a0' + libxml2: '>=2.13.5,<2.14.0a0' libzlib: '>=1.3.1,<2.0a0' zstd: '>=1.5.6,<1.6.0a0' hash: @@ -8326,19 +9155,19 @@ package: dependencies: __osx: '>=10.13' libcxx: '>=18' - libllvm20: 20.1.1 + libllvm20: 20.1.5 libzlib: '>=1.3.1,<2.0a0' zstd: '>=1.5.7,<1.6.0a0' hash: - md5: fb646f9afa47817183dd174415c516b6 - sha256: b9dfbc6cc45593cb50bc4af9d0864f704d0587cc12986d9af5b575f80c4df7c1 + md5: b89d79094387e9bfabfd86014236762a + sha256: 9913de3c5356a71e68a5e82a72874ab523691344f51b967d0c2d012621d7cb09 manager: conda name: llvm-tools-20 optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/llvm-tools-20-20.1.1-he90a8e3_0.conda - version: 20.1.1 + https://conda.anaconda.org/conda-forge/osx-64/llvm-tools-20-20.1.5-he90a8e3_0.conda + version: 20.1.5 - category: main dependencies: python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*' @@ -8488,53 +9317,53 @@ package: - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - libmamba: 2.0.8 + libmamba: 2.1.1 reproc: '>=14.2,<15.0a0' reproc-cpp: '>=14.2,<15.0a0' - zstd: '>=1.5.7,<1.6.0a0' + zstd: '>=1.5.6,<1.6.0a0' hash: - md5: d04a132d6a34133d73f88c7c761e1249 - sha256: f2cc0d1b019a9c6ad5f244b9848cafa4147953e8a4884beb0e5b4a07d4bf2463 + md5: a42dba86ec002c01ccf9e7b6e005bfc1 + sha256: 2146a0dd88021492b07f754d7f7521c5e9434e2ff1397a3c247562181e7fcd54 manager: conda name: mamba optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/mamba-2.0.8-h3f3603c_1.conda - version: 2.0.8 + url: https://conda.anaconda.org/conda-forge/linux-64/mamba-2.1.1-had4a41a_0.conda + version: 2.1.1 - category: main dependencies: __osx: '>=10.13' libcxx: '>=18' - libmamba: 2.0.8 + libmamba: 2.1.1 reproc: '>=14.2,<15.0a0' reproc-cpp: '>=14.2,<15.0a0' - zstd: '>=1.5.6,<1.6.0a0' + zstd: '>=1.5.7,<1.6.0a0' hash: - md5: 095c11533b15930a48a69899e37d133f - sha256: 42baaf503cc6b3fe73a9f83362c5be78f84d4727b73e12f98563dc2f8184b9d7 + md5: 8348aa35d363e706c842d616e65e693a + sha256: 4eee26b161e81e0fbf9262ff5369ae7710083d4e4e28eb07c6bfb3c3d28b10f0 manager: conda name: mamba optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/mamba-2.0.8-h88cbde9_1.conda - version: 2.0.8 + url: https://conda.anaconda.org/conda-forge/osx-64/mamba-2.1.1-h19c8969_0.conda + version: 2.1.1 - category: main dependencies: __osx: '>=11.0' libcxx: '>=18' - libmamba: 2.0.8 + libmamba: 2.1.1 reproc: '>=14.2,<15.0a0' reproc-cpp: '>=14.2,<15.0a0' zstd: '>=1.5.7,<1.6.0a0' hash: - md5: ac5a0ae12b3f84f66018d218e5b3e0f7 - sha256: ed59bee890f1d26591bac1462d8f153c05673b463ddf6e45338431c1872e2316 + md5: ef829dc43050c9a61e65b8103d56ff10 + sha256: 365ba94ffc4a9a22c821c14eb37af10868339f4db96ba6763081f22a5eb1c9a8 manager: conda name: mamba optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/mamba-2.0.8-h46bbf7f_1.conda - version: 2.0.8 + url: https://conda.anaconda.org/conda-forge/osx-arm64/mamba-2.1.1-h105ca85_0.conda + version: 2.1.1 - category: main dependencies: mdurl: '>=0.1,<1' @@ -8623,6 +9452,46 @@ package: url: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py311h4921393_1.conda version: 3.0.2 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libstdcxx: '>=13' + hash: + md5: 9fc429740cfb418a3b3ca107ba7b5e7d + sha256: ad17e2bf8fafed05a4c2f57577b30e611323bfe5046c5966bb0ea41edbed3808 + manager: conda + name: mbedtls + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/mbedtls-3.5.1-h5888daf_1.conda + version: 3.5.1 + - category: main + dependencies: + __osx: '>=10.13' + libcxx: '>=18' + hash: + md5: 1b6943cf07d3b150ba155e7004da9c81 + sha256: 18760c990017dd9091981cc9740c6161a72d544e9eecb94ea3c9d9592a9f2425 + manager: conda + name: mbedtls + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/mbedtls-3.5.1-h240833e_1.conda + version: 3.5.1 + - category: main + dependencies: + __osx: '>=11.0' + libcxx: '>=18' + hash: + md5: 1971b31c091c167f7d1bcb1b3beb9644 + sha256: 1ddc82a1f4c076f6521250bc7e9d8854157fb2f18a0a6ef2186aa4042ab593b9 + manager: conda + name: mbedtls + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/mbedtls-3.5.1-h286801f_1.conda + version: 3.5.1 - category: main dependencies: python: '>=3.9' @@ -8701,45 +9570,157 @@ package: url: https://conda.anaconda.org/conda-forge/osx-arm64/menuinst-2.2.0-py311h267d04e_0.conda version: 2.2.0 + - category: main + dependencies: + asgiref: '>=3.2.10,<3.8' + brotli-python: '>=1.0,<1.1' + certifi: '>=2019.9.11' + cryptography: '>=38,<41.1' + flask: '>=1.1.1,<2.4' + h11: '>=0.11,<0.15' + h2: '>=4.1,<5' + hyperframe: '>=6.0,<7' + kaitaistruct: '>=0.10,<0.11' + ldap3: '>=2.8,<2.10' + libgcc-ng: '>=12' + msgpack-python: '>=1.0.0,<1.1.0' + passlib: '>=1.6.5,<1.8' + protobuf: '>=3.14,<5' + publicsuffix2: '>=2.20190812,<3' + pylsqpack: '>=0.3.3,<0.4.0' + pyopenssl: '>=22.1,<23.2' + pyparsing: '>=2.4.2,<3.2' + pyperclip: '>=1.6.0,<1.9' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + ruamel.yaml: '>=0.16,<0.18' + sortedcontainers: '>=2.3,<2.5' + tornado: '>=6.2,<7' + wsproto: '>=1.0,<1.3' + zstandard: '>=0.11,<0.22' + hash: + md5: 286dfce5e12ff1d688b43bb7e11fb6bb + sha256: f199c24c7e77788191d66a8ff09eb3f31896f6c6dc1185dfe2fac6f43d94b859 + manager: conda + name: mitmproxy + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/mitmproxy-10.1.0-py311h46250e7_1.conda + version: 10.1.0 + - category: main + dependencies: + asgiref: '>=3.2.10,<3.8' + brotli-python: '>=1.0,<1.1' + certifi: '>=2019.9.11' + cryptography: '>=38,<41.1' + flask: '>=1.1.1,<2.4' + h11: '>=0.11,<0.15' + h2: '>=4.1,<5' + hyperframe: '>=6.0,<7' + kaitaistruct: '>=0.10,<0.11' + ldap3: '>=2.8,<2.10' + msgpack-python: '>=1.0.0,<1.1.0' + passlib: '>=1.6.5,<1.8' + protobuf: '>=3.14,<5' + publicsuffix2: '>=2.20190812,<3' + pylsqpack: '>=0.3.3,<0.4.0' + pyopenssl: '>=22.1,<23.2' + pyparsing: '>=2.4.2,<3.2' + pyperclip: '>=1.6.0,<1.9' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + ruamel.yaml: '>=0.16,<0.18' + sortedcontainers: '>=2.3,<2.5' + tornado: '>=6.2,<7' + wsproto: '>=1.0,<1.3' + zstandard: '>=0.11,<0.22' + hash: + md5: a7d334de4fad6050bfe8898c69d98111 + sha256: cdf2b6bd3543f3b3ef466eb1b0b5b1488c482cddbd34c112b0d1b6a46b263e99 + manager: conda + name: mitmproxy + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/mitmproxy-10.1.0-py311h5e0f0e4_0.conda + version: 10.1.0 + - category: main + dependencies: + asgiref: '>=3.2.10,<3.8' + brotli-python: '>=1.0,<1.1' + certifi: '>=2019.9.11' + cryptography: '>=38,<41.1' + flask: '>=1.1.1,<2.4' + h11: '>=0.11,<0.15' + h2: '>=4.1,<5' + hyperframe: '>=6.0,<7' + kaitaistruct: '>=0.10,<0.11' + ldap3: '>=2.8,<2.10' + msgpack-python: '>=1.0.0,<1.1.0' + passlib: '>=1.6.5,<1.8' + protobuf: '>=3.14,<5' + publicsuffix2: '>=2.20190812,<3' + pylsqpack: '>=0.3.3,<0.4.0' + pyopenssl: '>=22.1,<23.2' + pyparsing: '>=2.4.2,<3.2' + pyperclip: '>=1.6.0,<1.9' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + ruamel.yaml: '>=0.16,<0.18' + sortedcontainers: '>=2.3,<2.5' + tornado: '>=6.2,<7' + wsproto: '>=1.0,<1.3' + zstandard: '>=0.11,<0.22' + hash: + md5: 0059bf2757092ddcd33e9da3343f8816 + sha256: 0c54d7ca50d4b46c937ceb67fa8702d7cd249b4789d30af590a134f9094336bf + manager: conda + name: mitmproxy + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/mitmproxy-10.1.0-py311h94f323b_0.conda + version: 10.1.0 - category: main dependencies: python: '>=3.9' hash: - md5: 9b1225d67235df5411dbd2c94a5876b7 - sha256: e017ede184823b12a194d058924ca26e1129975cee1cae47f69d6115c0478b55 + md5: 7c65a443d58beb0518c35b26c70e201d + sha256: d0c2253dcb1da6c235797b57d29de688dabc2e48cc49645b1cff2b52b7907428 manager: conda name: more-itertools optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.6.0-pyhd8ed1ab_0.conda - version: 10.6.0 + https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.7.0-pyhd8ed1ab_0.conda + version: 10.7.0 - category: main dependencies: python: '>=3.9' hash: - md5: 9b1225d67235df5411dbd2c94a5876b7 - sha256: e017ede184823b12a194d058924ca26e1129975cee1cae47f69d6115c0478b55 + md5: 7c65a443d58beb0518c35b26c70e201d + sha256: d0c2253dcb1da6c235797b57d29de688dabc2e48cc49645b1cff2b52b7907428 manager: conda name: more-itertools optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.6.0-pyhd8ed1ab_0.conda - version: 10.6.0 + https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.7.0-pyhd8ed1ab_0.conda + version: 10.7.0 - category: main dependencies: python: '>=3.9' hash: - md5: 9b1225d67235df5411dbd2c94a5876b7 - sha256: e017ede184823b12a194d058924ca26e1129975cee1cae47f69d6115c0478b55 + md5: 7c65a443d58beb0518c35b26c70e201d + sha256: d0c2253dcb1da6c235797b57d29de688dabc2e48cc49645b1cff2b52b7907428 manager: conda name: more-itertools optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.6.0-pyhd8ed1ab_0.conda - version: 10.6.0 + https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.7.0-pyhd8ed1ab_0.conda + version: 10.7.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -8748,15 +9729,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 682f76920687f7d9283039eb542fdacf - sha256: 9033fa7084cbfd10e1b7ed3b74cee17169a0731ec98244d05c372fc4a935d5c9 + md5: b7f5e94f0a10f39bea5ded40b9adb73c + sha256: f74a33bd12a538e0aa1f0289e4263e0a3d18b125985dc08dac28250e3b197f5d manager: conda name: msgpack-python optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py311hd18a35c_0.conda - version: 1.1.0 + https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.8-py311hd18a35c_1.conda + version: 1.0.8 - category: main dependencies: __osx: '>=10.13' @@ -8764,15 +9745,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 6804cd42195bf94efd1b892688c96412 - sha256: b56b1e7d156b88cc0c62734acf56d4ee809723614f659e4203028e7eeac16a78 + md5: d354e6628d55888eb84a036eed8f369d + sha256: 4358505cef3440617baf48ac379a3b31a327793fb1dd9a4086872b3553fa9cbd manager: conda name: msgpack-python optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.1.0-py311hf2f7c97_0.conda - version: 1.1.0 + https://conda.anaconda.org/conda-forge/osx-64/msgpack-python-1.0.8-py311hf2f7c97_1.conda + version: 1.0.8 - category: main dependencies: __osx: '>=11.0' @@ -8780,15 +9761,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 6c826762702474fb0def6cedd2db5316 - sha256: aafa8572c72283801148845772fd9d494765bdcf1b8ae6f435e1caff4f1c97f3 + md5: ce5a4c818a8ac6bc6ebff30f3d2814eb + sha256: 17b3cff3dff93eb16b889698ffd2edb28f3a0fbf94e1d6629531bd3a01b575ae manager: conda name: msgpack-python optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.0-py311h2c37856_0.conda - version: 1.1.0 + https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.0.8-py311h2c37856_1.conda + version: 1.0.8 - category: main dependencies: certifi: '>=2017.4.17' @@ -9003,15 +9984,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 16ad2b996ea8064e0a7cb8b392d924fd - sha256: 4ff5f5ab2e0205d712fdc8b2950a2a4b2a063c47d0c9b08f7ea71ae246e47ac1 + md5: babce4d9841ebfcee64249d98eb4e0d4 + sha256: f28273a72d25f4d7d62a9ba031d5271082afc498121bd0f6783d72b4103dbbc7 manager: conda name: numpy optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.4-py311h5d046bc_0.conda - version: 2.2.4 + https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.6-py311h5d046bc_0.conda + version: 2.2.6 - category: main dependencies: __osx: '>=10.13' @@ -9022,14 +10003,14 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 8cc792914f85f8a0f52eb010e1bc2841 - sha256: 9a6a463e5dc101a5bd80e1684a3d51b2f12cc6fd3dd353fb8b976826b72c5171 + md5: 8e850d1284fd8a90aeb4b5195a0116f3 + sha256: bcb2c6fd701f3591fd4cd04580ec62ad88622c09671139a98d82ca80e2ae365f manager: conda name: numpy optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.2.4-py311h27c81cd_0.conda - version: 2.2.4 + url: https://conda.anaconda.org/conda-forge/osx-64/numpy-2.2.6-py311h27c81cd_0.conda + version: 2.2.6 - category: main dependencies: __osx: '>=11.0' @@ -9040,15 +10021,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 602a97eb615fcf5c7d94da0282a35bb5 - sha256: 87c8b96560398a4f39dab87dee3c4aab3e7296744302dab2915c223094c0159d + md5: 9446d2629b529e92769dfb34c7c194bb + sha256: c6cd42960418a2bd60cfbc293f08d85076f7d8aacf7a94f516195381241d4d93 manager: conda name: numpy optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/numpy-2.2.4-py311h762c074_0.conda - version: 2.2.4 + https://conda.anaconda.org/conda-forge/osx-arm64/numpy-2.2.6-py311h762c074_0.conda + version: 2.2.6 - category: main dependencies: blinker: '' @@ -9100,40 +10081,40 @@ package: ca-certificates: '' libgcc: '>=13' hash: - md5: 41adf927e746dc75ecf0ef841c454e48 - sha256: cbf62df3c79a5c2d113247ddea5658e9ff3697b6e741c210656e239ecaf1768f + md5: de356753cfdbffcde5bb1e86e3aa6cd0 + sha256: b4491077c494dbf0b5eaa6d87738c22f2154e9277e5293175ec187634bd808a0 manager: conda name: openssl optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.1-h7b32b05_0.conda - version: 3.4.1 + url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.5.0-h7b32b05_1.conda + version: 3.5.0 - category: main dependencies: __osx: '>=10.13' ca-certificates: '' hash: - md5: a7d63f8e7ab23f71327ea6d27e2d5eae - sha256: 505a46671dab5d66df8e684f99a9ae735a607816b12810b572d63caa512224df + md5: 919faa07b9647beb99a0e7404596a465 + sha256: bcac94cb82a458b4e3164da8d9bced08cc8c3da2bc3bd7330711a3689c1464a5 manager: conda name: openssl optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.4.1-hc426f3f_0.conda - version: 3.4.1 + url: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.5.0-hc426f3f_1.conda + version: 3.5.0 - category: main dependencies: __osx: '>=11.0' ca-certificates: '' hash: - md5: 75f9f0c7b1740017e2db83a53ab9a28e - sha256: 4f8e2389e1b711b44182a075516d02c80fa7a3a7e25a71ff1b5ace9eae57a17a + md5: 5c7aef00ef60738a14e0e612cfc5bcde + sha256: 73d366c1597a10bcd5f3604b5f0734b31c23225536e03782c6a13f9be9d01bff manager: conda name: openssl optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.4.1-h81ee809_0.conda - version: 3.4.1 + url: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.5.0-h81ee809_1.conda + version: 3.5.0 - category: main dependencies: jsonschema: '' @@ -9180,84 +10161,84 @@ package: dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - python: '>=3.11,<3.12.0a0' + python: '' python_abi: 3.11.* hash: - md5: a0b45a3ae748e3cce6409cb12664bf2b - sha256: 20ecfaf61224ea92dadb543ee5ca3757e4feff4524c9d6633de908d11f4c5cd4 + md5: 23fd932a9f391aaa6f4db0797f3c6b05 + sha256: 32b1ef80ec8e92820a0c9c61fd327c3426806a7ab7d2928f98c6566b3f4fa609 manager: conda name: orjson optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/orjson-3.10.16-py311h9e33e62_0.conda - version: 3.10.16 + https://conda.anaconda.org/conda-forge/linux-64/orjson-3.10.18-py311hdae7d1d_1.conda + version: 3.10.18 - category: main dependencies: __osx: '>=10.13' - python: '>=3.11,<3.12.0a0' + python: '' python_abi: 3.11.* hash: - md5: cb8506609560dbdc5dfc31be0a11f3b5 - sha256: 02b79f1ab9a745a542fd9fa361890ea1b99764fb91a1637d7a64e5bd9c699c96 + md5: 29d1e4a4747de09fc608b8b20221a63a + sha256: 7950969c0df7f19bdc0c0f1d84da15257a270c1b1804829bda51897d70ac070a manager: conda name: orjson optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/orjson-3.10.16-py311h3b9c2be_0.conda - version: 3.10.16 + https://conda.anaconda.org/conda-forge/osx-64/orjson-3.10.18-py311hab9d7c2_1.conda + version: 3.10.18 - category: main dependencies: __osx: '>=11.0' - python: '>=3.11,<3.12.0a0' + python: 3.11.* python_abi: 3.11.* hash: - md5: 8d735e647d8e06b1e33ecd296070de34 - sha256: 2f9f6d2541a903ce91f5af8bf31b39d678d7e569c802e73f7c2e2ca91eb763f2 + md5: 11832289b6a104f1ec72937afaadc784 + sha256: 443e23a42020377dcd37c88b73b167e012018c32ef57f9ee4fa3643d5344f7e2 manager: conda name: orjson optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/orjson-3.10.16-py311h3ff9189_0.conda - version: 3.10.16 + https://conda.anaconda.org/conda-forge/osx-arm64/orjson-3.10.18-py311hc9d6b66_1.conda + version: 3.10.18 - category: main dependencies: - python: '>=3.8' + python: '' hash: - md5: 3bfed7e6228ebf2f7b9eaa47f1b4e2aa - sha256: da157b19bcd398b9804c5c52fc000fcb8ab0525bdb9c70f95beaa0bb42f85af1 + md5: 58335b26c38bf4a20f399384c33cbcf9 + sha256: 289861ed0c13a15d7bbb408796af4de72c2fe67e2bcb0de98f4c3fce259d7991 manager: conda name: packaging optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda - version: '24.2' + url: https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda + version: '25.0' - category: main dependencies: python: '>=3.8' hash: - md5: 3bfed7e6228ebf2f7b9eaa47f1b4e2aa - sha256: da157b19bcd398b9804c5c52fc000fcb8ab0525bdb9c70f95beaa0bb42f85af1 + md5: 58335b26c38bf4a20f399384c33cbcf9 + sha256: 289861ed0c13a15d7bbb408796af4de72c2fe67e2bcb0de98f4c3fce259d7991 manager: conda name: packaging optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda - version: '24.2' + url: https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda + version: '25.0' - category: main dependencies: python: '>=3.8' hash: - md5: 3bfed7e6228ebf2f7b9eaa47f1b4e2aa - sha256: da157b19bcd398b9804c5c52fc000fcb8ab0525bdb9c70f95beaa0bb42f85af1 + md5: 58335b26c38bf4a20f399384c33cbcf9 + sha256: 289861ed0c13a15d7bbb408796af4de72c2fe67e2bcb0de98f4c3fce259d7991 manager: conda name: packaging optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda - version: '24.2' + url: https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda + version: '25.0' - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -9266,20 +10247,20 @@ package: fonts-conda-ecosystem: '' freetype: '>=2.13.3,<3.0a0' fribidi: '>=1.0.10,<2.0a0' - harfbuzz: '>=10.4.0,<11.0a0' + harfbuzz: '>=11.0.0,<12.0a0' libexpat: '>=2.6.4,<3.0a0' libgcc: '>=13' - libglib: '>=2.82.2,<3.0a0' + libglib: '>=2.84.0,<3.0a0' libpng: '>=1.6.47,<1.7.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 6d853ca33bc46bce99ce16ccd83d0466 - sha256: 6bc073dc2759cb00bc9e94c7142acab58432245c6e04d1cef179e8afd3b58d6f + md5: 21899b96828014270bd24fd266096612 + sha256: 9c00bbc8871b9ce00d1a1f0c1a64f76c032cf16a56a28984b9bb59e46af3932d manager: conda name: pango optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.3-h861ebed_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.3-h9ac818e_1.conda version: 1.56.3 - category: main dependencies: @@ -9289,19 +10270,19 @@ package: fonts-conda-ecosystem: '' freetype: '>=2.13.3,<3.0a0' fribidi: '>=1.0.10,<2.0a0' - harfbuzz: '>=10.4.0,<11.0a0' + harfbuzz: '>=11.0.0,<12.0a0' libexpat: '>=2.6.4,<3.0a0' - libglib: '>=2.82.2,<3.0a0' + libglib: '>=2.84.0,<3.0a0' libpng: '>=1.6.47,<1.7.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 97db8d75606bc6923a0f5cc7fb9bbff3 - sha256: db1de2f9f004b8b7d743dbd073728a9b119955eded54d81b23f4de30fddaee1a + md5: 17bcc6d5206e8a1a13cc478a777d79e5 + sha256: ff2cc0b201ce1b68a9f38c1dc71dbd26f70eef103089ae4ee26b7e80d336f0ab manager: conda name: pango optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pango-1.56.3-hf94f63b_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/pango-1.56.3-hae8941d_1.conda version: 1.56.3 - category: main dependencies: @@ -9311,19 +10292,19 @@ package: fonts-conda-ecosystem: '' freetype: '>=2.13.3,<3.0a0' fribidi: '>=1.0.10,<2.0a0' - harfbuzz: '>=10.4.0,<11.0a0' + harfbuzz: '>=11.0.0,<12.0a0' libexpat: '>=2.6.4,<3.0a0' - libglib: '>=2.82.2,<3.0a0' + libglib: '>=2.84.0,<3.0a0' libpng: '>=1.6.47,<1.7.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: aef5caa7ce0af969bfab789900982918 - sha256: dac976052589a8a0778db1c1b6f279da200572ed9f265f3a6b7a1f217af4a5f0 + md5: 2e5cef90f7d355790fa96f2459ee648f + sha256: 76e3843f37878629e744ec75d5f3acfc54a7bb23f9970139f4040f93209ef574 manager: conda name: pango optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/pango-1.56.3-h73f1e88_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pango-1.56.3-h5fd7515_1.conda version: 1.56.3 - category: main dependencies: @@ -9369,40 +10350,49 @@ package: version: 1.4.2 - category: main dependencies: - python: '>=2.7' + argon2-cffi: '>=19.2.0' + bcrypt: '>=3.1.0' + cryptography: '' + python: '>=3.9' hash: - md5: a4eea5bff523f26442405bc5d1f52adb - sha256: 9153f0f38c76a09da7688a61fdbf8f3d7504e2326bef53e4ec20d994311b15bd + md5: fba64c154edb7d7935af0d46d97ff536 + sha256: 2adfe01cdab93c39c4d8dfe3de74a31ae6fded21213f26925208ce6053cea93d manager: conda - name: pastel + name: passlib optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/pastel-0.2.1-pyhd8ed1ab_0.tar.bz2 - version: 0.2.1 + url: https://conda.anaconda.org/conda-forge/noarch/passlib-1.7.4-pyhd8ed1ab_2.conda + version: 1.7.4 - category: main dependencies: - python: '>=2.7' + argon2-cffi: '>=19.2.0' + bcrypt: '>=3.1.0' + cryptography: '' + python: '>=3.9' hash: - md5: a4eea5bff523f26442405bc5d1f52adb - sha256: 9153f0f38c76a09da7688a61fdbf8f3d7504e2326bef53e4ec20d994311b15bd + md5: fba64c154edb7d7935af0d46d97ff536 + sha256: 2adfe01cdab93c39c4d8dfe3de74a31ae6fded21213f26925208ce6053cea93d manager: conda - name: pastel + name: passlib optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/pastel-0.2.1-pyhd8ed1ab_0.tar.bz2 - version: 0.2.1 + url: https://conda.anaconda.org/conda-forge/noarch/passlib-1.7.4-pyhd8ed1ab_2.conda + version: 1.7.4 - category: main dependencies: - python: '>=2.7' + argon2-cffi: '>=19.2.0' + bcrypt: '>=3.1.0' + cryptography: '' + python: '>=3.9' hash: - md5: a4eea5bff523f26442405bc5d1f52adb - sha256: 9153f0f38c76a09da7688a61fdbf8f3d7504e2326bef53e4ec20d994311b15bd + md5: fba64c154edb7d7935af0d46d97ff536 + sha256: 2adfe01cdab93c39c4d8dfe3de74a31ae6fded21213f26925208ce6053cea93d manager: conda - name: pastel + name: passlib optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/pastel-0.2.1-pyhd8ed1ab_0.tar.bz2 - version: 0.2.1 + url: https://conda.anaconda.org/conda-forge/noarch/passlib-1.7.4-pyhd8ed1ab_2.conda + version: 1.7.4 - category: main dependencies: libgcc-ng: '>=9.3.0' @@ -9492,45 +10482,45 @@ package: dependencies: __glibc: '>=2.17,<3.0.a0' bzip2: '>=1.0.8,<2.0a0' - libgcc-ng: '>=12' + libgcc: '>=13' libzlib: '>=1.3.1,<2.0a0' hash: - md5: df359c09c41cd186fffb93a2d87aa6f5 - sha256: 1087716b399dab91cc9511d6499036ccdc53eb29a288bebcb19cf465c51d7c0d + md5: b90bece58b4c2bf25969b70f3be42d25 + sha256: 27c4014f616326240dcce17b5f3baca3953b6bc5f245ceb49c3fa1e6320571eb manager: conda name: pcre2 optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda - version: '10.44' + url: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.45-hc749103_0.conda + version: '10.45' - category: main dependencies: __osx: '>=10.13' bzip2: '>=1.0.8,<2.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 58cde0663f487778bcd7a0c8daf50293 - sha256: 336057fce69d45e1059f138beb38d60eb87ba858c3ad729ed49d9ecafd23669f + md5: d9f1976154f2f45588251dcfc48bcdda + sha256: 5b2c93ee8714c17682cd926127f1e712efef00441a79732635a80b24f5adc212 manager: conda name: pcre2 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pcre2-10.44-h7634a1b_2.conda - version: '10.44' + url: https://conda.anaconda.org/conda-forge/osx-64/pcre2-10.45-hf733adb_0.conda + version: '10.45' - category: main dependencies: __osx: '>=11.0' bzip2: '>=1.0.8,<2.0a0' libzlib: '>=1.3.1,<2.0a0' hash: - md5: 147c83e5e44780c7492998acbacddf52 - sha256: 83153c7d8fd99cab33c92ce820aa7bfed0f1c94fc57010cf227b6e3c50cb7796 + md5: a52385b93558d8e6bbaeec5d61a21cd7 + sha256: e9ecb706b58b5a2047c077b3a1470e8554f3aad02e9c3c00cfa35d537420fea3 manager: conda name: pcre2 optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/pcre2-10.44-h297a79d_2.conda - version: '10.44' + url: https://conda.anaconda.org/conda-forge/osx-arm64/pcre2-10.45-ha881caa_0.conda + version: '10.45' - category: main dependencies: libgcc-ng: '>=12' @@ -9613,82 +10603,82 @@ package: setuptools: '' wheel: '' hash: - md5: 79b5c1440aedc5010f687048d9103628 - sha256: 585940f09d87787f79f73ff5dff8eb2af8a67e5bec5eebf2f553cd26c840ba69 + md5: 32d0781ace05105cc99af55d36cbec7c + sha256: ebfa591d39092b111b9ebb3210eb42251be6da89e26c823ee03e5e838655a43e manager: conda name: pip optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/pip-25.0.1-pyh8b19718_0.conda - version: 25.0.1 + url: https://conda.anaconda.org/conda-forge/noarch/pip-25.1.1-pyh8b19718_0.conda + version: 25.1.1 - category: main dependencies: python: '>=3.9,<3.13.0a0' setuptools: '' wheel: '' hash: - md5: 79b5c1440aedc5010f687048d9103628 - sha256: 585940f09d87787f79f73ff5dff8eb2af8a67e5bec5eebf2f553cd26c840ba69 + md5: 32d0781ace05105cc99af55d36cbec7c + sha256: ebfa591d39092b111b9ebb3210eb42251be6da89e26c823ee03e5e838655a43e manager: conda name: pip optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/pip-25.0.1-pyh8b19718_0.conda - version: 25.0.1 + url: https://conda.anaconda.org/conda-forge/noarch/pip-25.1.1-pyh8b19718_0.conda + version: 25.1.1 - category: main dependencies: python: '>=3.9,<3.13.0a0' setuptools: '' wheel: '' hash: - md5: 79b5c1440aedc5010f687048d9103628 - sha256: 585940f09d87787f79f73ff5dff8eb2af8a67e5bec5eebf2f553cd26c840ba69 + md5: 32d0781ace05105cc99af55d36cbec7c + sha256: ebfa591d39092b111b9ebb3210eb42251be6da89e26c823ee03e5e838655a43e manager: conda name: pip optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/pip-25.0.1-pyh8b19718_0.conda - version: 25.0.1 + url: https://conda.anaconda.org/conda-forge/noarch/pip-25.1.1-pyh8b19718_0.conda + version: 25.1.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' libstdcxx: '>=13' hash: - md5: 5e2a7acfa2c24188af39e7944e1b3604 - sha256: 747c58db800d5583fee78e76240bf89cbaeedf7ab1ef339c2990602332b9c4be + md5: d2f1c87d4416d1e7344cf92b1aaee1c4 + sha256: 1330c3fd424fa2deec6a30678f235049c0ed1b0fad8d2d81ef995c9322d5e49a manager: conda name: pixman optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda - version: 0.44.2 + url: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.0-h29eaf8c_0.conda + version: 0.46.0 - category: main dependencies: __osx: '>=10.13' libcxx: '>=18' hash: - md5: 9d3ed4c1a6e21051bf4ce53851acdc96 - sha256: 7e5a9823e7e759355b954037f97d4aa53c26db1d73408571e749f8375b363743 + md5: 808d70603573b87f3427b61501fa376d + sha256: 4d8184a8d453e8218017ed2fe024496b6ccf5ba05b994d3a60a8871022ec7a76 manager: conda name: pixman optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.44.2-h1fd1274_0.conda - version: 0.44.2 + url: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.46.0-h1fd1274_0.conda + version: 0.46.0 - category: main dependencies: __osx: '>=11.0' libcxx: '>=18' hash: - md5: fa8e429fdb9e5b757281f69b8cc4330b - sha256: 28855d4cb2d9fc9a6bd9196dadbaecd6868ec706394cec2f88824a61ba4b1bc0 + md5: d098a1cca9d588cd4d258d06a08a454e + sha256: ed22ffec308e798d50066286e5b184c64bb47a3787840883249377ae4e6d684b manager: conda name: pixman optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.44.2-h2f9eb0b_0.conda - version: 0.44.2 + url: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.46.0-h2f9eb0b_0.conda + version: 0.46.0 - category: main dependencies: python: '>=3.9' @@ -9771,77 +10761,77 @@ package: dependencies: python: '' hash: - md5: e57da6fe54bb3a5556cf36d199ff07d8 - sha256: ae7d3e58224d53d6b59e1f5ac5809803bb1972f0ac4fb10cd9b8c87d4122d3e0 + md5: 424844562f5d337077b445ec6b1398a7 + sha256: 0f48999a28019c329cd3f6fd2f01f09fc32cc832f7d6bbe38087ddac858feaa3 manager: conda name: platformdirs optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.7-pyh29332c3_0.conda - version: 4.3.7 + https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.8-pyhe01879c_0.conda + version: 4.3.8 - category: main dependencies: python: '>=3.9' hash: - md5: e57da6fe54bb3a5556cf36d199ff07d8 - sha256: ae7d3e58224d53d6b59e1f5ac5809803bb1972f0ac4fb10cd9b8c87d4122d3e0 + md5: 424844562f5d337077b445ec6b1398a7 + sha256: 0f48999a28019c329cd3f6fd2f01f09fc32cc832f7d6bbe38087ddac858feaa3 manager: conda name: platformdirs optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.7-pyh29332c3_0.conda - version: 4.3.7 + https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.8-pyhe01879c_0.conda + version: 4.3.8 - category: main dependencies: python: '>=3.9' hash: - md5: e57da6fe54bb3a5556cf36d199ff07d8 - sha256: ae7d3e58224d53d6b59e1f5ac5809803bb1972f0ac4fb10cd9b8c87d4122d3e0 + md5: 424844562f5d337077b445ec6b1398a7 + sha256: 0f48999a28019c329cd3f6fd2f01f09fc32cc832f7d6bbe38087ddac858feaa3 manager: conda name: platformdirs optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.7-pyh29332c3_0.conda - version: 4.3.7 + https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.8-pyhe01879c_0.conda + version: 4.3.8 - category: main dependencies: python: '>=3.9' hash: - md5: e9dcbce5f45f9ee500e728ae58b605b6 - sha256: 122433fc5318816b8c69283aaf267c73d87aa2d09ce39f64c9805c9a3b264819 + md5: 7da7ccd349dbf6487a7778579d2bb971 + sha256: a8eb555eef5063bbb7ba06a379fa7ea714f57d9741fe0efdb9442dbbc2cccbcc manager: conda name: pluggy optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda - version: 1.5.0 + url: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhd8ed1ab_0.conda + version: 1.6.0 - category: main dependencies: python: '>=3.9' hash: - md5: e9dcbce5f45f9ee500e728ae58b605b6 - sha256: 122433fc5318816b8c69283aaf267c73d87aa2d09ce39f64c9805c9a3b264819 + md5: 7da7ccd349dbf6487a7778579d2bb971 + sha256: a8eb555eef5063bbb7ba06a379fa7ea714f57d9741fe0efdb9442dbbc2cccbcc manager: conda name: pluggy optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda - version: 1.5.0 + url: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhd8ed1ab_0.conda + version: 1.6.0 - category: main dependencies: python: '>=3.9' hash: - md5: e9dcbce5f45f9ee500e728ae58b605b6 - sha256: 122433fc5318816b8c69283aaf267c73d87aa2d09ce39f64c9805c9a3b264819 + md5: 7da7ccd349dbf6487a7778579d2bb971 + sha256: a8eb555eef5063bbb7ba06a379fa7ea714f57d9741fe0efdb9442dbbc2cccbcc manager: conda name: pluggy optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda - version: 1.5.0 + url: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhd8ed1ab_0.conda + version: 1.6.0 - category: main dependencies: cfgv: '>=2.0.0' @@ -9941,54 +10931,112 @@ package: - category: main dependencies: __glibc: '>=2.17,<3.0.a0' + libabseil: '>=20240116.2,<20240117.0a0' libgcc: '>=13' + libprotobuf: '>=4.25.3,<4.25.4.0a0' + libstdcxx: '>=13' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* + setuptools: '' hash: - md5: 1a390a54b2752169f5ba4ada5a8108e4 - sha256: 50d0944b59a9c6dfa6b99cc2632bf8bc9bef9c7c93710390ded6eac953f0182d + md5: 27089f71e28d01bcc070460d822d5acb + sha256: 3e06dcdd3ec2e73fb456d5c2fdf9c8829d7f70c15d724f9920a24276a0a1d6b5 manager: conda - name: psutil + name: protobuf optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/psutil-7.0.0-py311h9ecbd09_0.conda - version: 7.0.0 + https://conda.anaconda.org/conda-forge/linux-64/protobuf-4.25.3-py311hbffca5d_1.conda + version: 4.25.3 - category: main dependencies: __osx: '>=10.13' + libabseil: '>=20240116.2,<20240117.0a0' + libcxx: '>=17' + libprotobuf: '>=4.25.3,<4.25.4.0a0' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* + setuptools: '' hash: - md5: 7b5cdf63ced6576ead40a82ea0616322 - sha256: e290563f61f810f745b32d4c1ebe4ec87827323134f6bee2e8cc894391cbc548 + md5: 56584ed8b577c2e07f5c57a2bd3f5912 + sha256: d33640b917b5f2b0184142ece692a764de840b7f2fa44d211fd56c1170c57e7b manager: conda - name: psutil + name: protobuf optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/psutil-7.0.0-py311h4d7f069_0.conda - version: 7.0.0 + url: + https://conda.anaconda.org/conda-forge/osx-64/protobuf-4.25.3-py311h6b31176_1.conda + version: 4.25.3 - category: main dependencies: __osx: '>=11.0' + libabseil: '>=20240116.2,<20240117.0a0' + libcxx: '>=17' + libprotobuf: '>=4.25.3,<4.25.4.0a0' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* + setuptools: '' hash: - md5: 12f8d65fb5a6bd03aedd5ac74391f1ea - sha256: 3ea107f769b3ac99411f6bd6d86f946566ba3983894cbeb0e43439934a90c2f5 + md5: dacdcae7ce1a0d2f10351fb7b406bf7e + sha256: 4c7221018c88b9979fd25f97369d4635dee16fc42dd6a9079362edf97eaa5a48 manager: conda - name: psutil + name: protobuf optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/psutil-7.0.0-py311h917b07b_0.conda - version: 7.0.0 + https://conda.anaconda.org/conda-forge/osx-arm64/protobuf-4.25.3-py311hd7a3543_1.conda + version: 4.25.3 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* hash: - md5: b3c17d95b5a10c6e64a21fa17573e70e + md5: 1a390a54b2752169f5ba4ada5a8108e4 + sha256: 50d0944b59a9c6dfa6b99cc2632bf8bc9bef9c7c93710390ded6eac953f0182d + manager: conda + name: psutil + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/psutil-7.0.0-py311h9ecbd09_0.conda + version: 7.0.0 + - category: main + dependencies: + __osx: '>=10.13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 7b5cdf63ced6576ead40a82ea0616322 + sha256: e290563f61f810f745b32d4c1ebe4ec87827323134f6bee2e8cc894391cbc548 + manager: conda + name: psutil + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/psutil-7.0.0-py311h4d7f069_0.conda + version: 7.0.0 + - category: main + dependencies: + __osx: '>=11.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 12f8d65fb5a6bd03aedd5ac74391f1ea + sha256: 3ea107f769b3ac99411f6bd6d86f946566ba3983894cbeb0e43439934a90c2f5 + manager: conda + name: psutil + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/psutil-7.0.0-py311h917b07b_0.conda + version: 7.0.0 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + hash: + md5: b3c17d95b5a10c6e64a21fa17573e70e sha256: 9c88f8c64590e9567c6c80823f0328e58d3b1efb0e1c539c0315ceca764e0973 manager: conda name: pthread-stubs @@ -10036,58 +11084,97 @@ package: url: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda version: 0.7.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 6cffc01a8288495554ad5c5935e09d25 + sha256: db5e0d1d214ac2253556cbb1e242900ed75a3877eac1c4a68625ba45830a5942 + manager: conda + name: publicsuffix2 + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/publicsuffix2-2.20191221-pyhd8ed1ab_1.conda + version: '2.20191221' + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 6cffc01a8288495554ad5c5935e09d25 + sha256: db5e0d1d214ac2253556cbb1e242900ed75a3877eac1c4a68625ba45830a5942 + manager: conda + name: publicsuffix2 + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/publicsuffix2-2.20191221-pyhd8ed1ab_1.conda + version: '2.20191221' + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 6cffc01a8288495554ad5c5935e09d25 + sha256: db5e0d1d214ac2253556cbb1e242900ed75a3877eac1c4a68625ba45830a5942 + manager: conda + name: publicsuffix2 + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/publicsuffix2-2.20191221-pyhd8ed1ab_1.conda + version: '2.20191221' - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - liblief: 0.14.1 + liblief: 0.16.4 libstdcxx: '>=13' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: d526a5f49e1aba9c0be609532f59a3f9 - sha256: 6c443b60b70255a61c35680863de23fc141e07516fb87cc684c63cbdb7a920ce + md5: 6ae24e63dc3916c6b2b0c8c5739f90d1 + sha256: d2784fdd4dff7dbdee15fd542cd788e2d56680fd489174158f4841cb9209400e manager: conda name: py-lief optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/py-lief-0.14.1-py311hfdbb021_2.conda - version: 0.14.1 + https://conda.anaconda.org/conda-forge/linux-64/py-lief-0.16.4-py311hfdbb021_0.conda + version: 0.16.4 - category: main dependencies: __osx: '>=10.13' - libcxx: '>=17' - liblief: 0.14.1 + libcxx: '>=18' + liblief: 0.16.4 python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 3ae70c55fef0bf23940c95bf5170fe75 - sha256: bd95024dd3668a78eb131c9bcb6a24983573b2756f0cb4fb614cd31c5ebfa537 + md5: 4b880e07c8420779c2e4a64ba38113cf + sha256: e179e908228d122b7164c09a2885a0e678ec4d4691caf6a7ba1a6089dcabf614 manager: conda name: py-lief optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/py-lief-0.14.1-py311hd89902b_2.conda - version: 0.14.1 + https://conda.anaconda.org/conda-forge/osx-64/py-lief-0.16.4-py311hc356e98_0.conda + version: 0.16.4 - category: main dependencies: __osx: '>=11.0' - libcxx: '>=17' - liblief: 0.14.1 + libcxx: '>=18' + liblief: 0.16.4 python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 49783632a9410a8f1bfa69b0d8d2f7cb - sha256: f3553cdfc03f772f4b6cc0f364c5b61ed85ec688920fdba22fa7a73e1c52af0b + md5: 0fc63c1b37f33a2e55844f82a0b40ed3 + sha256: a5a161d11cdd18556e69d465e2f1cf48d3ea9258960db3e1459c7bb452dc8953 manager: conda name: py-lief optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/py-lief-0.14.1-py311h3f08180_2.conda - version: 0.14.1 + https://conda.anaconda.org/conda-forge/osx-arm64/py-lief-0.16.4-py311h155a34a_0.conda + version: 0.16.4 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -10137,6 +11224,42 @@ package: url: https://conda.anaconda.org/conda-forge/osx-arm64/py-rattler-0.9.0-py311h8be0713_0.conda version: 0.9.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 09bb17ed307ad6ab2fd78d32372fdd4e + sha256: d06051df66e9ab753683d7423fcef873d78bb0c33bd112c3d5be66d529eddf06 + manager: conda + name: pyasn1 + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda + version: 0.6.1 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 09bb17ed307ad6ab2fd78d32372fdd4e + sha256: d06051df66e9ab753683d7423fcef873d78bb0c33bd112c3d5be66d529eddf06 + manager: conda + name: pyasn1 + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda + version: 0.6.1 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 09bb17ed307ad6ab2fd78d32372fdd4e + sha256: d06051df66e9ab753683d7423fcef873d78bb0c33bd112c3d5be66d529eddf06 + manager: conda + name: pyasn1 + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda + version: 0.6.1 - category: main dependencies: {} hash: @@ -10260,15 +11383,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: a0ab50817d4c688883f2127b1e952b3a - sha256: cf65582b21bd89e91d41041413fc0da3abd98af5135fa9ee71e90d5befa6fffb + md5: 9bed86f8fc59f1e9eace4ec48c6e432e + sha256: eff6dd2f6a3a7c1bd20bf884dee7b909d779e3b30fc232c9b1107477961e66f1 manager: conda name: pycryptodome optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/pycryptodome-3.22.0-py311h35130b2_0.conda - version: 3.22.0 + https://conda.anaconda.org/conda-forge/linux-64/pycryptodome-3.23.0-py311h35130b2_0.conda + version: 3.23.0 - category: main dependencies: __osx: '>=10.13' @@ -10276,15 +11399,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 8b6f1c2fd3ed78c0d91f681361665972 - sha256: 6a13cb5db1b4b35d78dddc505c400fdfd251d20c995b3c84afa3bfb641cfbbb0 + md5: 5d47ce7c525b8bc72a35924cbbf5adb4 + sha256: 80678f11b07a60dae16844ef0b6e1a6baa1feb5c42de0a571f7abe76c246a72a manager: conda name: pycryptodome optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/pycryptodome-3.22.0-py311he3c51cc_0.conda - version: 3.22.0 + https://conda.anaconda.org/conda-forge/osx-64/pycryptodome-3.23.0-py311he3c51cc_0.conda + version: 3.23.0 - category: main dependencies: __osx: '>=11.0' @@ -10292,154 +11415,205 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: f4f6b814f5356ef2e5249ef1dc4272d1 - sha256: bcc2a2d5b93962524062ebfa83c1064dbcac73ca57b5a69bd4921a68f291274a + md5: 4ff491f9c7eef6fbb9034be1e7721d46 + sha256: 9f8b9e2213d37f567dbe460405dc8b6795aebfac938b9d43f2afa890027b8364 manager: conda name: pycryptodome optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/pycryptodome-3.22.0-py311h5f4a806_0.conda - version: 3.22.0 + https://conda.anaconda.org/conda-forge/osx-arm64/pycryptodome-3.23.0-py311h5f4a806_0.conda + version: 3.23.0 - category: main dependencies: annotated-types: '>=0.6.0' - pydantic-core: 2.27.2 + pydantic-core: 2.33.2 python: '>=3.9' typing-extensions: '>=4.6.1' + typing-inspection: '>=0.4.0' typing_extensions: '>=4.12.2' hash: - md5: c69f87041cf24dfc8cb6bf64ca7133c7 - sha256: 9a78801a28959edeb945e8270a4e666577b52fac0cf4e35f88cf122f73d83e75 + md5: 8ad3ad8db5ce2ba470c9facc37af00a9 + sha256: a522473505ac6a9c10bb304d7338459a406ba22a6d3bb1a355c1b5283553a372 manager: conda name: pydantic optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.10.6-pyh3cfb1c2_0.conda - version: 2.10.6 + url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.11.4-pyh3cfb1c2_0.conda + version: 2.11.4 - category: main dependencies: annotated-types: '>=0.6.0' - pydantic-core: 2.27.2 + pydantic-core: 2.33.2 python: '>=3.9' typing-extensions: '>=4.6.1' + typing-inspection: '>=0.4.0' typing_extensions: '>=4.12.2' hash: - md5: c69f87041cf24dfc8cb6bf64ca7133c7 - sha256: 9a78801a28959edeb945e8270a4e666577b52fac0cf4e35f88cf122f73d83e75 + md5: 8ad3ad8db5ce2ba470c9facc37af00a9 + sha256: a522473505ac6a9c10bb304d7338459a406ba22a6d3bb1a355c1b5283553a372 manager: conda name: pydantic optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.10.6-pyh3cfb1c2_0.conda - version: 2.10.6 + url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.11.4-pyh3cfb1c2_0.conda + version: 2.11.4 - category: main dependencies: annotated-types: '>=0.6.0' - pydantic-core: 2.27.2 + pydantic-core: 2.33.2 python: '>=3.9' typing-extensions: '>=4.6.1' + typing-inspection: '>=0.4.0' typing_extensions: '>=4.12.2' hash: - md5: c69f87041cf24dfc8cb6bf64ca7133c7 - sha256: 9a78801a28959edeb945e8270a4e666577b52fac0cf4e35f88cf122f73d83e75 + md5: 8ad3ad8db5ce2ba470c9facc37af00a9 + sha256: a522473505ac6a9c10bb304d7338459a406ba22a6d3bb1a355c1b5283553a372 manager: conda name: pydantic optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.10.6-pyh3cfb1c2_0.conda - version: 2.10.6 + url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.11.4-pyh3cfb1c2_0.conda + version: 2.11.4 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - python: '>=3.11,<3.12.0a0' + python: '' python_abi: 3.11.* typing-extensions: '>=4.6.0,!=4.7.0' hash: - md5: 675cb6079b6b3b4ef4f20399fedf6666 - sha256: 8ead97151b2f349cd327456fe4a6fcf7c51a3ab6c06f48f4330f86de0d848bd1 + md5: 484d0d62d4b069d5372680309fc5f00c + sha256: b48e5abb6debae4f559b08cdbaf0736c7806adc00c106ced2c98a622b7081d8f manager: conda name: pydantic-core optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.27.2-py311h9e33e62_0.conda - version: 2.27.2 + https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.33.2-py311hdae7d1d_0.conda + version: 2.33.2 - category: main dependencies: __osx: '>=10.13' - python: '>=3.11,<3.12.0a0' + python: '' python_abi: 3.11.* typing-extensions: '>=4.6.0,!=4.7.0' hash: - md5: 0a9d04f67aa75f92236e7c25afc370ed - sha256: 51cee7f401be36e8492ce265f8543dbe790e49e63ff15b9c41ba93d06372558c + md5: 3453cc60caa35dda5903d7fa59553208 + sha256: 8eb7c76e4a55ec7a58aada7d5288a111e05a05817dd91e3c3a752a5b657b91fb manager: conda name: pydantic-core optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.27.2-py311h3b9c2be_0.conda - version: 2.27.2 + https://conda.anaconda.org/conda-forge/osx-64/pydantic-core-2.33.2-py311hd1a56c6_0.conda + version: 2.33.2 - category: main dependencies: __osx: '>=11.0' - python: '>=3.11,<3.12.0a0' + python: 3.11.* python_abi: 3.11.* typing-extensions: '>=4.6.0,!=4.7.0' hash: - md5: b96fba96baad08b81c57fd157b481b22 - sha256: 5163982ef229292ca5b9fe96e756ac29b6c6453d56c9f1dfaf48f5796de78d05 + md5: 05220abd84df3f4645f4fe2b8413582b + sha256: ecca273484dcd5bb463e8fbbc90760155de09fcb6435c5372f83e521d791f44a manager: conda name: pydantic-core optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/pydantic-core-2.27.2-py311h3ff9189_0.conda - version: 2.27.2 + https://conda.anaconda.org/conda-forge/osx-arm64/pydantic-core-2.33.2-py311hf245fc6_0.conda + version: 2.33.2 - category: main dependencies: pydantic: '>=2.5.2' python: '>=3.9' hash: - md5: a59d358a1a8580cf65c9bf5f538cb011 - sha256: d1bb1838f0fd64577bd431e3cb3a4d7971e1e78a592f37917972fe2f415ee49f + md5: f8619d04cf6a514db8581100e8f3ec8c + sha256: 93079a64ef21d64eceedab7fc9eeeb815f5b443ab5bf3af87bdbf99422c6e3ce manager: conda name: pydantic-extra-types optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.10.3-pyh3cfb1c2_0.conda - version: 2.10.3 + https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.10.4-pyh3cfb1c2_0.conda + version: 2.10.4 - category: main dependencies: pydantic: '>=2.5.2' python: '>=3.9' hash: - md5: a59d358a1a8580cf65c9bf5f538cb011 - sha256: d1bb1838f0fd64577bd431e3cb3a4d7971e1e78a592f37917972fe2f415ee49f + md5: f8619d04cf6a514db8581100e8f3ec8c + sha256: 93079a64ef21d64eceedab7fc9eeeb815f5b443ab5bf3af87bdbf99422c6e3ce manager: conda name: pydantic-extra-types optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.10.3-pyh3cfb1c2_0.conda - version: 2.10.3 + https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.10.4-pyh3cfb1c2_0.conda + version: 2.10.4 - category: main dependencies: pydantic: '>=2.5.2' python: '>=3.9' hash: - md5: a59d358a1a8580cf65c9bf5f538cb011 - sha256: d1bb1838f0fd64577bd431e3cb3a4d7971e1e78a592f37917972fe2f415ee49f + md5: f8619d04cf6a514db8581100e8f3ec8c + sha256: 93079a64ef21d64eceedab7fc9eeeb815f5b443ab5bf3af87bdbf99422c6e3ce manager: conda name: pydantic-extra-types optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.10.3-pyh3cfb1c2_0.conda - version: 2.10.3 + https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.10.4-pyh3cfb1c2_0.conda + version: 2.10.4 + - category: main + dependencies: + pydantic: '>=2.7.0' + python: '>=3.9' + python-dotenv: '>=0.21.0' + typing-inspection: '>=0.4.0' + hash: + md5: 29dd5c4ece2497b75b4050ec3c8d4044 + sha256: ea2f1027218e83e484fd581933e0ce60b9194486c56c98053b4277b0fb291364 + manager: conda + name: pydantic-settings + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/pydantic-settings-2.9.1-pyh3cfb1c2_0.conda + version: 2.9.1 + - category: main + dependencies: + pydantic: '>=2.7.0' + python: '>=3.9' + python-dotenv: '>=0.21.0' + typing-inspection: '>=0.4.0' + hash: + md5: 29dd5c4ece2497b75b4050ec3c8d4044 + sha256: ea2f1027218e83e484fd581933e0ce60b9194486c56c98053b4277b0fb291364 + manager: conda + name: pydantic-settings + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/pydantic-settings-2.9.1-pyh3cfb1c2_0.conda + version: 2.9.1 + - category: main + dependencies: + pydantic: '>=2.7.0' + python: '>=3.9' + python-dotenv: '>=0.21.0' + typing-inspection: '>=0.4.0' + hash: + md5: 29dd5c4ece2497b75b4050ec3c8d4044 + sha256: ea2f1027218e83e484fd581933e0ce60b9194486c56c98053b4277b0fb291364 + manager: conda + name: pydantic-settings + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/pydantic-settings-2.9.1-pyh3cfb1c2_0.conda + version: 2.9.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -10449,15 +11623,15 @@ package: python: '' python_abi: 3.11.* hash: - md5: 9ab10d1e6fd9456542867071e6d2eb34 - sha256: b536ce917c676382a504f6e1f88d7337f43b54ed6b50f686a623a5775faaf918 + md5: b89bf5c223e6c871c6aad6fc2d2100c8 + sha256: df9919217bb14c9774fa11aadba6f0d3ca62a645ddadd98e265fbd4862d0b02b manager: conda name: pygit2 optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/pygit2-1.17.0-py311hdbb01f6_0.conda - version: 1.17.0 + https://conda.anaconda.org/conda-forge/linux-64/pygit2-1.18.0-py311hdbb01f6_0.conda + version: 1.18.0 - category: main dependencies: __osx: '>=10.13' @@ -10466,15 +11640,15 @@ package: python: '' python_abi: 3.11.* hash: - md5: 6eec62aaed7615f2bfa04cdf389a4169 - sha256: e8172858668756b9a39213134c85c38d0a59086c616c348acd71b5d0b5c96a9c + md5: bad7a9074d4a1a05066dd95963f31dd2 + sha256: 54893d5ccace4e6aebbec2ebb28cd6004d31d3a454285e33ca72a469eaea3801 manager: conda name: pygit2 optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/pygit2-1.17.0-py311h493d26c_0.conda - version: 1.17.0 + https://conda.anaconda.org/conda-forge/osx-64/pygit2-1.18.0-py311h493d26c_0.conda + version: 1.18.0 - category: main dependencies: __osx: '>=11.0' @@ -10483,15 +11657,15 @@ package: python: 3.11.* python_abi: 3.11.* hash: - md5: bd3e3c1ceacb295e7d4b8eb056191427 - sha256: 86e6f9501c9cbfb214282c4b22e609ee1d84de2d5b9ae4e89af7b0e8d09663e2 + md5: 6f2580e0237569b7c0c0b9ade0f6828a + sha256: 8f93743a5f210a7be181ade556dc588b03829e33af373edd5c4e9671fd4c2944 manager: conda name: pygit2 optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/pygit2-1.17.0-py311hd460543_0.conda - version: 1.17.0 + https://conda.anaconda.org/conda-forge/osx-arm64/pygit2-1.18.0-py311hd460543_0.conda + version: 1.18.0 - category: main dependencies: cryptography: '>=3.4.0' @@ -10626,40 +11800,50 @@ package: version: 2.10.1 - category: main dependencies: - python: '>=3.3' + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* hash: - md5: edf8651c4379d9d1495ad6229622d150 - sha256: 50bd91767686bfe769e50a5a1b883e238d944a6163fea43e7c0beaac54ca674f + md5: 869b15d0e4c7f7449c290336760a653f + sha256: 27ae4676bb24a09c7b571a032f5b6f2d0f7e990e4171b47e4ce2cdf8b4592000 manager: conda - name: pylev + name: pylsqpack optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/pylev-1.4.0-pyhd8ed1ab_0.tar.bz2 - version: 1.4.0 + url: + https://conda.anaconda.org/conda-forge/linux-64/pylsqpack-0.3.19-py311h9ecbd09_0.conda + version: 0.3.19 - category: main dependencies: - python: '>=3.3' + __osx: '>=10.13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* hash: - md5: edf8651c4379d9d1495ad6229622d150 - sha256: 50bd91767686bfe769e50a5a1b883e238d944a6163fea43e7c0beaac54ca674f + md5: ecc356b8988200c207571a5eea534f1e + sha256: 5c48e59a7db29829901ddf05c457a8122353d222ef48815b27b326521ecd346c manager: conda - name: pylev + name: pylsqpack optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/pylev-1.4.0-pyhd8ed1ab_0.tar.bz2 - version: 1.4.0 + url: + https://conda.anaconda.org/conda-forge/osx-64/pylsqpack-0.3.19-py311h4d7f069_0.conda + version: 0.3.19 - category: main dependencies: - python: '>=3.3' + __osx: '>=11.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* hash: - md5: edf8651c4379d9d1495ad6229622d150 - sha256: 50bd91767686bfe769e50a5a1b883e238d944a6163fea43e7c0beaac54ca674f + md5: d1ea0c8738fc40b84477bfbae265b415 + sha256: a67074ad65ae4a5d7e3bbf26a4e486b296fee9910573950f178600e63ac73859 manager: conda - name: pylev + name: pylsqpack optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/pylev-1.4.0-pyhd8ed1ab_0.tar.bz2 - version: 1.4.0 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/pylsqpack-0.3.19-py311h917b07b_0.conda + version: 0.3.19 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -10810,6 +11994,195 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/noarch/pynamodb-6.0.2-pyhd8ed1ab_0.conda version: 6.0.2 + - category: main + dependencies: + __osx: '>=10.13' + libffi: '>=3.4,<4.0a0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + setuptools: '' + hash: + md5: 3b2f520d27fa7cf9c6c73fb43c69a321 + sha256: 7cc9dd5c836631c733173c88187231bfc0438135e0ddf94e866e45b3d10592bd + manager: conda + name: pyobjc-core + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/pyobjc-core-11.0-py311hfbc4093_0.conda + version: '11.0' + - category: main + dependencies: + __osx: '>=11.0' + libffi: '>=3.4,<4.0a0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + setuptools: '' + hash: + md5: cc865b09e7a02328840b163fb8856731 + sha256: 7eb9c40a460ea769f024aaf45dae9fde7ca41137ca82154c50c8aead8a32ff88 + manager: conda + name: pyobjc-core + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-11.0-py311hab620ed_0.conda + version: '11.0' + - category: main + dependencies: + __osx: '>=10.13' + libffi: '>=3.4,<4.0a0' + pyobjc-core: 11.0.* + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: d16654f6b3f602bb0acab446c55bcafb + sha256: 94e00e4c9b5c5d8b2374321a0f908b7812b06ac8c9cb99242ddaa4ea0091f0be + manager: conda + name: pyobjc-framework-cocoa + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/pyobjc-framework-cocoa-11.0-py311hfbc4093_0.conda + version: '11.0' + - category: main + dependencies: + __osx: '>=11.0' + libffi: '>=3.4,<4.0a0' + pyobjc-core: 11.0.* + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 39da4013010bd559600f775ebf6a5915 + sha256: 33635759c626103696963a4d439f01cc534fe94c318ce5a14c7b9ddbe8dfb78c + manager: conda + name: pyobjc-framework-cocoa + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-11.0-py311hab620ed_0.conda + version: '11.0' + - category: main + dependencies: + cryptography: '>=38.0.0,<41' + python: '>=3.6' + hash: + md5: 0b34aa3ab7e7ccb1765a03dd9ed29938 + sha256: 458428cb867f70f2af2a4ed59d382291ea3eb3f10490196070a15d1d71d5432a + manager: conda + name: pyopenssl + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.1.1-pyhd8ed1ab_0.conda + version: 23.1.1 + - category: main + dependencies: + cryptography: '>=38.0.0,<41' + python: '>=3.6' + hash: + md5: 0b34aa3ab7e7ccb1765a03dd9ed29938 + sha256: 458428cb867f70f2af2a4ed59d382291ea3eb3f10490196070a15d1d71d5432a + manager: conda + name: pyopenssl + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.1.1-pyhd8ed1ab_0.conda + version: 23.1.1 + - category: main + dependencies: + cryptography: '>=38.0.0,<41' + python: '>=3.6' + hash: + md5: 0b34aa3ab7e7ccb1765a03dd9ed29938 + sha256: 458428cb867f70f2af2a4ed59d382291ea3eb3f10490196070a15d1d71d5432a + manager: conda + name: pyopenssl + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.1.1-pyhd8ed1ab_0.conda + version: 23.1.1 + - category: main + dependencies: + python: '>=3.6' + hash: + md5: 4d91352a50949d049cf9714c8563d433 + sha256: 8714a83f1aeac278b3eb33c7cb880c95c9a5924e7a5feeb9e87e7d0837afa085 + manager: conda + name: pyparsing + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + version: 3.1.4 + - category: main + dependencies: + python: '>=3.6' + hash: + md5: 4d91352a50949d049cf9714c8563d433 + sha256: 8714a83f1aeac278b3eb33c7cb880c95c9a5924e7a5feeb9e87e7d0837afa085 + manager: conda + name: pyparsing + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + version: 3.1.4 + - category: main + dependencies: + python: '>=3.6' + hash: + md5: 4d91352a50949d049cf9714c8563d433 + sha256: 8714a83f1aeac278b3eb33c7cb880c95c9a5924e7a5feeb9e87e7d0837afa085 + manager: conda + name: pyparsing + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda + version: 3.1.4 + - category: main + dependencies: + __linux: '' + python: '>=3.6' + xclip: '' + xsel: '' + hash: + md5: 2acdfb68ee42274329494c93fcf92ce6 + sha256: c4404821044f2fd2c33a1159d525368672d04e369d8c16b8cc8488a4a8bd5be5 + manager: conda + name: pyperclip + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/pyperclip-1.8.2-pyha804496_3.conda + version: 1.8.2 + - category: main + dependencies: + __osx: '' + pyobjc-framework-cocoa: '' + python: '>=3.6' + hash: + md5: aca7616a492c45b55b97d1b4e882ebea + sha256: 026d26b5e624de4b1f1c359224f57421b7ceaaecba4ffe265b65ef6d1253f4cb + manager: conda + name: pyperclip + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/pyperclip-1.8.2-pyh534df25_3.conda + version: 1.8.2 + - category: main + dependencies: + __osx: '' + pyobjc-framework-cocoa: '' + python: '>=3.6' + hash: + md5: aca7616a492c45b55b97d1b4e882ebea + sha256: 026d26b5e624de4b1f1c359224f57421b7ceaaecba4ffe265b65ef6d1253f4cb + manager: conda + name: pyperclip + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/pyperclip-1.8.2-pyh534df25_3.conda + version: 1.8.2 - category: main dependencies: python: '>=3.9' @@ -10952,15 +12325,15 @@ package: python: '>=3.9' toml: '' hash: - md5: 79963c319d1be62c8fd3e34555816e01 - sha256: 09acac1974e10a639415be4be326dd21fa6d66ca51a01fb71532263fba6dccf6 + md5: 1e35d8f975bc0e984a19819aa91c440a + sha256: 9961a1524f63d10bc29efdc52013ec06b0e95fb2619a250e250ff3618261d5cd manager: conda name: pytest-cov optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda - version: 6.0.0 + https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.1.1-pyhd8ed1ab_0.conda + version: 6.1.1 - category: main dependencies: coverage: '>=7.5' @@ -10968,15 +12341,15 @@ package: python: '>=3.9' toml: '' hash: - md5: 79963c319d1be62c8fd3e34555816e01 - sha256: 09acac1974e10a639415be4be326dd21fa6d66ca51a01fb71532263fba6dccf6 + md5: 1e35d8f975bc0e984a19819aa91c440a + sha256: 9961a1524f63d10bc29efdc52013ec06b0e95fb2619a250e250ff3618261d5cd manager: conda name: pytest-cov optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda - version: 6.0.0 + https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.1.1-pyhd8ed1ab_0.conda + version: 6.1.1 - category: main dependencies: coverage: '>=7.5' @@ -10984,15 +12357,15 @@ package: python: '>=3.9' toml: '' hash: - md5: 79963c319d1be62c8fd3e34555816e01 - sha256: 09acac1974e10a639415be4be326dd21fa6d66ca51a01fb71532263fba6dccf6 + md5: 1e35d8f975bc0e984a19819aa91c440a + sha256: 9961a1524f63d10bc29efdc52013ec06b0e95fb2619a250e250ff3618261d5cd manager: conda name: pytest-cov optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda - version: 6.0.0 + https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.1.1-pyhd8ed1ab_0.conda + version: 6.1.1 - category: main dependencies: pytest: '>=7.4.2' @@ -11127,81 +12500,129 @@ package: version: 3.6.1 - category: main dependencies: - __glibc: '>=2.17,<3.0.a0' - bzip2: '>=1.0.8,<2.0a0' - ld_impl_linux-64: '>=2.36.1' - libexpat: '>=2.6.4,<3.0a0' - libffi: '>=3.4,<4.0a0' - libgcc: '>=13' - liblzma: '>=5.6.4,<6.0a0' - libnsl: '>=2.0.1,<2.1.0a0' + psutil: '' + pytest: '>=2.8' + python: '>=3.9' + hash: + md5: 8c469458938c01ef5c9cd1357bbc62ac + sha256: 8bc8e82da035fe5dc1966c9d5f5d9d290f5c77075e2b3b6873e0b270e8c61853 + manager: conda + name: pytest-xprocess + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/pytest-xprocess-1.0.2-pyhd8ed1ab_1.conda + version: 1.0.2 + - category: main + dependencies: + psutil: '' + pytest: '>=2.8' + python: '>=3.9' + hash: + md5: 8c469458938c01ef5c9cd1357bbc62ac + sha256: 8bc8e82da035fe5dc1966c9d5f5d9d290f5c77075e2b3b6873e0b270e8c61853 + manager: conda + name: pytest-xprocess + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/pytest-xprocess-1.0.2-pyhd8ed1ab_1.conda + version: 1.0.2 + - category: main + dependencies: + psutil: '' + pytest: '>=2.8' + python: '>=3.9' + hash: + md5: 8c469458938c01ef5c9cd1357bbc62ac + sha256: 8bc8e82da035fe5dc1966c9d5f5d9d290f5c77075e2b3b6873e0b270e8c61853 + manager: conda + name: pytest-xprocess + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/pytest-xprocess-1.0.2-pyhd8ed1ab_1.conda + version: 1.0.2 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + bzip2: '>=1.0.8,<2.0a0' + ld_impl_linux-64: '>=2.36.1' + libexpat: '>=2.7.0,<3.0a0' + libffi: '>=3.4.6,<3.5.0a0' + libgcc: '>=13' + liblzma: '>=5.8.1,<6.0a0' + libnsl: '>=2.0.1,<2.1.0a0' libsqlite: '>=3.49.1,<4.0a0' libuuid: '>=2.38.1,<3.0a0' libxcrypt: '>=4.4.36' libzlib: '>=1.3.1,<2.0a0' ncurses: '>=6.5,<7.0a0' - openssl: '>=3.4.1,<4.0a0' + openssl: '>=3.5.0,<4.0a0' + pip: '' readline: '>=8.2,<9.0a0' tk: '>=8.6.13,<8.7.0a0' tzdata: '' hash: - md5: 81dd3e521f9b9eaa58d06213e28aaa9b - sha256: e0be7ad95a034d10e021f15317bf5c70fc1161564fa47844984c245505cde36c + md5: b61d4fbf583b8393d9d00ec106ad3658 + sha256: 028a03968eb101a681fa4966b2c52e93c8db1e934861f8d108224f51ba2c1bc9 manager: conda name: python optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/python-3.11.11-h9e4cc4f_2_cpython.conda - version: 3.11.11 + https://conda.anaconda.org/conda-forge/linux-64/python-3.11.12-h9e4cc4f_0_cpython.conda + version: 3.11.12 - category: main dependencies: __osx: '>=10.13' bzip2: '>=1.0.8,<2.0a0' - libexpat: '>=2.6.4,<3.0a0' - libffi: '>=3.4,<4.0a0' - liblzma: '>=5.6.4,<6.0a0' + libexpat: '>=2.7.0,<3.0a0' + libffi: '>=3.4.6,<3.5.0a0' + liblzma: '>=5.8.1,<6.0a0' libsqlite: '>=3.49.1,<4.0a0' libzlib: '>=1.3.1,<2.0a0' ncurses: '>=6.5,<7.0a0' - openssl: '>=3.4.1,<4.0a0' + openssl: '>=3.5.0,<4.0a0' + pip: '' readline: '>=8.2,<9.0a0' tk: '>=8.6.13,<8.7.0a0' tzdata: '' hash: - md5: 8d73135b48597cc13715a34bc79654b7 - sha256: 2c34d988cdb364665478ca3d93a43b2a5bf149e822215ad3fa6a5342627374a9 + md5: cfa36957cba60dca8e79a974d09b6a2c + sha256: fcd4b8a9a206940321d1d6569ddac2e99f359f0d5864e48140374a85aed5c27f manager: conda name: python optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/python-3.11.11-h9ccd52b_2_cpython.conda - version: 3.11.11 + https://conda.anaconda.org/conda-forge/osx-64/python-3.11.12-h9ccd52b_0_cpython.conda + version: 3.11.12 - category: main dependencies: __osx: '>=11.0' bzip2: '>=1.0.8,<2.0a0' - libexpat: '>=2.6.4,<3.0a0' - libffi: '>=3.4,<4.0a0' - liblzma: '>=5.6.4,<6.0a0' + libexpat: '>=2.7.0,<3.0a0' + libffi: '>=3.4.6,<3.5.0a0' + liblzma: '>=5.8.1,<6.0a0' libsqlite: '>=3.49.1,<4.0a0' libzlib: '>=1.3.1,<2.0a0' ncurses: '>=6.5,<7.0a0' - openssl: '>=3.4.1,<4.0a0' + openssl: '>=3.5.0,<4.0a0' + pip: '' readline: '>=8.2,<9.0a0' tk: '>=8.6.13,<8.7.0a0' tzdata: '' hash: - md5: 4bd51247ba4dd5958eb8f1e593edfe00 - sha256: 6f3c20b8666301fc27e6d1095f1e0f12a093bacf483e992cb56169127e989630 + md5: 6ab5f6a9e85f1b1848b6518e7eea63ee + sha256: ea91eb5bc7160cbc6f8110702f9250c87e378ff1dc83ab8daa8ae7832fb5d0de manager: conda name: python optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/python-3.11.11-hc22306f_2_cpython.conda - version: 3.11.11 + https://conda.anaconda.org/conda-forge/osx-arm64/python-3.11.12-hc22306f_0_cpython.conda + version: 3.11.12 - category: main dependencies: colorama: '' @@ -11300,32 +12721,110 @@ package: version: 2.9.0.post0 - category: main dependencies: - cpython: 3.11.11.* + python: '' + hash: + md5: 27d816c6981a8d50090537b761de80f4 + sha256: 7d927317003544049c97e7108e8ca5f2be5ff0ea954f5c84c8bbeb243b663fc8 + manager: conda + name: python-dotenv + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.1.0-pyh29332c3_1.conda + version: 1.1.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 27d816c6981a8d50090537b761de80f4 + sha256: 7d927317003544049c97e7108e8ca5f2be5ff0ea954f5c84c8bbeb243b663fc8 + manager: conda + name: python-dotenv + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.1.0-pyh29332c3_1.conda + version: 1.1.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 27d816c6981a8d50090537b761de80f4 + sha256: 7d927317003544049c97e7108e8ca5f2be5ff0ea954f5c84c8bbeb243b663fc8 + manager: conda + name: python-dotenv + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.1.0-pyh29332c3_1.conda + version: 1.1.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 38e34d2d1d9dca4fb2b9a0a04f604e2c + sha256: 1b09a28093071c1874862422696429d0d35bd0b8420698003ac004746c5e82a2 + manager: conda + name: python-fastjsonschema + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda + version: 2.21.1 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 38e34d2d1d9dca4fb2b9a0a04f604e2c + sha256: 1b09a28093071c1874862422696429d0d35bd0b8420698003ac004746c5e82a2 + manager: conda + name: python-fastjsonschema + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda + version: 2.21.1 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 38e34d2d1d9dca4fb2b9a0a04f604e2c + sha256: 1b09a28093071c1874862422696429d0d35bd0b8420698003ac004746c5e82a2 + manager: conda + name: python-fastjsonschema + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda + version: 2.21.1 + - category: main + dependencies: + cpython: 3.11.12.* python_abi: '*' hash: - md5: ce36c654f337283c2738bdc71072ecf8 - sha256: f6398783819f90cf048161cbb1eac86fdc9d54c6be6a0ff771906be3636979a5 + md5: e070d88f4def6f8c44fc7d11593d65b5 + sha256: 4e51fa5a746126ec8839eff78e205bdae2a550a6a0287e51cc613407ebc959cb manager: conda name: python-gil optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/python-gil-3.11.11-hd8ed1ab_2.conda - version: 3.11.11 + https://conda.anaconda.org/conda-forge/noarch/python-gil-3.11.12-hd8ed1ab_0.conda + version: 3.11.12 - category: main dependencies: - cpython: 3.11.11.* + cpython: 3.11.12.* python_abi: '*' hash: - md5: ce36c654f337283c2738bdc71072ecf8 - sha256: f6398783819f90cf048161cbb1eac86fdc9d54c6be6a0ff771906be3636979a5 + md5: e070d88f4def6f8c44fc7d11593d65b5 + sha256: 4e51fa5a746126ec8839eff78e205bdae2a550a6a0287e51cc613407ebc959cb manager: conda name: python-gil optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/python-gil-3.11.11-hd8ed1ab_2.conda - version: 3.11.11 + https://conda.anaconda.org/conda-forge/noarch/python-gil-3.11.12-hd8ed1ab_0.conda + version: 3.11.12 - category: main dependencies: graphviz: '>=2.46.1' @@ -11368,48 +12867,126 @@ package: url: https://conda.anaconda.org/conda-forge/noarch/python-graphviz-0.20.3-pyh91182bf_2.conda version: 0.20.3 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: e27480eebcdf247209e90da706ebef8d + sha256: f1fc3e9561b6d3bee2f738f5b1818b51124f45a2b28b3bf6c2174d629276e069 + manager: conda + name: python-installer + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-installer-0.7.0-pyhff2d567_1.conda + version: 0.7.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: e27480eebcdf247209e90da706ebef8d + sha256: f1fc3e9561b6d3bee2f738f5b1818b51124f45a2b28b3bf6c2174d629276e069 + manager: conda + name: python-installer + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-installer-0.7.0-pyhff2d567_1.conda + version: 0.7.0 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: e27480eebcdf247209e90da706ebef8d + sha256: f1fc3e9561b6d3bee2f738f5b1818b51124f45a2b28b3bf6c2174d629276e069 + manager: conda + name: python-installer + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-installer-0.7.0-pyhff2d567_1.conda + version: 0.7.0 - category: main dependencies: libarchive: '' python: '' hash: - md5: ff2e149fc19d07d5765dd1b56a741681 - sha256: b09d623de9f992b8452524052fb9670675aa6a48515e83ce9e9523bb29d5ffaa + md5: 62f88e1e404f84d3ff7f68746ced7f0f + sha256: 916fa14bc3a810dd9d8c295da941c160f61e2a7d93f67b676707c5a9ee719605 manager: conda name: python-libarchive-c optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/python-libarchive-c-5.2-pyh29332c3_0.conda - version: '5.2' + https://conda.anaconda.org/conda-forge/noarch/python-libarchive-c-5.3-pyhe01879c_0.conda + version: '5.3' - category: main dependencies: libarchive: '' python: '>=3.9' hash: - md5: ff2e149fc19d07d5765dd1b56a741681 - sha256: b09d623de9f992b8452524052fb9670675aa6a48515e83ce9e9523bb29d5ffaa + md5: 62f88e1e404f84d3ff7f68746ced7f0f + sha256: 916fa14bc3a810dd9d8c295da941c160f61e2a7d93f67b676707c5a9ee719605 manager: conda name: python-libarchive-c optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/python-libarchive-c-5.2-pyh29332c3_0.conda - version: '5.2' + https://conda.anaconda.org/conda-forge/noarch/python-libarchive-c-5.3-pyhe01879c_0.conda + version: '5.3' - category: main dependencies: libarchive: '' python: '>=3.9' hash: - md5: ff2e149fc19d07d5765dd1b56a741681 - sha256: b09d623de9f992b8452524052fb9670675aa6a48515e83ce9e9523bb29d5ffaa + md5: 62f88e1e404f84d3ff7f68746ced7f0f + sha256: 916fa14bc3a810dd9d8c295da941c160f61e2a7d93f67b676707c5a9ee719605 manager: conda name: python-libarchive-c optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/python-libarchive-c-5.2-pyh29332c3_0.conda - version: '5.2' + https://conda.anaconda.org/conda-forge/noarch/python-libarchive-c-5.3-pyhe01879c_0.conda + version: '5.3' + - category: main + dependencies: + python: '>=3.9' + hash: + md5: a28c984e0429aff3ab7386f7de56de6f + sha256: 1b03678d145b1675b757cba165a0d9803885807792f7eb4495e48a38858c3cca + manager: conda + name: python-multipart + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-multipart-0.0.20-pyhff2d567_0.conda + version: 0.0.20 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: a28c984e0429aff3ab7386f7de56de6f + sha256: 1b03678d145b1675b757cba165a0d9803885807792f7eb4495e48a38858c3cca + manager: conda + name: python-multipart + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-multipart-0.0.20-pyhff2d567_0.conda + version: 0.0.20 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: a28c984e0429aff3ab7386f7de56de6f + sha256: 1b03678d145b1675b757cba165a0d9803885807792f7eb4495e48a38858c3cca + manager: conda + name: python-multipart + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/python-multipart-0.0.20-pyhff2d567_0.conda + version: 0.0.20 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -11504,35 +13081,35 @@ package: - category: main dependencies: {} hash: - md5: 139a8d40c8a2f430df31048949e450de - sha256: 2660b8059b3ee854bc5d3c6b1fce946e5bd2fe8fbca7827de2c5885ead6209de + md5: 6320dac78b3b215ceac35858b2cfdb70 + sha256: 705d06b15c497b585d235e7e87f6c893ffe5fbfdb3326e376e56c842879e0a09 manager: conda name: python_abi optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-5_cp311.conda + url: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.11-7_cp311.conda version: '3.11' - category: main dependencies: {} hash: - md5: e6d62858c06df0be0e6255c753d74787 - sha256: 9b092850a268aca99600b724bae849f51209ecd5628e609b4699debc59ff1945 + md5: 6320dac78b3b215ceac35858b2cfdb70 + sha256: 705d06b15c497b585d235e7e87f6c893ffe5fbfdb3326e376e56c842879e0a09 manager: conda name: python_abi optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.11-5_cp311.conda + url: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.11-7_cp311.conda version: '3.11' - category: main dependencies: {} hash: - md5: 3b855e3734344134cb56c410f729c340 - sha256: adc05729b7e0aca7b436e60a86f10822a92185dfcb48d66d6444e3629d3a1f6a + md5: 6320dac78b3b215ceac35858b2cfdb70 + sha256: 705d06b15c497b585d235e7e87f6c893ffe5fbfdb3326e376e56c842879e0a09 manager: conda name: python_abi optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.11-5_cp311.conda + url: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.11-7_cp311.conda version: '3.11' - category: main dependencies: @@ -11627,15 +13204,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 42ee075eb116643c593718f0a2d39b7c - sha256: 0fcb9e4ef1059900f8eccada604915d593136d29d264b73d572a4af5ae14eca1 + md5: 9f4a6ee8fe126c8afe71230d0febb9ec + sha256: c8cf544f1e8e0a50b1dd774062bb4d56adcb7b5f5f03dbf7ccfcb236639311a2 manager: conda name: rapidfuzz optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/rapidfuzz-3.12.2-py311hfdbb021_0.conda - version: 3.12.2 + https://conda.anaconda.org/conda-forge/linux-64/rapidfuzz-3.13.0-py311hfdbb021_0.conda + version: 3.13.0 - category: main dependencies: __osx: '>=10.13' @@ -11644,15 +13221,15 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 0ddf5d64a6b78800fc0623f840bf6512 - sha256: a3da38a32897a1950498bbefe9e26713697d3e60375f0d71877051ef22dd9409 + md5: 146328b22cacf280e03aa790b1204a57 + sha256: 2171af9e1ba44d44858e31e7d1c8b639a6229c0fc395f6e6a8f9db3d3aa1a6fc manager: conda name: rapidfuzz optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/rapidfuzz-3.12.2-py311hc356e98_0.conda - version: 3.12.2 + https://conda.anaconda.org/conda-forge/osx-64/rapidfuzz-3.13.0-py311hc356e98_0.conda + version: 3.13.0 - category: main dependencies: __osx: '>=11.0' @@ -11661,57 +13238,57 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 4e68ac18fc06ee587c533f8b657195a1 - sha256: e6c441554acdded9f4697593941706aef0d1fd5446f45bd1e0445178820bd3fa + md5: a7cee2a5e47a2cb6313029e3cadaa8bb + sha256: 353cae53d85ba723aa52832501ad980b30592d26c9e9f480d20da22bc5c78aa5 manager: conda name: rapidfuzz optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/rapidfuzz-3.12.2-py311h155a34a_0.conda - version: 3.12.2 + https://conda.anaconda.org/conda-forge/osx-arm64/rapidfuzz-3.13.0-py311h155a34a_0.conda + version: 3.13.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - openssl: '>=3.4.1,<4.0a0' + openssl: '>=3.5.0,<4.0a0' patchelf: '' hash: - md5: a72e7d6e8ff13b2051aaffe74d211dce - sha256: a2a300747c1427628b95342ed74d0c375fb7753ecba0f8c96087526232cd367f + md5: 6be167e42bced9d1e4255fe25ec95ed7 + sha256: 3d1657518776ea7af56cd65cbf975155efab170ed657953a1d7211f145a638e4 manager: conda name: rattler-build optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/rattler-build-0.39.0-h159367c_1.conda - version: 0.39.0 + https://conda.anaconda.org/conda-forge/linux-64/rattler-build-0.42.1-h2d22210_0.conda + version: 0.42.1 - category: main dependencies: __osx: '>=10.13' hash: - md5: af9bc8b95e98111a6d4f17c6eee100f3 - sha256: 1673c5523ef8c2f7fb24f4006603171535e307521750518d9d984538f1286842 + md5: 5ed3e1eb9c7dca7b5e07b0340c6f6a23 + sha256: dd5998ad7bca41966a1bbb7a93b0c1749f0b8dc033e4de3d245530e9574a9b9d manager: conda name: rattler-build optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/rattler-build-0.39.0-h05de357_1.conda - version: 0.39.0 + https://conda.anaconda.org/conda-forge/osx-64/rattler-build-0.42.1-h05de357_0.conda + version: 0.42.1 - category: main dependencies: __osx: '>=11.0' hash: - md5: c8efb2fd0eb547705bbf7a2e9595e2ce - sha256: 7a718f3dff2d56acecba1451197c447e3ff81ddb4f7609135a00439e9a0d8e64 + md5: 31f97984064ff852c3724d6cc9541e71 + sha256: f3cf289e05295dc14d3027d9f61c56269afccc7410ad7e59522254791f519620 manager: conda name: rattler-build optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/rattler-build-0.39.0-h8dba533_1.conda - version: 0.39.0 + https://conda.anaconda.org/conda-forge/osx-arm64/rattler-build-0.42.1-h8dba533_0.conda + version: 0.42.1 - category: main dependencies: conda-build: '' @@ -11722,15 +13299,15 @@ package: tomli: '' typing-extensions: '>=4.12.2,<5' hash: - md5: af7ab66f4506d06272ba24aed05e2ba9 - sha256: 12cceccb2ee4f118e977ad137982ac652a544bef3e00b87752eb1c93f2c7a95d + md5: aaae12cdd0b6fda94de6be5d57a90686 + sha256: 53d28953f0a2f93e489b6c4873710799b89f77ab466de546c39e5492506e1db8 manager: conda name: rattler-build-conda-compat optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/rattler-build-conda-compat-1.3.4-pyhd8ed1ab_0.conda - version: 1.3.4 + https://conda.anaconda.org/conda-forge/noarch/rattler-build-conda-compat-1.4.3-pyhd8ed1ab_0.conda + version: 1.4.3 - category: main dependencies: conda-build: '' @@ -11741,15 +13318,15 @@ package: tomli: '' typing-extensions: '>=4.12.2,<5' hash: - md5: af7ab66f4506d06272ba24aed05e2ba9 - sha256: 12cceccb2ee4f118e977ad137982ac652a544bef3e00b87752eb1c93f2c7a95d + md5: aaae12cdd0b6fda94de6be5d57a90686 + sha256: 53d28953f0a2f93e489b6c4873710799b89f77ab466de546c39e5492506e1db8 manager: conda name: rattler-build-conda-compat optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/rattler-build-conda-compat-1.3.4-pyhd8ed1ab_0.conda - version: 1.3.4 + https://conda.anaconda.org/conda-forge/noarch/rattler-build-conda-compat-1.4.3-pyhd8ed1ab_0.conda + version: 1.4.3 - category: main dependencies: conda-build: '' @@ -11760,15 +13337,15 @@ package: tomli: '' typing-extensions: '>=4.12.2,<5' hash: - md5: af7ab66f4506d06272ba24aed05e2ba9 - sha256: 12cceccb2ee4f118e977ad137982ac652a544bef3e00b87752eb1c93f2c7a95d + md5: aaae12cdd0b6fda94de6be5d57a90686 + sha256: 53d28953f0a2f93e489b6c4873710799b89f77ab466de546c39e5492506e1db8 manager: conda name: rattler-build-conda-compat optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/rattler-build-conda-compat-1.3.4-pyhd8ed1ab_0.conda - version: 1.3.4 + https://conda.anaconda.org/conda-forge/noarch/rattler-build-conda-compat-1.4.3-pyhd8ed1ab_0.conda + version: 1.4.3 - category: main dependencies: libgcc: '>=13' @@ -12075,21 +13652,63 @@ package: url: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_1.conda version: 2.0.0 + - category: main + dependencies: + python: '>=3.9' + requests: '>=2.0.1,<3.0.0' + hash: + md5: 66de8645e324fda0ea6ef28c2f99a2ab + sha256: c0b815e72bb3f08b67d60d5e02251bbb0164905b5f72942ff5b6d2a339640630 + manager: conda + name: requests-toolbelt + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/requests-toolbelt-1.0.0-pyhd8ed1ab_1.conda + version: 1.0.0 + - category: main + dependencies: + python: '>=3.9' + requests: '>=2.0.1,<3.0.0' + hash: + md5: 66de8645e324fda0ea6ef28c2f99a2ab + sha256: c0b815e72bb3f08b67d60d5e02251bbb0164905b5f72942ff5b6d2a339640630 + manager: conda + name: requests-toolbelt + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/requests-toolbelt-1.0.0-pyhd8ed1ab_1.conda + version: 1.0.0 + - category: main + dependencies: + python: '>=3.9' + requests: '>=2.0.1,<3.0.0' + hash: + md5: 66de8645e324fda0ea6ef28c2f99a2ab + sha256: c0b815e72bb3f08b67d60d5e02251bbb0164905b5f72942ff5b6d2a339640630 + manager: conda + name: requests-toolbelt + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/requests-toolbelt-1.0.0-pyhd8ed1ab_1.conda + version: 1.0.0 - category: main dependencies: markdown-it-py: '>=2.2.0' pygments: '>=2.13.0,<3.0.0' - python: '>=3.9' + python: '' typing_extensions: '>=4.0.0,<5.0.0' hash: - md5: 7aed65d4ff222bfb7335997aa40b7da5 - sha256: 06a760c5ae572e72e865d5a87e9fe3cc171e1a9c996e63daf3db52ff1a0b4457 + md5: 202f08242192ce3ed8bdb439ba40c0fe + sha256: d10e2b66a557ec6296844e04686db87818b0df87d73c06388f2332fda3f7d2d5 manager: conda name: rich optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda - version: 13.9.4 + url: https://conda.anaconda.org/conda-forge/noarch/rich-14.0.0-pyh29332c3_0.conda + version: 14.0.0 - category: main dependencies: markdown-it-py: '>=2.2.0' @@ -12097,14 +13716,14 @@ package: python: '>=3.9' typing_extensions: '>=4.0.0,<5.0.0' hash: - md5: 7aed65d4ff222bfb7335997aa40b7da5 - sha256: 06a760c5ae572e72e865d5a87e9fe3cc171e1a9c996e63daf3db52ff1a0b4457 + md5: 202f08242192ce3ed8bdb439ba40c0fe + sha256: d10e2b66a557ec6296844e04686db87818b0df87d73c06388f2332fda3f7d2d5 manager: conda name: rich optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda - version: 13.9.4 + url: https://conda.anaconda.org/conda-forge/noarch/rich-14.0.0-pyh29332c3_0.conda + version: 14.0.0 - category: main dependencies: markdown-it-py: '>=2.2.0' @@ -12112,50 +13731,98 @@ package: python: '>=3.9' typing_extensions: '>=4.0.0,<5.0.0' hash: - md5: 7aed65d4ff222bfb7335997aa40b7da5 - sha256: 06a760c5ae572e72e865d5a87e9fe3cc171e1a9c996e63daf3db52ff1a0b4457 + md5: 202f08242192ce3ed8bdb439ba40c0fe + sha256: d10e2b66a557ec6296844e04686db87818b0df87d73c06388f2332fda3f7d2d5 manager: conda name: rich optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda - version: 13.9.4 + url: https://conda.anaconda.org/conda-forge/noarch/rich-14.0.0-pyh29332c3_0.conda + version: 14.0.0 + - category: main + dependencies: + click: '>=8.1.7' + python: '' + rich: '>=13.7.1' + typing_extensions: '>=4.12.2' + hash: + md5: 4ba15ae9388b67d09782798347481f69 + sha256: e558f8c254a9ff9164d069110da162fc79497d70c60f2c09a5d3d0d7101c5628 + manager: conda + name: rich-toolkit + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/rich-toolkit-0.11.3-pyh29332c3_0.conda + version: 0.11.3 + - category: main + dependencies: + click: '>=8.1.7' + python: '>=3.9' + rich: '>=13.7.1' + typing_extensions: '>=4.12.2' + hash: + md5: 4ba15ae9388b67d09782798347481f69 + sha256: e558f8c254a9ff9164d069110da162fc79497d70c60f2c09a5d3d0d7101c5628 + manager: conda + name: rich-toolkit + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/rich-toolkit-0.11.3-pyh29332c3_0.conda + version: 0.11.3 + - category: main + dependencies: + click: '>=8.1.7' + python: '>=3.9' + rich: '>=13.7.1' + typing_extensions: '>=4.12.2' + hash: + md5: 4ba15ae9388b67d09782798347481f69 + sha256: e558f8c254a9ff9164d069110da162fc79497d70c60f2c09a5d3d0d7101c5628 + manager: conda + name: rich-toolkit + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/rich-toolkit-0.11.3-pyh29332c3_0.conda + version: 0.11.3 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' hash: - md5: d214df5d083a659f75e7cdafd9500383 - sha256: fd9d568112a6cf6dfa00828a70c730289fa046a86b641899e298c5ed3fd50ca6 + md5: 845d9730024ab628f55d384631ceb499 + sha256: b8c3e26a804077fde11fc3579207c7a60db32c5367c2330fe6d620f8e49a6dc4 manager: conda name: ripgrep optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ripgrep-14.1.1-h8fae777_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/ripgrep-14.1.1-h8fae777_1.conda version: 14.1.1 - category: main dependencies: __osx: '>=10.13' hash: - md5: 8457bf2f3a39b5ad494e4aa93594204c - sha256: 7c2539a29a163e96c9aa43079b09f92a52b51984309c528188591f9eb35a904d + md5: 4a5968c29202574e4ba15b0e17b65150 + sha256: 2619c551a09bbdd4659c846998aa115bccfe0c807fc88e51280da849552293c6 manager: conda name: ripgrep optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/ripgrep-14.1.1-h926acf8_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/ripgrep-14.1.1-h926acf8_1.conda version: 14.1.1 - category: main dependencies: __osx: '>=11.0' hash: - md5: a5ac0a2bfc92f3cb9b6cfc9caf9de5c9 - sha256: bea65d7f355ac3db84b046e2db3b203d78ac261451bf5dd7a5719fc8102fa73e + md5: 44d5815da71c4facfaed2f7d2d88f160 + sha256: 754c4a8ac866deb3372f97a54254e9bcc01b20d4be2f432c08de798ba6d12df8 manager: conda name: ripgrep optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/ripgrep-14.1.1-h0ef69ab_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/ripgrep-14.1.1-h0ef69ab_1.conda version: 14.1.1 - category: main dependencies: @@ -12164,94 +13831,94 @@ package: python: '' python_abi: 3.11.* hash: - md5: e2fc6063859ff5fd62f983c31e4bf521 - sha256: a45aec5ad66dc54884bc782ac590cd26e00f738bfcf4f55b4d63c8ca22915a30 + md5: a82b805c84bca54329510d03656cf57b + sha256: 9654a1c11dda67b2782cad03f2a3793e18dbf5d9dbf5d2fdf86bdac3f2ad8a1d manager: conda name: rpds-py optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.24.0-py311h687327b_0.conda - version: 0.24.0 + https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.25.1-py311hdae7d1d_0.conda + version: 0.25.1 - category: main dependencies: __osx: '>=10.13' python: '' python_abi: 3.11.* hash: - md5: 31a54aa2037a6c581c6aeffc73f47085 - sha256: b9b2f76cde789dbddb4c80694fdde2b9b88efade7029afcd5b2f26b0647646f8 + md5: 2071cf0f0fd57946d37b825b227f5b02 + sha256: 87bab663373ff8b3461dbc73a963f86d3c4c4b442727c5efe89ba40d1d57e470 manager: conda name: rpds-py optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.24.0-py311hab9d7c2_0.conda - version: 0.24.0 + https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.25.1-py311hd1a56c6_0.conda + version: 0.25.1 - category: main dependencies: __osx: '>=11.0' python: 3.11.* python_abi: 3.11.* hash: - md5: 9a2d45b17a80b5a11fbd04d3ed8db6ce - sha256: 502e0a47463bb66624abd968e3d42f264f8aafd556731f05c238b03c433320c0 + md5: 3c969fae89e5832566890421a074eb92 + sha256: 8928c4cacc668db0c62dd9a11415319f6fa7f06d01360e5398264941c0ab404d manager: conda name: rpds-py optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.24.0-py311hc9d6b66_0.conda - version: 0.24.0 + https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.25.1-py311hf245fc6_0.conda + version: 0.25.1 - category: main dependencies: - __glibc: '>=2.17,<3.0.a0' - libgcc: '>=13' + libgcc-ng: '>=12' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* ruamel.yaml.clib: '>=0.1.2' + setuptools: '' hash: - md5: a3188715e28c25f1404b84c702e6fdf4 - sha256: 11922e4b99d1d16a0ec18daccee4a1b83243000022d4e67ab957e15f3b4aa644 + md5: 59518a18bdf00ee4379797459d2c76ee + sha256: b33e0e83f834b948ce5c77c0df727b6cd027a388c3b4e4498b34b83751ba7c05 manager: conda name: ruamel.yaml optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.18.10-py311h9ecbd09_0.conda - version: 0.18.10 + https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.17.40-py311h459d7ec_0.conda + version: 0.17.40 - category: main dependencies: - __osx: '>=10.13' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* ruamel.yaml.clib: '>=0.1.2' + setuptools: '' hash: - md5: 7f11b35a61a8c90eea12a917b52895b9 - sha256: a623d6fdcaf22a6173b79dd167ee67b7dadf31f2f80081e70f3b2b8a84948299 + md5: 4796cc1d45c88ace8f5bb7950167a568 + sha256: 0260c295cdfae417f107fe2fae66bae4eb260195885d81d8b1cd4fc6d81c849b manager: conda name: ruamel.yaml optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/ruamel.yaml-0.18.10-py311h4d7f069_0.conda - version: 0.18.10 + https://conda.anaconda.org/conda-forge/osx-64/ruamel.yaml-0.17.40-py311he705e18_0.conda + version: 0.17.40 - category: main dependencies: - __osx: '>=11.0' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* ruamel.yaml.clib: '>=0.1.2' + setuptools: '' hash: - md5: 99b00011b5162250638eae2ea0b033e8 - sha256: 88ec95e9631b1eeec551455320f87e87cc3b8370379bc48aabc7eb550288c4c8 + md5: 640b6933c680860877b32a4c11076ab9 + sha256: bf44774cbd03eb8d396a078b4af1b194f8600f8ca7ea7ca0e3d61dc16b400cb4 manager: conda name: ruamel.yaml optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/ruamel.yaml-0.18.10-py311h917b07b_0.conda - version: 0.18.10 + https://conda.anaconda.org/conda-forge/osx-arm64/ruamel.yaml-0.17.40-py311h05b510d_0.conda + version: 0.17.40 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -12369,7 +14036,7 @@ package: libblas: '>=3.9.0,<4.0a0' libcblas: '>=3.9.0,<4.0a0' libcxx: '>=18' - libgfortran: 5.* + libgfortran: '>=5' libgfortran5: '>=13.2.0' liblapack: '>=3.9.0,<4.0a0' numpy: '>=1.23.5' @@ -12390,7 +14057,7 @@ package: libblas: '>=3.9.0,<4.0a0' libcblas: '>=3.9.0,<4.0a0' libcxx: '>=18' - libgfortran: 5.* + libgfortran: '>=5' libgfortran5: '>=13.2.0' liblapack: '>=3.9.0,<4.0a0' numpy: '>=1.23.5' @@ -12479,131 +14146,170 @@ package: dependencies: python: '>=3.9' hash: - md5: 9bddfdbf4e061821a1a443f93223be61 - sha256: 91d664ace7c22e787775069418daa9f232ee8bafdd0a6a080a5ed2395a6fa6b2 + md5: 982aa48accc06494cbd2b51af69e17c7 + sha256: 7d3f5531269e15cb533b60009aa2a950f9844acf31f38c1b55c8000dbb316676 + manager: conda + name: semver + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/semver-3.0.4-pyhd8ed1ab_0.conda + version: 3.0.4 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 982aa48accc06494cbd2b51af69e17c7 + sha256: 7d3f5531269e15cb533b60009aa2a950f9844acf31f38c1b55c8000dbb316676 + manager: conda + name: semver + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/semver-3.0.4-pyhd8ed1ab_0.conda + version: 3.0.4 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: 982aa48accc06494cbd2b51af69e17c7 + sha256: 7d3f5531269e15cb533b60009aa2a950f9844acf31f38c1b55c8000dbb316676 + manager: conda + name: semver + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/semver-3.0.4-pyhd8ed1ab_0.conda + version: 3.0.4 + - category: main + dependencies: + python: '>=3.9' + hash: + md5: ea075e94dc0106c7212128b6a25bbc4c + sha256: 56ce31d15786e1df2f1105076f3650cd7c1892e0afeeb9aa92a08d2551af2e34 manager: conda name: setuptools optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.2-pyhff2d567_0.conda - version: 75.8.2 + https://conda.anaconda.org/conda-forge/noarch/setuptools-80.8.0-pyhff2d567_0.conda + version: 80.8.0 - category: main dependencies: python: '>=3.9' hash: - md5: 9bddfdbf4e061821a1a443f93223be61 - sha256: 91d664ace7c22e787775069418daa9f232ee8bafdd0a6a080a5ed2395a6fa6b2 + md5: ea075e94dc0106c7212128b6a25bbc4c + sha256: 56ce31d15786e1df2f1105076f3650cd7c1892e0afeeb9aa92a08d2551af2e34 manager: conda name: setuptools optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.2-pyhff2d567_0.conda - version: 75.8.2 + https://conda.anaconda.org/conda-forge/noarch/setuptools-80.8.0-pyhff2d567_0.conda + version: 80.8.0 - category: main dependencies: python: '>=3.9' hash: - md5: 9bddfdbf4e061821a1a443f93223be61 - sha256: 91d664ace7c22e787775069418daa9f232ee8bafdd0a6a080a5ed2395a6fa6b2 + md5: ea075e94dc0106c7212128b6a25bbc4c + sha256: 56ce31d15786e1df2f1105076f3650cd7c1892e0afeeb9aa92a08d2551af2e34 manager: conda name: setuptools optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.2-pyhff2d567_0.conda - version: 75.8.2 + https://conda.anaconda.org/conda-forge/noarch/setuptools-80.8.0-pyhff2d567_0.conda + version: 80.8.0 - category: main dependencies: + importlib-metadata: '' packaging: '>=20.0' python: '>=3.9' setuptools: '>=45' tomli: '>=1.0.0' typing-extensions: '' hash: - md5: 1f960a50e0fba37b5f04ec766df10657 - sha256: 9a98abc0e600f48ce0be6aa412ca5e83a95b8f16f14a139212a1f15614c5dcaa + md5: 996376098e3648237b3efb0e0ad460c1 + sha256: f2c94e01f7998aab77edd996afc63482556b1d935e23fc14361889ee89424d16 manager: conda name: setuptools-scm optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.2.1-pyhd8ed1ab_0.conda - version: 8.2.1 + https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.3.1-pyhd8ed1ab_0.conda + version: 8.3.1 - category: main dependencies: + importlib-metadata: '' packaging: '>=20.0' python: '>=3.9' setuptools: '>=45' tomli: '>=1.0.0' typing-extensions: '' hash: - md5: 1f960a50e0fba37b5f04ec766df10657 - sha256: 9a98abc0e600f48ce0be6aa412ca5e83a95b8f16f14a139212a1f15614c5dcaa + md5: 996376098e3648237b3efb0e0ad460c1 + sha256: f2c94e01f7998aab77edd996afc63482556b1d935e23fc14361889ee89424d16 manager: conda name: setuptools-scm optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.2.1-pyhd8ed1ab_0.conda - version: 8.2.1 + https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.3.1-pyhd8ed1ab_0.conda + version: 8.3.1 - category: main dependencies: + importlib-metadata: '' packaging: '>=20.0' python: '>=3.9' setuptools: '>=45' tomli: '>=1.0.0' typing-extensions: '' hash: - md5: 1f960a50e0fba37b5f04ec766df10657 - sha256: 9a98abc0e600f48ce0be6aa412ca5e83a95b8f16f14a139212a1f15614c5dcaa + md5: 996376098e3648237b3efb0e0ad460c1 + sha256: f2c94e01f7998aab77edd996afc63482556b1d935e23fc14361889ee89424d16 manager: conda name: setuptools-scm optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.2.1-pyhd8ed1ab_0.conda - version: 8.2.1 + https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.3.1-pyhd8ed1ab_0.conda + version: 8.3.1 - category: main dependencies: - setuptools-scm: '>=8.2.1,<8.2.2.0a0' + setuptools-scm: '>=8.3.1,<8.3.2.0a0' hash: - md5: ba6dd8f5a2d2b46fd52f03ac212e8c6a - sha256: 368da667755722f4fc70663c3589c192cf9a75487dd7636247bc06d7acc52605 + md5: 38ca080dff1a30a6fd3aec989062b255 + sha256: 726d9a8a626e4f87cfb58491d859949511499d20f4fc776a6cfbddfc35e06e50 manager: conda name: setuptools_scm optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools_scm-8.2.1-hd8ed1ab_0.conda - version: 8.2.1 + https://conda.anaconda.org/conda-forge/noarch/setuptools_scm-8.3.1-hd8ed1ab_0.conda + version: 8.3.1 - category: main dependencies: - setuptools-scm: '>=8.2.1,<8.2.2.0a0' + setuptools-scm: '>=8.3.1,<8.3.2.0a0' hash: - md5: ba6dd8f5a2d2b46fd52f03ac212e8c6a - sha256: 368da667755722f4fc70663c3589c192cf9a75487dd7636247bc06d7acc52605 + md5: 38ca080dff1a30a6fd3aec989062b255 + sha256: 726d9a8a626e4f87cfb58491d859949511499d20f4fc776a6cfbddfc35e06e50 manager: conda name: setuptools_scm optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools_scm-8.2.1-hd8ed1ab_0.conda - version: 8.2.1 + https://conda.anaconda.org/conda-forge/noarch/setuptools_scm-8.3.1-hd8ed1ab_0.conda + version: 8.3.1 - category: main dependencies: - setuptools-scm: '>=8.2.1,<8.2.2.0a0' + setuptools-scm: '>=8.3.1,<8.3.2.0a0' hash: - md5: ba6dd8f5a2d2b46fd52f03ac212e8c6a - sha256: 368da667755722f4fc70663c3589c192cf9a75487dd7636247bc06d7acc52605 + md5: 38ca080dff1a30a6fd3aec989062b255 + sha256: 726d9a8a626e4f87cfb58491d859949511499d20f4fc776a6cfbddfc35e06e50 manager: conda name: setuptools_scm optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/setuptools_scm-8.2.1-hd8ed1ab_0.conda - version: 8.2.1 + https://conda.anaconda.org/conda-forge/noarch/setuptools_scm-8.3.1-hd8ed1ab_0.conda + version: 8.3.1 - category: main dependencies: python: '>=3.9' @@ -12710,41 +14416,41 @@ package: libgcc: '>=13' libstdcxx: '>=13' hash: - md5: 2e2658fcb5ce6e27fd715e2017317490 - sha256: d80ba4542451e88f914857364688bf5fbe97a216c73e5a31fecb3b7e67f5212b + md5: 595c86db43ee33b84ae7c3569b70d824 + sha256: 7b81fead15b5fe4578ed335b0bb4d11054a6f431231c26c9c3cea223c466ffcf manager: conda name: simdjson optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/simdjson-3.12.2-h84d6215_0.conda - version: 3.12.2 + url: https://conda.anaconda.org/conda-forge/linux-64/simdjson-3.12.3-h84d6215_0.conda + version: 3.12.3 - category: main dependencies: __osx: '>=10.13' libcxx: '>=18' hash: - md5: ebf0bed97515ce08afc53ff15e08b9fc - sha256: 8fb91dc4dceb477cb9cf589099bdbd3a98fda188484c68e5442163b2262e8593 + md5: 7af452d0c1e685c7acbabf3f43c3e7d5 + sha256: f0b22fb3c81814875dc2d8e3d4f42b8a70746dc855d7a3cfb5bf050066e1ed53 manager: conda name: simdjson optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/simdjson-3.12.2-h9275861_0.conda - version: 3.12.2 + url: https://conda.anaconda.org/conda-forge/osx-64/simdjson-3.12.3-h9275861_0.conda + version: 3.12.3 - category: main dependencies: __osx: '>=11.0' libcxx: '>=18' hash: - md5: d929886990cae5aee962fb2002e00726 - sha256: 222eb76e16386ad370fcca9d58ad77660c13f3efcb2294cb286799a56be3eed1 + md5: 1e3350c8f56d5bc5c8ba2917f4a1e212 + sha256: 73d5dbf349d9c84b3be224b1ebabbef51d8c4c1bc34e9b4e6a5f6902a5cb1fb8 manager: conda name: simdjson optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/simdjson-3.12.2-ha393de7_0.conda - version: 3.12.2 + https://conda.anaconda.org/conda-forge/osx-arm64/simdjson-3.12.3-ha393de7_0.conda + version: 3.12.3 - category: main dependencies: python: '>=3.9' @@ -12894,83 +14600,85 @@ package: version: 2.4.0 - category: main dependencies: - python: '>=3.8' + python: '>=3.9' hash: - md5: 3f144b2c34f8cb5a9abd9ed23a39c561 - sha256: 54ae221033db8fbcd4998ccb07f3c3828b4d77e73b0c72b18c1d6a507059059c + md5: fb32097c717486aa34b38a9db57eb49e + sha256: 7518506cce9a736042132f307b3f4abce63bf076f5fb07c1f4e506c0b214295a manager: conda name: soupsieve optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - version: '2.5' + url: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.7-pyhd8ed1ab_0.conda + version: '2.7' - category: main dependencies: - python: '>=3.8' + python: '>=3.9' hash: - md5: 3f144b2c34f8cb5a9abd9ed23a39c561 - sha256: 54ae221033db8fbcd4998ccb07f3c3828b4d77e73b0c72b18c1d6a507059059c + md5: fb32097c717486aa34b38a9db57eb49e + sha256: 7518506cce9a736042132f307b3f4abce63bf076f5fb07c1f4e506c0b214295a manager: conda name: soupsieve optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - version: '2.5' + url: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.7-pyhd8ed1ab_0.conda + version: '2.7' - category: main dependencies: - python: '>=3.8' + python: '>=3.9' hash: - md5: 3f144b2c34f8cb5a9abd9ed23a39c561 - sha256: 54ae221033db8fbcd4998ccb07f3c3828b4d77e73b0c72b18c1d6a507059059c + md5: fb32097c717486aa34b38a9db57eb49e + sha256: 7518506cce9a736042132f307b3f4abce63bf076f5fb07c1f4e506c0b214295a manager: conda name: soupsieve optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda - version: '2.5' + url: https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.7-pyhd8ed1ab_0.conda + version: '2.7' - category: main dependencies: - __glibc: '>=2.17,<3.0.a0' - fmt: '>=11.0.2,<12.0a0' - libgcc: '>=13' - libstdcxx: '>=13' + anyio: '>=3.6.2,<5' + python: '' + typing_extensions: '>=3.10.0' hash: - md5: 3666458a0c6a5c1ab099e0813ea2dc86 - sha256: 6a8fbb341a43c58d46cb57c6146f1443084be58dfa16583a53f87dbcbb8acea2 + md5: 36ec80c2b37e52760ab41be7c2bd1fd3 + sha256: d41b9b2719a2a0176930df21d7fec7b758058e7fafd53dc900b5706cd627fa3a manager: conda - name: spdlog + name: starlette optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.15.1-hb29a8c4_0.conda - version: 1.15.1 + url: + https://conda.anaconda.org/conda-forge/noarch/starlette-0.46.2-pyh81abbef_0.conda + version: 0.46.2 - category: main dependencies: - __osx: '>=10.13' - fmt: '>=11.0.2,<12.0a0' - libcxx: '>=18' + anyio: '>=3.6.2,<5' + python: '>=3.9' + typing_extensions: '>=3.10.0' hash: - md5: 869e2924e6f2f44803b550b7059e7395 - sha256: ebebbfdcd5eb360c974af934c5d94c7237b57a9139343cbc8d0b2d9f7903a5cd + md5: 36ec80c2b37e52760ab41be7c2bd1fd3 + sha256: d41b9b2719a2a0176930df21d7fec7b758058e7fafd53dc900b5706cd627fa3a manager: conda - name: spdlog + name: starlette optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/spdlog-1.15.1-h65da0ee_0.conda - version: 1.15.1 + url: + https://conda.anaconda.org/conda-forge/noarch/starlette-0.46.2-pyh81abbef_0.conda + version: 0.46.2 - category: main dependencies: - __osx: '>=11.0' - fmt: '>=11.0.2,<12.0a0' - libcxx: '>=18' + anyio: '>=3.6.2,<5' + python: '>=3.9' + typing_extensions: '>=3.10.0' hash: - md5: 95277d613352000a8cd51533076bf1db - sha256: 076353705f6d9b530b24a795dd5cf3687bdd07e3bd63fff337dc3073e9bd7364 + md5: 36ec80c2b37e52760ab41be7c2bd1fd3 + sha256: d41b9b2719a2a0176930df21d7fec7b758058e7fafd53dc900b5706cd627fa3a manager: conda - name: spdlog + name: starlette optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/spdlog-1.15.1-hed1c2b2_0.conda - version: 1.15.1 + url: + https://conda.anaconda.org/conda-forge/noarch/starlette-0.46.2-pyh81abbef_0.conda + version: 0.46.2 - category: main dependencies: python: '>=3.9' @@ -13169,38 +14877,38 @@ package: dependencies: python: '>=3.9' hash: - md5: 60ce69f73f3e75b21f1c27b1b471320c - sha256: 6869cd2e043426d30c84d0ff6619f176b39728f9c75dc95dca89db994548bb8a + md5: a15c62b8a306b8978f094f76da2f903f + sha256: a83c83f5e622a2f34fb1d179c55c3ff912429cd0a54f9f3190ae44a0fdba2ad2 manager: conda name: tblib optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda - version: 3.0.0 + url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.1.0-pyhd8ed1ab_0.conda + version: 3.1.0 - category: main dependencies: python: '>=3.9' hash: - md5: 60ce69f73f3e75b21f1c27b1b471320c - sha256: 6869cd2e043426d30c84d0ff6619f176b39728f9c75dc95dca89db994548bb8a + md5: a15c62b8a306b8978f094f76da2f903f + sha256: a83c83f5e622a2f34fb1d179c55c3ff912429cd0a54f9f3190ae44a0fdba2ad2 manager: conda name: tblib optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda - version: 3.0.0 + url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.1.0-pyhd8ed1ab_0.conda + version: 3.1.0 - category: main dependencies: python: '>=3.9' hash: - md5: 60ce69f73f3e75b21f1c27b1b471320c - sha256: 6869cd2e043426d30c84d0ff6619f176b39728f9c75dc95dca89db994548bb8a + md5: a15c62b8a306b8978f094f76da2f903f + sha256: a83c83f5e622a2f34fb1d179c55c3ff912429cd0a54f9f3190ae44a0fdba2ad2 manager: conda name: tblib optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda - version: 3.0.0 + url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.1.0-pyhd8ed1ab_0.conda + version: 3.1.0 - category: main dependencies: libgcc-ng: '>=12' @@ -13385,40 +15093,40 @@ package: version: 0.13.2 - category: main dependencies: - python: '>=3.7' + python: '>=3.9' hash: - md5: 2fcb582444635e2c402e8569bb94e039 - sha256: 22b0a9790317526e08609d5dfdd828210ae89e6d444a9e954855fc29012e90c6 + md5: 40d0ed782a8aaa16ef248e68c06c168d + sha256: eda38f423c33c2eaeca49ed946a8d3bf466cc3364970e083a65eb2fd85258d87 manager: conda name: toolz optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - version: 0.12.1 + url: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda + version: 1.0.0 - category: main dependencies: - python: '>=3.7' + python: '>=3.9' hash: - md5: 2fcb582444635e2c402e8569bb94e039 - sha256: 22b0a9790317526e08609d5dfdd828210ae89e6d444a9e954855fc29012e90c6 + md5: 40d0ed782a8aaa16ef248e68c06c168d + sha256: eda38f423c33c2eaeca49ed946a8d3bf466cc3364970e083a65eb2fd85258d87 manager: conda name: toolz optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - version: 0.12.1 + url: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda + version: 1.0.0 - category: main dependencies: - python: '>=3.7' + python: '>=3.9' hash: - md5: 2fcb582444635e2c402e8569bb94e039 - sha256: 22b0a9790317526e08609d5dfdd828210ae89e6d444a9e954855fc29012e90c6 + md5: 40d0ed782a8aaa16ef248e68c06c168d + sha256: eda38f423c33c2eaeca49ed946a8d3bf466cc3364970e083a65eb2fd85258d87 manager: conda name: toolz optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda - version: 0.12.1 + url: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda + version: 1.0.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -13426,45 +15134,44 @@ package: python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: df3aee9c3e44489257a840b8354e77b9 - sha256: afa3489113154b5cb0724b0bf120b62df91f426dabfe5d02f2ba09e90d346b28 + md5: e6aa9d8ca506982ed2a059b3c6057fc3 + sha256: d297d5c0cb91627b17d49b4c633d1bb923b8e76a8796edcc6176b0d4379508db manager: conda name: tornado optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py311h9ecbd09_0.conda - version: 6.4.2 + https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5-py311h9ecbd09_0.conda + version: '6.5' - category: main dependencies: __osx: '>=10.13' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 073c42a2b6b7e4219325b1f5983c7579 - sha256: 5273ba307489570df61d82a6b3365b2a27862765099cf4ef3830569fa4a30f27 + md5: 5776f9693ac1c592a26a2b8fb7220e4d + sha256: 5923cc6ffbef6da367c30d63d82e6f751b9503d5eef6d6488e92eab6d907a476 manager: conda name: tornado optional: false platform: osx-64 - url: - https://conda.anaconda.org/conda-forge/osx-64/tornado-6.4.2-py311h4d7f069_0.conda - version: 6.4.2 + url: https://conda.anaconda.org/conda-forge/osx-64/tornado-6.5-py311h4d7f069_0.conda + version: '6.5' - category: main dependencies: __osx: '>=11.0' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 183b74c576dc7f920dae168997dbd1dd - sha256: 80b79a7d4ed8e16019b8c634cca66935d18fc98be358c76a6ead8c611306ee14 + md5: ff86e96e324c0e08a55aee1111d93582 + sha256: 675c378b691d33a09f70ff684493c2395821c0d2934b37bea8ad9f260461a048 manager: conda name: tornado optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.4.2-py311h917b07b_0.conda - version: 6.4.2 + https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5-py311h917b07b_0.conda + version: '6.5' - category: main dependencies: colorama: '' @@ -13508,41 +15215,41 @@ package: dependencies: python: '>=3.9' hash: - md5: 4601bd3e415bf63bca30d00353a9440d - sha256: 8b6fe745925e55636a5f8bd1affde47fc385665486e3cf9ba12a73bd4cdf2df1 + md5: 764db08a8d868de9e377d88277c75d83 + sha256: 455b7b0dc0cf7e4a6fcc41455b4fd7f646b3b842e6dc0d894438366827d7d9b2 manager: conda name: trove-classifiers optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2025.3.19.19-pyhd8ed1ab_0.conda - version: 2025.3.19.19 + https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2025.5.9.12-pyhd8ed1ab_0.conda + version: 2025.5.9.12 - category: main dependencies: python: '>=3.9' hash: - md5: 4601bd3e415bf63bca30d00353a9440d - sha256: 8b6fe745925e55636a5f8bd1affde47fc385665486e3cf9ba12a73bd4cdf2df1 + md5: 764db08a8d868de9e377d88277c75d83 + sha256: 455b7b0dc0cf7e4a6fcc41455b4fd7f646b3b842e6dc0d894438366827d7d9b2 manager: conda name: trove-classifiers optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2025.3.19.19-pyhd8ed1ab_0.conda - version: 2025.3.19.19 + https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2025.5.9.12-pyhd8ed1ab_0.conda + version: 2025.5.9.12 - category: main dependencies: python: '>=3.9' hash: - md5: 4601bd3e415bf63bca30d00353a9440d - sha256: 8b6fe745925e55636a5f8bd1affde47fc385665486e3cf9ba12a73bd4cdf2df1 + md5: 764db08a8d868de9e377d88277c75d83 + sha256: 455b7b0dc0cf7e4a6fcc41455b4fd7f646b3b842e6dc0d894438366827d7d9b2 manager: conda name: trove-classifiers optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2025.3.19.19-pyhd8ed1ab_0.conda - version: 2025.3.19.19 + https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2025.5.9.12-pyhd8ed1ab_0.conda + version: 2025.5.9.12 - category: main dependencies: python: '' @@ -13585,210 +15292,252 @@ package: - category: main dependencies: python: '' - typer-slim-standard: ==0.15.2 + typer-slim-standard: ==0.15.3 hash: - md5: 1152565b06e3dc27794c3c11f1050005 - sha256: fa6eeb42e3bddff74126dd61b01b21a3f4f4791368e93bc5a5775563542b2d4e + md5: b4bed8eb8dd4fe076f436e5506d31673 + sha256: 8cd849ceb5e2f50481b1f30f083ee134fac706a56d7879c61248f0aadad4ea5b manager: conda name: typer optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/typer-0.15.2-pyhff008b6_0.conda - version: 0.15.2 + url: https://conda.anaconda.org/conda-forge/noarch/typer-0.15.3-pyhf21524f_0.conda + version: 0.15.3 - category: main dependencies: python: '>=3.9' - typer-slim-standard: ==0.15.2 + typer-slim-standard: ==0.15.3 hash: - md5: 1152565b06e3dc27794c3c11f1050005 - sha256: fa6eeb42e3bddff74126dd61b01b21a3f4f4791368e93bc5a5775563542b2d4e + md5: b4bed8eb8dd4fe076f436e5506d31673 + sha256: 8cd849ceb5e2f50481b1f30f083ee134fac706a56d7879c61248f0aadad4ea5b manager: conda name: typer optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/typer-0.15.2-pyhff008b6_0.conda - version: 0.15.2 + url: https://conda.anaconda.org/conda-forge/noarch/typer-0.15.3-pyhf21524f_0.conda + version: 0.15.3 - category: main dependencies: python: '>=3.9' - typer-slim-standard: ==0.15.2 + typer-slim-standard: ==0.15.3 hash: - md5: 1152565b06e3dc27794c3c11f1050005 - sha256: fa6eeb42e3bddff74126dd61b01b21a3f4f4791368e93bc5a5775563542b2d4e + md5: b4bed8eb8dd4fe076f436e5506d31673 + sha256: 8cd849ceb5e2f50481b1f30f083ee134fac706a56d7879c61248f0aadad4ea5b manager: conda name: typer optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/typer-0.15.2-pyhff008b6_0.conda - version: 0.15.2 + url: https://conda.anaconda.org/conda-forge/noarch/typer-0.15.3-pyhf21524f_0.conda + version: 0.15.3 - category: main dependencies: click: '>=8.0.0' python: '' typing_extensions: '>=3.7.4.3' hash: - md5: 7c8d9609e2cfe08dd7672e10fe7e7de9 - sha256: c094713560bfacab0539c863010a5223171d9980cbd419cc799e474ae15aca08 + md5: 20c0f2ae932004d7118c172eeb035cea + sha256: 1768d1d9914d4237b0a1ae8bcb30dace44ac80b9ab1516a2d429d0b27ad70ab9 manager: conda name: typer-slim optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/typer-slim-0.15.2-pyh29332c3_0.conda - version: 0.15.2 + https://conda.anaconda.org/conda-forge/noarch/typer-slim-0.15.3-pyh29332c3_0.conda + version: 0.15.3 - category: main dependencies: click: '>=8.0.0' python: '>=3.9' typing_extensions: '>=3.7.4.3' hash: - md5: 7c8d9609e2cfe08dd7672e10fe7e7de9 - sha256: c094713560bfacab0539c863010a5223171d9980cbd419cc799e474ae15aca08 + md5: 20c0f2ae932004d7118c172eeb035cea + sha256: 1768d1d9914d4237b0a1ae8bcb30dace44ac80b9ab1516a2d429d0b27ad70ab9 manager: conda name: typer-slim optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/typer-slim-0.15.2-pyh29332c3_0.conda - version: 0.15.2 + https://conda.anaconda.org/conda-forge/noarch/typer-slim-0.15.3-pyh29332c3_0.conda + version: 0.15.3 - category: main dependencies: click: '>=8.0.0' python: '>=3.9' typing_extensions: '>=3.7.4.3' hash: - md5: 7c8d9609e2cfe08dd7672e10fe7e7de9 - sha256: c094713560bfacab0539c863010a5223171d9980cbd419cc799e474ae15aca08 + md5: 20c0f2ae932004d7118c172eeb035cea + sha256: 1768d1d9914d4237b0a1ae8bcb30dace44ac80b9ab1516a2d429d0b27ad70ab9 manager: conda name: typer-slim optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/typer-slim-0.15.2-pyh29332c3_0.conda - version: 0.15.2 + https://conda.anaconda.org/conda-forge/noarch/typer-slim-0.15.3-pyh29332c3_0.conda + version: 0.15.3 - category: main dependencies: rich: '' shellingham: '' - typer-slim: ==0.15.2 + typer-slim: ==0.15.3 hash: - md5: ea164fc4e03f61f7ff3c1166001969af - sha256: 79b6b34e90e50e041908939d53053f69285714b0082a0370fba6ab3b38315c8d + md5: 120216d3a2e51dfbb87bbba173ebf210 + sha256: 72f77e8e61b28058562f2782cf32ff84f14f6c11c6cea7a3fe2839d34654ea45 manager: conda name: typer-slim-standard optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/typer-slim-standard-0.15.2-h801b22e_0.conda - version: 0.15.2 + https://conda.anaconda.org/conda-forge/noarch/typer-slim-standard-0.15.3-h1a15894_0.conda + version: 0.15.3 - category: main dependencies: rich: '' shellingham: '' - typer-slim: ==0.15.2 + typer-slim: ==0.15.3 hash: - md5: ea164fc4e03f61f7ff3c1166001969af - sha256: 79b6b34e90e50e041908939d53053f69285714b0082a0370fba6ab3b38315c8d + md5: 120216d3a2e51dfbb87bbba173ebf210 + sha256: 72f77e8e61b28058562f2782cf32ff84f14f6c11c6cea7a3fe2839d34654ea45 manager: conda name: typer-slim-standard optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/typer-slim-standard-0.15.2-h801b22e_0.conda - version: 0.15.2 + https://conda.anaconda.org/conda-forge/noarch/typer-slim-standard-0.15.3-h1a15894_0.conda + version: 0.15.3 - category: main dependencies: rich: '' shellingham: '' - typer-slim: ==0.15.2 + typer-slim: ==0.15.3 hash: - md5: ea164fc4e03f61f7ff3c1166001969af - sha256: 79b6b34e90e50e041908939d53053f69285714b0082a0370fba6ab3b38315c8d + md5: 120216d3a2e51dfbb87bbba173ebf210 + sha256: 72f77e8e61b28058562f2782cf32ff84f14f6c11c6cea7a3fe2839d34654ea45 manager: conda name: typer-slim-standard optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/typer-slim-standard-0.15.2-h801b22e_0.conda - version: 0.15.2 + https://conda.anaconda.org/conda-forge/noarch/typer-slim-standard-0.15.3-h1a15894_0.conda + version: 0.15.3 - category: main dependencies: - typing_extensions: 4.12.2 + typing_extensions: ==4.13.2 hash: - md5: b6a408c64b78ec7b779a3e5c7a902433 - sha256: c8e9c1c467b5f960b627d7adc1c65fece8e929a3de89967e91ef0f726422fd32 + md5: 568ed1300869dca0ba09fb750cda5dbb + sha256: 4865fce0897d3cb0ffc8998219157a8325f6011c136e6fd740a9a6b169419296 manager: conda name: typing-extensions optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda - version: 4.12.2 + https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.13.2-h0e9735f_0.conda + version: 4.13.2 - category: main dependencies: - typing_extensions: 4.12.2 + typing_extensions: ==4.13.2 hash: - md5: b6a408c64b78ec7b779a3e5c7a902433 - sha256: c8e9c1c467b5f960b627d7adc1c65fece8e929a3de89967e91ef0f726422fd32 + md5: 568ed1300869dca0ba09fb750cda5dbb + sha256: 4865fce0897d3cb0ffc8998219157a8325f6011c136e6fd740a9a6b169419296 manager: conda name: typing-extensions optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda - version: 4.12.2 + https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.13.2-h0e9735f_0.conda + version: 4.13.2 - category: main dependencies: - typing_extensions: 4.12.2 + typing_extensions: ==4.13.2 hash: - md5: b6a408c64b78ec7b779a3e5c7a902433 - sha256: c8e9c1c467b5f960b627d7adc1c65fece8e929a3de89967e91ef0f726422fd32 + md5: 568ed1300869dca0ba09fb750cda5dbb + sha256: 4865fce0897d3cb0ffc8998219157a8325f6011c136e6fd740a9a6b169419296 manager: conda name: typing-extensions optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda - version: 4.12.2 + https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.13.2-h0e9735f_0.conda + version: 4.13.2 + - category: main + dependencies: + python: '>=3.9' + typing_extensions: '>=4.12.0' + hash: + md5: e0c3cd765dc15751ee2f0b03cd015712 + sha256: 4259a7502aea516c762ca8f3b8291b0d4114e094bdb3baae3171ccc0900e722f + manager: conda + name: typing-inspection + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.1-pyhd8ed1ab_0.conda + version: 0.4.1 + - category: main + dependencies: + python: '>=3.9' + typing_extensions: '>=4.12.0' + hash: + md5: e0c3cd765dc15751ee2f0b03cd015712 + sha256: 4259a7502aea516c762ca8f3b8291b0d4114e094bdb3baae3171ccc0900e722f + manager: conda + name: typing-inspection + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.1-pyhd8ed1ab_0.conda + version: 0.4.1 - category: main dependencies: python: '>=3.9' + typing_extensions: '>=4.12.0' + hash: + md5: e0c3cd765dc15751ee2f0b03cd015712 + sha256: 4259a7502aea516c762ca8f3b8291b0d4114e094bdb3baae3171ccc0900e722f + manager: conda + name: typing-inspection + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.1-pyhd8ed1ab_0.conda + version: 0.4.1 + - category: main + dependencies: + python: '' hash: - md5: d17f13df8b65464ca316cbc000a3cb64 - sha256: 337be7af5af8b2817f115b3b68870208b30c31d3439bec07bfb2d8f4823e3568 + md5: 83fc6ae00127671e301c9f44254c31b8 + sha256: a8aaf351e6461de0d5d47e4911257e25eec2fa409d71f3b643bb2f748bde1c08 manager: conda name: typing_extensions optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda - version: 4.12.2 + https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.13.2-pyh29332c3_0.conda + version: 4.13.2 - category: main dependencies: python: '>=3.9' hash: - md5: d17f13df8b65464ca316cbc000a3cb64 - sha256: 337be7af5af8b2817f115b3b68870208b30c31d3439bec07bfb2d8f4823e3568 + md5: 83fc6ae00127671e301c9f44254c31b8 + sha256: a8aaf351e6461de0d5d47e4911257e25eec2fa409d71f3b643bb2f748bde1c08 manager: conda name: typing_extensions optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda - version: 4.12.2 + https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.13.2-pyh29332c3_0.conda + version: 4.13.2 - category: main dependencies: python: '>=3.9' hash: - md5: d17f13df8b65464ca316cbc000a3cb64 - sha256: 337be7af5af8b2817f115b3b68870208b30c31d3439bec07bfb2d8f4823e3568 + md5: 83fc6ae00127671e301c9f44254c31b8 + sha256: a8aaf351e6461de0d5d47e4911257e25eec2fa409d71f3b643bb2f748bde1c08 manager: conda name: typing_extensions optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda - version: 4.12.2 + https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.13.2-pyh29332c3_0.conda + version: 4.13.2 - category: main dependencies: {} hash: @@ -13916,45 +15665,51 @@ package: - category: main dependencies: brotli-python: '>=1.0.9' + h2: '>=4,<5' pysocks: '>=1.5.6,<2.0,!=1.5.7' - python: '>=3.7' + python: '>=3.9' + zstandard: '>=0.18.0' hash: - md5: 6bb37c314b3cc1515dcf086ffe01c46e - sha256: 543ebab5241418a4e0d4d9e356ef13e4361504810a067a01481660bb35eb5643 + md5: c1e349028e0052c4eea844e94f773065 + sha256: a25403b76f7f03ca1a906e1ef0f88521edded991b9897e7fed56a3e334b3db8c manager: conda name: urllib3 optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.19-pyhd8ed1ab_0.conda - version: 1.26.19 + url: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.4.0-pyhd8ed1ab_0.conda + version: 2.4.0 - category: main dependencies: brotli-python: '>=1.0.9' + h2: '>=4,<5' pysocks: '>=1.5.6,<2.0,!=1.5.7' - python: '>=3.7' + python: '>=3.9' + zstandard: '>=0.18.0' hash: - md5: 6bb37c314b3cc1515dcf086ffe01c46e - sha256: 543ebab5241418a4e0d4d9e356ef13e4361504810a067a01481660bb35eb5643 + md5: c1e349028e0052c4eea844e94f773065 + sha256: a25403b76f7f03ca1a906e1ef0f88521edded991b9897e7fed56a3e334b3db8c manager: conda name: urllib3 optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.19-pyhd8ed1ab_0.conda - version: 1.26.19 + url: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.4.0-pyhd8ed1ab_0.conda + version: 2.4.0 - category: main dependencies: brotli-python: '>=1.0.9' + h2: '>=4,<5' pysocks: '>=1.5.6,<2.0,!=1.5.7' - python: '>=3.7' + python: '>=3.9' + zstandard: '>=0.18.0' hash: - md5: 6bb37c314b3cc1515dcf086ffe01c46e - sha256: 543ebab5241418a4e0d4d9e356ef13e4361504810a067a01481660bb35eb5643 + md5: c1e349028e0052c4eea844e94f773065 + sha256: a25403b76f7f03ca1a906e1ef0f88521edded991b9897e7fed56a3e334b3db8c manager: conda name: urllib3 optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.19-pyhd8ed1ab_0.conda - version: 1.26.19 + url: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.4.0-pyhd8ed1ab_0.conda + version: 2.4.0 - category: main dependencies: click: '' @@ -14000,40 +15755,197 @@ package: libgcc: '>=13' libstdcxx: '>=13' hash: - md5: d819b241035dc42d91c4672a82daea24 - sha256: 3664a844eb11b4ecadb46c129eca1ce31f77448d15b371dac071c4db50060689 + md5: ca22b78da353c6ffb3cb6ebfcd1f43b9 + sha256: 61808677fe2a0013ccb8a55516da29370d8ea20d7bbcfe31573d1804d1834298 manager: conda name: uv optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/uv-0.6.10-h0f3a69f_0.conda - version: 0.6.10 + url: https://conda.anaconda.org/conda-forge/linux-64/uv-0.7.7-h2f11bb8_0.conda + version: 0.7.7 - category: main dependencies: __osx: '>=10.13' libcxx: '>=18' hash: - md5: c6045cbc51fccfd84dae2439776b6dfe - sha256: 604c987ccde8b4009abfbd2146309e8a9ce7510d393ce4113dffe2b6963b4c35 + md5: 525d1f5b77edac542ee96fec3914af15 + sha256: 935b5f3d3558cc1a132c1d60bf8335cd076ffefa89d8bd9562d39cffc22910a1 manager: conda name: uv optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/uv-0.6.10-h8de1528_0.conda - version: 0.6.10 + url: https://conda.anaconda.org/conda-forge/osx-64/uv-0.7.7-h6914f18_0.conda + version: 0.7.7 - category: main dependencies: __osx: '>=11.0' libcxx: '>=18' hash: - md5: dfba53db3db2cec15df8ade779778880 - sha256: 2c7f724345cf01ce7ff76a9e49662bc60ce3db9ebee8f7d7c44e621ea4bfc718 + md5: d0c23950777c302192c325a3b1d5d91d + sha256: e5d5cc047f4d22d759a6d3249d5496f95643f05f66fe9b045f3d795fde37d499 manager: conda name: uv optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/uv-0.6.10-h668ec48_0.conda - version: 0.6.10 + url: https://conda.anaconda.org/conda-forge/osx-arm64/uv-0.7.7-hb4c02be_0.conda + version: 0.7.7 + - category: main + dependencies: + __unix: '' + click: '>=7.0' + h11: '>=0.8' + python: '>=3.9' + typing_extensions: '>=4.0' + hash: + md5: 7e9f164470d693a5d2537c6b2ce1d9ea + sha256: d6c504920400354a89e597c5d355288e77481d638cca0489fea3530167895f15 + manager: conda + name: uvicorn + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/uvicorn-0.34.2-pyh31011fe_0.conda + version: 0.34.2 + - category: main + dependencies: + __unix: '' + click: '>=7.0' + h11: '>=0.8' + python: '>=3.9' + typing_extensions: '>=4.0' + hash: + md5: 7e9f164470d693a5d2537c6b2ce1d9ea + sha256: d6c504920400354a89e597c5d355288e77481d638cca0489fea3530167895f15 + manager: conda + name: uvicorn + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/uvicorn-0.34.2-pyh31011fe_0.conda + version: 0.34.2 + - category: main + dependencies: + __unix: '' + click: '>=7.0' + h11: '>=0.8' + python: '>=3.9' + typing_extensions: '>=4.0' + hash: + md5: 7e9f164470d693a5d2537c6b2ce1d9ea + sha256: d6c504920400354a89e597c5d355288e77481d638cca0489fea3530167895f15 + manager: conda + name: uvicorn + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/uvicorn-0.34.2-pyh31011fe_0.conda + version: 0.34.2 + - category: main + dependencies: + __unix: '' + httptools: '>=0.6.3' + python-dotenv: '>=0.13' + pyyaml: '>=5.1' + uvicorn: 0.34.2 + uvloop: '>=0.14.0,!=0.15.0,!=0.15.1' + watchfiles: '>=0.13' + websockets: '>=10.4' + hash: + md5: 62676324fa57eb76b542a6a2e85d35e2 + sha256: c323cc4986f4fea91dedbee68dce8071cade48be2e71cf9c575faf3f3ccc42a9 + manager: conda + name: uvicorn-standard + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/noarch/uvicorn-standard-0.34.2-h31011fe_0.conda + version: 0.34.2 + - category: main + dependencies: + __unix: '' + httptools: '>=0.6.3' + python-dotenv: '>=0.13' + pyyaml: '>=5.1' + uvicorn: 0.34.2 + uvloop: '>=0.14.0,!=0.15.0,!=0.15.1' + watchfiles: '>=0.13' + websockets: '>=10.4' + hash: + md5: 62676324fa57eb76b542a6a2e85d35e2 + sha256: c323cc4986f4fea91dedbee68dce8071cade48be2e71cf9c575faf3f3ccc42a9 + manager: conda + name: uvicorn-standard + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/noarch/uvicorn-standard-0.34.2-h31011fe_0.conda + version: 0.34.2 + - category: main + dependencies: + __unix: '' + httptools: '>=0.6.3' + python-dotenv: '>=0.13' + pyyaml: '>=5.1' + uvicorn: 0.34.2 + uvloop: '>=0.14.0,!=0.15.0,!=0.15.1' + watchfiles: '>=0.13' + websockets: '>=10.4' + hash: + md5: 62676324fa57eb76b542a6a2e85d35e2 + sha256: c323cc4986f4fea91dedbee68dce8071cade48be2e71cf9c575faf3f3ccc42a9 + manager: conda + name: uvicorn-standard + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/noarch/uvicorn-standard-0.34.2-h31011fe_0.conda + version: 0.34.2 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libuv: '>=1.49.2,<2.0a0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 66890e34ed6a9bd84f1c189043a928f8 + sha256: 9421eeb1e15b99985bb15dec9cf0f337d332106cea584a147449c91c389a4418 + manager: conda + name: uvloop + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/uvloop-0.21.0-py311h9ecbd09_1.conda + version: 0.21.0 + - category: main + dependencies: + __osx: '>=10.13' + libuv: '>=1.49.2,<2.0a0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: fdf82f7e7a4561819bcbfca2c2e7031c + sha256: cc0fe2730e413fc449cb3d0b6b48576dd53283b099f182b844f34f97b459ac59 + manager: conda + name: uvloop + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/uvloop-0.21.0-py311h1314207_1.conda + version: 0.21.0 + - category: main + dependencies: + __osx: '>=11.0' + libuv: '>=1.49.2,<2.0a0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: bc9ca85e86e305b58432c4791b732ae6 + sha256: f42e2ca33beedef252d234d3aac7642432bf8545a6d37c11e58a69f6aee36898 + manager: conda + name: uvloop + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/uvloop-0.21.0-py311hae2e1ce_1.conda + version: 0.21.0 - category: main dependencies: distlib: '>=0.3.7,<1' @@ -14041,15 +15953,15 @@ package: platformdirs: '>=3.9.1,<5' python: '>=3.9' hash: - md5: 87a2061465e55be9a997dd8cf8b5a578 - sha256: f7b2cd8ee05769e57dab1f2e2206360cb03d15d4290ddb30442711700c430ba6 + md5: c0600c1b374efa7a1ff444befee108ca + sha256: 763dc774200b2eebdf5437b112834c5455a1dd1c9b605340696950277ff36729 manager: conda name: virtualenv optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.3-pyhd8ed1ab_0.conda - version: 20.29.3 + https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.31.2-pyhd8ed1ab_0.conda + version: 20.31.2 - category: main dependencies: distlib: '>=0.3.7,<1' @@ -14057,15 +15969,15 @@ package: platformdirs: '>=3.9.1,<5' python: '>=3.9' hash: - md5: 87a2061465e55be9a997dd8cf8b5a578 - sha256: f7b2cd8ee05769e57dab1f2e2206360cb03d15d4290ddb30442711700c430ba6 + md5: c0600c1b374efa7a1ff444befee108ca + sha256: 763dc774200b2eebdf5437b112834c5455a1dd1c9b605340696950277ff36729 manager: conda name: virtualenv optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.3-pyhd8ed1ab_0.conda - version: 20.29.3 + https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.31.2-pyhd8ed1ab_0.conda + version: 20.31.2 - category: main dependencies: distlib: '>=0.3.7,<1' @@ -14073,15 +15985,15 @@ package: platformdirs: '>=3.9.1,<5' python: '>=3.9' hash: - md5: 87a2061465e55be9a997dd8cf8b5a578 - sha256: f7b2cd8ee05769e57dab1f2e2206360cb03d15d4290ddb30442711700c430ba6 + md5: c0600c1b374efa7a1ff444befee108ca + sha256: 763dc774200b2eebdf5437b112834c5455a1dd1c9b605340696950277ff36729 manager: conda name: virtualenv optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.3-pyhd8ed1ab_0.conda - version: 20.29.3 + https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.31.2-pyhd8ed1ab_0.conda + version: 20.31.2 - category: main dependencies: msrest: '>=0.6.0,<0.7.0' @@ -14130,58 +16042,153 @@ package: - category: main dependencies: __glibc: '>=2.17,<3.0.a0' - libexpat: '>=2.6.2,<3.0a0' - libffi: '>=3.4,<4.0a0' - libgcc-ng: '>=13' - libstdcxx-ng: '>=13' + anyio: '>=3.0.0' + libgcc: '>=13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 896e5c200fddca2b878a0113a58345f9 + sha256: b639615b2b943dd2e01296e5f99af62f92453c5b93568fb4509cd85fc4139b93 + manager: conda + name: watchfiles + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/watchfiles-1.0.5-py311h9e33e62_0.conda + version: 1.0.5 + - category: main + dependencies: + __osx: '>=10.13' + anyio: '>=3.0.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* hash: - md5: 0a732427643ae5e0486a727927791da1 - sha256: 0884b2023a32d2620192cf2e2fc6784b8d1e31cf9f137e49e00802d4daf7d1c1 + md5: c2741ddc8d98730e1b0940fd0179216a + sha256: dca0ef5b3d6b642dc1e21a7845a6ac5fcb590e53cf0fb5219d48111f37b5d30d + manager: conda + name: watchfiles + optional: false + platform: osx-64 + url: + https://conda.anaconda.org/conda-forge/osx-64/watchfiles-1.0.5-py311h3b9c2be_0.conda + version: 1.0.5 + - category: main + dependencies: + __osx: '>=11.0' + anyio: '>=3.0.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 5f46b36eba293a884fe980d0c5b37606 + sha256: 2cfb284d572c6b623ac22053ba6fc082d20c9d86c54c6b2697b41ba69c6859ea + manager: conda + name: watchfiles + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/watchfiles-1.0.5-py311h3ff9189_0.conda + version: 1.0.5 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libexpat: '>=2.7.0,<3.0a0' + libffi: '>=3.4.6,<3.5.0a0' + libgcc: '>=13' + libstdcxx: '>=13' + hash: + md5: a37843723437ba75f42c9270ffe800b1 + sha256: 73d809ec8056c2f08e077f9d779d7f4e4c2b625881cad6af303c33dc1562ea01 manager: conda name: wayland optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_1.conda version: 1.23.1 - category: main dependencies: - python: '>=3.9' + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* hash: - md5: 2841eb5bfc75ce15e9a0054b98dcd64d - sha256: 19ff205e138bb056a46f9e3839935a2e60bd1cf01c8241a5e172a422fed4f9c6 + md5: 208bf8e44ef767b25a51ba1beef0613c + sha256: 3284007ea6eaadef71e475e93124e10362d0a3376e32d2d7023bfef01ee96a66 manager: conda - name: webencodings + name: websockets optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda - version: 0.5.1 + https://conda.anaconda.org/conda-forge/linux-64/websockets-15.0.1-py311h9ecbd09_0.conda + version: 15.0.1 - category: main dependencies: - python: '>=3.9' + __osx: '>=10.13' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* hash: - md5: 2841eb5bfc75ce15e9a0054b98dcd64d - sha256: 19ff205e138bb056a46f9e3839935a2e60bd1cf01c8241a5e172a422fed4f9c6 + md5: 706ed76fe488bd51f542c1f68347059c + sha256: 785eee203cbf8f536d7f840c6ca4ea9883f8a86d62f29e8711c5a0bcd0bf4148 manager: conda - name: webencodings + name: websockets optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda - version: 0.5.1 + https://conda.anaconda.org/conda-forge/osx-64/websockets-15.0.1-py311h4d7f069_0.conda + version: 15.0.1 - category: main dependencies: - python: '>=3.9' + __osx: '>=11.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* hash: - md5: 2841eb5bfc75ce15e9a0054b98dcd64d - sha256: 19ff205e138bb056a46f9e3839935a2e60bd1cf01c8241a5e172a422fed4f9c6 + md5: 603430a5c35ae5c87310e096e0868551 + sha256: 9452f7288c50c3fef9f2ccc850a2b5e8fbe6148a7ec36f16939d0734eb728f1e manager: conda - name: webencodings + name: websockets optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda - version: 0.5.1 + https://conda.anaconda.org/conda-forge/osx-arm64/websockets-15.0.1-py311h917b07b_0.conda + version: 15.0.1 + - category: main + dependencies: + markupsafe: '>=2.1.1' + python: '>=3.9' + hash: + md5: 0a9b57c159d56b508613cc39022c1b9e + sha256: cd9a603beae0b237be7d9dfae8ae0b36ad62666ac4bb073969bce7da6f55157c + manager: conda + name: werkzeug + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhd8ed1ab_1.conda + version: 3.1.3 + - category: main + dependencies: + markupsafe: '>=2.1.1' + python: '>=3.9' + hash: + md5: 0a9b57c159d56b508613cc39022c1b9e + sha256: cd9a603beae0b237be7d9dfae8ae0b36ad62666ac4bb073969bce7da6f55157c + manager: conda + name: werkzeug + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhd8ed1ab_1.conda + version: 3.1.3 + - category: main + dependencies: + markupsafe: '>=2.1.1' + python: '>=3.9' + hash: + md5: 0a9b57c159d56b508613cc39022c1b9e + sha256: cd9a603beae0b237be7d9dfae8ae0b36ad62666ac4bb073969bce7da6f55157c + manager: conda + name: werkzeug + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhd8ed1ab_1.conda + version: 3.1.3 - category: main dependencies: libgcc-ng: '>=12' @@ -14312,6 +16319,45 @@ package: url: https://conda.anaconda.org/conda-forge/osx-arm64/wrapt-1.17.2-py311h917b07b_0.conda version: 1.17.2 + - category: main + dependencies: + h11: '>=0.9.0,<1.0' + python: '>=3.9' + hash: + md5: 2c7536a04d9c21e1dd05bd4a3b1e3a39 + sha256: 37b89ef8dc05b6e06c73b60d0bc130f81d1be3a8c8eed5807c27984484ec175e + manager: conda + name: wsproto + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/noarch/wsproto-1.2.0-pyhd8ed1ab_1.conda + version: 1.2.0 + - category: main + dependencies: + h11: '>=0.9.0,<1.0' + python: '>=3.9' + hash: + md5: 2c7536a04d9c21e1dd05bd4a3b1e3a39 + sha256: 37b89ef8dc05b6e06c73b60d0bc130f81d1be3a8c8eed5807c27984484ec175e + manager: conda + name: wsproto + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/noarch/wsproto-1.2.0-pyhd8ed1ab_1.conda + version: 1.2.0 + - category: main + dependencies: + h11: '>=0.9.0,<1.0' + python: '>=3.9' + hash: + md5: 2c7536a04d9c21e1dd05bd4a3b1e3a39 + sha256: 37b89ef8dc05b6e06c73b60d0bc130f81d1be3a8c8eed5807c27984484ec175e + manager: conda + name: wsproto + optional: false + platform: osx-arm64 + url: https://conda.anaconda.org/conda-forge/noarch/wsproto-1.2.0-pyhd8ed1ab_1.conda + version: 1.2.0 - category: main dependencies: python: '>=3.9' @@ -14348,21 +16394,67 @@ package: platform: osx-arm64 url: https://conda.anaconda.org/conda-forge/noarch/wurlitzer-3.1.1-pyhd8ed1ab_1.conda version: 3.1.1 + - category: main + dependencies: + __osx: '>=10.13' + cffi: '>=1.0.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 3015eff47d09161438823d9e5610b283 + sha256: e9a73c738778911ae30bccdd0993b67231a63b7e28d0a31e7ed137a71b282e4d + manager: conda + name: xattr + optional: false + platform: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/xattr-1.1.0-py311h3336109_1.conda + version: 1.1.0 + - category: main + dependencies: + __osx: '>=11.0' + cffi: '>=1.0.0' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + hash: + md5: 71526a76dc2b6fa730d9fa36d62b253a + sha256: 1d4d46468074fa050fd3d90c1c77ff468cccf127aa2dbb4fb0060402f353373d + manager: conda + name: xattr + optional: false + platform: osx-arm64 + url: + https://conda.anaconda.org/conda-forge/osx-arm64/xattr-1.1.0-py311h460d6c5_1.conda + version: 1.1.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' xorg-libx11: '>=1.8.10,<2.0a0' + xorg-libxmu: '>=1.2.1,<2.0a0' + hash: + md5: 60617f7654d84993ff0ccdfc55209b69 + sha256: 7795c9b28a643a7279e6008dfe625cda3c8ee8fa6e178d390d7e213fe4291a5d + manager: conda + name: xclip + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xclip-0.13-hb9d3cd8_4.conda + version: '0.13' + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + xorg-libx11: '>=1.8.12,<2.0a0' hash: - md5: f725c7425d6d7c15e31f3b99a88ea02f - sha256: 0d89b5873515a1f05d311f37ea4e087bbccc0418afa38f2f6189e97280db3179 + md5: 7c91bfc90672888259675ad2ad28af9c + sha256: 83ad2be5eb1d359b4cd7d7a93a6b25cdbfdce9d27b37508e2a4efe90d3a4ed80 manager: conda name: xkeyboard-config optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda - version: '2.43' + https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.44-hb9d3cd8_0.conda + version: '2.44' - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -14550,6 +16642,23 @@ package: url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda version: 1.1.5 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + xorg-libx11: '>=1.8.10,<2.0a0' + xorg-libxext: '>=1.3.6,<2.0a0' + xorg-libxt: '>=1.3.0,<2.0a0' + hash: + md5: f35a9a2da717ade815ffa70c0e8bdfbd + sha256: 467cba5106e628068487dcbc2ba2dbd6a434e75d752eaf0895086e9fe65e6a8d + manager: conda + name: xorg-libxmu + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/xorg-libxmu-1.2.1-hb9d3cd8_1.conda + version: 1.2.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -14582,6 +16691,23 @@ package: url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda version: 0.9.12 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + xorg-libice: '>=1.1.1,<2.0a0' + xorg-libsm: '>=1.2.4,<2.0a0' + xorg-libx11: '>=1.8.10,<2.0a0' + hash: + md5: 279b0de5f6ba95457190a1c459a64e31 + sha256: a8afba4a55b7b530eb5c8ad89737d60d60bc151a03fbef7a2182461256953f0e + manager: conda + name: xorg-libxt + optional: false + platform: linux-64 + url: + https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.1-hb9d3cd8_0.conda + version: 1.3.1 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' @@ -14599,6 +16725,20 @@ package: url: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda version: 1.2.5 + - category: main + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + xorg-libx11: '>=1.8.10,<2.0a0' + hash: + md5: 16566b426488305d7fc8b084d5db94e9 + sha256: e13cab6260ccf8619547fea51b403301ea9ed0f667fa7e9e4f39c7d016d8caa4 + manager: conda + name: xsel + optional: false + platform: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/xsel-1.2.1-hb9d3cd8_6.conda + version: 1.2.1 - category: main dependencies: libgcc-ng: '>=9.4.0' @@ -14635,40 +16775,43 @@ package: version: 0.2.5 - category: main dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libstdcxx: '>=13' hash: - md5: 965eaacd7c18eb8361fd12bb9e7a57d7 - sha256: a65bb5284369e548a15a44b14baf1f7ac34fa4718d7d987dd29032caba2ecf20 + md5: 92b90f5f7a322e74468bb4909c7354b5 + sha256: 4b0b713a4308864a59d5f0b66ac61b7960151c8022511cdc914c0c0458375eca manager: conda name: yaml-cpp optional: false platform: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/yaml-cpp-0.8.0-h59595ed_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/yaml-cpp-0.8.0-h3f2d84a_0.conda version: 0.8.0 - category: main dependencies: - libcxx: '>=15.0.7' + __osx: '>=10.13' + libcxx: '>=18' hash: - md5: 1bb3addc859ed1338370da6e2996ef47 - sha256: 6e5e4afa1011a1ad5a734e895b8d2b2ad0fbc9ef6538aac8f852b33b2ebe44a8 + md5: e15e9855092a8bdaaaed6ad5c173fffa + sha256: 67d25c3aa2b4ee54abc53060188542d6086b377878ebf3e2b262ae7379e05a6d manager: conda name: yaml-cpp optional: false platform: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/yaml-cpp-0.8.0-he965462_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/yaml-cpp-0.8.0-h92383a6_0.conda version: 0.8.0 - category: main dependencies: - libcxx: '>=15.0.7' + __osx: '>=11.0' + libcxx: '>=18' hash: - md5: e783a232972a5c7dca549111e63a78b2 - sha256: e65a52fb1c9821ba3a7a670d650314f8ff983865e77ba9f69f74e0906844943d + md5: 30475b3d0406587cf90386a283bb3cd0 + sha256: 66ba31cfb8014fdd3456f2b3b394df123bbd05d95b75328b7c4131639e299749 manager: conda name: yaml-cpp optional: false platform: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-cpp-0.8.0-h13dd4ca_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-cpp-0.8.0-ha1acc90_0.conda version: 0.8.0 - category: main dependencies: @@ -14784,53 +16927,50 @@ package: version: 1.3.1 - category: main dependencies: - __glibc: '>=2.17,<3.0.a0' - cffi: '>=1.11' - libgcc: '>=13' + cffi: '>=1.8' + libgcc-ng: '>=12' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 6d229edd907b6bb39961b74e3d52de9c - sha256: 1a824220227f356f35acec5ff6a4418b1ccd0238fd752ceebeb04a0bd37acf0f + md5: 056b3271f46abaa4673c8c6783283a07 + sha256: 8aac43cc4fbdcc420fe8a22c764b67f6ac9168b103bfd10d79a82b748304ddf6 manager: conda name: zstandard optional: false platform: linux-64 url: - https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py311h9ecbd09_1.conda - version: 0.23.0 + https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.19.0-py311hd4cff14_0.tar.bz2 + version: 0.19.0 - category: main dependencies: - __osx: '>=10.13' - cffi: '>=1.11' + cffi: '>=1.8' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: 11d2b64d86f2e63f7233335a23936151 - sha256: 7810fa3c45a93679eb78b49f1a4db0397e644dbb0edc7ff6e956668343f4f67f + md5: 96e4e2aa960398abbe5c4a6cf22269b8 + sha256: b470229c05df4d96d27904def00660b5dfa7ad57bf2b9dfd826325233f9e8510 manager: conda name: zstandard optional: false platform: osx-64 url: - https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.23.0-py311h4d7f069_1.conda - version: 0.23.0 + https://conda.anaconda.org/conda-forge/osx-64/zstandard-0.19.0-py311h5547dcb_0.tar.bz2 + version: 0.19.0 - category: main dependencies: - __osx: '>=11.0' - cffi: '>=1.11' + cffi: '>=1.8' python: '>=3.11,<3.12.0a0' python_abi: 3.11.* hash: - md5: be18ca5f35d991ab12342a6fc3f7a6f8 - sha256: 496189ea504358088128df526e545a96d7c8b597bea0747f09bc0e081a67a69b + md5: ece21cb47a93c985aa4b44219c4c8c8b + sha256: 43eaee70cd406468d96d1643b75d16e0da3955a9c1d37056767134b91b61d515 manager: conda name: zstandard optional: false platform: osx-arm64 url: - https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.23.0-py311h917b07b_1.conda - version: 0.23.0 + https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.19.0-py311he2be06e_0.tar.bz2 + version: 0.19.0 - category: main dependencies: __glibc: '>=2.17,<3.0.a0' diff --git a/conda_forge_tick/all_feedstocks.py b/conda_forge_tick/all_feedstocks.py index 1bde6e8cf..d4037a8e1 100644 --- a/conda_forge_tick/all_feedstocks.py +++ b/conda_forge_tick/all_feedstocks.py @@ -6,14 +6,14 @@ from conda_forge_tick.git_utils import github_client from .lazy_json_backends import dump, load +from .settings import settings logger = logging.getLogger(__name__) def get_all_feedstocks_from_github(): gh = github_client() - - org = gh.get_organization("conda-forge") + org = gh.get_organization(settings().conda_forge_org) archived = set() not_archived = set() default_branches = {} diff --git a/conda_forge_tick/auto_tick.py b/conda_forge_tick/auto_tick.py index 0eae9e69e..5882e6bd2 100644 --- a/conda_forge_tick/auto_tick.py +++ b/conda_forge_tick/auto_tick.py @@ -72,10 +72,10 @@ from .migrators_types import MigrationUidTypedDict from .models.pr_json import PullRequestData, PullRequestInfoSpecial, PullRequestState +from .settings import settings logger = logging.getLogger(__name__) -BOT_HOME_DIR: str = os.getcwd() START_TIME = None TIMEOUT = int(os.environ.get("TIMEOUT", 600)) @@ -155,22 +155,34 @@ def _prepare_feedstock_repository( Any errors are written to the pr_info attribute of the feedstock context and logged. - :param backend: The GitPlatformBackend instance to use. - :param context: The current context - :param branch: The branch to create in the forked repository. - :param base_branch: The base branch to branch from. - :return: True if the repository was successfully prepared, False otherwise. + Parameters + ---------- + backend + The GitPlatformBackend instance to use. + context + The current context + branch + The branch to create in the forked repository. + base_branch + The base branch to branch from. + + Returns + ------- + bool + True if the repository was successfully prepared, False otherwise. """ try: backend.fork(context.git_repo_owner, context.git_repo_name) except RepositoryNotFoundError: logger.warning( - f"Could not fork {context.git_repo_owner}/{context.git_repo_name}: Not Found" + "Could not fork %s/%s: Not Found", + context.git_repo_owner, + context.git_repo_name, ) error_message = f"{context.feedstock_name}: Git repository not found." logger.critical( - f"Failed to migrate {context.feedstock_name}, {error_message}", + "Failed to migrate %s, %s", context.feedstock_name, error_message ) with context.attrs["pr_info"] as pri: @@ -198,12 +210,24 @@ def _commit_migration( """ Commit a migration that has been run in the local clone of a feedstock repository. If an error occurs during the commit, it is logged. - :param cli: The GitCli instance to use. - :param context: The FeedstockContext instance. - :param commit_message: The commit message to use. - :param allow_empty_commits: Whether the migrator allows empty commits. - :param raise_commit_errors: Whether to raise an exception if an error occurs during the commit. - :raises GitCliError: If an error occurs during the commit and raise_commit_errors is True. + + Parameters + ---------- + cli + The GitCli instance to use. + context + The FeedstockContext instance. + commit_message + The commit message to use. + allow_empty_commits + Whether the migrator allows empty commits. + raise_commit_errors + Whether to raise an exception if an error occurs during the commit. + + Raises + ------ + GitCliError + If an error occurs during the commit and raise_commit_errors is True. """ cli.add( context.local_clone_dir, @@ -223,9 +247,7 @@ def _commit_migration( @dataclass(frozen=True) class _RerenderInfo: - """ - Additional information about a rerender operation. - """ + """Additional information about a rerender operation.""" nontrivial_changes: bool """ @@ -289,10 +311,17 @@ def _should_automerge(migrator: Migrator, context: FeedstockContext) -> bool: """ Determine if a migration should be auto merged based on the feedstock and migrator settings. - :param migrator: The migrator to check. - :param context: The feedstock context. + Parameters + ---------- + migrator + The migrator to check. + context + The feedstock context. - :return: True if the migrator should be auto merged, False otherwise. + Returns + ------- + bool + True if the migrator should be auto merged, False otherwise. """ if isinstance(migrator, Version): return context.automerge in [True, "version"] @@ -393,12 +422,20 @@ def _check_and_process_solvability( by setting the corresponding fields in the feedstock attributes. If the recipe is solvable, reset the fields that track the solvability check status. - :param migrator: The migrator that was run - :param context: The current FeedstockContext of the feedstock that was migrated - :param base_branch: The branch of the feedstock repository that is the migration target + Parameters + ---------- + migrator + The migrator that was run + context + The current FeedstockContext of the feedstock that was migrated + base_branch + The branch of the feedstock repository that is the migration target - :returns: True if the migration can proceed normally, False if a required solvability check failed and the migration - needs to be aborted + Returns + ------- + bool + True if the migration can proceed normally, False if a required solvability check failed and the migration + needs to be aborted """ if not _is_solvability_check_needed(migrator, context, base_branch): return True @@ -483,7 +520,7 @@ def run( base_branch: str = "main", **kwargs: typing.Any, ) -> tuple[MigrationUidTypedDict, dict] | tuple[Literal[False], Literal[False]]: - """For a given feedstock and migration run the migration + """For a given feedstock and migration run the migration. Parameters ---------- @@ -507,10 +544,6 @@ def run( pr_json: dict The PR json object for recreating the PR as needed """ - - # sometimes we get weird directory issues so make sure we reset - os.chdir(BOT_HOME_DIR) - migrator_name = get_migrator_name(migrator) is_version_migration = isinstance(migrator, Version) _increment_pre_pr_migrator_attempt( @@ -634,8 +667,11 @@ def run( except DuplicatePullRequestError: # This shouldn't happen too often anymore since we won't double PR logger.warning( - f"Attempted to create a duplicate PR for merging {git_backend.user}:{branch_name} " - f"into {context.git_repo_owner}:{base_branch}. Ignoring." + "Attempted to create a duplicate PR for merging %s:%s into %s:%s. Ignoring.", + git_backend.user, + branch_name, + context.git_repo_owner, + base_branch, ) # Don't update the PR data pr_data = None @@ -737,20 +773,14 @@ def _run_migrator_on_feedstock_branch( sync_version_pr_info = False is_version = isinstance(migrator, Version) try: - try: - fctx.attrs["new_version"] = attrs.get("version_pr_info", {}).get( - "new_version", None - ) - migrator_uid, pr_json = run_with_tmpdir( - context=fctx, - migrator=migrator, - git_backend=git_backend, - rerender=migrator.rerender, - base_branch=base_branch, - hash_type=attrs.get("hash_type", "sha256"), - ) - finally: - fctx.attrs.pop("new_version", None) + migrator_uid, pr_json = run_with_tmpdir( + context=fctx, + migrator=migrator, + git_backend=git_backend, + rerender=migrator.rerender, + base_branch=base_branch, + hash_type=attrs.get("hash_type", "sha256"), + ) # if migration successful if migrator_uid: @@ -959,6 +989,7 @@ def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit, tried_prs): def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBackend): _mg_start = time.time() + initial_working_dir = os.getcwd() migrator_name = get_migrator_name(migrator) @@ -1047,6 +1078,7 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken fctx = FeedstockContext( feedstock_name=attrs["feedstock_name"], attrs=attrs, + git_repo_owner=settings().conda_forge_org, ) # map main to current default branch @@ -1101,7 +1133,7 @@ def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBacken gc.collect() # sometimes we get weird directory issues so make sure we reset - os.chdir(BOT_HOME_DIR) + os.chdir(initial_working_dir) # Write graph partially through dump_graph(mctx.graph) @@ -1130,8 +1162,7 @@ def _setup_limits(): def _update_nodes_with_bot_rerun(gx: nx.DiGraph): - """Go through all the open PRs and check if they are rerun""" - + """Go through all the open PRs and check if they are rerun.""" print("processing bot-rerun labels", flush=True) for i, (name, node) in enumerate(gx.nodes.items()): @@ -1154,7 +1185,9 @@ def _update_nodes_with_bot_rerun(gx: nx.DiGraph): # maybe add a pass check info here ? (if using DEBUG) except Exception as e: logger.error( - f"BOT-RERUN : could not proceed check with {node}, {e}", + "BOT-RERUN : could not proceed check with %s", + node, + exc_info=e, ) raise e # if there is a valid PR and it isn't currently listed as rerun @@ -1191,8 +1224,7 @@ def _filter_ignored_versions(attrs, version): def _update_nodes_with_new_versions(gx): - """Updates every node with it's new version (when available)""" - + """Update every node with it's new version (when available).""" print("updating nodes with new versions", flush=True) version_nodes = get_all_keys_for_hashmap("versions") diff --git a/conda_forge_tick/cf_tick_schema.json b/conda_forge_tick/cf_tick_schema.json index 7b1e84f8d..7b5a35c34 100644 --- a/conda_forge_tick/cf_tick_schema.json +++ b/conda_forge_tick/cf_tick_schema.json @@ -24,7 +24,7 @@ }, "BotConfigVersionUpdates": { "additionalProperties": false, - "description": "This dictates the behavior of the conda-forge auto-tick bot for version\nupdates", + "description": "Dictates the behavior of the conda-forge auto-tick bot for version\nupdates.", "properties": { "random_fraction_to_keep": { "anyOf": [ @@ -106,7 +106,7 @@ } }, "additionalProperties": false, - "description": "This dictates the behavior of the conda-forge auto-tick bot which issues\nautomatic version updates/migrations for feedstocks.", + "description": "Dictates the behavior of the conda-forge auto-tick bot which issues\nautomatic version updates/migrations for feedstocks.\n\nA valid example is:\n\n```yaml\nbot:\n # can the bot automerge PRs it makes on this feedstock\n automerge: true\n # only automerge on successful version PRs, migrations are not automerged\n automerge: 'version'\n # only automerge on successful migration PRs, versions are not automerged\n automerge: 'migration'\n\n # only open PRs if resulting environment is solvable, useful for tightly coupled packages\n check_solvable: true\n\n # The bot.inspection key in the conda-forge.yml can have one of seven possible values and controls\n # the bots behaviour for automatic dependency updates:\n inspection: hint # generate hints using source code (backwards compatible)\n inspection: hint-all # generate hints using all methods\n inspection: hint-source # generate hints using only source code\n inspection: hint-grayskull # generate hints using only grayskull\n inspection: update-all # update recipe using all methods\n inspection: update-source # update recipe using only source code\n inspection: update-grayskull # update recipe using only grayskull\n inspection: disabled # don't update recipe, don't generate hints\n\n # any branches listed in this section will get bot migration PRs in addition\n # to the default branch\n abi_migration_branches:\n - 'v1.10.x'\n\n version_updates:\n # use this for packages that are updated too frequently\n random_fraction_to_keep: 0.1 # keeps 10% of versions at random\n exclude:\n - '08.14'\n```\n\nThe `abi_migration_branches` feature is useful to, for example, add a\nlong-term support (LTS) branch for a package.", "properties": { "automerge": { "anyOf": [ diff --git a/conda_forge_tick/chaindb.py b/conda_forge_tick/chaindb.py index 09b083122..6f03feab6 100644 --- a/conda_forge_tick/chaindb.py +++ b/conda_forge_tick/chaindb.py @@ -1,6 +1,7 @@ """ -The code in this module is from xonsh (https://github.com/xonsh/xonsh/blob/main/xonsh/lib/collections.py) -under the folloiwing license: +The code in this module is from xonsh (https://github.com/xonsh/xonsh/blob/main/xonsh/lib/collections.py). + +License: Copyright 2015-2016, the xonsh developers. All rights reserved. @@ -51,7 +52,8 @@ def __new__(cls): class ChainDB(ChainMap): """A ChainMap who's ``_getitem__`` returns either a ChainDB or - the result. The results resolve to the outermost mapping.""" + the result. The results resolve to the outermost mapping. + """ def __getitem__(self, key): res = None diff --git a/conda_forge_tick/cli.py b/conda_forge_tick/cli.py index 370448b69..d064c96c7 100644 --- a/conda_forge_tick/cli.py +++ b/conda_forge_tick/cli.py @@ -238,9 +238,7 @@ def make_import_to_package_mapping( ctx: CliContext, max_artifacts: int, ) -> None: - """ - Make the import to package mapping. - """ + """Make the import to package mapping.""" from . import import_to_pkg import_to_pkg.main(ctx, max_artifacts) @@ -251,9 +249,7 @@ def make_import_to_package_mapping( def make_migrators( ctx: CliContext, ) -> None: - """ - Make the migrators. - """ + """Make the migrators.""" from . import make_migrators as _make_migrators _make_migrators.main(ctx) @@ -281,9 +277,7 @@ def react_to_event( event: str, uid: str, ) -> None: - """ - React to an event. - """ + """React to an event.""" from .events import react_to_event react_to_event(ctx, event, uid) @@ -292,9 +286,7 @@ def react_to_event( @main.command(name="clean-disk-space") @click.option("--ci-service", required=True, type=click.Choice(["github-actions"])) def clean_disk_space(ci_service) -> None: - """ - Clean up disk space on CI services. - """ + """Clean up disk space on CI services.""" from .os_utils import clean_disk_space clean_disk_space(ci_service) diff --git a/conda_forge_tick/config_schema.py b/conda_forge_tick/config_schema.py index e4f97bf56..c5ad82cec 100644 --- a/conda_forge_tick/config_schema.py +++ b/conda_forge_tick/config_schema.py @@ -47,8 +47,8 @@ class BotConfigVersionUpdatesSourcesChoice(StrEnum): class BotConfigVersionUpdates(BaseModel): """ - This dictates the behavior of the conda-forge auto-tick bot for version - updates + Dictates the behavior of the conda-forge auto-tick bot for version + updates. """ model_config: ConfigDict = ConfigDict(extra="forbid") @@ -104,8 +104,48 @@ class BotConfigVersionUpdates(BaseModel): class BotConfig(BaseModel): """ - This dictates the behavior of the conda-forge auto-tick bot which issues + Dictates the behavior of the conda-forge auto-tick bot which issues automatic version updates/migrations for feedstocks. + + A valid example is: + + ```yaml + bot: + # can the bot automerge PRs it makes on this feedstock + automerge: true + # only automerge on successful version PRs, migrations are not automerged + automerge: 'version' + # only automerge on successful migration PRs, versions are not automerged + automerge: 'migration' + + # only open PRs if resulting environment is solvable, useful for tightly coupled packages + check_solvable: true + + # The bot.inspection key in the conda-forge.yml can have one of seven possible values and controls + # the bots behaviour for automatic dependency updates: + inspection: hint # generate hints using source code (backwards compatible) + inspection: hint-all # generate hints using all methods + inspection: hint-source # generate hints using only source code + inspection: hint-grayskull # generate hints using only grayskull + inspection: update-all # update recipe using all methods + inspection: update-source # update recipe using only source code + inspection: update-grayskull # update recipe using only grayskull + inspection: disabled # don't update recipe, don't generate hints + + # any branches listed in this section will get bot migration PRs in addition + # to the default branch + abi_migration_branches: + - 'v1.10.x' + + version_updates: + # use this for packages that are updated too frequently + random_fraction_to_keep: 0.1 # keeps 10% of versions at random + exclude: + - '08.14' + ``` + + The `abi_migration_branches` feature is useful to, for example, add a + long-term support (LTS) branch for a package. """ model_config: ConfigDict = ConfigDict(extra="forbid") diff --git a/conda_forge_tick/container_cli.py b/conda_forge_tick/container_cli.py index 7b6f74483..b269e8e14 100644 --- a/conda_forge_tick/container_cli.py +++ b/conda_forge_tick/container_cli.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -"""This file runs specific tasks for the bot. +"""Run specific tasks for the bot. All imports from the bot need to be guarded by putting them in the subcommands. This ensures that we can set important environment variables before any imports, @@ -45,7 +45,7 @@ @contextmanager def _setenv(name, value): - """set an environment variable temporarily""" + """Set an environment variable temporarily.""" old = os.environ.get(name) try: os.environ[name] = value @@ -83,7 +83,7 @@ def _get_existing_feedstock_node_attrs(existing_feedstock_node_attrs): return attrs -def _run_bot_task(func, *, log_level, existing_feedstock_node_attrs, **kwargs): +def _run_bot_task(func, *, log_level: str, existing_feedstock_node_attrs, **kwargs): with ( tempfile.TemporaryDirectory() as tmpdir_cbld, _setenv("CONDA_BLD_PATH", os.path.join(tmpdir_cbld, "conda-bld")), @@ -141,13 +141,15 @@ def _provide_source_code(): with tempfile.TemporaryDirectory() as tmpdir: input_recipe_dir = "/cf_feedstock_ops_dir/recipe_dir" logger.debug( - f"input container recipe dir {input_recipe_dir}: {os.listdir(input_recipe_dir)}" + "input container recipe dir %s: %s", + input_recipe_dir, + os.listdir(input_recipe_dir), ) recipe_dir = os.path.join(tmpdir, os.path.basename(input_recipe_dir)) sync_dirs(input_recipe_dir, recipe_dir, ignore_dot_git=True, update_git=False) logger.debug( - f"copied container recipe dir {recipe_dir}: {os.listdir(recipe_dir)}" + "copied container recipe dir %s: %s", recipe_dir, os.listdir(recipe_dir) ) output_source_code = "/cf_feedstock_ops_dir/source_dir" @@ -180,7 +182,7 @@ def _execute_git_cmds_and_report(*, cmds, cwd, msg): _output += gitret.stdout gitret.check_returncode() except Exception as e: - logger.error(f"{msg}\noutput: {_output}", exc_info=e) + logger.error("%s\noutput: %s", msg, _output, exc_info=e) raise e @@ -203,7 +205,9 @@ def _migrate_feedstock(*, feedstock_name, default_branch, attrs, input_kwargs): assert len(input_fs_dir) == 1, f"expected one feedstock, got {input_fs_dir}" input_fs_dir = input_fs_dir[0] logger.debug( - f"input container feedstock dir {input_fs_dir}: {os.listdir(input_fs_dir)}" + "input container feedstock dir %s: %s", + input_fs_dir, + os.listdir(input_fs_dir), ) input_permissions = os.path.join( "/cf_feedstock_ops_dir", @@ -214,7 +218,9 @@ def _migrate_feedstock(*, feedstock_name, default_branch, attrs, input_kwargs): fs_dir = os.path.join(tmpdir, os.path.basename(input_fs_dir)) sync_dirs(input_fs_dir, fs_dir, ignore_dot_git=True, update_git=False) - logger.debug(f"copied container feedstock dir {fs_dir}: {os.listdir(fs_dir)}") + logger.debug( + "copied container feedstock dir %s: %s", fs_dir, os.listdir(fs_dir) + ) reset_permissions_with_user_execute(fs_dir, input_permissions) @@ -257,7 +263,9 @@ def _update_version(*, version, hash_type): assert len(input_fs_dir) == 1, f"expected one feedstock, got {input_fs_dir}" input_fs_dir = input_fs_dir[0] logger.debug( - f"input container feedstock dir {input_fs_dir}: {os.listdir(input_fs_dir)}" + "input container feedstock dir %s: %s", + input_fs_dir, + os.listdir(input_fs_dir), ) input_permissions = os.path.join( "/cf_feedstock_ops_dir", @@ -268,7 +276,9 @@ def _update_version(*, version, hash_type): fs_dir = os.path.join(tmpdir, os.path.basename(input_fs_dir)) sync_dirs(input_fs_dir, fs_dir, ignore_dot_git=True, update_git=False) - logger.debug(f"copied container feedstock dir {fs_dir}: {os.listdir(fs_dir)}") + logger.debug( + "copied container feedstock dir %s: %s", fs_dir, os.listdir(fs_dir) + ) reset_permissions_with_user_execute(fs_dir, input_permissions) @@ -382,7 +392,8 @@ def _check_solvable( logger = logging.getLogger("conda_forge_tick.container") logger.debug( - f"input container feedstock dir /cf_feedstock_ops_dir: {os.listdir('/cf_feedstock_ops_dir')}" + "input container feedstock dir /cf_feedstock_ops_dir: %s", + os.listdir("/cf_feedstock_ops_dir"), ) data = {} @@ -434,7 +445,7 @@ def cli(): ) @click.option("--log-debug", is_flag=True, help="Log debug information.") def parse_meta_yaml( - log_level, + log_level: str, for_pinning, platform, arch, @@ -472,12 +483,14 @@ def parse_meta_yaml( "--cbc-path", type=str, default=None, help="The path to global pinning file." ) def parse_recipe_yaml( + log_level: str, for_pinning, platform_arch, cbc_path, ): return _run_bot_task( _parse_recipe_yaml, + log_level=log_level, existing_feedstock_node_attrs=None, for_pinning=for_pinning, platform_arch=platform_arch, diff --git a/conda_forge_tick/contexts.py b/conda_forge_tick/contexts.py index 3d1057326..78a1bb8e7 100644 --- a/conda_forge_tick/contexts.py +++ b/conda_forge_tick/contexts.py @@ -22,7 +22,7 @@ @dataclass class MigratorSessionContext: - """Singleton session context. There should generally only be one of these""" + """Singleton session context. There should generally only be one of these.""" graph: DiGraph = None smithy_version: str = "" @@ -37,6 +37,10 @@ class FeedstockContext: """ If not provided, this is set to a default branch read from all_feedstocks.json, or 'main'. """ + git_repo_owner: str = "conda-forge" + """ + The owner of the upstream git repository. + """ def __post_init__(self): if not self.default_branch: @@ -46,19 +50,13 @@ def __post_init__(self): DEFAULT_BRANCHES.get(self.feedstock_name, "main"), ) - @property - def git_repo_owner(self) -> str: - return "conda-forge" - @property def git_repo_name(self) -> str: return f"{self.feedstock_name}-feedstock" @property def git_http_ref(self) -> str: - """ - A link to the feedstock's GitHub repository. - """ + """A link to the feedstock's GitHub repository.""" return f"https://github.com/{self.git_repo_owner}/{self.git_repo_name}" @property diff --git a/conda_forge_tick/depfinder_api.py b/conda_forge_tick/depfinder_api.py index 4ea0c5eef..74524e4da 100644 --- a/conda_forge_tick/depfinder_api.py +++ b/conda_forge_tick/depfinder_api.py @@ -123,7 +123,7 @@ def simple_import_to_pkg_map( ignore=None, custom_namespaces=None, ): - """Provide the map between all the imports and their possible packages + """Provide the map between all the imports and their possible packages. Parameters ---------- diff --git a/conda_forge_tick/deploy.py b/conda_forge_tick/deploy.py index e3f99feb1..1caf5fabf 100644 --- a/conda_forge_tick/deploy.py +++ b/conda_forge_tick/deploy.py @@ -3,15 +3,14 @@ import subprocess import sys -from . import sensitive_env from .cli_context import CliContext -from .git_utils import delete_file_via_gh_api, push_file_via_gh_api +from .git_utils import delete_file_via_gh_api, get_bot_token, push_file_via_gh_api from .lazy_json_backends import ( CF_TICK_GRAPH_DATA_HASHMAPS, - CF_TICK_GRAPH_GITHUB_BACKEND_REPO, get_lazy_json_backends, ) from .os_utils import clean_disk_space +from .settings import settings from .utils import ( fold_log_lines, get_bot_run_url, @@ -148,22 +147,17 @@ def _deploy_batch(*, files_to_add, batch, n_added, max_per_batch=200): pass print(">>>>>>>>>>>> git push try", flush=True) - with sensitive_env() as env: - status = run_command_hiding_token( - [ - "git", - "push", - "https://{token}@github.com/{deploy_repo}.git".format( - token=env.get("BOT_TOKEN", ""), - deploy_repo="regro/cf-graph-countyfair", - ), - "master", - ], - token=env.get("BOT_TOKEN", ""), - ) + status = run_command_hiding_token( + [ + "git", + "push", + f"https://{get_bot_token()}@github.com/{settings().graph_github_backend_repo}.git", + settings().graph_repo_default_branch, + ], + token=get_bot_token(), + ) if status != 0: print(">>>>>>>>>>>> git push failed", flush=True) - num_try += 1 if status != 0 or not graph_ok: @@ -192,7 +186,7 @@ def _get_files_to_delete(): def _get_pth_commit_message(pth): - """make a nice message for stuff managed via LazyJson.""" + """Make a nice message for stuff managed via LazyJson.""" step_name = os.environ.get("GITHUB_WORKFLOW", "update graph") msg_pth = pth parts = pth.split("/") @@ -207,10 +201,11 @@ def _get_pth_commit_message(pth): def _reset_and_restore_file(pth): subprocess.run(["git", "reset", "--", pth], capture_output=True, text=True) subprocess.run(["git", "restore", "--", pth], capture_output=True, text=True) + subprocess.run(["git", "clean", "-f", "--", pth], capture_output=True, text=True) def deploy(ctx: CliContext, dirs_to_deploy: list[str] = None): - """Deploy the graph to GitHub""" + """Deploy the graph to GitHub.""" if ctx.dry_run: print("(dry run) deploying") return @@ -282,7 +277,7 @@ def deploy(ctx: CliContext, dirs_to_deploy: list[str] = None): msg = _get_pth_commit_message(pth) - push_file_via_gh_api(pth, CF_TICK_GRAPH_GITHUB_BACKEND_REPO, msg) + push_file_via_gh_api(pth, settings().graph_github_backend_repo, msg) except Exception as e: logger.warning( "git push via API failed - trying via git CLI", exc_info=e @@ -305,7 +300,7 @@ def deploy(ctx: CliContext, dirs_to_deploy: list[str] = None): # make a nice message for stuff managed via LazyJson msg = _get_pth_commit_message(pth) - delete_file_via_gh_api(pth, CF_TICK_GRAPH_GITHUB_BACKEND_REPO, msg) + delete_file_via_gh_api(pth, settings().graph_github_backend_repo, msg) except Exception as e: logger.warning( "git delete via API failed - trying via git CLI", exc_info=e diff --git a/conda_forge_tick/env_management.py b/conda_forge_tick/env_management.py index dc700f7c8..8f7a0e068 100644 --- a/conda_forge_tick/env_management.py +++ b/conda_forge_tick/env_management.py @@ -14,7 +14,7 @@ def __init__(self): self.classified_info = {} def hide_env_vars(self): - """Remove sensitive env vars""" + """Remove sensitive env vars.""" self.classified_info.update( { k: os.environ.pop(k, self.classified_info.get(k, None)) @@ -23,7 +23,7 @@ def hide_env_vars(self): ) def reveal_env_vars(self): - """Restore sensitive env vars""" + """Restore sensitive env vars.""" os.environ.update( **{k: v for k, v in self.classified_info.items() if v is not None} ) @@ -31,7 +31,8 @@ def reveal_env_vars(self): @contextmanager def sensitive_env(self): """Add sensitive keys to environ if needed, when ctx is finished remove keys and update the sensitive env - in case any were updated inside the ctx""" + in case any were updated inside the ctx. + """ self.reveal_env_vars() yield os.environ self.hide_env_vars() diff --git a/conda_forge_tick/events/__init__.py b/conda_forge_tick/events/__init__.py index c1a23fb7a..1f3ee6031 100644 --- a/conda_forge_tick/events/__init__.py +++ b/conda_forge_tick/events/__init__.py @@ -13,6 +13,11 @@ def react_to_event(ctx: CliContext, event: str, uid: str) -> None: uid : str The unique identifier of the event. It is the PR id for PR events or the feedstock name for push events. + + Raises + ------ + RuntimeError + If the event is not recognized. """ if event == "pr": from .pr_events import react_to_pr diff --git a/conda_forge_tick/executors.py b/conda_forge_tick/executors.py index 5623e808d..26ddffbea 100644 --- a/conda_forge_tick/executors.py +++ b/conda_forge_tick/executors.py @@ -24,11 +24,10 @@ def __exit__(self, *args, **kwargs): @contextlib.contextmanager def lock_git_operation(): """ - A context manager to lock git operations - it can be acquired once per thread, once per process, + Get a context manager to lock git operations - it can be acquired once per thread, once per process, and once per dask worker. Note that this is a reentrant lock, so it can be acquired multiple times by the same thread/process/worker. """ - with GIT_LOCK_THREAD, GIT_LOCK_PROCESS, GIT_LOCK_DASK: yield @@ -80,7 +79,7 @@ def _init_dask(lock): @contextlib.contextmanager def executor(kind: str, max_workers: int, daemon=True) -> typing.Iterator[Executor]: - """General purpose utility to get an executor with its as_completed handler + """General purpose utility to get an executor with its as_completed handler. This allows us to easily use other executors as needed. """ diff --git a/conda_forge_tick/feedstock_parser.py b/conda_forge_tick/feedstock_parser.py index 4e31ab44e..b8a1996d5 100644 --- a/conda_forge_tick/feedstock_parser.py +++ b/conda_forge_tick/feedstock_parser.py @@ -19,6 +19,12 @@ ) from requests.models import Response +from conda_forge_tick.settings import ( + ENV_CONDA_FORGE_ORG, + ENV_GRAPH_GITHUB_BACKEND_REPO, + settings, +) + if typing.TYPE_CHECKING: from mypy_extensions import TestTypedDict @@ -82,7 +88,7 @@ def _get_requirements( run: bool = True, outputs_to_keep: Optional[Set["PackageName"]] = None, ) -> "Set[PackageName]": - """Get the list of recipe requirements from a meta.yaml dict + """Get the list of recipe requirements from a meta.yaml dict. Parameters ---------- @@ -122,7 +128,7 @@ def _parse_requirements( host: bool = True, run: bool = True, ) -> typing.MutableSet["PackageName"]: - """Flatten a YAML requirements section into a list of names""" + """Flatten a YAML requirements section into a list of names.""" if not req: # handle None as empty return set() if isinstance(req, list): # simple list goes to both host and run @@ -192,7 +198,7 @@ def _fetch_static_repo(name, dest): for branch in ["main", "master"]: try: r = requests.get( - f"https://github.com/conda-forge/{name}-feedstock/archive/{branch}.zip", + f"https://github.com/{settings().conda_forge_org}/{name}-feedstock/archive/{branch}.zip", ) r.raise_for_status() found_branch = branch @@ -202,7 +208,7 @@ def _fetch_static_repo(name, dest): if r.status_code != 200: logger.error( - f"Something odd happened when fetching feedstock {name}: {r.status_code}", + "Something odd happened when fetching feedstock %s: %d", name, r.status_code ) return r @@ -244,15 +250,36 @@ def populate_feedstock_attributes( """ Parse the various configuration information into the node_attrs of a feedstock. - :param name: The name of the feedstock - :param existing_node_attrs: The existing node_attrs of the feedstock. Pass an empty dict if none. - :param meta_yaml: The meta.yaml file as a string - :param recipe_yaml: The recipe.yaml file as a string - :param conda_forge_yaml: The conda-forge.yaml file as a string - :param mark_not_archived: If True, forcibly mark the feedstock as not archived in the node attrs, even if it is archived. - :param feedstock_dir: The directory where the feedstock is located. If None, some information will not be available. + Parameters + ---------- + name + The name of the feedstock. + existing_node_attrs + The existing node_attrs of the feedstock. Pass an empty dict if none. + meta_yaml + The meta.yaml file as a string. + recipe_yaml + The recipe.yaml file as a string. + conda_forge_yaml + The conda-forge.yaml file as a string. + mark_not_archived + If True, forcibly mark the feedstock as not archived in the node attrs, + even if it is archived. + feedstock_dir + The directory where the feedstock is located. If None, some information + will not be available. - :return: A dictionary with the new node_attrs of the feedstock, with only some fields populated. + Returns + ------- + dict[str, Any] + A dictionary with the new node_attrs of the feedstock, with only some + fields populated. + + Raises + ------ + ValueError + If both `meta_yaml` and `recipe_yaml` are provided. + If neither `meta_yaml` nor `recipe_yaml` are provided. """ from conda_forge_tick.chaindb import ChainDB, _convert_to_dict @@ -316,7 +343,7 @@ def populate_feedstock_attributes( variant_yamls = [] plat_archs = [] for cbc_path in ci_support_files: - logger.debug(f"parsing conda-build config: {cbc_path}") + logger.debug("parsing conda-build config: %s", cbc_path) cbc_name = cbc_path.name cbc_name_parts = cbc_name.replace(".yaml", "").split("_") plat = cbc_name_parts[0] @@ -367,7 +394,7 @@ def populate_feedstock_attributes( # sometimes the requirements come out to None or [None] # and this ruins the aggregated meta_yaml / breaks stuff - logger.debug(f"getting reqs for config: {cbc_path}") + logger.debug("getting reqs for config: %s", cbc_path) if "requirements" in variant_yamls[-1]: variant_yamls[-1]["requirements"] = _clean_req_nones( variant_yamls[-1]["requirements"], @@ -382,7 +409,7 @@ def populate_feedstock_attributes( ) # collapse them down - logger.debug(f"collapsing reqs for {name}") + logger.debug("collapsing reqs for %s", name) final_cfgs = {} for plat_arch, varyml in zip(plat_archs, variant_yamls): if plat_arch not in final_cfgs: @@ -428,7 +455,7 @@ def populate_feedstock_attributes( sorted_variant_yamls = [x for _, x in sorted(zip(plat_archs, variant_yamls))] yaml_dict = ChainDB(*sorted_variant_yamls) if not yaml_dict: - logger.error(f"Something odd happened when parsing recipe {name}") + logger.error("Something odd happened when parsing recipe %s", name) node_attrs["parsing_error"] = ( "feedstock parsing error: could not combine metadata dicts across platforms" ) @@ -552,6 +579,13 @@ def load_feedstock_local( ------- sub_graph : MutableMapping The sub_graph, now updated with the feedstock metadata + + Raises + ------ + ValueError + If both `meta_yaml` and `recipe_yaml` are provided. + If neither `meta_yaml` nor `recipe_yaml` are provided and no file is present in + the feedstock. """ new_sub_graph = {key: value for key, value in sub_graph.items()} @@ -672,6 +706,12 @@ def load_feedstock_containerized( args, json_loads=loads, input=json_blob, + extra_container_args=[ + "-e", + f"{ENV_CONDA_FORGE_ORG}={settings().conda_forge_org}", + "-e", + f"{ENV_GRAPH_GITHUB_BACKEND_REPO}={settings().graph_github_backend_repo}", + ], ) return data @@ -714,7 +754,6 @@ def load_feedstock( sub_graph : MutableMapping The sub_graph, now updated with the feedstock metadata """ - if should_use_container(use_container=use_container): return load_feedstock_containerized( name, diff --git a/conda_forge_tick/git_utils.py b/conda_forge_tick/git_utils.py index 14448f608..30d1e2dff 100644 --- a/conda_forge_tick/git_utils.py +++ b/conda_forge_tick/git_utils.py @@ -1,4 +1,4 @@ -"""Utilities for managing github repos""" +"""Utilities for managing github repos.""" import base64 import copy @@ -84,14 +84,27 @@ } -def get_bot_token(): +def get_bot_token() -> str: + """Get the bot token from the environment. + + Returns + ------- + str + The bot token. + """ with sensitive_env() as env: return env["BOT_TOKEN"] def github3_client() -> github3.GitHub: - """ + """Get the github3 client. + This will be removed in the future, use the GitHubBackend class instead. + + Returns + ------- + github3.GitHub + The github3 client. """ if not hasattr(GITHUB3_CLIENT, "client"): GITHUB3_CLIENT.client = github3.login(token=get_bot_token()) @@ -99,8 +112,14 @@ def github3_client() -> github3.GitHub: def github_client() -> github.Github: - """ + """Get the PyGithub client. + This will be removed in the future, use the GitHubBackend class instead. + + Returns + ------- + github.Github + The PyGithub client. """ if not hasattr(GITHUB_CLIENT, "client"): GITHUB_CLIENT.client = github.Github( @@ -121,8 +140,7 @@ def __str__(self): class GitConnectionMode(enum.StrEnum): - """ - We don't need anything else than HTTPS for now, but this would be the place to + """We don't need anything else than HTTPS for now, but this would be the place to add more connection modes (e.g. SSH). """ @@ -130,40 +148,31 @@ class GitConnectionMode(enum.StrEnum): class GitCliError(Exception): - """ - A generic error that occurred while running a git CLI command. - """ + """A generic error that occurred while running a git CLI command.""" pass class GitPlatformError(Exception): - """ - A generic error that occurred while interacting with a git platform. - """ + """A generic error that occurred while interacting with a git platform.""" pass class DuplicatePullRequestError(GitPlatformError): - """ - Raised if a pull request already exists. - """ + """Raised if a pull request already exists.""" pass class RepositoryNotFoundError(Exception): - """ - Raised when a repository is not found. - """ + """Raised when a repository is not found.""" pass class GitCli: - """ - A simple wrapper around the git command line interface. + """A simple wrapper around the git command line interface. Git operations are locked (globally) to prevent operations from interfering with each other. If this does impact performance too much, we can consider a per-repository locking strategy. @@ -177,23 +186,44 @@ def _run_git_command( check_error: bool = True, suppress_all_output: bool = False, ) -> subprocess.CompletedProcess: - """ - Run a git command. stdout is by default only printed if the command fails. stderr is always printed by default. + """Run a git command. + + stdout is by default only printed if the command fails. stderr is always printed by default. stdout is, by default, always available in the returned CompletedProcess, stderr is never. - :param cmd: The command to run, as a list of strings. - :param working_directory: The directory to run the command in. If None, the command will be run in the current - working directory. - :param check_error: If True, raise a GitCliError if the git command fails. - :param suppress_all_output: If True, suppress all output (stdout and stderr). Also, the returned - CompletedProcess will have stdout and stderr set to None. Use this for sensitive commands. - :return: The result of the git command. - :raises GitCliError: If the git command fails and check_error is True. - :raises FileNotFoundError: If the working directory does not exist. + Parameters + ---------- + cmd + The command to run, as a list of strings. + working_directory + The directory to run the command in. If None, the command will be run in the current + working directory. + check_error + If True, raise a GitCliError if the git command fails. + suppress_all_output + If True, suppress all output (stdout and stderr). Also, the returned + CompletedProcess will have stdout and stderr set to None. Use this for sensitive commands. + + Returns + ------- + subprocess.CompletedProcess + The result of the git command. + + Raises + ------ + GitCliError + If the git command fails and check_error is True. + FileNotFoundError + If the working directory does not exist. """ + if working_directory is not None and not working_directory.exists(): + raise FileNotFoundError( + f"Working directory {working_directory} does not exist." + ) git_command = ["git"] + cmd - logger.debug(f"Running git command: {git_command}") + if not suppress_all_output: + logger.debug("Running git command: %s", git_command) # stdout and stderr are piped to devnull if suppress_all_output is True stdout_args = ( @@ -214,7 +244,9 @@ def _run_git_command( ) except subprocess.CalledProcessError as e: logger.info( - f"Command '{' '.join(map(str, git_command))}' failed.\nstdout:\n{e.stdout or ''}\nend of stdout" + "Command '%s' failed.\nstdout:\n%s\nend of stdout", + " ".join(map(str, git_command)), + e.stdout or "", ) raise GitCliError(f"Error running git command: {repr(e)}") @@ -222,63 +254,114 @@ def _run_git_command( @lock_git_operation() def add(self, git_dir: Path, *pathspec: Path, all_: bool = False): - """ - Add files to the git index with `git add`. - :param git_dir: The directory of the git repository. - :param pathspec: The files to add. - :param all_: If True, not only add the files in pathspec, but also where the index already has an entry. - If all_ is set with empty pathspec, all files in the entire working tree are updated. - :raises ValueError: If pathspec is empty and all_ is False. - :raises GitCliError: If the git command fails. + """Add files to the git index with `git add`. + + Parameters + ---------- + git_dir : str + The directory of the git repository. + pathspec : str + The files to add. + all_ : bool, optional + If True, not only add the files in pathspec, but also where the index + already has an entry. If all_ is set with empty pathspec, all files + in the entire working tree are updated. + + Raises + ------ + ValueError + If pathspec is empty and all_ is False. + GitCliError + If the git command fails. """ if not pathspec and not all_: raise ValueError("Either pathspec or all_ must be set.") all_arg = ["--all"] if all_ else [] - self._run_git_command(["add", *all_arg, *pathspec], git_dir) + try: + self._run_git_command(["add", *all_arg, *pathspec], git_dir) + except GitCliError as e: + raise GitCliError("Adding files to git failed.") from e @lock_git_operation() def commit( self, git_dir: Path, message: str, all_: bool = False, allow_empty: bool = False ): - """ - Commit changes to the git repository with `git commit`. - :param git_dir: The directory of the git repository. - :param message: The commit message. - :param allow_empty: If True, allow an empty commit. - :param all_: Automatically stage files that have been modified and deleted, but new files are not affected. - :raises GitCliError: If the git command fails. + """Commit changes to the git repository with `git commit`. + + Parameters + ---------- + git_dir : Path + The directory of the git repository. + message : str + The commit message. + allow_empty : bool, optional + If True, allow an empty commit. + all_ : bool, optional + Automatically stage files that have been modified and deleted, but new + files are not affected. + + Raises + ------ + GitCliError + If the git command fails. """ all_arg = ["-a"] if all_ else [] allow_empty_arg = ["--allow-empty"] if allow_empty else [] - self._run_git_command( - ["commit", *all_arg, *allow_empty_arg, "-m", message], git_dir - ) + try: + self._run_git_command( + ["commit", *all_arg, *allow_empty_arg, "-m", message], git_dir + ) + except GitCliError as e: + raise GitCliError("Could not commit.") from e @lock_git_operation() def reset_hard(self, git_dir: Path, to_treeish: str = "HEAD"): + """Reset the git index of a directory to the state of the last commit with `git reset --hard HEAD`. + + Parameters + ---------- + git_dir : Path + The directory to reset. + to_treeish : str, optional + The treeish to reset to. Defaults to "HEAD". + + Raises + ------ + GitCliError + If the git command fails. + FileNotFoundError + If the git_dir does not exist. """ - Reset the git index of a directory to the state of the last commit with `git reset --hard HEAD`. - :param git_dir: The directory to reset. - :param to_treeish: The treeish to reset to. Defaults to "HEAD". - :raises GitCliError: If the git command fails. - :raises FileNotFoundError: If the git_dir does not exist. - """ - self._run_git_command(["reset", "--quiet", "--hard", to_treeish], git_dir) + if not git_dir.exists(): + raise FileNotFoundError(f"git_dir {git_dir} does not exist.") + + try: + self._run_git_command(["reset", "--quiet", "--hard", to_treeish], git_dir) + except GitCliError as e: + raise GitCliError("git reset failed") from e @lock_git_operation() def clone_repo(self, origin_url: str, target_dir: Path): - """ - Clone a Git repository. - If target_dir exists and is non-empty, this method will fail with GitCliError. - If target_dir exists and is empty, it will work. - If target_dir does not exist, it will work. - :param target_dir: The directory to clone the repository into. - :param origin_url: The URL of the repository to clone. - :raises GitCliError: If the git command fails (e.g. because origin_url does not point to valid remote or - target_dir is not empty). + """Clone a Git repository. + + Parameters + ---------- + target_dir : Path + The directory to clone the repository into. + If the directory exists and is non-empty, this method will fail. + If the directory exists and is empty, it will work. + If the directory does not exist, it will work. + origin_url : str + The URL of the repository to clone. + + Raises + ------ + GitCliError + If the git command fails (e.g. because origin_url does not point to valid + remote or target_dir is not empty). """ try: self._run_git_command(["clone", "--quiet", origin_url, target_dir]) @@ -289,41 +372,73 @@ def clone_repo(self, origin_url: str, target_dir: Path): @lock_git_operation() def push_to_url(self, git_dir: Path, remote_url: str, branch: str): + """Push changes to a remote URL. + + Parameters + ---------- + git_dir + The directory of the git repository. + remote_url + The URL of the remote. + branch + The branch to push to. + + Raises + ------ + GitCliError + If the git command fails. """ - Push changes to a remote URL. - :param git_dir: The directory of the git repository. - :param remote_url: The URL of the remote. - :param branch: The branch to push to. - :raises GitCliError: If the git command fails. - """ - - self._run_git_command(["push", remote_url, branch], git_dir) + try: + self._run_git_command(["push", remote_url, branch], git_dir) + except GitCliError as e: + raise GitCliError("git push failed") from e @lock_git_operation() def add_remote(self, git_dir: Path, remote_name: str, remote_url: str): + """Add a remote to a git repository. + + Parameters + ---------- + remote_name + The name of the remote. + remote_url + The URL of the remote. + git_dir + The directory of the git repository. + + Raises + ------ + GitCliError + If the git command fails (e.g., the remote already exists). + FileNotFoundError + If git_dir does not exist. """ - Add a remote to a git repository. - :param remote_name: The name of the remote. - :param remote_url: The URL of the remote. - :param git_dir: The directory of the git repository. - :raises GitCliError: If the git command fails (e.g., the remote already exists). - :raises FileNotFoundError: If git_dir does not exist - """ - self._run_git_command(["remote", "add", remote_name, remote_url], git_dir) + if not git_dir.is_dir(): + raise FileNotFoundError(f"git directory does not exist: {git_dir}") + try: + self._run_git_command(["remote", "add", remote_name, remote_url], git_dir) + except GitCliError as e: + raise GitCliError(f"error adding remote {remote_name}") from e @lock_git_operation() def add_token(self, git_dir: Path, origin: str, token: str): - """ - Configures git with a local configuration to use the given token for the given origin. + """Configure git with a local configuration to use the given token for the given origin. + Internally, this sets the `http./.extraheader` git configuration key to `AUTHORIZATION: basic `. This is similar to how the GitHub Checkout action does it: - https://github.com/actions/checkout/blob/eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871/adrs/0153-checkout-v2.md#PAT + https://github.com/actions/checkout/blob/eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871/adrs/0153-checkout-v2.md#PAT. The CLI outputs of this command are suppressed to avoid leaking the token. - :param git_dir: The directory of the git repository. - :param origin: The origin to use the token for. Origin is SCHEME://HOST[:PORT] (without trailing slash). - :param token: The token to use. + + Parameters + ---------- + git_dir + The directory of the git repository. + origin + The origin to use the token for. Origin is SCHEME://HOST[:PORT] (without trailing slash). + token + The token to use. """ http_basic_token = base64.b64encode(f"x-access-token:{token}".encode()).decode() @@ -340,10 +455,14 @@ def add_token(self, git_dir: Path, origin: str, token: str): @lock_git_operation() def clear_token(self, git_dir, origin): - """ - Clear the token for the given origin. - :param git_dir: The directory of the git repository. - :param origin: The origin to clear the token for. + """Clear the token for the given origin. + + Parameters + ---------- + git_dir + The directory of the git repository. + origin + The origin to clear the token for. """ self._run_git_command( [ @@ -357,41 +476,76 @@ def clear_token(self, git_dir, origin): @lock_git_operation() def fetch_all(self, git_dir: Path): + """Fetch all changes from all remotes. + + Parameters + ---------- + git_dir + The directory of the git repository. + + Raises + ------ + GitCliError + If the git command fails. + FileNotFoundError + If git_dir does not exist. """ - Fetch all changes from all remotes. - :param git_dir: The directory of the git repository. - :raises GitCliError: If the git command fails. - :raises FileNotFoundError: If git_dir does not exist - """ - self._run_git_command(["fetch", "--all", "--quiet"], git_dir) + if not git_dir.is_dir(): + raise FileNotFoundError(f"git directory {git_dir} does not exist") + try: + self._run_git_command(["fetch", "--all", "--quiet"], git_dir) + except GitCliError as e: + raise GitCliError(f"error running git fetch --all in {git_dir}") from e def does_branch_exist(self, git_dir: Path, branch_name: str): - """ - Check if a branch exists in a git repository. - Note: If git_dir is not a git repository, this method will return False. + """Check if a branch exists in a git repository. + + If git_dir is not a git repository, this method will return False. Note: This method is intentionally not locked with lock_git_operation, as it only reads the git repository and does not modify it. - :param branch_name: The name of the branch. - :param git_dir: The directory of the git repository. - :return: True if the branch exists, False otherwise. - :raises GitCliError: If the git command fails. - :raises FileNotFoundError: If git_dir does not exist + + Parameters + ---------- + branch_name + The name of the branch. + git_dir + The directory of the git repository. + + Returns + ------- + bool + True if the branch exists, False otherwise. + + Raises + ------ + FileNotFoundError + If git_dir does not exist. """ + if not git_dir.is_dir(): + raise FileNotFoundError(f"git directory {git_dir} does not exist") + ret = self._run_git_command( ["show-ref", "--verify", "--quiet", f"refs/heads/{branch_name}"], git_dir, check_error=False, ) - return ret.returncode == 0 def does_remote_exist(self, remote_url: str) -> bool: - """ - Check if a remote exists. + """Check if a remote exists. + Note: This method is intentionally not locked with lock_git_operation, as it only reads a remote and does not modify a git repository. - :param remote_url: The URL of the remote. - :return: True if the remote exists, False otherwise. + + Parameters + ---------- + remote_url + The URL of the remote. + + Returns + ------- + bool + True if the remote exists, False otherwise. """ ret = self._run_git_command(["ls-remote", remote_url], check_error=False) @@ -404,56 +558,111 @@ def checkout_branch( branch: str, track: bool = False, ): + """Checkout a branch in a git repository. + + Parameters + ---------- + git_dir + The directory of the git repository. + branch + The branch to check out. + track + If True, set the branch to track the remote branch with the same name (sets the --track flag). + A new local branch will be created with the name inferred from branch. + For example, if branch is "upstream/main", the new branch will be "main". + + Raises + ------ + GitCliError + If the git command fails. + FileNotFoundError + If git_dir does not exist. """ - Checkout a branch in a git repository. - :param git_dir: The directory of the git repository. - :param branch: The branch to check out. - :param track: If True, set the branch to track the remote branch with the same name (sets the --track flag). - A new local branch will be created with the name inferred from branch. - For example, if branch is "upstream/main", the new branch will be "main". - :raises GitCliError: If the git command fails. - :raises FileNotFoundError: If git_dir does not exist - """ + if not git_dir.is_dir(): + raise FileNotFoundError(f"git directory {git_dir} does not exist") track_flag = ["--track"] if track else [] - self._run_git_command( - ["checkout", "--quiet"] + track_flag + [branch], - git_dir, - ) + + try: + self._run_git_command( + ["checkout", "--quiet"] + track_flag + [branch], + git_dir, + ) + except GitCliError as e: + raise GitCliError( + f"error running git checkout {' '.join(track_flag)} in {git_dir}" + ) from e @lock_git_operation() def checkout_new_branch( self, git_dir: Path, branch: str, start_point: str | None = None ): + """Checkout a new branch in a git repository. + + Parameters + ---------- + git_dir + The directory of the git repository. + branch + The name of the new branch. + start_point + The name of the branch to branch from, or None to branch from the current branch. + + Raises + ------ + FileNotFoundError + If git_dir does not exist. + GitCliError + If the git command fails. """ - Checkout a new branch in a git repository. - :param git_dir: The directory of the git repository. - :param branch: The name of the new branch. - :param start_point: The name of the branch to branch from, or None to branch from the current branch. - :raises FileNotFoundError: If git_dir does not exist - """ + if not git_dir.is_dir(): + raise FileNotFoundError(f"git directory {git_dir} does not exist") start_point_option = [start_point] if start_point else [] + cmd = ["checkout", "--quiet", "-b", branch] + start_point_option - self._run_git_command( - ["checkout", "--quiet", "-b", branch] + start_point_option, git_dir - ) + try: + self._run_git_command(cmd, git_dir) + except GitCliError as e: + raise GitCliError(f"error running git {' '.join(cmd)} in {git_dir}") from e def diffed_files( self, git_dir: Path, commit_a: str, commit_b: str = "HEAD" ) -> Iterator[Path]: - """ - Get the files that are different between two commits. - :param git_dir: The directory of the git repository. This should be the root of the repository. + """Get the files that are different between two commits. + + Parameters + ---------- + git_dir + The directory of the git repository. This should be the root of the repository. If it is a subdirectory, only the files in that subdirectory will be returned. - :param commit_a: The first commit. - :param commit_b: The second commit. - :return: An iterator over the files that are different between the two commits. + commit_a + The first commit. + commit_b + The second commit. + + Returns + ------- + Iterator[Path] + An iterator over the files that are different between the two commits. + + Raises + ------ + GitCliError + If the git command fails. + FileNotFoundError + If the git_dir does not exist. """ - + if not git_dir.is_dir(): + raise FileNotFoundError(f"git directory {git_dir} does not exist") # --relative ensures that we do not assemble invalid paths below if git_dir is a subdirectory - ret = self._run_git_command( - ["diff", "--name-only", "--relative", commit_a, commit_b], - git_dir, - ) + cmd = ["diff", "--name-only", "--relative", commit_a, commit_b] + + try: + ret = self._run_git_command( + cmd, + git_dir, + ) + except GitCliError as e: + raise GitCliError(f"error running git {' '.join(cmd)} in {git_dir}") from e return (git_dir / line for line in ret.stdout.splitlines()) @@ -466,8 +675,7 @@ def clone_fork_and_branch( new_branch: str, base_branch: str = "main", ): - """ - Convenience method to do the following: + """Do the following: 1. Clone the repository at origin_url into target_dir (resetting the directory if it already exists). 2. Add a remote named "upstream" with the URL upstream_url (ignoring if it already exists). 3. Fetch all changes from all remotes. @@ -477,13 +685,23 @@ def clone_fork_and_branch( This is usually used to create a new branch for a pull request. In this case, origin_url is the URL of the user's fork, and upstream_url is the URL of the upstream repository. - :param origin_url: The URL of the repository (fork) to clone. - :param target_dir: The directory to clone the repository into. - :param upstream_url: The URL of the upstream repository. - :param new_branch: The name of the branch to create. - :param base_branch: The name of the base branch to branch from. - - :raises GitCliError: If a git command fails. + Parameters + ---------- + origin_url + The URL of the repository (fork) to clone. + target_dir + The directory to clone the repository into. + upstream_url + The URL of the upstream repository. + new_branch + The name of the branch to create. + base_branch + The name of the base branch to branch from. + + Raises + ------ + GitCliError + If a git command fails. """ try: self.clone_repo(origin_url, target_dir) @@ -493,9 +711,9 @@ def clone_fork_and_branch( f"Could not clone {origin_url} - does the remote exist?" ) logger.info( - f"Cloning {origin_url} into {target_dir} was not successful - " - f"trying to reset hard since the directory already exists. This will fail if the target directory is " - f"not a git repository." + "Cloning %s into %s was not successful - trying to reset hard since the directory already exists. This will fail if the target directory is not a git repository.", + origin_url, + target_dir, ) self.reset_hard(target_dir) @@ -532,13 +750,11 @@ def clone_fork_and_branch( try: logger.info( - f"Trying to checkout branch {new_branch} without creating a new branch" + "Trying to checkout branch %s without creating a new branch", new_branch ) self.checkout_branch(target_dir, new_branch) except GitCliError: - logger.info( - f"It seems branch {new_branch} does not exist. Creating it.", - ) + logger.info("It seems branch %s does not exist. Creating it.", new_branch) self.checkout_new_branch(target_dir, new_branch, start_point=base_branch) @@ -546,7 +762,9 @@ class GitPlatformBackend(ABC): """ A backend for interacting with a git platform (e.g. GitHub). - Implementation Note: If you wonder what should be in this class vs. the GitCli class, the GitPlatformBackend class + Implementation Note + ------------------ + If you wonder what should be in this class vs. the GitCli class, the GitPlatformBackend class should contain the logic for interacting with the platform (e.g. GitHub), while the GitCli class should contain the logic for interacting with the git repository itself. If you need to know anything specific about the platform, it should be in the GitPlatformBackend class. @@ -559,18 +777,30 @@ class GitPlatformBackend(ABC): GIT_PLATFORM_ORIGIN = "https://github.com" def __init__(self, git_cli: GitCli): - """ - Create a new GitPlatformBackend. - :param git_cli: The GitCli instance to use for interacting with git repositories. + """Create a new GitPlatformBackend. + + Parameters + ---------- + git_cli + The GitCli instance to use for interacting with git repositories. """ self.cli = git_cli @abstractmethod def does_repository_exist(self, owner: str, repo_name: str) -> bool: - """ - Check if a repository exists. - :param owner: The owner of the repository. - :param repo_name: The name of the repository. + """Check if a repository exists. + + Parameters + ---------- + owner + The owner of the repository. + repo_name + The name of the repository. + + Returns + ------- + bool + True if the repository exists, False otherwise. """ pass @@ -580,15 +810,30 @@ def get_remote_url( repo_name: str, connection_mode: GitConnectionMode = GitConnectionMode.HTTPS, ) -> str: - """ - Get the URL of a remote repository. - :param owner: The owner of the repository. - :param repo_name: The name of the repository. - :param connection_mode: The connection mode to use. - :raises ValueError: If the connection mode is not supported. - :raises RepositoryNotFoundError: If the repository does not exist. This is only raised if the backend relies on - the repository existing to generate the URL. - """ + """Get the URL of a remote repository. + + Parameters + ---------- + owner + The owner of the repository. + repo_name + The name of the repository. + connection_mode + The connection mode to use. + + Returns + ------- + str + The remote URL. + + Raises + ------ + ValueError + If the connection mode is not supported. + RepositoryNotFoundError + If the repository does not exist. This is only raised if the backend relies + on the repository existing to generate the URL. + """ # noqa: DOC502 (RepositoryNotFoundError only raised by subclasses) match connection_mode: case GitConnectionMode.HTTPS: return f"{self.GIT_PLATFORM_ORIGIN}/{owner}/{repo_name}.git" @@ -598,26 +843,45 @@ def get_remote_url( @abstractmethod def push_to_repository( self, owner: str, repo_name: str, git_dir: Path, branch: str - ): - """ - Push changes to a repository. - :param owner: The owner of the repository. - :param repo_name: The name of the repository. - :param git_dir: The directory of the git repository. - :param branch: The branch to push to. - :raises GitPlatformError: If the push fails. + ) -> None: + """Push changes to a repository. + + Parameters + ---------- + owner + The owner of the repository. + repo_name + The name of the repository. + git_dir + The directory of the git repository. + branch + The branch to push to. + + Raises + ------ + GitPlatformError + If the push fails. """ pass @abstractmethod - def fork(self, owner: str, repo_name: str): - """ - Fork a repository. If the fork already exists, do nothing except syncing the default branch name. + def fork(self, owner: str, repo_name: str) -> None: + """Fork a repository. If the fork already exists, do nothing except syncing the default branch name. + Forks are created under the current user's account (see `self.user`). The name of the forked repository is the same as the original repository. - :param owner: The owner of the repository. - :param repo_name: The name of the repository. - :raises RepositoryNotFoundError: If the repository does not exist. + + Parameters + ---------- + owner + The owner of the repository. + repo_name + The name of the repository. + + Raises + ------ + RepositoryNotFoundError + If the repository does not exist. """ pass @@ -629,62 +893,95 @@ def clone_fork_and_branch( target_dir: Path, new_branch: str, base_branch: str = "main", - ): - """ - Identical to `GitCli::clone_fork_and_branch`, but generates the URLs from the repository name. + ) -> None: + """Clone a fork and create a new branch from the base branch. - :param upstream_owner: The owner of the upstream repository. - :param repo_name: The name of the repository. - :param target_dir: The directory to clone the repository into. - :param new_branch: The name of the branch to create. - :param base_branch: The name of the base branch to branch from. + Identical to `GitCli::clone_fork_and_branch`, but generates the URLs from the repository name. - :raises GitCliError: If a git command fails. + Parameters + ---------- + upstream_owner + The owner of the upstream repository. + repo_name + The name of the repository. + target_dir + The directory to clone the repository into. + new_branch + The name of the branch to create. + base_branch + The name of the base branch to branch from. + + Raises + ------ + GitCliError + If a git command fails. """ - self.cli.clone_fork_and_branch( - origin_url=self.get_remote_url(self.user, repo_name), - target_dir=target_dir, - upstream_url=self.get_remote_url(upstream_owner, repo_name), - new_branch=new_branch, - base_branch=base_branch, - ) + try: + self.cli.clone_fork_and_branch( + origin_url=self.get_remote_url(self.user, repo_name), + target_dir=target_dir, + upstream_url=self.get_remote_url(upstream_owner, repo_name), + new_branch=new_branch, + base_branch=base_branch, + ) + except GitCliError as e: + raise GitCliError( + f"error cloning a fork of {upstream_owner}/{repo_name} into {target_dir} or checking out the new branch {new_branch} from {base_branch}" + ) from e @property @abstractmethod def user(self) -> str: - """ - The username of the logged-in user, i.e. the owner of forked repositories. + """The username of the logged-in user, i.e. the owner of forked repositories. + + Returns + ------- + str + The username of the logged-in user. """ pass @abstractmethod - def _sync_default_branch(self, upstream_owner: str, upstream_repo: str): - """ - Sync the default branch of the forked repository with the upstream repository. - :param upstream_owner: The owner of the upstream repository. - :param upstream_repo: The name of the upstream repository. + def _sync_default_branch(self, upstream_owner: str, upstream_repo: str) -> None: + """Sync the default branch of the forked repository with the upstream repository. + + Parameters + ---------- + upstream_owner + The owner of the upstream repository. + upstream_repo + The name of the upstream repository. """ pass @abstractmethod def get_api_requests_left(self) -> int | Bound | None: - """ - Get the number of remaining API requests for the backend. - Returns `Bound.INFINITY` if the backend does not have a rate limit. - Returns None if an exception occurred while getting the rate limit. + """Get the number of remaining API requests for the backend. + Returns + ------- + int, Bound, or None + The number of remaining API requests. Returns `Bound.INFINITY` if the backend does not have a rate limit. + Returns None if an exception occurred while getting the rate limit. + + Notes + ----- Implementations may print diagnostic information about the API limit. """ pass def is_api_limit_reached(self) -> bool: - """ - Returns True if the API limit has been reached, False otherwise. + """Return True if the API limit has been reached, False otherwise. If an exception occurred while getting the rate limit, this method returns True, assuming the limit has been reached. Additionally, implementations may print diagnostic information about the API limit. + + Returns + ------- + bool + True if the API limit has been reached, False otherwise. """ return self.get_api_requests_left() in (0, None) @@ -698,18 +995,36 @@ def create_pull_request( title: str, body: str, ) -> PullRequestDataValid: - """ - Create a pull request from a forked repository. It is assumed that the forked repository is owned by the - current user and has the same name as the target repository. - :param target_owner: The owner of the target repository. - :param target_repo: The name of the target repository. - :param base_branch: The base branch of the pull request, located in the target repository. - :param head_branch: The head branch of the pull request, located in the forked repository. - :param title: The title of the pull request. - :param body: The body of the pull request. - :returns: The data of the created pull request. - :raises GitPlatformError: If the pull request could not be created. - :raises DuplicatePullRequestError: If a pull request already exists and the backend checks for it. + """Create a pull request from a forked repository. + + It is assumed that the forked repository is owned by the current user and has the same name as the target repository. + + Parameters + ---------- + target_owner + The owner of the target repository. + target_repo + The name of the target repository. + base_branch + The base branch of the pull request, located in the target repository. + head_branch + The head branch of the pull request, located in the forked repository. + title + The title of the pull request. + body + The body of the pull request. + + Returns + ------- + PullRequestDataValid + The data of the created pull request. + + Raises + ------ + GitPlatformError + If the pull request could not be created. + DuplicatePullRequestError + If a pull request already exists and the backend checks for it. """ pass @@ -717,22 +1032,31 @@ def create_pull_request( def comment_on_pull_request( self, repo_owner: str, repo_name: str, pr_number: int, comment: str ) -> None: - """ - Comment on an existing pull request. - :param repo_owner: The owner of the repository. - :param repo_name: The name of the repository. - :param pr_number: The number of the pull request. - :param comment: The comment to post. - :raises RepositoryNotFoundError: If the repository does not exist. - :raises GitPlatformError: If the comment could not be posted, including if the pull request does not exist. + """Comment on an existing pull request. + + Parameters + ---------- + repo_owner + The owner of the repository. + repo_name + The name of the repository. + pr_number + The number of the pull request. + comment + The comment to post. + + Raises + ------ + RepositoryNotFoundError + If the repository does not exist. + GitPlatformError + If the comment could not be posted, including if the pull request does not exist. """ pass class _Github3SessionWrapper: - """ - This is a wrapper around the github3.session.GitHubSession that allows us to intercept the response headers. - """ + """Wrapper around the github3.session.GitHubSession that allows us to intercept the response headers.""" def __init__(self, session: GitHubSession): super().__init__() @@ -771,15 +1095,21 @@ class GitHubBackend(GitPlatformBackend): def __init__( self, github3_client: github3.GitHub, pygithub_client: github.Github, token: str ): - """ - Create a new GitHubBackend. + """Create a new GitHubBackend. + Note: Because we need additional response headers, we wrap the github3 session of the github3 client with our own session wrapper and replace the github3 client's session with it. - :param github3_client: The github3 client to use for interacting with the GitHub API. - :param pygithub_client: The PyGithub client to use for interacting with the GitHub API. - :param token: The token used for writing to git repositories. Note that you need to authenticate github3 - and PyGithub yourself. Use the `from_token` class method to create an instance - that has all necessary clients set up. + + Parameters + ---------- + github3_client + The github3 client to use for interacting with the GitHub API. + pygithub_client + The PyGithub client to use for interacting with the GitHub API. + token + The token used for writing to git repositories. Note that you need to authenticate github3 + and PyGithub yourself. Use the `from_token` class method to create an instance + that has all necessary clients set up. """ cli = GitCli() super().__init__(cli) @@ -808,14 +1138,27 @@ def _get_repo(self, owner: str, repo_name: str) -> None | github3.repos.Reposito ) except Exception as e: logger.warning( - f"GitHub API error fetching repo {owner}/{repo_name}.", - exc_info=e, + "GitHub API error fetching repo %s/%s.", owner, repo_name, exc_info=e ) raise e return repo def does_repository_exist(self, owner: str, repo_name: str) -> bool: + """Check if a repository exists. + + Parameters + ---------- + owner + The owner of the repository. + repo_name + The name of the repository. + + Returns + ------- + bool + True if the repository exists, False otherwise. + """ try: self._get_repo(owner, repo_name) return True @@ -848,7 +1191,7 @@ def fork(self, owner: str, repo_name: str): repo = self._get_repo(owner, repo_name) - logger.debug(f"Forking {owner}/{repo_name}.") + logger.debug("Forking %s/%s.", owner, repo_name) repo.create_fork() # Sleep to make sure the fork is created before we go after it @@ -865,7 +1208,11 @@ def _sync_default_branch(self, upstream_owner: str, repo_name: str): return logger.info( - f"Syncing default branch of {fork_owner}/{repo_name} with {upstream_owner}/{repo_name}..." + "Syncing default branch of %s/%s with %s/%s...", + fork_owner, + repo_name, + upstream_owner, + repo_name, ) fork.rename_branch(fork.default_branch, upstream_repo.default_branch) @@ -875,6 +1222,13 @@ def _sync_default_branch(self, upstream_owner: str, repo_name: str): @cached_property def user(self) -> str: + """The username of the logged-in user, i.e. the owner of forked repositories. + + Returns + ------- + str + The username of the logged-in user. + """ return self.pygithub_client.get_user().login def get_api_requests_left(self) -> int | None: @@ -905,8 +1259,8 @@ def get_api_requests_left(self) -> int | None: return remaining_limit logger.info( - "GitHub API limit reached, will reset at " - f"{datetime.utcfromtimestamp(reset_timestamp).strftime('%Y-%m-%dT%H:%M:%SZ')}" + "GitHub API limit reached, will reset at %s", + datetime.utcfromtimestamp(reset_timestamp).strftime("%Y-%m-%dT%H:%M:%SZ"), ) return remaining_limit @@ -1030,12 +1384,16 @@ def push_to_repository( self, owner: str, repo_name: str, git_dir: Path, branch: str ): logger.debug( - f"Dry Run: Pushing changes from {git_dir} to {owner}/{repo_name} on branch {branch}." + "Dry Run: Pushing changes from %s to %s/%s on branch %s.", + git_dir, + owner, + repo_name, + branch, ) def fork(self, owner: str, repo_name: str): if repo_name in self._repos: - logger.debug(f"Fork of {repo_name} already exists. Doing nothing.") + logger.debug("Fork of %s already exists. Doing nothing.", repo_name) return if not self.does_repository_exist(owner, repo_name): @@ -1044,13 +1402,13 @@ def fork(self, owner: str, repo_name: str): ) logger.debug( - f"Dry Run: Creating fork of {owner}/{repo_name} for user {self._USER}." + "Dry Run: Creating fork of %s/%s for user %s.", owner, repo_name, self._USER ) self._repos[repo_name] = owner def _sync_default_branch(self, upstream_owner: str, upstream_repo: str): logger.debug( - f"Dry Run: Syncing default branch of {upstream_owner}/{upstream_repo}." + "Dry Run: Syncing default branch of %s/%s.", upstream_owner, upstream_repo ) @property @@ -1059,11 +1417,15 @@ def user(self) -> str: @staticmethod def print_dry_run_message(title: str, data: dict[str, str]): - """ - Print a dry run output message. - :param title: The title of the message. - :param data: The data to print. The keys are the field names and the values are the field values. - Please capitalize the keys for consistency. + """Print a dry run output message. + + Parameters + ---------- + title + The title of the message. + data + The data to print. The keys are the field names and the values are the field values. + Please capitalize the keys for consistency. """ border = "==============================================================" output = textwrap.dedent( @@ -1139,6 +1501,8 @@ def comment_on_pull_request( def github_backend() -> GitHubBackend: """ + Return the GitHub backend. + This helper method will be removed in the future, use the GitHubBackend class directly. """ return GitHubBackend.from_token(get_bot_token()) diff --git a/conda_forge_tick/hashing.py b/conda_forge_tick/hashing.py index dd6649536..5dab4c330 100644 --- a/conda_forge_tick/hashing.py +++ b/conda_forge_tick/hashing.py @@ -89,7 +89,7 @@ def hash_url(url, timeout=None, progress=False, hash_type="sha256"): hash : str or None The hash, possibly None if the operation timed out or the url does not exist. - """ + """ # noqa: DOC501 _hash = None try: @@ -120,6 +120,7 @@ def hash_url(url, timeout=None, progress=False, hash_type="sha256"): raise e if isinstance(_hash, tuple): + # TODO: What is this? (remove noqa from above) raise eval(_hash[0]) return _hash diff --git a/conda_forge_tick/import_to_pkg.py b/conda_forge_tick/import_to_pkg.py index a11620fa9..11aedc2fa 100644 --- a/conda_forge_tick/import_to_pkg.py +++ b/conda_forge_tick/import_to_pkg.py @@ -18,7 +18,6 @@ from conda_forge_tick.cli_context import CliContext from conda_forge_tick.lazy_json_backends import ( CF_TICK_GRAPH_DATA_BACKENDS, - CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL, CF_TICK_GRAPH_GITHUB_BACKEND_NUM_DIRS, LazyJson, dump, @@ -26,6 +25,7 @@ lazy_json_override_backends, load, ) +from conda_forge_tick.settings import settings logger = logging.getLogger(__name__) @@ -80,7 +80,7 @@ def _get_head_letters(name): def _ranked_hubs_authorities() -> list[str]: req = requests.get( os.path.join( - CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL, + settings().graph_github_backend_raw_base_url, "ranked_hubs_authorities.json", ) ) @@ -99,7 +99,7 @@ def _import_to_pkg_maps_cache(import_first_letters: str) -> dict[str, set[str]]: else: req = requests.get( os.path.join( - CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL, + settings().graph_github_backend_raw_base_url, pth, ) ) @@ -152,7 +152,6 @@ def map_import_to_package(import_name: str) -> str: pkg_name : str The name of the package. """ - supplying_pkgs, found_import_name = get_pkgs_for_import(import_name) if supplying_pkgs is None: return found_import_name @@ -170,7 +169,7 @@ def map_import_to_package(import_name: str) -> str: def extract_pkg_from_import(name): """Provide the name of the package that matches with the import provided, - with the maps between the imports and artifacts and packages that matches + with the maps between the imports and artifacts and packages that matches. Parameters ---------- @@ -197,22 +196,18 @@ def extract_pkg_from_import(name): def _fetch_arch(arch): # Generate a set a urls to generate for an channel/arch combo try: - logger.info(f"fetching {arch}") + logger.info("fetching %s", arch) r = requests.get( f"https://conda.anaconda.org/conda-forge/{arch}/repodata.json.bz2" ) r.raise_for_status() repodata = orjson.loads(bz2.BZ2File(io.BytesIO(r.content)).read()) except Exception as e: - logger.error(f"Failed to fetch {arch}: {e}") + logger.error("Failed to fetch %s: %s", arch, e) return - logger.info( - " found %d .conda artifacts" % (len(repodata["packages.conda"])), - ) - logger.info( - " found %d .tar.bz2 artifacts" % (len(repodata["packages"])), - ) + logger.info(" found %d .conda artifacts", len(repodata["packages.conda"])) + logger.info(" found %d .tar.bz2 artifacts", len(repodata["packages"])) for p in repodata["packages.conda"]: yield f"{arch}/{p}" @@ -282,7 +277,7 @@ def _get_imports_and_files(file): backend="oci", ) except Exception as e: - logger.error(f"Failed to get artifact info for {file}: {e}") + logger.error("Failed to get artifact info for %s: %s", file, e) data = None if data is None: @@ -327,9 +322,10 @@ def _main_import_to_pkg(max_artifacts: int): return new_files = all_files - indexed_files logger.info( - f"Found {len(new_files)} new files to index " - f"out of {len(all_files)} total " - f"({(1 - len(new_files) / len(all_files)) * 100:0.4}% indexed).", + "Found %d new files to index out of %d total (%.4f%% indexed).", + len(new_files), + len(all_files), + (1 - len(new_files) / len(all_files)) * 100 if len(all_files) else 0.0, ) with ProcessPoolExecutor(max_workers=4) as exc: diff --git a/conda_forge_tick/lazy_json_backends.py b/conda_forge_tick/lazy_json_backends.py index d6021c62f..27e0eea9f 100644 --- a/conda_forge_tick/lazy_json_backends.py +++ b/conda_forge_tick/lazy_json_backends.py @@ -31,6 +31,7 @@ from .cli_context import CliContext from .executors import lock_git_operation +from .settings import settings logger = logging.getLogger(__name__) @@ -56,15 +57,11 @@ "migrators", ] -CF_TICK_GRAPH_GITHUB_BACKEND_REPO = "regro/cf-graph-countyfair" -CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL = ( - f"https://github.com/{CF_TICK_GRAPH_GITHUB_BACKEND_REPO}/raw/master" -) CF_TICK_GRAPH_GITHUB_BACKEND_NUM_DIRS = 5 def get_sharded_path(file_path, n_dirs=CF_TICK_GRAPH_GITHUB_BACKEND_NUM_DIRS): - """computed a sharded location for the LazyJson file.""" + """Compute a sharded location for the LazyJson file.""" top_dir, file_name = os.path.split(file_path) if len(top_dir) == 0 or top_dir == "lazy_json": @@ -207,7 +204,7 @@ class GithubLazyJsonBackend(LazyJsonBackend): _n_requests = 0 def __init__(self) -> None: - self.base_url = CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL + self._base_url = settings().graph_github_backend_raw_base_url @property def base_url(self) -> str: @@ -231,7 +228,7 @@ def _inform_web_request(cls): cls._n_requests += 1 if cls._n_requests % 20 == 0: logger.info( - f"Made {cls._n_requests} requests to the GitHub online backend.", + "Made %d requests to the GitHub online backend.", cls._n_requests ) if cls._n_requests == 20: logger.warning( @@ -327,7 +324,7 @@ def __init__(self): from conda_forge_tick.git_utils import github_client self._gh = github_client() - self._repo = self._gh.get_repo(CF_TICK_GRAPH_GITHUB_BACKEND_REPO) + self._repo = self._gh.get_repo(settings().graph_github_backend_repo) @contextlib.contextmanager def transaction_context(self) -> "Iterator[FileLazyJsonBackend]": @@ -499,7 +496,7 @@ def hget(self, name: str, key: str) -> str: for tr in range(ntries): try: cnts = requests.get( - f"https://api.github.com/repos/{CF_TICK_GRAPH_GITHUB_BACKEND_REPO}/contents/{pth}", + f"https://api.github.com/repos/{settings().graph_github_backend_repo}/contents/{pth}", headers=hrds, ) cnts.raise_for_status() @@ -943,7 +940,7 @@ def sync_lazy_json_object( class LazyJson(MutableMapping): - """Lazy load a dict from a json file and save it when updated""" + """Lazy load a dict from a json file and save it when updated.""" def __init__(self, file_name: str): self.file_name = file_name @@ -1083,7 +1080,13 @@ def __eq__(self, other: object) -> bool: def default(obj: Any) -> Any: - """For custom object serialization.""" + """For custom object serialization. + + Raises + ------ + TypeError + If the object is not JSON serializable. + """ if isinstance(obj, LazyJson): return {"__lazy_json__": obj.file_name} elif isinstance(obj, Set): @@ -1112,7 +1115,7 @@ def dumps( obj: Any, default: "Callable[[Any], Any]" = default, ) -> str: - """Returns a JSON string from a Python object.""" + """Return a JSON string from a Python object.""" return orjson.dumps( obj, option=orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2, @@ -1125,7 +1128,7 @@ def dump( fp: IO[str], default: "Callable[[Any], Any]" = default, ) -> None: - """Returns a JSON string from a Python object.""" + """Return a JSON string from a Python object.""" return fp.write(dumps(obj, default=default)) @@ -1145,7 +1148,7 @@ def _call_object_hook( def loads(s: str, object_hook: "Callable[[dict], Any]" = object_hook) -> dict: - """Loads a string as JSON, with appropriate object hooks""" + """Load a string as JSON, with appropriate object hooks.""" data = orjson.loads(s) if object_hook is not None: data = _call_object_hook(data, object_hook) @@ -1156,7 +1159,7 @@ def load( fp: IO[str], object_hook: "Callable[[dict], Any]" = object_hook, ) -> dict: - """Loads a file object as JSON, with appropriate object hooks.""" + """Load a file object as JSON, with appropriate object hooks.""" return loads(fp.read()) diff --git a/conda_forge_tick/make_graph.py b/conda_forge_tick/make_graph.py index a6508e39d..3aa55e8c9 100644 --- a/conda_forge_tick/make_graph.py +++ b/conda_forge_tick/make_graph.py @@ -24,6 +24,7 @@ from .all_feedstocks import get_all_feedstocks, get_archived_feedstocks from .cli_context import CliContext from .executors import executor +from .settings import settings from .utils import as_iterable, dump_graph, load_graph, sanitize_string # from conda_forge_tick.profiler import profiling @@ -34,8 +35,6 @@ pin_sep_pat = re.compile(r" |>|<|=|\[") RNG = secrets.SystemRandom() -RANDOM_FRAC_TO_UPDATE = 0.1 - # AFAIK, go and rust do not have strong run exports and so do not need to # appear here COMPILER_STUBS_WITH_STRONG_EXPORTS = [ @@ -200,7 +199,7 @@ def _build_graph_process_pool( futures = { pool.submit(get_attrs, name, mark_not_archived=mark_not_archived): name for name in names - if RNG.random() < RANDOM_FRAC_TO_UPDATE + if RNG.random() < settings().frac_make_graph } logger.info("submitted all nodes") @@ -217,11 +216,14 @@ def _build_graph_process_pool( f.result() if n_left % 100 == 0: logger.info( - f"nodes left {n_left: >5d} - eta {int(eta): >5d}s: finished {name}" + "nodes left %5d - eta %5ds: finished %s", n_left, int(eta), name ) except Exception as e: logger.error( - f"nodes left {n_left: >5d} - eta {int(eta): >5d}s: error adding {name} to the graph", + "nodes left %5d - eta %5ds: error adding %s to the graph", + n_left, + int(eta), + name, exc_info=e, ) @@ -231,13 +233,14 @@ def _build_graph_sequential( mark_not_archived=False, ) -> None: for name in names: - if RNG.random() >= RANDOM_FRAC_TO_UPDATE: + if RNG.random() >= settings().frac_make_graph: + logger.debug("skipping %s due to random fraction to update", name) continue try: get_attrs(name, mark_not_archived=mark_not_archived) except Exception as e: - logger.error(f"Error updating node {name}", exc_info=e) + logger.error("Error updating node %s", name, exc_info=e) def _get_all_deps_for_node(attrs, outputs_lut): @@ -333,7 +336,7 @@ def _update_graph_nodes( mark_not_archived=mark_not_archived, ) logger.info("feedstock fetch loop completed") - logger.info(f"memory usage: {psutil.virtual_memory()}") + logger.info("memory usage: %s", psutil.virtual_memory()) def _update_nodes_with_archived(names): @@ -366,9 +369,9 @@ def main( tot_names_for_this_job = _get_names_for_job(tot_names, job, n_jobs) names_for_this_job = _get_names_for_job(names, job, n_jobs) archived_names_for_this_job = _get_names_for_job(archived_names, job, n_jobs) - logger.info(f"total # of nodes across all backends: {len(tot_names)}") - logger.info(f"active nodes: {len(names)}") - logger.info(f"archived nodes: {len(archived_names)}") + logger.info("total # of nodes across all backends: %d", len(tot_names)) + logger.info("active nodes: %d", len(names)) + logger.info("archived nodes: %d", len(archived_names)) if update_nodes_and_edges: gx = load_graph() diff --git a/conda_forge_tick/make_migrators.py b/conda_forge_tick/make_migrators.py index 1dabc03d3..1751e98a4 100644 --- a/conda_forge_tick/make_migrators.py +++ b/conda_forge_tick/make_migrators.py @@ -6,17 +6,16 @@ import pprint import re import secrets +import sys import time import typing from concurrent.futures import as_completed from typing import ( + Any, List, Mapping, MutableMapping, MutableSequence, - MutableSet, - Sequence, - Set, Union, cast, ) @@ -46,12 +45,13 @@ CrossCompilationForARMAndPower, CrossPythonMigrator, CrossRBaseMigrator, + CrossRBaseWinMigrator, DependencyUpdateMigrator, DuplicateLinesCleanup, ExtraJinja2KeysCleanup, FlangMigrator, - GraphMigrator, GuardTestingMigrator, + GuardTestingWinMigrator, Jinja2VarsCleanup, LibboostMigrator, LicenseMigrator, @@ -71,25 +71,23 @@ StaticLibMigrator, StdlibMigrator, UpdateCMakeArgsMigrator, + UpdateCMakeArgsWinMigrator, UpdateConfigSubGuessMigrator, Version, YAMLRoundTrip, make_from_lazy_json_data, - skip_migrator_due_to_schema, -) -from conda_forge_tick.migrators.arch import OSXArm -from conda_forge_tick.migrators.migration_yaml import ( - MigrationYamlCreator, - create_rebuild_graph, ) +from conda_forge_tick.migrators.arch import OSXArm, WinArm64 +from conda_forge_tick.migrators.migration_yaml import MigrationYamlCreator from conda_forge_tick.os_utils import pushd from conda_forge_tick.utils import ( CB_CONFIG, fold_log_lines, - get_keys_default, + get_recipe_schema_version, load_existing_graph, parse_meta_yaml, parse_munged_run_export, + parse_recipe_yaml, pluck, yaml_safe_load, ) @@ -191,7 +189,7 @@ def add_replacement_migrator( rationale: str, alt_migrator: Union[Migrator, None] = None, ) -> None: - """Adds a migrator to replace one package with another. + """Add a migrator to replace one package with another. Parameters ---------- @@ -210,25 +208,6 @@ def add_replacement_migrator( """ with fold_log_lines(f"making replacement migrator for {old_pkg} -> {new_pkg}"): - total_graph = copy.deepcopy(gx) - - for node, node_attrs in gx.nodes.items(): - requirements = node_attrs["payload"].get("requirements", {}) - rq = ( - requirements.get("build", set()) - | requirements.get("host", set()) - | requirements.get("run", set()) - | requirements.get("test", set()) - ) - pkgs = {old_pkg} - old_pkg_c = pkgs.intersection(rq) - - if not old_pkg_c: - pluck(total_graph, node) - - # post plucking we can have several strange cases, lets remove all selfloops - total_graph.remove_edges_from(nx.selfloop_edges(total_graph)) - if alt_migrator is not None: migrators.append( alt_migrator( @@ -236,7 +215,7 @@ def add_replacement_migrator( new_pkg=new_pkg, rationale=rationale, pr_limit=PR_LIMIT, - graph=total_graph, + total_graph=gx, ), ) else: @@ -246,7 +225,7 @@ def add_replacement_migrator( new_pkg=new_pkg, rationale=rationale, pr_limit=PR_LIMIT, - graph=total_graph, + total_graph=gx, ), ) @@ -259,7 +238,7 @@ def add_replacement_migrator( def add_arch_migrate(migrators: MutableSequence[Migrator], gx: nx.DiGraph) -> None: - """Adds rebuild migrators. + """Add rebuild migrators. Parameters ---------- @@ -267,23 +246,19 @@ def add_arch_migrate(migrators: MutableSequence[Migrator], gx: nx.DiGraph) -> No The list of migrators to run. """ - total_graph = copy.deepcopy(gx) - with fold_log_lines("making aarch64+ppc64le migrator"): migrators.append( ArchRebuild( - graph=total_graph, + total_graph=gx, pr_limit=PR_LIMIT, - name="aarch64 and ppc64le addition", ), ) with fold_log_lines("making osx-arm64 migrator"): migrators.append( OSXArm( - graph=total_graph, + total_graph=gx, pr_limit=PR_LIMIT, - name="arm osx addition", piggy_back_migrations=[ UpdateConfigSubGuessMigrator(), CondaForgeYAMLCleanup(), @@ -298,14 +273,28 @@ def add_arch_migrate(migrators: MutableSequence[Migrator], gx: nx.DiGraph) -> No ), ) + with fold_log_lines("making win-arm64 migrator"): + migrators.append( + WinArm64( + total_graph=gx, + pr_limit=PR_LIMIT, + piggy_back_migrations=[ + CondaForgeYAMLCleanup(), + UpdateCMakeArgsWinMigrator(), + GuardTestingWinMigrator(), + CrossRBaseWinMigrator(), + CrossPythonMigrator(), + NoCondaInspectMigrator(), + MPIPinRunAsBuildCleanup(), + CombineV1ConditionsMigrator(), + ], + ), + ) + def add_rebuild_migration_yaml( migrators: MutableSequence[Migrator], gx: nx.DiGraph, - package_names: Sequence[str], - output_to_feedstock: Mapping[str, str], - excluded_feedstocks: MutableSet[str], - exclude_pinned_pkgs: bool, migration_yaml: str, config: dict, migration_name: str, @@ -314,7 +303,7 @@ def add_rebuild_migration_yaml( force_pr_after_solver_attempts: int = FORCE_PR_AFTER_SOLVER_ATTEMPTS, paused: bool = False, ) -> None: - """Adds rebuild migrator. + """Add rebuild migrator. Parameters ---------- @@ -322,14 +311,6 @@ def add_rebuild_migration_yaml( The list of migrators to run. gx : networkx.DiGraph The feedstock graph - package_names : list of str - The package who's pin was moved - output_to_feedstock : dict of str - Mapping of output name to feedstock name - excluded_feedstocks : set of str - Feedstock names which should never be included in the migration - exclude_pinned_pkgs : bool - Whether pinned packages should be excluded from the migration migration_yaml : str The raw yaml for the migration variant dict config: dict @@ -343,36 +324,6 @@ def add_rebuild_migration_yaml( paused : bool, optional Whether the migration is paused, defaults to False. """ - - total_graph = create_rebuild_graph( - gx, - package_names, - excluded_feedstocks, - exclude_pinned_pkgs=exclude_pinned_pkgs, - include_noarch=config.get("include_noarch", False), - include_build=config.get("include_build", False), - ) - - # Note at this point the graph is made of all packages that have a - # dependency on the pinned package via Host, run, or test. - # Some packages don't have a host section so we use their - # build section in its place. - - feedstock_names: Set[str] = set() - for p in package_names: - feedstock_names |= output_to_feedstock.get(p, {p}) - - feedstock_names = { - p for p in feedstock_names if p in gx.nodes - } - excluded_feedstocks - - top_level = { - node - for node in { - gx.successors(feedstock_name) for feedstock_name in feedstock_names - } - if (node in total_graph) and len(list(total_graph.predecessors(node))) == 0 - } piggy_back_migrations = [ CrossCompilationForARMAndPower(), StdlibMigrator(), @@ -399,17 +350,11 @@ def add_rebuild_migration_yaml( extra_mini_migrators=piggy_back_migrations ) - cycles = set() - for cyc in nx.simple_cycles(total_graph): - cycles |= set(cyc) - migrator = MigrationYaml( migration_yaml, name=migration_name, - graph=total_graph, + total_graph=gx, pr_limit=nominal_pr_limit, - top_level=top_level, - cycles=cycles, piggy_back_migrations=piggy_back_migrations, max_solver_attempts=max_solver_attempts, force_pr_after_solver_attempts=force_pr_after_solver_attempts, @@ -424,7 +369,7 @@ def add_rebuild_migration_yaml( ) migrator.pr_limit = pr_limit - print(f"migration yaml:\n{migration_yaml}", flush=True) + print(f"migration yaml:\n{migration_yaml.rstrip()}", flush=True) print(f"bump number: {migrator.bump_number}", flush=True) print( f"# of PRs made so far: {number_pred} ({frac_pred * 100:0.2f} percent)", @@ -434,7 +379,7 @@ def add_rebuild_migration_yaml( final_config.update(config) final_config["pr_limit"] = migrator.pr_limit final_config["max_solver_attempts"] = max_solver_attempts - print("final config:\n", pprint.pformat(final_config) + "\n\n", flush=True) + print("final config:\n", pprint.pformat(final_config), flush=True) migrators.append(migrator) @@ -463,16 +408,6 @@ def migration_factory( for yaml_file, _ in migration_yamls ] - output_to_feedstock = gx.graph["outputs_lut"] - all_package_names = set( - sum( - ( - list(node.get("payload", {}).get("outputs_names", set())) - for node in gx.nodes.values() - ), - [], - ), - ) for yaml_file, yaml_contents in migration_yamls: loaded_yaml = yaml_safe_load(yaml_contents) __mname = os.path.splitext(os.path.basename(yaml_file))[0] @@ -483,7 +418,6 @@ def migration_factory( with fold_log_lines(f"making {__mname} migrator"): migrator_config = loaded_yaml.get("__migrator", {}) paused = migrator_config.pop("paused", False) - excluded_feedstocks = set(migrator_config.get("exclude", [])) _pr_limit = min(migrator_config.pop("pr_limit", pr_limit), MAX_PR_LIMIT) max_solver_attempts = min( migrator_config.pop("max_solver_attempts", MAX_SOLVER_ATTEMPTS), @@ -497,14 +431,6 @@ def migration_factory( FORCE_PR_AFTER_SOLVER_ATTEMPTS, ) - if "override_cbc_keys" in migrator_config: - package_names = set(migrator_config.get("override_cbc_keys")) - else: - package_names = ( - set(loaded_yaml) | {ly.replace("_", "-") for ly in loaded_yaml} - ) & all_package_names - exclude_pinned_pkgs = migrator_config.get("exclude_pinned_pkgs", True) - age = time.time() - loaded_yaml.get("migrator_ts", time.time()) age /= 24 * 60 * 60 print( @@ -534,10 +460,6 @@ def migration_factory( add_rebuild_migration_yaml( migrators=migrators, gx=gx, - package_names=list(package_names), - output_to_feedstock=output_to_feedstock, - excluded_feedstocks=excluded_feedstocks, - exclude_pinned_pkgs=exclude_pinned_pkgs, migration_yaml=yaml_contents, migration_name=os.path.splitext(yaml_file)[0], config=migrator_config, @@ -552,6 +474,137 @@ def migration_factory( if paused: print(f"skipping migration {__mname} because it is paused", flush=True) + print("\n", flush=True) + sys.stdout.flush() + sys.stderr.flush() + + +def _get_max_pin_from_pinning_dict( + pinning_dict: Mapping[str, Any], recipe_version: int +): + """Given a pinning dictionary in the format returned by parse_munged_run_export, + return the value for max_pin. + + In recipe v0, this is the value of the key "max_pin". + In recipe v1, this is the value of the key "upper_bound", but only if it has the + format of a pinning spec and is not a hard-coded version string. + + Returns + ------- + str + The value for max_pin, or an empty string if not defined or not a pinning spec. + + Raises + ------ + ValueError + If the schema version of the recipe is neither 0 nor 1. + """ + pinning_spec_regex = re.compile(r"^(x\.)*x$") + + if recipe_version == 0: + value = pinning_dict.get("max_pin", "") + elif recipe_version == 1: + value = pinning_dict.get("upper_bound", "") + else: + raise ValueError(f"Unsupported schema version: {recipe_version}") + + if pinning_spec_regex.match(value): + return value + return "" + + +def _extract_most_stringent_pin_from_recipe( + feedstock_name: str, + package_name: str, + feedstock_attrs: Mapping[str, Any], + gx: nx.DiGraph, +) -> tuple[str, list[dict]]: + """Given the name of a package that is specified in the run_exports in a feedstock, + find the run_exports pinning specification that is most stringent for that package + in the feedstock recipe. + We do that by considering all run_exports sections from outputs of the feedstock. + The package must also be an output of the feedstock. + + Parameters + ---------- + feedstock_name + Name of the feedstock to analyze. + package_name + Name of the package that is specified as run_exports. + feedstock_attrs + Node attributes of the feedstock. + gx + Instance of the global cf-graph. + + Returns + ------- + tuple[str, list[dict]] + A tuple containing: + - The most stringent pinning spec found. If the package is not found in the recipe, + this will be an empty string. + - A list of all run_exports dictionaries found in the recipe, in the format + returned by parse_munged_run_export. + + Raises + ------ + ValueError + If the schema version of the recipe is neither 0 nor 1. + """ + schema_version = get_recipe_schema_version(feedstock_attrs) + # we need a special parsing for pinning stuff + if schema_version == 0: + meta_yaml = parse_meta_yaml( + feedstock_attrs["raw_meta_yaml"], + for_pinning=True, + ) + elif schema_version == 1: + meta_yaml = parse_recipe_yaml( + feedstock_attrs["raw_meta_yaml"], + for_pinning=True, + ) + else: + raise ValueError(f"Unsupported schema version: {schema_version}") + # find the most stringent max pin for this feedstock if any + pin_spec = "" + possible_p_dicts = [] + for block in [meta_yaml] + meta_yaml.get("outputs", []) or []: + build = block.get("build", {}) or {} + + # parse back to dict + if isinstance(build.get("run_exports", None), MutableMapping): + for _, v in build.get("run_exports", {}).items(): + for p in v: + possible_p_dicts.append(parse_munged_run_export(p)) + else: + for p in build.get("run_exports", []) or []: + possible_p_dicts.append(parse_munged_run_export(p)) + + # and check the exported package is within the feedstock + exports = [ + _get_max_pin_from_pinning_dict(p, schema_version) + for p in possible_p_dicts + # make certain not direct hard pin + if isinstance(p, MutableMapping) + # ensure the export is for this package + and p.get("package_name", "") == package_name + # ensure the pinned package is in an output of the + # parent feedstock + and ( + feedstock_name + in gx.graph["outputs_lut"].get( + p.get("package_name", ""), + set(), + ) + ) + ] + if not exports: + continue + # get the most stringent pin spec from the recipe block + max_pin = max(exports, key=len) + if len(max_pin) > len(pin_spec): + pin_spec = max_pin + return pin_spec, possible_p_dicts + def _outside_pin_range(pin_spec, current_pin, new_version): pin_level = len(pin_spec.split(".")) @@ -566,6 +619,39 @@ def _outside_pin_range(pin_spec, current_pin, new_version): return False +def _compute_approximate_pinning_migration_sizes( + gx, + pinning_names, + packages_to_migrate_together_mapping, + packages_to_migrate_together, + pinnings, +): + pinning_migration_sizes = {pinning_name: 0 for pinning_name in pinning_names} + for node in list(gx.nodes): + with gx.nodes[node]["payload"] as attrs: + for pinning_name in pinning_names: + if ( + pinning_name in packages_to_migrate_together_mapping + and pinning_name not in packages_to_migrate_together + ): + continue + + if pinning_name not in gx.graph["outputs_lut"]: + # conda_build_config.yaml can't have `-` unlike our package names + package_name = pinning_name.replace("_", "-") + else: + package_name = pinning_name + + requirements = attrs.get("requirements", {}) + host = requirements.get("host", set()) + build = requirements.get("build", set()) + bh = host or build + if package_name in bh: + pinning_migration_sizes[pinning_name] += 1 + + return pinning_migration_sizes + + def create_migration_yaml_creator( migrators: MutableSequence[Migrator], gx: nx.DiGraph, pin_to_debug=None ): @@ -573,6 +659,7 @@ def create_migration_yaml_creator( for node in list(cfp_gx.nodes): if node != "conda-forge-pinning": pluck(cfp_gx, node) + cfp_gx.remove_edges_from(nx.selfloop_edges(cfp_gx)) with pushd(os.environ["CONDA_PREFIX"]): pinnings = parse_config_file( @@ -595,6 +682,14 @@ def create_migration_yaml_creator( pinning_names = sorted(list(pinnings.keys())) + pinning_migration_sizes = _compute_approximate_pinning_migration_sizes( + gx, + pinning_names, + packages_to_migrate_together_mapping, + packages_to_migrate_together, + pinnings, + ) + feedstocks_to_be_repinned = [] for pinning_name in pinning_names: if ( @@ -624,165 +719,113 @@ def create_migration_yaml_creator( # replace sub-packages with their feedstock names # TODO - we are grabbing one element almost at random here # the sorted call makes it stable at least? - fs_name = next( + feedstock_name = next( iter( sorted(gx.graph["outputs_lut"].get(package_name, {package_name})), ), ) + if feedstock_name not in gx.nodes: + continue + feedstock_attrs = gx.nodes[feedstock_name]["payload"] + if ( - (fs_name in gx.nodes) - and not gx.nodes[fs_name]["payload"].get("archived", False) - and gx.nodes[fs_name]["payload"].get("version") - and fs_name not in feedstocks_to_be_repinned + feedstock_attrs.get("archived", False) + or not feedstock_attrs.get("version") + or feedstock_name in feedstocks_to_be_repinned ): - current_pins = list(map(str, package_pin_list)) - current_version = str(gx.nodes[fs_name]["payload"]["version"]) + continue - try: - # we need a special parsing for pinning stuff - meta_yaml = parse_meta_yaml( - gx.nodes[fs_name]["payload"]["raw_meta_yaml"], - for_pinning=True, - ) + current_pins = list(map(str, package_pin_list)) + current_version = str(feedstock_attrs["version"]) - # find the most stringent max pin for this feedstock if any - pin_spec = "" - for block in [meta_yaml] + meta_yaml.get("outputs", []) or []: - build = block.get("build", {}) or {} - - # parse back to dict - possible_p_dicts = [] - if isinstance(build.get("run_exports", None), MutableMapping): - for _, v in build.get("run_exports", {}).items(): - for p in v: - possible_p_dicts.append(parse_munged_run_export(p)) - else: - for p in build.get("run_exports", []) or []: - possible_p_dicts.append(parse_munged_run_export(p)) - - # and check the exported package is within the feedstock - exports = [ - p.get("max_pin", "") - for p in possible_p_dicts - # make certain not direct hard pin - if isinstance(p, MutableMapping) - # ensure the export is for this package - and p.get("package_name", "") == package_name - # ensure the pinned package is in an output of the - # parent feedstock - and ( - fs_name - in gx.graph["outputs_lut"].get( - p.get("package_name", ""), - set(), - ) - ) - ] - if not exports: - continue - # get the most stringent pin spec from the recipe block - max_pin = max(exports, key=len) - if len(max_pin) > len(pin_spec): - pin_spec = max_pin - - # fall back to the pinning file or "x" - if not pin_spec: - pin_spec = ( - pinnings["pin_run_as_build"] - .get(pinning_name, {}) - .get("max_pin", "x") - ) or "x" - - current_pins = list( - map(lambda x: re.sub("[^0-9.]", "", x).rstrip("."), current_pins), - ) - current_pins = [cp.strip() for cp in current_pins if cp.strip() != ""] - current_version = re.sub("[^0-9.]", "", current_version).rstrip(".") - if not current_pins or current_version == "": - continue + try: + pin_spec, possible_p_dicts = _extract_most_stringent_pin_from_recipe( + feedstock_name, package_name, feedstock_attrs, gx + ) - current_pin = str(max(map(VersionOrder, current_pins))) - # If the current pin and the current version is the same nothing - # to do even if the pin isn't accurate to the spec - if current_pin != current_version and _outside_pin_range( - pin_spec, - current_pin, - current_version, - ): - feedstocks_to_be_repinned.append(fs_name) - with fold_log_lines( - "making pinning migrator for %s" % pinning_name - ): - pinnings_together = packages_to_migrate_together.get( - pinning_name, [pinning_name] - ) - print(" %s:" % pinning_name, flush=True) - print(" package name:", package_name, flush=True) - print(" feedstock name:", fs_name, flush=True) - for p in possible_p_dicts: - print(" possible pin spec:", p, flush=True) - print( - " migrator:\n" - " curr version: %s\n" - " curr pin: %s\n" - " pin_spec: %s\n" - " pinnings: %s" - % ( - current_version, - current_pin, - pin_spec, - pinnings_together, - ), - flush=True, - ) - migrators.append( - MigrationYamlCreator( - pinning_name, - current_version, - current_pin, - pin_spec, - fs_name, - cfp_gx, - pinnings=pinnings_together, - full_graph=gx, - pr_limit=1, - ), - ) - except Exception as e: - with fold_log_lines( - "failed to make pinning migrator for %s" % pinning_name - ): - print(" %s:" % pinning_name, flush=True) - print(" package name:", package_name, flush=True) - print(" feedstock name:", fs_name, flush=True) - print(" error:", repr(e), flush=True) + # fall back to the pinning file or "x" + if not pin_spec: + # since this comes from conda_build_config.yaml, max_pin is correct for v1 as well + pin_spec = ( + pinnings["pin_run_as_build"] + .get(pinning_name, {}) + .get("max_pin", "x") + ) or "x" + + current_pins = list( + map(lambda x: re.sub("[^0-9.]", "", x).rstrip("."), current_pins), + ) + current_pins = [cp.strip() for cp in current_pins if cp.strip() != ""] + current_version = re.sub("[^0-9.]", "", current_version).rstrip(".") + if not current_pins or current_version == "": continue + current_pin = str(max(map(VersionOrder, current_pins))) + # If the current pin and the current version is the same nothing + # to do even if the pin isn't accurate to the spec + if current_pin != current_version and _outside_pin_range( + pin_spec, + current_pin, + current_version, + ): + feedstocks_to_be_repinned.append(feedstock_name) + with fold_log_lines("making pinning migrator for %s" % pinning_name): + pinnings_together = packages_to_migrate_together.get( + pinning_name, [pinning_name] + ) + print("%s:" % pinning_name, flush=True) + print(" package name:", package_name, flush=True) + print(" feedstock name:", feedstock_name, flush=True) + for p in possible_p_dicts: + print(" possible pin spec:", p, flush=True) + print( + " migrator:\n" + " curr version: %s\n" + " curr pin: %s\n" + " pin_spec: %s\n" + " pinnings: %s" + % ( + current_version, + current_pin, + pin_spec, + pinnings_together, + ), + flush=True, + ) + print(" ", flush=True) + migrators.append( + MigrationYamlCreator( + package_name=pinning_name, + new_pin_version=current_version, + current_pin=current_pin, + pin_spec=pin_spec, + feedstock_name=feedstock_name, + total_graph=cfp_gx, + pinnings=pinnings_together, + pr_limit=1, + pin_impact=pinning_migration_sizes[pinning_name], + ), + ) + except Exception as e: + with fold_log_lines( + "failed to make pinning migrator for %s" % pinning_name + ): + print("%s:" % pinning_name, flush=True) + print(" package name:", package_name, flush=True) + print(" feedstock name:", feedstock_name, flush=True) + print(" error:", repr(e), flush=True) + print(" ", flush=True) + continue + def add_noarch_python_min_migrator( migrators: MutableSequence[Migrator], gx: nx.DiGraph ): with fold_log_lines("making `noarch: python` migrator"): - gx2 = copy.deepcopy(gx) - for node in list(gx2.nodes): - has_noarch_python = False - with gx2.nodes[node]["payload"] as attrs: - skip_schema = skip_migrator_due_to_schema( - attrs, NoarchPythonMinMigrator.allowed_schema_versions - ) - for line in attrs.get("raw_meta_yaml", "").splitlines(): - if line.lstrip().startswith("noarch: python"): - has_noarch_python = True - break - if (not has_noarch_python) or skip_schema: - pluck(gx2, node) - - gx2.clear_edges() - migrators.append( NoarchPythonMinMigrator( - graph=gx2, + total_graph=gx, pr_limit=PR_LIMIT, piggy_back_migrations=_make_mini_migrators_with_defaults( extra_mini_migrators=[YAMLRoundTrip()], @@ -800,27 +843,9 @@ def add_noarch_python_min_migrator( def add_static_lib_migrator(migrators: MutableSequence[Migrator], gx: nx.DiGraph): with fold_log_lines("making static lib migrator"): - gx2 = copy.deepcopy(gx) - for node in list(gx2.nodes): - with gx2.nodes[node]["payload"] as attrs: - skip_schema = skip_migrator_due_to_schema( - attrs, StaticLibMigrator.allowed_schema_versions - ) - update_static_libs = get_keys_default( - attrs, - ["conda-forge.yml", "bot", "update_static_libs"], - {}, - False, - ) - - if (not update_static_libs) or skip_schema: - pluck(gx2, node) - - gx2.clear_edges() - migrators.append( StaticLibMigrator( - graph=gx2, + total_graph=gx, pr_limit=PR_LIMIT, piggy_back_migrations=_make_mini_migrators_with_defaults( extra_mini_migrators=[YAMLRoundTrip()], @@ -841,31 +866,10 @@ def add_nvtools_migrator( gx: nx.DiGraph, ): with fold_log_lines("making add nvtools migrator"): - gx2 = copy.deepcopy(gx) - for node in list(gx2.nodes): - with gx2.nodes[node]["payload"] as attrs: - skip_schema = skip_migrator_due_to_schema( - attrs, StaticLibMigrator.allowed_schema_versions - ) - has_nvidia = False - if "meta_yaml" in attrs and "source" in attrs["meta_yaml"]: - if isinstance(attrs["meta_yaml"]["source"], list): - src_list = attrs["meta_yaml"]["source"] - else: - src_list = [attrs["meta_yaml"]["source"]] - for src in src_list: - src_url = src.get("url", "") or "" - has_nvidia = has_nvidia or ( - "https://developer.download.nvidia.com" in src_url - ) - - if (not has_nvidia) or skip_schema: - pluck(gx2, node) - migrators.append( AddNVIDIATools( check_solvable=False, - graph=gx2, + total_graph=gx, pr_limit=PR_LIMIT, piggy_back_migrations=_make_mini_migrators_with_defaults( extra_mini_migrators=[YAMLRoundTrip()], @@ -915,16 +919,6 @@ def initialize_migrators( migration_factory(pinning_migrators, gx) create_migration_yaml_creator(migrators=pinning_migrators, gx=gx) - with fold_log_lines("migration graph sizes"): - print("rebuild migration graph sizes:", flush=True) - for m in migrators + pinning_migrators: - if isinstance(m, GraphMigrator): - print( - f" {getattr(m, 'name', m)} graph size: " - f"{len(getattr(m, 'graph', []))}", - flush=True, - ) - with fold_log_lines("making version migrator"): print("building package import maps and version migrator", flush=True) python_nodes = { @@ -940,7 +934,7 @@ def initialize_migrators( ) version_migrator = Version( python_nodes=python_nodes, - graph=gx, + total_graph=gx, pr_limit=PR_LIMIT * 2, piggy_back_migrations=_make_mini_migrators_with_defaults( extra_mini_migrators=[ @@ -954,6 +948,15 @@ def initialize_migrators( RNG.shuffle(pinning_migrators) migrators = [version_migrator] + migrators + pinning_migrators + with fold_log_lines("migration graph sizes"): + print("rebuild migration graph sizes:", flush=True) + for m in migrators: + print( + f" {getattr(m, 'name', m)} graph size: " + f"{len(getattr(m, 'graph', []))}", + flush=True, + ) + return migrators @@ -963,7 +966,7 @@ def _load(name): def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]: - """Loads all current migrators. + """Load all current migrators. Parameters ---------- @@ -974,13 +977,18 @@ def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]: ------- migrators : list of Migrator The list of migrators to run in the correct randomized order. + + Raises + ------ + RuntimeError + If no version migrator is found in the migrators directory. """ migrators = [] version_migrator = None pinning_migrators = [] longterm_migrators = [] all_names = get_all_keys_for_hashmap("migrators") - with executor("process", 4) as pool: + with executor("process", 2) as pool: futs = [pool.submit(_load, name) for name in all_names] for fut in tqdm.tqdm( @@ -1013,12 +1021,25 @@ def load_migrators(skip_paused: bool = True) -> MutableSequence[Migrator]: return migrators -def main(ctx: CliContext) -> None: - gx = load_existing_graph() - migrators = initialize_migrators( - gx, - dry_run=ctx.dry_run, - ) +def dump_migrators(migrators: MutableSequence[Migrator], dry_run: bool = False) -> None: + """Dump the current migrators to JSON. + + Parameters + ---------- + migrators : list of Migrator + The list of migrators to dump. + dry_run : bool, optional + Whether to perform a dry run, defaults to False. If True, no changes will be made. + + Raises + ------ + RuntimeError + If a duplicate migrator name is found. + """ + if dry_run: + print("dry run: dumping migrators to json", flush=True) + return + with ( fold_log_lines("dumping migrators to JSON"), lazy_json_override_backends( @@ -1042,8 +1063,20 @@ def main(ctx: CliContext) -> None: lzj.update(data) except Exception as e: - logger.error(f"Error dumping migrator {migrator} to JSON!", exc_info=e) + logger.error("Error dumping migrator %s to JSON!", migrator, exc_info=e) migrators_to_remove = old_migrators - new_migrators for migrator in migrators_to_remove: remove_key_for_hashmap("migrators", migrator) + + +def main(ctx: CliContext) -> None: + gx = load_existing_graph() + migrators = initialize_migrators( + gx, + dry_run=ctx.dry_run, + ) + dump_migrators( + migrators, + dry_run=ctx.dry_run, + ) diff --git a/conda_forge_tick/mappings.py b/conda_forge_tick/mappings.py index 65e5f2ffd..8e7c99ec7 100644 --- a/conda_forge_tick/mappings.py +++ b/conda_forge_tick/mappings.py @@ -2,5 +2,5 @@ def main(): - """Run all the mapping updaters""" + """Run all the mapping updaters.""" main_pypi_name_mapping() diff --git a/conda_forge_tick/migration_runner.py b/conda_forge_tick/migration_runner.py index ddb62317c..5e34e88eb 100644 --- a/conda_forge_tick/migration_runner.py +++ b/conda_forge_tick/migration_runner.py @@ -19,6 +19,11 @@ from conda_forge_tick.contexts import ClonedFeedstockContext from conda_forge_tick.lazy_json_backends import LazyJson, dumps +from conda_forge_tick.settings import ( + ENV_CONDA_FORGE_ORG, + ENV_GRAPH_GITHUB_BACKEND_REPO, + settings, +) logger = logging.getLogger(__name__) @@ -138,9 +143,13 @@ def run_migration_containerized( chmod_plus_rwX(tmpdir, recursive=True) - logger.debug(f"host feedstock dir {feedstock_dir}: {os.listdir(feedstock_dir)}") logger.debug( - f"copied host feedstock dir {tmp_feedstock_dir}: {os.listdir(tmp_feedstock_dir)}" + "host feedstock dir %s: %s", feedstock_dir, os.listdir(feedstock_dir) + ) + logger.debug( + "copied host feedstock dir %s: %s", + tmp_feedstock_dir, + os.listdir(tmp_feedstock_dir), ) mfile = os.path.join(tmpdir, "migrator.json") @@ -171,7 +180,14 @@ def run_migration_containerized( if isinstance(node_attrs, LazyJson) else dumps(node_attrs) ), - extra_container_args=["-e", "RUN_URL"], + extra_container_args=[ + "-e", + "RUN_URL", + "-e", + f"{ENV_CONDA_FORGE_ORG}={settings().conda_forge_org}", + "-e", + f"{ENV_GRAPH_GITHUB_BACKEND_REPO}={settings().graph_github_backend_repo}", + ], ) sync_dirs( @@ -228,9 +244,11 @@ def run_migration_local( - pr_title: The PR title for the migration. - pr_body: The PR body for the migration. """ - - # it would be better if we don't re-instantiate ClonedFeedstockContext ourselves and let - # FeedstockContext.reserve_clone_directory be the only way to create a ClonedFeedstockContext + # Instead of mimicking the ClonedFeedstockContext which is already available in the call hierarchy of this function, + # we should instead pass the ClonedFeedstockContext object to this function. This would allow the following issue. + # POSSIBLE BUG: The feedstock_ctx object is mimicked and any attributes not listed here might have incorrect + # default values that were actually overridden. FOR EXAMPLE, DO NOT use the git_repo_owner attribute of the + # feedstock_ctx object below. Instead, refactor to make this function accept a ClonedFeedstockContext object. feedstock_ctx = ClonedFeedstockContext( feedstock_name=feedstock_name, attrs=node_attrs, diff --git a/conda_forge_tick/migrators/__init__.py b/conda_forge_tick/migrators/__init__.py index 40eaddbcf..d4ffa5aa1 100644 --- a/conda_forge_tick/migrators/__init__.py +++ b/conda_forge_tick/migrators/__init__.py @@ -1,5 +1,5 @@ # flake8: noqa -from .arch import ArchRebuild, OSXArm +from .arch import ArchRebuild, OSXArm, WinArm64 from .broken_rebuild import RebuildBroken from .conda_forge_yaml_cleanup import CondaForgeYAMLCleanup from .core import ( @@ -15,9 +15,12 @@ CrossCompilationForARMAndPower, CrossPythonMigrator, CrossRBaseMigrator, + CrossRBaseWinMigrator, GuardTestingMigrator, + GuardTestingWinMigrator, NoCondaInspectMigrator, UpdateCMakeArgsMigrator, + UpdateCMakeArgsWinMigrator, UpdateConfigSubGuessMigrator, ) from .cstdlib import StdlibMigrator diff --git a/conda_forge_tick/migrators/arch.py b/conda_forge_tick/migrators/arch.py index b75543d49..49ce35c1c 100644 --- a/conda_forge_tick/migrators/arch.py +++ b/conda_forge_tick/migrators/arch.py @@ -9,13 +9,11 @@ from conda_forge_tick.contexts import ClonedFeedstockContext, FeedstockContext from conda_forge_tick.make_graph import ( get_deps_from_outputs_lut, - make_outputs_lut_from_graph, ) -from conda_forge_tick.migrators.core import GraphMigrator, _sanitized_muids +from conda_forge_tick.migrators.core import GraphMigrator, MiniMigrator, get_outputs_lut from conda_forge_tick.os_utils import pushd from conda_forge_tick.utils import ( as_iterable, - frozen_to_json_friendly, pluck, yaml_safe_dump, yaml_safe_load, @@ -26,11 +24,25 @@ if typing.TYPE_CHECKING: from conda_forge_tick.migrators_types import AttrsTypedDict, MigrationUidTypedDict -from .core import MiniMigrator +MIGRATION_SUPPORT_DIRS = [ + os.path.join( + os.environ["CONDA_PREFIX"], + "share", + "conda-forge", + "migration_support", + ), + # Deprecated + os.path.join( + os.environ["CONDA_PREFIX"], + "share", + "conda-forge", + "migrations", + ), +] def _filter_excluded_deps(graph, excluded_dependencies): - """filter out excluded dependencies from the graph + """Filter out excluded dependencies from the graph. This function removes any node that descends from an excluded dependency in addition to removing the excluded dependency itself. @@ -47,7 +59,7 @@ def _filter_excluded_deps(graph, excluded_dependencies): def _cut_to_target_packages(graph, target_packages): - """cut the graph to only the target packages + """Cut the graph to only the target packages. **operates in place** """ @@ -63,7 +75,7 @@ def _cut_to_target_packages(graph, target_packages): def _filter_stubby_and_ignored_nodes(graph, ignored_packages): - """remove any stub packages and ignored packages from the graph + """Remove any stub packages and ignored packages from the graph. **operates in place** """ @@ -84,9 +96,7 @@ def _filter_stubby_and_ignored_nodes(graph, ignored_packages): class ArchRebuild(GraphMigrator): - """ - A Migrator that add aarch64 and ppc64le builds to feedstocks - """ + """A Migrator that adds aarch64 and ppc64le builds to feedstocks.""" migrator_version = 1 rerender = True @@ -106,34 +116,31 @@ class ArchRebuild(GraphMigrator): def __init__( self, - graph: nx.DiGraph = None, - name: Optional[str] = None, + graph: nx.DiGraph | None = None, + name: str = "aarch64 and ppc64le addition", pr_limit: int = 0, piggy_back_migrations: Optional[Sequence[MiniMigrator]] = None, target_packages: Optional[Sequence[str]] = None, - effective_graph: nx.DiGraph = None, - _do_init: bool = True, + effective_graph: nx.DiGraph | None = None, + total_graph: nx.DiGraph | None = None, ): - if _do_init: + if total_graph is not None: if target_packages is None: # We are constraining the scope of this migrator - with open( - os.path.join( - os.environ["CONDA_PREFIX"], - "share", - "conda-forge", - "migrations", - "arch_rebuild.txt", - ) - ) as f: + fname = None + for d in MIGRATION_SUPPORT_DIRS: + fname = os.path.join(d, "arch_rebuild.txt") + if os.path.exists(fname): + break + + with open(fname) as f: target_packages = set(f.read().split()) - if "outputs_lut" not in graph.graph: - graph.graph["outputs_lut"] = make_outputs_lut_from_graph(graph) + outputs_lut = get_outputs_lut(total_graph, graph, effective_graph) # rebuild the graph to only use edges from the arm and power requirements - graph2 = nx.create_empty_copy(graph) - for node, attrs in graph.nodes(data="payload"): + graph2 = nx.create_empty_copy(total_graph) + for node, attrs in total_graph.nodes(data="payload"): for plat_arch in self.arches: deps = set().union( *attrs.get( @@ -141,20 +148,18 @@ def __init__( attrs.get("requirements", {}), ).values() ) - for dep in get_deps_from_outputs_lut( - deps, graph.graph["outputs_lut"] - ): + for dep in get_deps_from_outputs_lut(deps, outputs_lut): graph2.add_edge(dep, node) pass - graph = graph2 + total_graph = graph2 target_packages = set(target_packages) if target_packages: target_packages.add("python") # hack that is ~harmless? - _cut_to_target_packages(graph, target_packages) + _cut_to_target_packages(total_graph, target_packages) # filter out stub packages and ignored packages - _filter_stubby_and_ignored_nodes(graph, self.ignored_packages) + _filter_stubby_and_ignored_nodes(total_graph, self.ignored_packages) if not hasattr(self, "_init_args"): self._init_args = [] @@ -167,38 +172,21 @@ def __init__( "piggy_back_migrations": piggy_back_migrations, "target_packages": target_packages, "effective_graph": effective_graph, - "_do_init": False, + "total_graph": total_graph, } + self.target_packages = target_packages + super().__init__( graph=graph, pr_limit=pr_limit, check_solvable=False, piggy_back_migrations=piggy_back_migrations, effective_graph=effective_graph, + total_graph=total_graph, + name=name, ) - assert not self.check_solvable, "We don't want to check solvability for aarch!" - self.target_packages = target_packages - self.name = name - - if _do_init: - self._reset_effective_graph() - - def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - if super().filter(attrs): - return True - muid = frozen_to_json_friendly(self.migrator_uid(attrs)) - for arch in self.arches: - configured_arch = ( - attrs.get("conda-forge.yml", {}).get("provider", {}).get(arch) - ) - if configured_arch: - return muid in _sanitized_muids( - attrs.get("pr_info", {}).get("PRed", []), - ) - else: - return False def migrate( self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any @@ -247,12 +235,9 @@ def remote_branch(self, feedstock_ctx: FeedstockContext) -> str: return super().remote_branch(feedstock_ctx) + "_arch" -class OSXArm(GraphMigrator): - """ - A Migrator that add arm osx builds to feedstocks - """ +class _CrossCompileRebuild(GraphMigrator): + """A Migrator that adds arch platform builds to feedstocks.""" - migrator_version = 1 rerender = True # We purposefully don't want to bump build number for this migrator bump_number = 0 @@ -260,47 +245,47 @@ class OSXArm(GraphMigrator): ignored_packages = set() excluded_dependencies = set() - arches = ["osx_arm64"] - - additional_keys = { - "build_platform": {"osx_arm64": "osx_64"}, - "test": "native_and_emulated", - } + @property + def additional_keys(self): + return { + "build_platform": self.build_platform, + "test": "native_and_emulated", + } def __init__( self, - graph: nx.DiGraph = None, - name: Optional[str] = None, + graph: nx.DiGraph | None = None, pr_limit: int = 0, + name: str = "", piggy_back_migrations: Optional[Sequence[MiniMigrator]] = None, target_packages: Optional[Sequence[str]] = None, - effective_graph: nx.DiGraph = None, - _do_init: bool = True, + effective_graph: nx.DiGraph | None = None, + total_graph: nx.DiGraph | None = None, ): - if _do_init: + if total_graph is not None: if target_packages is None: # We are constraining the scope of this migrator - with open( - os.path.join( - os.environ["CONDA_PREFIX"], - "share", - "conda-forge", - "migrations", - "osx_arm64.txt", - ) - ) as f: + fname = None + for d in MIGRATION_SUPPORT_DIRS: + fname = os.path.join(d, self.pkg_list_filename) + if os.path.exists(fname): + break + + with open(fname) as f: target_packages = set(f.read().split()) - if "outputs_lut" not in graph.graph: - graph.graph["outputs_lut"] = make_outputs_lut_from_graph(graph) + outputs_lut = get_outputs_lut(total_graph, graph, effective_graph) - # rebuild the graph to only use edges from the arm osx requirements - graph2 = nx.create_empty_copy(graph) - for node, attrs in graph.nodes(data="payload"): - for plat_arch in self.arches: + # rebuild the graph to only use edges from the arch requirements + graph2 = nx.create_empty_copy(total_graph) + for node, attrs in total_graph.nodes(data="payload"): + for plat_arch, build_plat_arch in self.build_platform.items(): reqs = attrs.get( f"{plat_arch}_requirements", - attrs.get("osx_64_requirements", attrs.get("requirements", {})), + attrs.get( + f"{build_plat_arch}_requirements", + attrs.get("requirements", {}), + ), ) host_deps = set(as_iterable(reqs.get("host", set()))) run_deps = set(as_iterable(reqs.get("run", set()))) @@ -314,26 +299,27 @@ def __init__( if build_dep.endswith("_stub"): deps.add(build_dep) for dep in get_deps_from_outputs_lut( - deps, graph.graph["outputs_lut"] + deps, + outputs_lut, ): graph2.add_edge(dep, node) - graph = graph2 + total_graph = graph2 # Excluded dependencies need to be removed before non target_packages are # filtered out so that if a target_package is excluded, its dependencies # are not added to the graph - _filter_excluded_deps(graph, self.excluded_dependencies) + _filter_excluded_deps(total_graph, self.excluded_dependencies) target_packages = set(target_packages) # filter the graph down to the target packages if target_packages: target_packages.add("python") # hack that is ~harmless? - _cut_to_target_packages(graph, target_packages) + _cut_to_target_packages(total_graph, target_packages) # filter out stub packages and ignored packages - _filter_stubby_and_ignored_nodes(graph, self.ignored_packages) + _filter_stubby_and_ignored_nodes(total_graph, self.ignored_packages) if not hasattr(self, "_init_args"): self._init_args = [] @@ -346,40 +332,21 @@ def __init__( "piggy_back_migrations": piggy_back_migrations, "target_packages": target_packages, "effective_graph": effective_graph, - "_do_init": False, + "total_graph": total_graph, } + self.target_packages = target_packages + super().__init__( graph=graph, pr_limit=pr_limit, check_solvable=False, piggy_back_migrations=piggy_back_migrations, effective_graph=effective_graph, + total_graph=total_graph, + name=name, ) - - assert not self.check_solvable, ( - "We don't want to check solvability for arm osx!" - ) - self.target_packages = target_packages - self.name = name - - if _do_init: - self._reset_effective_graph() - - def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - if super().filter(attrs): - return True - muid = frozen_to_json_friendly(self.migrator_uid(attrs)) - for arch in self.arches: - configured_arch = ( - attrs.get("conda-forge.yml", {}).get("provider", {}).get(arch) - ) - if configured_arch: - return muid in _sanitized_muids( - attrs.get("pr_info", {}).get("PRed", []), - ) - else: - return False + assert not self.check_solvable, "We don't want to check solvability!" def migrate( self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any @@ -412,6 +379,18 @@ def migrate( return muid + +class OSXArm(_CrossCompileRebuild): + """A Migrator that adds osx-arm64 builds to feedstocks.""" + + migrator_version = 1 + build_platform = {"osx_arm64": "osx_64"} + pkg_list_filename = "osx_arm64.txt" + + def __init__(self, *args, **kwargs): + kwargs.setdefault("name", "arm osx addition") + super().__init__(*args, **kwargs) + def pr_title(self, feedstock_ctx: FeedstockContext) -> str: return "ARM OSX Migrator" @@ -431,3 +410,35 @@ def pr_body(self, feedstock_ctx: ClonedFeedstockContext) -> str: def remote_branch(self, feedstock_ctx: FeedstockContext) -> str: return super().remote_branch(feedstock_ctx) + "_arm_osx" + + +class WinArm64(_CrossCompileRebuild): + """A Migrator that adds win-arm64 builds to feedstocks.""" + + migrator_version = 1 + build_platform = {"win_arm64": "win_64"} + pkg_list_filename = "win_arm64.txt" + + def __init__(self, *args, **kwargs): + kwargs.setdefault("name", "support windows arm64 platform") + super().__init__(*args, **kwargs) + + def pr_title(self, feedstock_ctx: FeedstockContext) -> str: + return "Support Windows ARM64 platform" + + def pr_body(self, feedstock_ctx: ClonedFeedstockContext) -> str: + body = super().pr_body(feedstock_ctx) + body = body.format( + dedent( + """\ + This feedstock is being rebuilt as part of the windows arm migration. + + **Feel free to merge the PR if CI is all green, but please don't close it + without reaching out the the ARM Windows team first at @conda-forge/help-win-arm64.** + """, + ), + ) + return body + + def remote_branch(self, feedstock_ctx: FeedstockContext) -> str: + return super().remote_branch(feedstock_ctx) + "_arm64_win" diff --git a/conda_forge_tick/migrators/broken_rebuild.py b/conda_forge_tick/migrators/broken_rebuild.py index 310d225b0..1a9b1e225 100644 --- a/conda_forge_tick/migrators/broken_rebuild.py +++ b/conda_forge_tick/migrators/broken_rebuild.py @@ -4,7 +4,7 @@ import networkx as nx from conda_forge_tick.contexts import ClonedFeedstockContext -from conda_forge_tick.migrators.core import Migrator +from conda_forge_tick.migrators.core import Migrator, get_outputs_lut RNG = secrets.SystemRandom() @@ -297,8 +297,25 @@ """.splitlines() -def split_pkg(pkg): - """nice little code snippet from isuru and CJ""" +def split_pkg(pkg: str): + """Split a package filename into its components. + + Parameters + ---------- + pkg : str + The package filename. + + Returns + ------- + tuple[str, str, str, str] + The platform, package name, version, and build string. + + + Raises + ------ + RuntimeError + If the package filename does not end with ".tar.bz2". + """ if not pkg.endswith(".tar.bz2"): raise RuntimeError("Can only process packages that end in .tar.bz2") pkg = pkg[:-8] @@ -326,34 +343,38 @@ class RebuildBroken(Migrator): def __init__( self, *, - outputs_lut, pr_limit: int = 0, - graph: nx.DiGraph = None, - effective_graph: nx.DiGraph = None, + total_graph: nx.DiGraph | None = None, + graph: nx.DiGraph | None = None, + effective_graph: nx.DiGraph | None = None, ): if not hasattr(self, "_init_args"): self._init_args = [] if not hasattr(self, "_init_kwargs"): self._init_kwargs = { - "outputs_lut": outputs_lut, "pr_limit": pr_limit, "graph": graph, "effective_graph": effective_graph, + "total_graph": total_graph, } - super().__init__( - 1, check_solvable=False, graph=graph, effective_graph=effective_graph - ) self.name = "rebuild-broken" outputs_to_migrate = {split_pkg(pkg)[1] for pkg in BROKEN_PACKAGES} self.feedstocks_to_migrate = set() + outputs_lut = get_outputs_lut(total_graph, graph, effective_graph) for output in outputs_to_migrate: for fs in outputs_lut.get(output, {output}): self.feedstocks_to_migrate |= {fs} - self._reset_effective_graph() + super().__init__( + pr_limit=pr_limit, + check_solvable=False, + graph=graph, + effective_graph=effective_graph, + total_graph=total_graph, + ) def order( self, @@ -362,11 +383,12 @@ def order( ): return sorted(list(graph.nodes), key=lambda x: RNG.random()) - def filter(self, attrs) -> bool: - return ( - super().filter(attrs) - or attrs["feedstock_name"] not in self.feedstocks_to_migrate - ) + def filter_not_in_migration(self, attrs, not_bad_str_start=""): + if super().filter_not_in_migration(attrs, not_bad_str_start): + return True + + not_broken = attrs["feedstock_name"] not in self.feedstocks_to_migrate + return not_broken def migrate(self, recipe_dir, attrs, **kwargs): self.set_build_number(os.path.join(recipe_dir, "meta.yaml")) diff --git a/conda_forge_tick/migrators/conda_forge_yaml_cleanup.py b/conda_forge_tick/migrators/conda_forge_yaml_cleanup.py index afae9257e..124a07c87 100644 --- a/conda_forge_tick/migrators/conda_forge_yaml_cleanup.py +++ b/conda_forge_tick/migrators/conda_forge_yaml_cleanup.py @@ -26,7 +26,7 @@ class CondaForgeYAMLCleanup(MiniMigrator): ] def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - """remove recipes without a conda-forge.yml file that has the keys to remove or change""" + """Remove recipes without a conda-forge.yml file that has the keys to remove or change.""" if super().filter(attrs): return True diff --git a/conda_forge_tick/migrators/core.py b/conda_forge_tick/migrators/core.py index bb0242198..11f4eb9ad 100644 --- a/conda_forge_tick/migrators/core.py +++ b/conda_forge_tick/migrators/core.py @@ -1,5 +1,6 @@ -"""Base classes for migrating repos""" +"""Base classes for migrating repos.""" +import contextlib import copy import logging import re @@ -12,12 +13,13 @@ from conda_forge_tick.contexts import ClonedFeedstockContext, FeedstockContext from conda_forge_tick.lazy_json_backends import LazyJson -from conda_forge_tick.make_graph import make_outputs_lut_from_graph from conda_forge_tick.update_recipe import update_build_number, v1_recipe from conda_forge_tick.utils import ( frozen_to_json_friendly, get_bot_run_url, get_keys_default, + get_recipe_schema_version, + pluck, ) if typing.TYPE_CHECKING: @@ -35,12 +37,7 @@ def skip_migrator_due_to_schema( attrs: "AttrsTypedDict", allowed_schema_versions: List[int] ) -> bool: __name = attrs.get("name", "") - schema_version = get_keys_default( - attrs, - ["meta_yaml", "schema_version"], - {}, - 0, - ) + schema_version = get_recipe_schema_version(attrs) if schema_version not in allowed_schema_versions: logger.debug( "%s: schema version not allowed - %r not in %r", @@ -53,26 +50,71 @@ def skip_migrator_due_to_schema( return False -def _make_effective_graph(graph, migrator): +def get_outputs_lut( + total_graph: nx.DiGraph | None, + graph: nx.DiGraph | None, + effective_graph: nx.DiGraph | None, +) -> dict[str, str]: + outputs_lut = None + for gx in [total_graph, graph, effective_graph]: + if gx is not None and "outputs_lut" in gx.graph: + return gx.graph["outputs_lut"] + if outputs_lut is None: + raise ValueError( + "Either `total_graph` or both `graph` and `effective_graph` " + "must be provided and must contain `outputs_lut` in their " + "`.graph` attribute." + ) + + +@contextlib.contextmanager +def _lazy_json_or_dict(data): + if isinstance(data, LazyJson): + with data as _data: + yield _data + else: + yield data + + +def _make_migrator_graph(graph, migrator, effective=False, pluck_nodes=True): """Prune graph only to nodes that need rebuilds.""" gx2 = copy.deepcopy(graph) # Prune graph to only things that need builds right now + nodes_to_pluck = set() for node in list(gx2.nodes): - if isinstance(graph.nodes[node]["payload"], LazyJson): - with graph.nodes[node]["payload"] as _attrs: - attrs = copy.deepcopy(_attrs.data) + if "payload" not in gx2.nodes[node]: + logger.critical("node %s: no payload, removing", node) + nodes_to_pluck.add(node) + continue + + with _lazy_json_or_dict(graph.nodes[node]["payload"]) as attrs: + had_orig_branch = "branch" in attrs + orig_branch = attrs.get("branch") + try: + base_branches = migrator.get_possible_feedstock_branches(attrs) + filters = [] + for base_branch in base_branches: + attrs["branch"] = base_branch + if effective: + filters.append(migrator.filter_node_migrated(attrs)) + else: + filters.append(migrator.filter_not_in_migration(attrs)) + if filters and all(filters): + nodes_to_pluck.add(node) + finally: + if had_orig_branch: + attrs["branch"] = orig_branch + else: + del attrs["branch"] + + # the plucking + for node in nodes_to_pluck: + if pluck_nodes: + pluck(gx2, node) else: - attrs = copy.deepcopy(graph.nodes[node]["payload"]) - base_branches = migrator.get_possible_feedstock_branches(attrs) - filters = [] - for base_branch in base_branches: - attrs["branch"] = base_branch - filters.append(migrator.filter(attrs)) - - if filters and all(filters): gx2.remove_node(node) - + gx2.remove_edges_from(nx.selfloop_edges(gx2)) return gx2 @@ -85,7 +127,7 @@ def _sanitized_muids(pred: List[dict]) -> List["JsonFriendly"]: def _parse_bad_attr(attrs: "AttrsTypedDict", not_bad_str_start: str) -> bool: - """Overlook some bad entries""" + """Overlook some bad entries.""" bad = attrs.get("pr_info", {}).get("bad", False) if isinstance(bad, str): bad_bool = not bad.startswith(not_bad_str_start) @@ -111,7 +153,7 @@ def _gen_active_feedstocks_payloads(nodes, gx): yield node, payload -def _migratror_hash(klass, args, kwargs): +def _migrator_hash(klass, args, kwargs): import hashlib from conda_forge_tick.lazy_json_backends import dumps @@ -134,7 +176,7 @@ def _make_migrator_lazy_json_name(mgr, data): "" if len(mgr._init_args) == 0 and len(mgr._init_kwargs) == 0 else "_h" - + _migratror_hash( + + _migrator_hash( data["class"], data["args"], data["kwargs"], @@ -175,7 +217,7 @@ def __init__(self): self._init_kwargs = {} def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - """If true don't act upon node + """If true don't act upon node. Parameters ---------- @@ -190,7 +232,7 @@ def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: return skip_migrator_due_to_schema(attrs, self.allowed_schema_versions) def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> None: - """Perform the migration, updating the ``meta.yaml`` + """Perform the migration, updating the ``meta.yaml``. Parameters ---------- @@ -198,11 +240,6 @@ def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> No The directory of the recipe attrs : dict The node attributes - - Returns - ------- - namedtuple or bool: - If namedtuple continue with PR, if False scrap local folder """ return @@ -219,29 +256,14 @@ def to_lazy_json_data(self): class Migrator: - """Base class for Migrators - - Inheritors - ---------- - Subclasses of Migrator should have at least the following in their __init__ function: - - ```python - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._reset_effective_graph() - ``` + """Base class for Migrators. Initialization of Instances --------------------------- When a migrator is initialized, you need to supply at least the following items - pr_limit: The number of PRs the migrator can open in a given run of the bot. - - graph: The graph of feedstocks to be migrated. - - The graph you feed the migrator should be the entire graph of feedstocks the - migrator could ever run on. If a migrator skips a feedstock because that migrator - does not apply to that feedstock, then it should not be in the graph passed to the - migrator. If you do not do this, the status page statistics will be incorrect. + - total_graph: The entire graph of conda-forge feedstocks. """ name: str @@ -257,6 +279,8 @@ def __init__(self, *args, **kwargs): allowed_schema_versions = [0] + pluck_nodes = True + build_patterns = ( (re.compile(r"(\s*?)number:\s*([0-9]+)"), "number: {}"), ( @@ -271,13 +295,15 @@ def __init__(self, *args, **kwargs): def __init__( self, + total_graph: nx.DiGraph | None = None, + graph: nx.DiGraph | None = None, + effective_graph: nx.DiGraph | None = None, + *, pr_limit: int = 0, # TODO: Validate this? obj_version: int | None = None, piggy_back_migrations: Sequence[MiniMigrator] | None = None, check_solvable: bool = True, - graph: nx.DiGraph | None = None, - effective_graph: nx.DiGraph | None = None, ): if not hasattr(self, "_init_args"): self._init_args = [] @@ -288,25 +314,49 @@ def __init__( "obj_version": obj_version, "piggy_back_migrations": piggy_back_migrations, "check_solvable": check_solvable, - "graph": graph, - "effective_graph": effective_graph, } self.piggy_back_migrations = piggy_back_migrations or [] self._pr_limit = pr_limit self.obj_version = obj_version self.check_solvable = check_solvable + self.graph = graph + self.effective_graph = effective_graph + self.total_graph = total_graph - if graph is None: - self.graph = nx.DiGraph() - else: + if total_graph is not None: + if graph is not None or effective_graph is not None: + raise ValueError( + "Cannot pass both `total_graph` and `graph` or " + "`effective_graph` to the Migrator." + ) + + graph = _make_migrator_graph( + total_graph, self, effective=False, pluck_nodes=self.pluck_nodes + ) self.graph = graph + self._init_kwargs["graph"] = graph - self.effective_graph = effective_graph + effective_graph = _make_migrator_graph( + self.graph, self, effective=True, pluck_nodes=self.pluck_nodes + ) + self.effective_graph = effective_graph + self._init_kwargs["effective_graph"] = effective_graph + + # do not need this any more + self._init_kwargs["total_graph"] = None + else: + if graph is None or effective_graph is None: + raise ValueError( + "Must pass graph and effective_graph " + "to the Migrator if total_graph is not passed." + ) + self._init_kwargs["graph"] = graph + self._init_kwargs["effective_graph"] = effective_graph + self._init_kwargs["total_graph"] = total_graph def to_lazy_json_data(self): """Serialize the migrator to LazyJson-compatible data.""" - kwargs = copy.deepcopy(self._init_kwargs) if ( "piggy_back_migrations" in kwargs @@ -327,13 +377,6 @@ def to_lazy_json_data(self): data["name"] = _make_migrator_lazy_json_name(self, data) return data - def _reset_effective_graph(self, force=False): - """This method is meant to be called by an non-abstract child class at the end - of its __init__ method.""" - if self.effective_graph is None or force: - self.effective_graph = _make_effective_graph(self.graph, self) - self._init_kwargs["effective_graph"] = self.effective_graph - @property def pr_limit(self): return self._pr_limit @@ -349,7 +392,7 @@ def downstream_children( feedstock_ctx: FeedstockContext, limit: int = 5, ) -> List["PackageName"]: - """Utility method for getting a list of follow on packages""" + """Get a list of follow on packages.""" return [ a[1] for a in list( @@ -358,7 +401,7 @@ def downstream_children( ][:limit] def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - """If true don't act upon node + """If True don't act upon a node. Parameters ---------- @@ -374,47 +417,21 @@ def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: bool : True if node is to be skipped """ + return self.filter_not_in_migration( + attrs, not_bad_str_start + ) or self.filter_node_migrated(attrs, not_bad_str_start) + + def filter_not_in_migration( + self, attrs: "AttrsTypedDict", not_bad_str_start: str = "" + ) -> bool: + """If true don't act upon node because it is not in the migration.""" # never run on archived feedstocks - # don't run on things we've already done # don't run on bad nodes __name = attrs.get("name", "") - def parse_already_pred() -> bool: - pr_data = frozen_to_json_friendly(self.migrator_uid(attrs)) - migrator_uid: "MigrationUidTypedDict" = typing.cast( - "MigrationUidTypedDict", - pr_data["data"], - ) - already_migrated_uids: typing.Iterable["MigrationUidTypedDict"] = list( - z["data"] for z in attrs.get("pr_info", {}).get("PRed", []) - ) - already_pred = migrator_uid in already_migrated_uids - if already_pred: - ind = already_migrated_uids.index(migrator_uid) - logger.debug(f"{__name}: already PRed: uid: {migrator_uid}") - if "PR" in attrs.get("pr_info", {}).get("PRed", [])[ind]: - if isinstance( - attrs.get("pr_info", {}).get("PRed", [])[ind]["PR"], - LazyJson, - ): - with attrs.get("pr_info", {}).get("PRed", [])[ind][ - "PR" - ] as mg_attrs: - logger.debug( - "{}: already PRed: PR file: {}".format( - __name, mg_attrs.file_name - ), - ) - - html_url = mg_attrs.get("html_url", "no url") - - logger.debug(f"{__name}: already PRed: url: {html_url}") - - return already_pred - if attrs.get("archived", False): - logger.debug("%s: archived" % __name) + logger.debug("%s: archived", __name) bad_attr = _parse_bad_attr(attrs, not_bad_str_start) if bad_attr: @@ -422,11 +439,48 @@ def parse_already_pred() -> bool: return ( attrs.get("archived", False) - or parse_already_pred() or bad_attr or skip_migrator_due_to_schema(attrs, self.allowed_schema_versions) ) + def filter_node_migrated( + self, attrs: "AttrsTypedDict", not_bad_str_start: str = "" + ) -> bool: + """If true don't act upon node because it is already migrated.""" + # don't run on things we've already done + + __name = attrs.get("name", "") + + pr_data = frozen_to_json_friendly(self.migrator_uid(attrs)) + migrator_uid: "MigrationUidTypedDict" = typing.cast( + "MigrationUidTypedDict", + pr_data["data"], + ) + already_migrated_uids: typing.Iterable["MigrationUidTypedDict"] = list( + z["data"] for z in attrs.get("pr_info", {}).get("PRed", []) + ) + already_pred = migrator_uid in already_migrated_uids + if already_pred: + ind = already_migrated_uids.index(migrator_uid) + logger.debug("%s: already PRed: uid: %s", __name, migrator_uid) + if "PR" in attrs.get("pr_info", {}).get("PRed", [])[ind]: + if isinstance( + attrs.get("pr_info", {}).get("PRed", [])[ind]["PR"], + LazyJson, + ): + with attrs.get("pr_info", {}).get("PRed", [])[ind][ + "PR" + ] as mg_attrs: + logger.debug( + "%s: already PRed: PR file: %s", __name, mg_attrs.file_name + ) + + html_url = mg_attrs.get("html_url", "no url") + + logger.debug("%s: already PRed: url: %s", __name, html_url) + + return already_pred + def get_possible_feedstock_branches(self, attrs: "AttrsTypedDict") -> List[str]: """Return the valid possible branches to which to apply this migration to for the given attrs. @@ -450,7 +504,7 @@ def get_possible_feedstock_branches(self, attrs: "AttrsTypedDict") -> List[str]: [], ) except Exception: - logger.exception(f"Invalid value for {attrs.get('conda-forge.yml', {})=}") + logger.exception("Invalid value for %r", attrs.get("conda-forge.yml", {})) # make sure this is always a string return [str(b) for b in branches] @@ -495,7 +549,7 @@ def run_post_piggyback_migrations( def migrate( self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any ) -> "MigrationUidTypedDict": - """Perform the migration, updating the ``meta.yaml`` + """Perform the migration, updating the ``meta.yaml``. Parameters ---------- @@ -514,13 +568,14 @@ def migrate( def pr_body( self, feedstock_ctx: ClonedFeedstockContext, add_label_text=True ) -> str: - """Create a PR message body + """Create a PR message body. Returns ------- - body: str + body The body of the PR message - :param feedstock_ctx: + feedstock_ctx + The current ClonedFeedstockContext. """ body = "{}\n\n" @@ -548,25 +603,19 @@ def pr_body( return body def commit_message(self, feedstock_ctx: FeedstockContext) -> str: - """Create a commit message - :param feedstock_ctx: - """ + """Create a commit message.""" return f"migration: {self.__class__.__name__}" def pr_title(self, feedstock_ctx: FeedstockContext) -> str: - """Title for PR - :param feedstock_ctx: - """ + """Get the PR title.""" return "PR from Regro-cf-autotick-bot" def remote_branch(self, feedstock_ctx: FeedstockContext) -> str: - """Branch to use on local and remote - :param feedstock_context: - """ + """Get branch to use on local and remote.""" return "bot-pr" def migrator_uid(self, attrs: "AttrsTypedDict") -> "MigrationUidTypedDict": - """Make a unique id for this migrator and node attrs + """Make a unique id for this migrator and node attrs. Parameters ---------- @@ -599,8 +648,7 @@ def order( graph: nx.DiGraph, total_graph: nx.DiGraph, ) -> Sequence["PackageName"]: - """Run the order by number of decedents, ties are resolved by package name""" - + """Run the order by number of decedents, ties are resolved by package name.""" if hasattr(self, "name"): assert isinstance(self.name, str) migrator_name = self.name.lower().replace(" ", "") @@ -695,6 +743,7 @@ class GraphMigrator(Migrator): def __init__( self, *, + total_graph: nx.DiGraph | None = None, name: str | None = None, graph: nx.DiGraph | None = None, pr_limit: int = 0, @@ -721,28 +770,24 @@ def __init__( "check_solvable": check_solvable, "ignored_deps_per_node": ignored_deps_per_node, "effective_graph": effective_graph, + "total_graph": total_graph, } + self.name = name + self.top_level = top_level or set() + self.cycles = set(cycles or []) + self.ignored_deps_per_node = ignored_deps_per_node or {} + super().__init__( - pr_limit, - obj_version, - piggy_back_migrations, + pr_limit=pr_limit, + obj_version=obj_version, + piggy_back_migrations=piggy_back_migrations, check_solvable=check_solvable, graph=graph, effective_graph=effective_graph, + total_graph=total_graph, ) - # IDK if this will be there so I am going to make it if needed - if "outputs_lut" in self.graph.graph: - self.outputs_lut = self.graph.graph["outputs_lut"] - else: - self.outputs_lut = make_outputs_lut_from_graph(self.graph) - - self.name = name - self.top_level = top_level or set() - self.cycles = set(cycles or []) - self.ignored_deps_per_node = ignored_deps_per_node or {} - def all_predecessors_issued(self, attrs: "AttrsTypedDict") -> bool: # Check if all upstreams have been issue and are stale for node, payload in _gen_active_feedstocks_payloads( @@ -785,7 +830,7 @@ def predecessors_not_yet_built(self, attrs: "AttrsTypedDict") -> bool: if muid not in _sanitized_muids( payload.get("pr_info", {}).get("PRed", []), ): - logger.debug("not yet built: %s" % node) + logger.debug("not yet built: %s", node) return True # This is due to some PRed_json loss due to bad graph deploy outage @@ -801,44 +846,50 @@ def predecessors_not_yet_built(self, attrs: "AttrsTypedDict") -> bool: m_pred_json and m_pred_json.get("PR", {"state": "open"}).get("state", "") == "open" ): - logger.debug("not yet built: %s" % node) + logger.debug("not yet built: %s", node) return True return False - def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - name = attrs.get("name", "") - - if super().filter(attrs, "Upstream:"): - logger.debug( - "filter %s: archived or done or bad attr or schema_version not allowed", - name, - ) + def filter_not_in_migration(self, attrs, not_bad_str_start=""): + if super().filter_not_in_migration(attrs, not_bad_str_start): return True - if attrs.get("feedstock_name", None) not in self.graph: + name = attrs.get("name", "") + _gx = self.total_graph or self.graph + not_in_migration = attrs.get("feedstock_name", None) not in _gx + + if not_in_migration: logger.debug("filter %s: node not in graph", name) - return True - # If in top level or in a cycle don't check for upstreams just build - if (attrs["feedstock_name"] in self.top_level) or ( - attrs["feedstock_name"] in self.cycles - ): - return False + return not_in_migration - # once all PRs are issued (not merged), propose the change in pin - if name == "conda-forge-pinning" and self.all_predecessors_issued( - attrs=attrs, - ): - logger.debug("not filtered %s: pinning parents issued", name) - return False - - # Check if all upstreams have been built - if self.predecessors_not_yet_built(attrs=attrs): - logger.debug("filter %s: parents not built", name) - return True + def filter_node_migrated(self, attrs, not_bad_str_start=""): + name = attrs.get("name", "") - return False + # If in top level or in a cycle don't check for upstreams just build + is_top_level = (attrs["feedstock_name"] in self.top_level) or ( + attrs["feedstock_name"] in self.cycles + ) + if is_top_level: + logger.debug("not filtered %s: top level", name) + node_is_ready = True + else: + if name == "conda-forge-pinning": + if self.all_predecessors_issued(attrs=attrs): + node_is_ready = True + else: + logger.debug("filtered %s: pinning parents not issued", name) + node_is_ready = False + else: + # Check if all upstreams have been built + if self.predecessors_not_yet_built(attrs=attrs): + logger.debug("filter %s: parents not built", name) + node_is_ready = False + else: + node_is_ready = True + + return (not node_is_ready) or super().filter_node_migrated(attrs, "Upstream:") def migrator_uid(self, attrs: "AttrsTypedDict") -> "MigrationUidTypedDict": n = super().migrator_uid(attrs) diff --git a/conda_forge_tick/migrators/cross_compile.py b/conda_forge_tick/migrators/cross_compile.py index 9c63a1937..3c8bfd146 100644 --- a/conda_forge_tick/migrators/cross_compile.py +++ b/conda_forge_tick/migrators/cross_compile.py @@ -148,6 +148,38 @@ def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> No f.write("".join(lines)) +class GuardTestingWinMigrator(CrossCompilationMigratorBase): + def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> None: + with pushd(recipe_dir): + if not os.path.exists("bld.bat"): + return + with open("bld.bat") as f: + lines = list(f.readlines()) + + for i, line in enumerate(lines): + if "CONDA_BUILD_CROSS_COMPILATION" in line: + return + if ( + line.strip().startswith("make check") + or line.strip().startswith("ctest") + or line.strip().startswith("make test") + ): + lines.insert(i, 'if not "%CONDA_BUILD_SKIP_TESTS%"=="1" (\n') + insert_after = i + 1 + while len(lines) > insert_after and lines[insert_after].endswith( + "\\\n", + ): + insert_after += 1 + if lines[insert_after][-1] != "\n": + lines[insert_after] += "\n" + lines.insert(insert_after + 1, ")\n") + break + else: + return + with open("bld.bat", "w") as f: + f.write("".join(lines)) + + class CrossPythonMigrator(MiniMigrator): allowed_schema_versions = {0, 1} post_migration = True @@ -171,6 +203,8 @@ def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> No with open(recipe_file) as f: lines = f.readlines() in_reqs = False + if any("cross-python" in line for line in lines): + return for i, line in enumerate(lines): if line.strip().startswith("requirements:"): in_reqs = True @@ -259,6 +293,35 @@ def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> No f.write("".join(lines)) +class UpdateCMakeArgsWinMigrator(CrossCompilationMigratorBase): + def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: + build_reqs = attrs.get("requirements", {}).get("build", set()) + return "cmake" not in build_reqs or skip_migrator_due_to_schema( + attrs, self.allowed_schema_versions + ) + + def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> None: + with pushd(recipe_dir): + if not os.path.exists("bld.bat"): + return + with open("bld.bat") as f: + lines = list(f.readlines()) + + for i, line in enumerate(lines): + if "%CMAKE_ARGS%" in line: + return + + for i, line in enumerate(lines): + if line.startswith("cmake "): + lines[i] = "cmake %CMAKE_ARGS% " + line[len("cmake ") :] + break + else: + return + + with open("bld.bat", "w") as f: + f.write("".join(lines)) + + class Build2HostMigrator(MiniMigrator): allowed_schema_versions = {0, 1} post_migration = False @@ -345,6 +408,12 @@ def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> No """ +CRAN_BLD_BAT = """\ +"%R%" CMD INSTALL --build . %R_ARGS% +IF %ERRORLEVEL% NEQ 0 exit 1 +""" + + class CrossRBaseMigrator(MiniMigrator): allowed_schema_versions = {0, 1} post_migration = True @@ -421,6 +490,17 @@ def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> No f.write(CRAN_BUILD_SH) +class CrossRBaseWinMigrator(CrossRBaseMigrator): + allowed_schema_versions = {0, 1} + post_migration = True + + def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> None: + with pushd(recipe_dir): + if os.path.exists("bld.bat"): + with open("bld.bat", "w") as f: + f.write(CRAN_BLD_BAT) + + class CrossCompilationForARMAndPower(MiniMigrator): allowed_schema_versions = {0, 1} post_migration = True @@ -432,7 +512,7 @@ def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> No with pushd(recipe_dir): if not os.path.exists("../conda-forge.yml"): name = attrs.get("feedstock_name") - logger.info(f"no conda-forge.yml for {name}") + logger.info("no conda-forge.yml for %s", name) return with open("../conda-forge.yml") as f: @@ -447,7 +527,7 @@ def migrate(self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any) -> No config["build_platform"][arch] = "linux_64" with open("../conda-forge.yml", "w") as f: name = attrs.get("feedstock_name") - logger.info(f"new conda-forge.yml for {name}:={config}") + logger.info("new conda-forge.yml for %s:=%s", name, config) yaml_safe_dump(config, f) if not os.path.exists("build.sh"): diff --git a/conda_forge_tick/migrators/cstdlib.py b/conda_forge_tick/migrators/cstdlib.py index bac801e0f..577e9bb7f 100644 --- a/conda_forge_tick/migrators/cstdlib.py +++ b/conda_forge_tick/migrators/cstdlib.py @@ -33,6 +33,12 @@ def _process_section(output_index, attrs, lines): - where there's no host-section, add it If we find `sysroot_linux-64 2.17`, remove those lines and write the spec to CBC. + + Raises + ------ + RuntimeError + If the output given by output_index could not be found in attrs. + Also, if an assertion fails. """ write_stdlib_to_cbc = False # remove occurrences of __osx due to MACOSX_DEPLOYMENT_TARGET (see migrate() below) diff --git a/conda_forge_tick/migrators/libboost.py b/conda_forge_tick/migrators/libboost.py index 2b8d6d663..122d4783a 100644 --- a/conda_forge_tick/migrators/libboost.py +++ b/conda_forge_tick/migrators/libboost.py @@ -16,6 +16,12 @@ def _slice_into_output_sections(meta_yaml_lines, attrs): the list of lines where this output is described in the meta.yaml. The result will always contain an index -1 for the top-level section ( == everything if there are no other outputs). + + Raises + ------ + RuntimeError + If the recipe contains list-style outputs, or if the number of + sections found does not match the number of outputs. """ outputs_token_pos = None re_output_start = None @@ -119,6 +125,11 @@ def _process_section(output_index, attrs, lines): - if boost-cpp is only a host-dep, rename to libboost-headers - if boost-cpp is _also_ a run-dep, rename it to libboost in host and remove it in run. + + Raises + ------ + RuntimeError + If the output given by output_index cannot be found in attrs. """ outputs = attrs["meta_yaml"].get("outputs", []) if output_index == -1: @@ -191,9 +202,7 @@ def _process_section(output_index, attrs, lines): def _replacer(lines, from_this, to_that, max_times=None): - """ - Replaces one pattern with a string in a set of lines, up to max_times - """ + """Replace one pattern with a string in a set of lines, up to max_times.""" i = 0 new_lines = [] pat = re.compile(from_this) diff --git a/conda_forge_tick/migrators/license.py b/conda_forge_tick/migrators/license.py index 19475a1ec..264d8fc09 100644 --- a/conda_forge_tick/migrators/license.py +++ b/conda_forge_tick/migrators/license.py @@ -25,7 +25,7 @@ def replace_in_file(pattern, new, fname, leading_whitespace=True): - """Replaces a given pattern in a file. If leading whitespace is True, + """Replace a given pattern in a file. If leading whitespace is True, whitespace at the beginning of a line will be captured and preserved. Otherwise, the pattern itself must contain all leading whitespace. @@ -80,7 +80,7 @@ def replace_in_file(pattern, new, fname, leading_whitespace=True): def _to_spdx(lic): """ - we are munging this stuff from conda-build + We are munging this stuff from conda-build. d_license = {'agpl3': ['AGPL-3', 'AGPL (>= 3)', 'AGPL', 'GNU Affero General Public License'], @@ -184,7 +184,7 @@ def _scrape_license_string(pkg): if pkg.startswith("r-"): pkg = pkg[2:] - logger.info("LICENSE running cran skeleton for pkg %s" % pkg) + logger.info("LICENSE running cran skeleton for pkg %s", pkg) with tempfile.TemporaryDirectory() as tmpdir, pushd(tmpdir): subprocess.run( @@ -242,7 +242,7 @@ def _scrape_license_string(pkg): def _do_r_license_munging(pkg, recipe_dir): try: d = _scrape_license_string(pkg) - logger.info("LICENSE R package license data: %s" % d) + logger.info("LICENSE R package license data: %s", d) with open(os.path.join(recipe_dir, "meta.yaml")) as fp: cmeta = CondaMetaYAML(fp.read()) @@ -259,7 +259,7 @@ def _do_r_license_munging(pkg, recipe_dir): cmeta.dump(fp) except Exception as e: - logger.info("LICENSE R license ERROR: %s" % repr(e)) + logger.info("LICENSE R license ERROR: %s", repr(e)) pass diff --git a/conda_forge_tick/migrators/migration_yaml.py b/conda_forge_tick/migrators/migration_yaml.py index 063d17d4e..be486ba2b 100644 --- a/conda_forge_tick/migrators/migration_yaml.py +++ b/conda_forge_tick/migrators/migration_yaml.py @@ -6,20 +6,24 @@ import time import typing from collections import defaultdict -from typing import Any, List, MutableSet, Optional, Sequence, Set +from typing import Any, List, Optional, Sequence, Set import networkx as nx from conda_forge_tick.contexts import ClonedFeedstockContext, FeedstockContext from conda_forge_tick.feedstock_parser import PIN_SEP_PAT from conda_forge_tick.make_graph import get_deps_from_outputs_lut -from conda_forge_tick.migrators.core import GraphMigrator, Migrator, MiniMigrator +from conda_forge_tick.migrators.core import ( + GraphMigrator, + Migrator, + MiniMigrator, + get_outputs_lut, +) from conda_forge_tick.os_utils import pushd from conda_forge_tick.utils import ( get_bot_run_url, get_keys_default, get_migrator_name, - pluck, yaml_safe_dump, yaml_safe_load, ) @@ -82,7 +86,7 @@ def _patch_dict(cfg, patches): def merge_migrator_cbc(migrator_yaml: str, conda_build_config_yaml: str): - """Merge a migrator_yaml with the conda_build_config_yaml""" + """Merge a migrator_yaml with the conda_build_config_yaml.""" migrator_keys = defaultdict(list) current_key = None regex = re.compile(r"\w") @@ -116,6 +120,46 @@ def merge_migrator_cbc(migrator_yaml: str, conda_build_config_yaml: str): return "\n".join(outbound_cbc) +def _trim_edges_for_abi_rebuild( + total_graph: nx.DiGraph, migrator: Migrator, outputs_lut: dict[str, str] +) -> nx.DiGraph: + migrator_payload = migrator.loaded_yaml.get("__migrator", {}) + include_build = migrator_payload.get("include_build", False) + + for node, node_attrs in total_graph.nodes.items(): + # do not trim any edges for pinnings repo + if node == "conda-forge-pinning": + continue + + with node_attrs["payload"] as attrs: + in_migration = not migrator.filter_not_in_migration(attrs) + + requirements = attrs.get("requirements", {}) + host = requirements.get("host", set()) + build = requirements.get("build", set()) + if include_build: + bh = host | build + else: + bh = host or build + + # get host/build, run and test and launder them through outputs + # this should fix outputs related issues (eg gdal) + all_reqs = requirements.get("run", set()) + if in_migration: + all_reqs = all_reqs | requirements.get("test", set()) + all_reqs = all_reqs | bh + rq = get_deps_from_outputs_lut( + all_reqs, + outputs_lut, + ) + + for e in list(total_graph.in_edges(node)): + if e[0] not in rq: + total_graph.remove_edge(*e) + + return total_graph + + class MigrationYaml(GraphMigrator): """Migrator for bumping the build number.""" @@ -129,7 +173,9 @@ def __init__( self, yaml_contents: str, name: str, - graph: nx.DiGraph = None, + package_names: set[str] | None = None, + total_graph: nx.DiGraph | None = None, + graph: nx.DiGraph | None = None, pr_limit: int = 0, top_level: Set["PackageName"] = None, cycles: Optional[Sequence["PackageName"]] = None, @@ -141,7 +187,7 @@ def __init__( conda_forge_yml_patches=None, ignored_deps_per_node=None, max_solver_attempts=3, - effective_graph: nx.DiGraph = None, + effective_graph: nx.DiGraph | None = None, force_pr_after_solver_attempts=10, longterm=False, paused=False, @@ -152,6 +198,7 @@ def __init__( if not hasattr(self, "_init_kwargs"): self._init_kwargs = { + "total_graph": total_graph, "graph": graph, "pr_limit": pr_limit, "top_level": top_level, @@ -168,21 +215,12 @@ def __init__( "longterm": longterm, "force_pr_after_solver_attempts": force_pr_after_solver_attempts, "paused": paused, + "package_names": package_names, } self._init_kwargs.update(copy.deepcopy(kwargs)) - super().__init__( - graph=graph, - pr_limit=pr_limit, - obj_version=migration_number, - piggy_back_migrations=piggy_back_migrations, - check_solvable=check_solvable, - ignored_deps_per_node=ignored_deps_per_node, - effective_graph=effective_graph, - ) self.yaml_contents = yaml_contents assert isinstance(name, str) - self.name = name self.top_level = top_level or set() self.cycles = set(cycles or []) self.automerge = automerge @@ -193,20 +231,138 @@ def __init__( self.longterm = longterm self.force_pr_after_solver_attempts = force_pr_after_solver_attempts self.paused = paused + self.package_names = package_names or set() + + # special init steps to be done on total_graph + # - compute package names to find in host to indicate needs migration + # - trim edges to only those for host|run|test deps - must be done before plucking + # - add pinning as child of all nodes in graph + if total_graph is not None: + # compute package names for migrating + migrator_payload = self.loaded_yaml.get("__migrator", {}) + all_package_names = set( + sum( + ( + list(node.get("payload", {}).get("outputs_names", set())) + for node in total_graph.nodes.values() + ), + [], + ), + ) + if "override_cbc_keys" in migrator_payload: + package_names = set(migrator_payload.get("override_cbc_keys")) + else: + package_names = ( + set(self.loaded_yaml) + | {ly.replace("_", "-") for ly in self.loaded_yaml} + ) & all_package_names + self.package_names = package_names + self._init_kwargs["package_names"] = package_names + + # compute excluded pinned feedstocks no matter what + outputs_lut = get_outputs_lut(total_graph, graph, effective_graph) + + self.excluded_pinned_feedstocks = set() + for _node in self.package_names: + self.excluded_pinned_feedstocks.update(outputs_lut.get(_node, {_node})) + + # finish special init steps + if total_graph is not None: + # needed so that we can filter nodes not in migration + self.graph = None + total_graph = copy.deepcopy(total_graph) + self.total_graph = total_graph + _trim_edges_for_abi_rebuild(total_graph, self, outputs_lut) + total_graph.add_edges_from( + [(n, "conda-forge-pinning") for n in total_graph.nodes] + ) + delattr(self, "total_graph") + delattr(self, "graph") + + super().__init__( + graph=graph, + pr_limit=pr_limit, + obj_version=migration_number, + piggy_back_migrations=piggy_back_migrations, + check_solvable=check_solvable, + ignored_deps_per_node=ignored_deps_per_node, + effective_graph=effective_graph, + total_graph=total_graph, + name=name, + ) + + if total_graph is not None: + # recompute top-level nodes and cycles after cutting to graph of all rebuilds + # these computations have to go after the call to super which turns the + # total graph into the graph of all possible rebuilds (stored in self.graph) + migrator_payload = self.loaded_yaml.get("__migrator", {}) + excluded_feedstocks = set(migrator_payload.get("exclude", [])) + feedstock_names = { + p for p in self.excluded_pinned_feedstocks if p in total_graph.nodes + } - excluded_feedstocks + + top_level = { + node + for node in { + total_graph.successors(feedstock_name) + for feedstock_name in feedstock_names + } + if (node in self.graph) + and len(list(self.graph.predecessors(node))) == 0 + } + + cycles = set() + for cyc in nx.simple_cycles(self.graph): + cycles |= set(cyc) + + self.top_level = self.top_level | top_level + self._init_kwargs["top_level"] = top_level + self.cycles = self.cycles | cycles + self._init_kwargs["cycles"] = cycles + + def filter_not_in_migration(self, attrs, not_bad_str_start=""): + if super().filter_not_in_migration(attrs, not_bad_str_start): + return True - self._reset_effective_graph() + node = attrs["feedstock_name"] - def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - """ - Determine whether migrator needs to be filtered out. + if node == "conda-forge-pinning": + # conda-forge-pinning is always included in migration + return False - Return value of True means to skip migrator, False means to go ahead. - Calls up the MRO until Migrator.filter, see docstring there (./core.py). - """ migrator_payload = self.loaded_yaml.get("__migrator", {}) - platform_allowlist = migrator_payload.get("platform_allowlist", []) - wait_for_migrators = migrator_payload.get("wait_for_migrators", []) + include_noarch = migrator_payload.get("include_noarch", False) + include_build = migrator_payload.get("include_build", False) + excluded_feedstocks = set(migrator_payload.get("exclude", [])) + exclude_pinned_pkgs = migrator_payload.get("exclude_pinned_pkgs", True) + # Generally, the packages themselves should be excluded from the migration; + # an example for exceptions are migrations for new python versions + # where numpy needs to be rebuilt despite being pinned. + if exclude_pinned_pkgs: + excluded_feedstocks.update(self.excluded_pinned_feedstocks) + + requirements = attrs.get("requirements", {}) + host = requirements.get("host", set()) + build = requirements.get("build", set()) + if include_build: + bh = host | build + else: + bh = host or build + only_python = "python" in self.package_names + inclusion_criteria = bh & set(self.package_names) and ( + include_noarch or not all_noarch(attrs, only_python=only_python) + ) + + if not inclusion_criteria: + logger.debug( + "filter %s: pin %s not in host/build %s", + node, + self.package_names, + bh, + ) + + platform_allowlist = migrator_payload.get("platform_allowlist", []) platform_filtered = False if platform_allowlist: # migrator.platform_allowlist allows both styles: "osx-64" & "osx_64"; @@ -218,6 +374,30 @@ def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: intersection = set(attrs.get("platforms", {})) & set(platform_allowlist) platform_filtered = not bool(intersection) + if platform_filtered: + logger.debug( + "filter %s: platform(s) %s not in %s", + node, + attrs.get("platforms", {}), + platform_allowlist, + ) + + if node in excluded_feedstocks: + logger.debug( + "filter %s: excluded feedstock", + node, + ) + + return ( + platform_filtered + or (not inclusion_criteria) + or (node in excluded_feedstocks) + ) + + def filter_node_migrated(self, attrs, not_bad_str_start=""): + migrator_payload = self.loaded_yaml.get("__migrator", {}) + wait_for_migrators = migrator_payload.get("wait_for_migrators", []) + need_to_wait = False if wait_for_migrators: found_migrators = set() @@ -231,20 +411,14 @@ def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: need_to_wait = True if set(wait_for_migrators) - found_migrators: need_to_wait = True + logger.debug( "filter %s: need to wait for %s", attrs.get("name", ""), wait_for_migrators, ) - return ( - platform_filtered - or need_to_wait - or super().filter( - attrs=attrs, - not_bad_str_start=not_bad_str_start, - ) - ) + return need_to_wait or super().filter_node_migrated(attrs, not_bad_str_start) def migrate( self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any @@ -392,6 +566,38 @@ def migrator_uid(self, attrs: "AttrsTypedDict") -> "MigrationUidTypedDict": return n +def _compute_pin_impact( + total_graph: nx.DiGraph, package_names: tuple[str], outputs_lut: dict[str, str] +) -> int: + # Generally, the packages themselves should be excluded from the migration; + # an example for exceptions are migrations for new python versions + # where numpy needs to be rebuilt despite being pinned. + excluded_feedstocks = set() + for node in package_names: + excluded_feedstocks.update(outputs_lut.get(node, {node})) + + included_nodes = 0 + + for node, node_attrs in total_graph.nodes.items(): + # always keep pinning + if node == "conda-forge-pinning": + included_nodes += 1 + else: + with node_attrs["payload"] as attrs: + requirements = attrs.get("requirements", {}) + host = requirements.get("host", set()) + build = requirements.get("build", set()) + bh = host or build + only_python = "python" in package_names + inclusion_criteria = bh & set(package_names) and ( + not all_noarch(attrs, only_python=only_python) + ) + if inclusion_criteria and node not in excluded_feedstocks: + included_nodes += 1 + + return included_nodes + + class MigrationYamlCreator(Migrator): """Migrator creating migration yaml files.""" @@ -402,14 +608,15 @@ class MigrationYamlCreator(Migrator): # TODO: make yaml_contents an arg? def __init__( self, + *, package_name: str, new_pin_version: str, current_pin: str, pin_spec: str, feedstock_name: str, - graph: nx.DiGraph, + total_graph: nx.DiGraph | None = None, + graph: nx.DiGraph | None = None, pin_impact: Optional[int] = None, - full_graph: Optional[nx.DiGraph] = None, pr_limit: int = 0, bump_number: int = 1, effective_graph: nx.DiGraph = None, @@ -420,36 +627,34 @@ def __init__( pinnings = [package_name] if pin_impact is None: - if full_graph is not None: - pin_impact = len(create_rebuild_graph(full_graph, tuple(pinnings))) - full_graph = None + if total_graph is not None: + outputs_lut = get_outputs_lut(total_graph, graph, effective_graph) + pin_impact = _compute_pin_impact( + total_graph, tuple(pinnings), outputs_lut + ) else: pin_impact = -1 if not hasattr(self, "_init_args"): - self._init_args = [ - package_name, - new_pin_version, - current_pin, - pin_spec, - feedstock_name, - graph, - ] + self._init_args = [] if not hasattr(self, "_init_kwargs"): self._init_kwargs = { + "package_name": package_name, + "new_pin_version": new_pin_version, + "current_pin": current_pin, + "pin_spec": pin_spec, + "feedstock_name": feedstock_name, + "graph": graph, "pr_limit": pr_limit, "bump_number": bump_number, "pin_impact": pin_impact, - "full_graph": full_graph, "effective_graph": effective_graph, "pinnings": pinnings, + "total_graph": total_graph, } self._init_kwargs.update(copy.deepcopy(kwargs)) - super().__init__( - pr_limit=pr_limit, graph=graph, effective_graph=effective_graph - ) self.feedstock_name = feedstock_name self.pin_spec = pin_spec self.current_pin = current_pin @@ -459,18 +664,24 @@ def __init__( self.package_name = package_name self.bump_number = bump_number self.name = package_name + " pinning" - self.pin_impact = pin_impact + self.pin_impact = pin_impact or -1 self.pinnings = pinnings - self._reset_effective_graph() + super().__init__( + pr_limit=pr_limit, + graph=graph, + effective_graph=effective_graph, + total_graph=total_graph, + ) - def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: + def filter_not_in_migration(self, attrs, not_bad_str_start=""): if ( - not super().filter(attrs, not_bad_str_start) - and attrs.get("name", "") == "conda-forge-pinning" + attrs.get("name", "") == "conda-forge-pinning" + or attrs.get("feedstock_name", "") == "conda-forge-pinning" ): - return False - return True + return super().filter_not_in_migration(attrs, not_bad_str_start) + else: + return True def migrate( self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any @@ -554,7 +765,7 @@ def order( graph: nx.DiGraph, total_graph: nx.DiGraph, ) -> Sequence["PackageName"]: - """Run the order by number of decedents, ties are resolved by package name""" + """Run the order by number of decedents, ties are resolved by package name.""" return sorted( list(graph.nodes), key=lambda x: (len(nx.descendants(total_graph, x)), RNG.random()), @@ -613,71 +824,3 @@ def all_noarch(attrs, only_python=False): all_noarch = all_noarch and _all_noarch return all_noarch - - -def create_rebuild_graph( - gx: nx.DiGraph, - package_names: Sequence[str], - excluded_feedstocks: MutableSet[str] = None, - exclude_pinned_pkgs: bool = True, - include_noarch: bool = False, - include_build: bool = False, -) -> nx.DiGraph: - total_graph = copy.deepcopy(gx) - excluded_feedstocks = set() if excluded_feedstocks is None else excluded_feedstocks - # Generally, the packages themselves should be excluded from the migration; - # an example for exceptions are migrations for new python versions - # where numpy needs to be rebuilt despite being pinned. - if exclude_pinned_pkgs: - for node in package_names: - excluded_feedstocks.update(gx.graph["outputs_lut"].get(node, {node})) - - included_nodes = set() - - for node, node_attrs in gx.nodes.items(): - # always keep pinning - if node == "conda-forge-pinning": - continue - attrs: "AttrsTypedDict" = node_attrs["payload"] - requirements = attrs.get("requirements", {}) - host = requirements.get("host", set()) - build = requirements.get("build", set()) - if include_build: - bh = host | build - else: - bh = host or build - only_python = "python" in package_names - inclusion_criteria = bh & set(package_names) and ( - include_noarch or not all_noarch(attrs, only_python=only_python) - ) - # get host/build, run and test and launder them through outputs - # this should fix outputs related issues (eg gdal) - all_reqs = requirements.get("run", set()) - if inclusion_criteria: - all_reqs = all_reqs | requirements.get("test", set()) - all_reqs = all_reqs | bh - rq = get_deps_from_outputs_lut( - all_reqs, - gx.graph["outputs_lut"], - ) - - for e in list(total_graph.in_edges(node)): - if e[0] not in rq: - total_graph.remove_edge(*e) - if inclusion_criteria: - included_nodes.add(node) - - # all nodes have the conda-forge-pinning as child package - total_graph.add_edges_from([(n, "conda-forge-pinning") for n in total_graph.nodes]) - included_nodes.add("conda-forge-pinning") # it does not get added above - - # finally remove all nodes that should not be built from the graph - for node in list(total_graph.nodes): - # if there isn't a strict dependency or if the feedstock is excluded, - # remove it while retaining the edges to its parents and children - if (node not in included_nodes) or (node in excluded_feedstocks): - pluck(total_graph, node) - - # post plucking we can have several strange cases, lets remove all selfloops - total_graph.remove_edges_from(nx.selfloop_edges(total_graph)) - return total_graph diff --git a/conda_forge_tick/migrators/noarch_python_min.py b/conda_forge_tick/migrators/noarch_python_min.py index d970bae35..c426414fc 100644 --- a/conda_forge_tick/migrators/noarch_python_min.py +++ b/conda_forge_tick/migrators/noarch_python_min.py @@ -1,3 +1,4 @@ +import copy import functools import logging import os @@ -14,7 +15,6 @@ from conda_forge_tick.migrators.core import ( Migrator, MiniMigrator, - skip_migrator_due_to_schema, ) from conda_forge_tick.migrators.libboost import _slice_into_output_sections from conda_forge_tick.os_utils import pushd @@ -421,8 +421,9 @@ def __init__( self, *, pr_limit: int = 0, - graph: nx.DiGraph = None, - effective_graph: nx.DiGraph = None, + graph: nx.DiGraph | None = None, + effective_graph: nx.DiGraph | None = None, + total_graph: nx.DiGraph | None = None, piggy_back_migrations: Sequence[MiniMigrator] | None = None, ): if not hasattr(self, "_init_args"): @@ -434,30 +435,34 @@ def __init__( "graph": graph, "effective_graph": effective_graph, "piggy_back_migrations": piggy_back_migrations, + "total_graph": total_graph, } + self.name = "noarch_python_min" + + if total_graph is not None: + total_graph = copy.deepcopy(total_graph) + total_graph.clear_edges() + super().__init__( - pr_limit, + pr_limit=pr_limit, graph=graph, effective_graph=effective_graph, piggy_back_migrations=piggy_back_migrations, + total_graph=total_graph, ) - self.name = "noarch_python_min" - self._reset_effective_graph() + def filter_not_in_migration(self, attrs, not_bad_str_start=""): + if super().filter_not_in_migration(attrs, not_bad_str_start): + return True - def filter(self, attrs) -> bool: has_noarch_python = False for line in attrs.get("raw_meta_yaml", "").splitlines(): if line.lstrip().startswith("noarch: python"): has_noarch_python = True break - return ( - super().filter(attrs) - or (not has_noarch_python) - or skip_migrator_due_to_schema(attrs, self.allowed_schema_versions) - ) + return not has_noarch_python def migrate(self, recipe_dir, attrs, **kwargs): # if the feedstock has already been updated, return a migration ID diff --git a/conda_forge_tick/migrators/nvtools.py b/conda_forge_tick/migrators/nvtools.py index 4310513a3..6a0e6b5e6 100644 --- a/conda_forge_tick/migrators/nvtools.py +++ b/conda_forge_tick/migrators/nvtools.py @@ -13,6 +13,8 @@ from .core import Migrator +logger = logging.getLogger(__name__) + def _file_contains(filename: str, string: str) -> bool: """Return whether the given file contains the given string.""" @@ -81,27 +83,10 @@ class AddNVIDIATools(Migrator): allowed_schema_versions = [0] - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._reset_effective_graph() - - def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - """If true don't act upon node - - Parameters - ---------- - attrs : dict - The node attributes - not_bad_str_start : str, optional - If the 'bad' notice starts with the string then it is not - to be excluded. For example, rebuild migrations don't need - to worry about if the upstream can be fetched. Defaults to ``''`` + def filter_not_in_migration(self, attrs, not_bad_str_start=""): + if super().filter_not_in_migration(attrs, not_bad_str_start): + return True - Returns - ------- - bool : - True if node is to be skipped - """ has_nvidia = False if "meta_yaml" in attrs and "source" in attrs["meta_yaml"]: if isinstance(attrs["meta_yaml"]["source"], list): @@ -114,12 +99,12 @@ def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: "https://developer.download.nvidia.com" in src_url ) - return super().filter(attrs) or attrs["archived"] or (not has_nvidia) + return not has_nvidia def migrate( self, recipe_dir: str, attrs: AttrsTypedDict, **kwargs: Any ) -> MigrationUidTypedDict: - """Perform the migration, updating the ``meta.yaml`` + """Perform the migration, updating the ``meta.yaml``. Parameters ---------- @@ -140,17 +125,17 @@ def migrate( # STEP 1: Add cf-nvidia-tools to build requirements if _file_contains(meta, "cf-nvidia-tools"): - logging.debug("cf-nvidia-tools already in meta.yaml; not adding again.") + logger.debug("cf-nvidia-tools already in meta.yaml; not adding again.") else: if _insert_subsection( meta, "requirements", "build", - " - cf-nvidia-tools 1 # [linux]\n", + [" - cf-nvidia-tools 1 # [linux]\n"], ): - logging.debug("cf-nvidia-tools added to meta.yaml.") + logger.debug("cf-nvidia-tools added to meta.yaml.") else: - logging.warning( + logger.warning( "cf-nvidia-tools migration failed to add cf-nvidia-tools to meta.yaml. Manual migration required." ) @@ -158,30 +143,28 @@ def migrate( build = os.path.join(recipe_dir, "build.sh") if os.path.isfile(build): if _file_contains(build, "check-glibc"): - logging.debug( - "build.sh already contains check-glibc; not adding again." - ) + logger.debug("build.sh already contains check-glibc; not adding again.") else: with open(build, "a") as file: file.write( '\ncheck-glibc "$PREFIX"/lib*/*.so.* "$PREFIX"/bin/* "$PREFIX"/targets/*/lib*/*.so.* "$PREFIX"/targets/*/bin/*\n' ) - logging.debug("Added check-glibc to build.sh") + logger.debug("Added check-glibc to build.sh") else: if _file_contains(meta, "check-glibc"): - logging.debug( + logger.debug( "meta.yaml already contains check-glibc; not adding again." ) else: if _insert_subsection( meta, + "requirements", "build", - "script", - ' - check-glibc "$PREFIX"/lib*/*.so.* "$PREFIX"/bin/* "$PREFIX"/targets/*/lib*/*.so.* "$PREFIX"/targets/*/bin/* # [linux]\n', + [" - check-glibc # [linux]\n"], ): - logging.debug("Added check-glibc to meta.yaml") + logger.debug("Added check-glibc to meta.yaml") else: - logging.warning( + logger.warning( "cf-nvidia-tools migration failed to add check-glibc to meta.yaml. Manual migration required." ) @@ -203,13 +186,14 @@ def pr_title(self, feedstock_ctx: FeedstockContext) -> str: def pr_body( self, feedstock_ctx: ClonedFeedstockContext, add_label_text=True ) -> str: - """Create a PR message body + """Create a PR message body. Returns ------- - body: str + body The body of the PR message - :param feedstock_ctx: + feedstock_ctx + The current ClonedFeedstockContext """ body = """\ In order to ensure that NVIDIA's redistributed binaries (redists) are being packaged @@ -270,13 +254,9 @@ def pr_body( return body def commit_message(self, feedstock_ctx: FeedstockContext) -> str: - """Create a commit message - :param feedstock_ctx: - """ + """Create a commit message.""" return "BLD: Try to automatically add cf-nvidia-tols and check glibc" def remote_branch(self, feedstock_ctx: FeedstockContext) -> str: - """Branch to use on local and remote - :param feedstock_context: - """ + """Branch to use on local and remote.""" return "add-cf-nvidia-tools" diff --git a/conda_forge_tick/migrators/pip_check.py b/conda_forge_tick/migrators/pip_check.py index 719e527bd..3598cb4b7 100644 --- a/conda_forge_tick/migrators/pip_check.py +++ b/conda_forge_tick/migrators/pip_check.py @@ -147,7 +147,7 @@ def _adjust_test_dict(meta, key, mapping, groups, parent_group=None): class PipCheckMigrator(MiniMigrator): def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - """run pip check if we see python in any host sections""" + """Run pip check if we see python in any host sections.""" build_host = ( attrs["requirements"].get("host", set()) or attrs["requirements"].get("build", set()) diff --git a/conda_forge_tick/migrators/pip_wheel_dep.py b/conda_forge_tick/migrators/pip_wheel_dep.py index 6e150fd6e..e9e490670 100644 --- a/conda_forge_tick/migrators/pip_wheel_dep.py +++ b/conda_forge_tick/migrators/pip_wheel_dep.py @@ -8,9 +8,9 @@ import requests from ruamel.yaml import YAML -from conda_forge_tick.lazy_json_backends import CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL from conda_forge_tick.migrators.core import MiniMigrator, skip_migrator_due_to_schema from conda_forge_tick.os_utils import pushd +from conda_forge_tick.settings import settings from conda_forge_tick.utils import get_keys_default if typing.TYPE_CHECKING: @@ -21,14 +21,17 @@ @functools.lru_cache() def pypi_conda_mapping() -> Dict[str, str]: - """Retrieves the most recent version of the pypi-conda name mapping dictionary. + """Retrieve the most recent version of the pypi-conda name mapping dictionary. - Result is a dictionary {pypi_name: conda_name} + Returns + ------- + Dict[str, str] + Format: {pypi_name: conda_name} """ yaml = YAML() content = requests.get( os.path.join( - CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL, + settings().graph_github_backend_raw_base_url, "mappings", "pypi", "grayskull_pypi_mapping.yaml", @@ -71,7 +74,7 @@ def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: return True version: str = self._get_version(attrs) - logger.debug(f"Checking if PyPI has a wheel for {version}") + logger.debug("Checking if PyPI has a wheel for %s", version) wheel_url, _ = self.determine_wheel(source_url, version) if wheel_url is None: diff --git a/conda_forge_tick/migrators/r_ucrt.py b/conda_forge_tick/migrators/r_ucrt.py index 04fc18bf5..9bab5bed1 100644 --- a/conda_forge_tick/migrators/r_ucrt.py +++ b/conda_forge_tick/migrators/r_ucrt.py @@ -41,7 +41,7 @@ def _cleanup_raw_yaml(raw_yaml): class RUCRTCleanup(MiniMigrator): - """Cleanup the R recipes for ucrt""" + """Cleanup the R recipes for ucrt.""" def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: return ( diff --git a/conda_forge_tick/migrators/recipe_v1.py b/conda_forge_tick/migrators/recipe_v1.py index f50f4f9b2..2a3707682 100644 --- a/conda_forge_tick/migrators/recipe_v1.py +++ b/conda_forge_tick/migrators/recipe_v1.py @@ -1,10 +1,11 @@ import logging +import re import typing from pathlib import Path from typing import Any from jinja2 import Environment -from jinja2.nodes import Compare, Node, Not +from jinja2.nodes import And, Compare, Node, Not from jinja2.parser import Parser from conda_forge_tick.migrators.core import MiniMigrator @@ -64,6 +65,36 @@ def is_negated_condition(a: Node, b: Node) -> bool: return False +def is_sub_condition(sub_node: Node, super_node: Node) -> bool: + return isinstance(sub_node, And) and super_node in (sub_node.left, sub_node.right) + + +def get_new_sub_condition(sub_cond: str, super_cond: str) -> str | None: + l_cond_re = re.compile( + r"^\s* (?P \( )? \s*" # optional "(" + + re.escape(super_cond) # super_cond + + r"\s* (?(p1) \) ) \s*" # matching ")" + r"and \s* (?P \( )? \s*" # "and", optional "(" + r"(?P.*?)" # new_cond + r"\s* (?(p2) \) ) \s*$", # matching ")" + re.VERBOSE, + ) + r_cond_re = re.compile( + r"^\s* (?P \( )? \s*" # optional "(" + r"(?P.*?)" # new_cond + r"\s* (?(p1) \) ) \s*" # matching ")" + r"and \s* (?P \( )? \s*" # "and", optional "(" + + re.escape(super_cond) # super_cond + + r"\s* (?(p2) \) ) \s*$", # matching ")" + re.VERBOSE, + ) + if match := l_cond_re.match(sub_cond): + return match.group("new_cond") + if match := r_cond_re.match(sub_cond): + return match.group("new_cond") + return None + + def fold_branch(source: Any, dest: Any, branch: str, dest_branch: str) -> None: if branch not in source: return @@ -84,13 +115,43 @@ def fold_branch(source: Any, dest: Any, branch: str, dest_branch: str) -> None: def combine_conditions(node: Any): - """Breadth first recursive call to combine list conditions""" - + """Breadth first recursive call to combine list conditions.""" # recursion is breadth first because we go through each element here # before calling `combine_conditions` on any element in the node if isinstance(node, list): # iterate in reverse order, so we can remove elements on the fly # start at index 1, since we can only fold to the previous node + + # fold subconditions into superconditions in the first run, since + # they can enable us to further combine same/opposite conditions later + for i in reversed(range(1, len(node))): + node_cond = get_condition(node[i]) + prev_cond = get_condition(node[i - 1]) + if node_cond is None or prev_cond is None: + continue + + if is_sub_condition(sub_node=node_cond, super_node=prev_cond): + new_cond = get_new_sub_condition( + sub_cond=node[i]["if"], super_cond=node[i - 1]["if"] + ) + if new_cond is not None: + node[i]["if"] = new_cond + if isinstance(node[i - 1]["then"], str): + node[i - 1]["then"] = [node[i - 1]["then"]] + node[i - 1]["then"].append(node[i]) + del node[i] + elif is_sub_condition(sub_node=prev_cond, super_node=node_cond): + new_cond = get_new_sub_condition( + sub_cond=node[i - 1]["if"], super_cond=node[i]["if"] + ) + if new_cond is not None: + node[i - 1]["if"] = new_cond + if isinstance(node[i]["then"], str): + node[i]["then"] = [node[i]["then"]] + node[i]["then"].insert(0, node[i - 1]) + del node[i - 1] + + # now combine same-level conditions for i in reversed(range(1, len(node))): node_cond = get_condition(node[i]) prev_cond = get_condition(node[i - 1]) diff --git a/conda_forge_tick/migrators/replacement.py b/conda_forge_tick/migrators/replacement.py index bc24826a0..f29774cc1 100644 --- a/conda_forge_tick/migrators/replacement.py +++ b/conda_forge_tick/migrators/replacement.py @@ -47,10 +47,11 @@ def __init__( old_pkg: "PackageName", new_pkg: "PackageName", rationale: str, - graph: nx.DiGraph = None, + graph: nx.DiGraph | None = None, pr_limit: int = 0, check_solvable=True, - effective_graph: nx.DiGraph = None, + effective_graph: nx.DiGraph | None = None, + total_graph: nx.DiGraph | None = None, ): if not hasattr(self, "_init_args"): self._init_args = [] @@ -64,14 +65,9 @@ def __init__( "pr_limit": pr_limit, "check_solvable": check_solvable, "effective_graph": effective_graph, + "total_graph": total_graph, } - super().__init__( - pr_limit, - check_solvable=check_solvable, - graph=graph, - effective_graph=effective_graph, - ) self.old_pkg = old_pkg self.new_pkg = new_pkg self.pattern = re.compile(r"\s*-\s*(%s)(\s+|$)" % old_pkg) @@ -79,9 +75,18 @@ def __init__( self.rationale = rationale self.name = f"{old_pkg}-to-{new_pkg}" - self._reset_effective_graph() + super().__init__( + pr_limit=pr_limit, + check_solvable=check_solvable, + graph=graph, + effective_graph=effective_graph, + total_graph=total_graph, + ) + + def filter_not_in_migration(self, attrs, not_bad_str_start=""): + if super().filter_not_in_migration(attrs, not_bad_str_start): + return True - def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: requirements = attrs.get("requirements", {}) rq = ( requirements.get("build", set()) @@ -89,7 +94,8 @@ def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: | requirements.get("run", set()) | requirements.get("test", set()) ) - return super().filter(attrs) or len(rq & self.packages) == 0 + + return len(rq & self.packages) == 0 def migrate( self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any diff --git a/conda_forge_tick/migrators/staticlib.py b/conda_forge_tick/migrators/staticlib.py index a716c365f..bacd65e2c 100644 --- a/conda_forge_tick/migrators/staticlib.py +++ b/conda_forge_tick/migrators/staticlib.py @@ -1,3 +1,4 @@ +import copy import logging import os import re @@ -25,6 +26,7 @@ extract_section_from_yaml_text, get_keys_default, get_migrator_name, + get_recipe_schema_version, ) if typing.TYPE_CHECKING: @@ -39,7 +41,8 @@ def _left_gt_right_rec(lrec, rrec): """Compare two records, declaring the left one bigger if - the version and/or build number is bigger.""" + the version and/or build number is bigger. + """ lver = VersionOrder(lrec.version) lbuild = lrec.build_number rver = VersionOrder(rrec.version) @@ -238,7 +241,7 @@ def any_static_libs_out_of_date( raw_meta_yaml: str, schema_version: int = 0, ) -> (bool, dict[str, dict[str, str]]): - """check if any static libs are out of date for a given recipe and set of platforms. + """Check if any static libs are out of date for a given recipe and set of platforms. Parameters ---------- @@ -402,16 +405,17 @@ class StaticLibMigrator(GraphMigrator): def __init__( self, - graph: nx.DiGraph = None, + graph: nx.DiGraph | None = None, pr_limit: int = 0, bump_number: int = 1, piggy_back_migrations: Optional[Sequence[MiniMigrator]] = None, check_solvable=True, max_solver_attempts=3, - effective_graph: nx.DiGraph = None, + effective_graph: nx.DiGraph | None = None, force_pr_after_solver_attempts=10, longterm=False, paused=False, + total_graph: nx.DiGraph | None = None, ): if not hasattr(self, "_init_args"): self._init_args = [] @@ -428,8 +432,21 @@ def __init__( "longterm": longterm, "force_pr_after_solver_attempts": force_pr_after_solver_attempts, "paused": paused, + "total_graph": total_graph, } + self.top_level = set() + self.cycles = set() + self.bump_number = bump_number + self.max_solver_attempts = max_solver_attempts + self.longterm = longterm + self.force_pr_after_solver_attempts = force_pr_after_solver_attempts + self.paused = paused + + if total_graph is not None: + total_graph = copy.deepcopy(total_graph) + total_graph.clear_edges() + super().__init__( graph=graph, pr_limit=pr_limit, @@ -438,16 +455,8 @@ def __init__( check_solvable=check_solvable, effective_graph=effective_graph, name="static_lib_migrator", + total_graph=total_graph, ) - self.top_level = set() - self.cycles = set() - self.bump_number = bump_number - self.max_solver_attempts = max_solver_attempts - self.longterm = longterm - self.force_pr_after_solver_attempts = force_pr_after_solver_attempts - self.paused = paused - - self._reset_effective_graph() def predecessors_not_yet_built(self, attrs: "AttrsTypedDict") -> bool: # Check if all upstreams have been built @@ -470,17 +479,15 @@ def predecessors_not_yet_built(self, attrs: "AttrsTypedDict") -> bool: platform_arches=platform_arches, raw_meta_yaml=payload.get("raw_meta_yaml") or "", )[0]: - logger.debug("not yet built for new static libs: %s" % node) + logger.debug("not yet built for new static libs: %s", node) return True return False - def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: - """Determine whether feedstock needs to be filtered out. + def filter_not_in_migration(self, attrs, not_bad_str_start=""): + if super().filter_not_in_migration(attrs, not_bad_str_start): + return True - Return True to skip ("filter") the feedstock from the migration. - Return False to include the feedstock in the migration. - """ update_static_libs = get_keys_default( attrs, ["conda-forge.yml", "bot", "update_static_libs"], @@ -493,31 +500,24 @@ def filter(self, attrs: "AttrsTypedDict", not_bad_str_start: str = "") -> bool: "filter %s: static lib updates not enabled", attrs.get("name") or "", ) - return True - platform_arches = tuple(attrs.get("platforms") or []) - static_libs_out_of_date, slrep = any_static_libs_out_of_date( - platform_arches=platform_arches, - raw_meta_yaml=attrs.get("raw_meta_yaml") or "", - ) - if not static_libs_out_of_date: - logger.debug( - "filter %s: no static libs out of date\nmapping: %s", - attrs.get("name") or "", - static_libs_out_of_date, - slrep, + if update_static_libs: + platform_arches = tuple(attrs.get("platforms") or []) + static_libs_out_of_date, slrep = any_static_libs_out_of_date( + platform_arches=platform_arches, + raw_meta_yaml=attrs.get("raw_meta_yaml") or "", ) + if not static_libs_out_of_date: + logger.debug( + "filter %s: no static libs out of date\nmapping: %s", + attrs.get("name") or "", + slrep, + ) + _read_repodata.cache_clear() + else: + static_libs_out_of_date = False - retval = ( - (not update_static_libs) - or (not static_libs_out_of_date) - or super().filter( - attrs=attrs, - not_bad_str_start=not_bad_str_start, - ) - ) - _read_repodata.cache_clear() - return retval + return (not update_static_libs) or (not static_libs_out_of_date) def migrate( self, recipe_dir: str, attrs: "AttrsTypedDict", **kwargs: Any @@ -534,12 +534,7 @@ def migrate( with open("meta.yaml") as f: raw_meta_yaml = f.read() - schema_version = get_keys_default( - attrs, - ["meta_yaml", "schema_version"], - {}, - 0, - ) + schema_version = get_recipe_schema_version(attrs) needs_update, static_lib_replacements = any_static_libs_out_of_date( platform_arches=platform_arches, diff --git a/conda_forge_tick/migrators/version.py b/conda_forge_tick/migrators/version.py index 60f4c3bcf..d0e3a1633 100644 --- a/conda_forge_tick/migrators/version.py +++ b/conda_forge_tick/migrators/version.py @@ -18,7 +18,11 @@ from conda_forge_tick.models.pr_info import MigratorName from conda_forge_tick.update_deps import get_dep_updates_and_hints from conda_forge_tick.update_recipe import update_version, update_version_v1 -from conda_forge_tick.utils import get_keys_default, sanitize_string +from conda_forge_tick.utils import ( + get_keys_default, + get_recipe_schema_version, + sanitize_string, +) if typing.TYPE_CHECKING: from conda_forge_tick.migrators_types import ( @@ -56,74 +60,97 @@ def _fmt_error_message(errors, version): class Version(Migrator): - """Migrator for version bumping of packages""" + """Migrator for version bumping of packages.""" max_num_prs = 3 migrator_version = 0 rerender = True - name = MigratorName.VERSION + name = str(MigratorName.VERSION) allowed_schema_versions = {0, 1} + pluck_nodes = False - def __init__(self, python_nodes, *args, **kwargs): + def __init__( + self, + python_nodes, + *args, + total_graph: nx.DiGraph | None = None, + graph: nx.DiGraph | None = None, + effective_graph: nx.DiGraph | None = None, + **kwargs, + ): if not hasattr(self, "_init_args"): self._init_args = [python_nodes, *args] if not hasattr(self, "_init_kwargs"): self._init_kwargs = copy.deepcopy(kwargs) + self._init_kwargs["total_graph"] = total_graph + self._init_kwargs["graph"] = graph + self._init_kwargs["effective_graph"] = effective_graph self.python_nodes = python_nodes if "check_solvable" in kwargs: kwargs.pop("check_solvable") - super().__init__(*args, **kwargs, check_solvable=False) - self._new_version = None + super().__init__( + *args, + **kwargs, + check_solvable=False, + total_graph=total_graph, + graph=graph, + effective_graph=effective_graph, + ) - self._reset_effective_graph() + def filter_not_in_migration(self, attrs, not_bad_str_start="", new_version=None): + if super().filter_not_in_migration(attrs, not_bad_str_start): + return True - def filter( - self, - attrs: "AttrsTypedDict", - not_bad_str_start: str = "", - new_version=None, - ) -> bool: # if no new version do nothing if new_version is None: vpri = attrs.get("version_pr_info", {}) if "new_version" not in vpri or not vpri["new_version"]: - return True - new_version = vpri["new_version"] - self._new_version = new_version + no_new_version = True + else: + new_version = vpri["new_version"] + no_new_version = False + else: + no_new_version = False - # if no jinja2 version, then move on + if no_new_version: + return True - schema_version = get_keys_default( - attrs, - ["meta_yaml", "schema_version"], - {}, - 0, + fs_name = ( + attrs.get("feedstock_name", "") + or attrs.get("name", "") + or "!!NO_FEEDSTOCK_NAME!!" ) + + # if no jinja2 version, then move on + no_jinja2_ver = False + schema_version = get_recipe_schema_version(attrs) if schema_version == 0: if "raw_meta_yaml" not in attrs: - return True + no_jinja2_ver = True if "{% set version" not in attrs["raw_meta_yaml"]: - return True + no_jinja2_ver = True elif schema_version == 1: # load yaml and check if context is there if "raw_meta_yaml" not in attrs: - return True + no_jinja2_ver = True yaml = load_yaml(attrs["raw_meta_yaml"]) if "context" not in yaml: - return True + no_jinja2_ver = True if "version" not in yaml["context"]: - return True + no_jinja2_ver = True else: raise NotImplementedError("Schema version not implemented!") - conditional = super().filter(attrs) - result = bool( - conditional # if archived/finished/schema version skip - or len( + if no_jinja2_ver: + logger.debug("No jinja2 version found for feedstock %s, skipping!", fs_name) + return True + + too_many_prs = ( + len( [ k for k in attrs.get("pr_info", {}).get("PRed", []) @@ -133,9 +160,15 @@ def filter( ], ) > self.max_num_prs - or not new_version, # if no new version ) + if too_many_prs: + logger.debug( + "Too many PRs open for feedstock %s, skipping!", + fs_name, + ) + return True + try: version_filter = ( # if new version is less than current version @@ -153,12 +186,17 @@ def filter( ) ) except conda.exceptions.InvalidVersionSpec as e: - name = attrs.get("name", "") warnings.warn( - f"Failed to filter to to invalid version for {name}\nException: {e}", + f"Failed to order versions to invalid version for {fs_name}, skipping!\nException: {e}", ) version_filter = True + if version_filter: + logger.debug( + "Version filter failed for feedstock %s, skipping!", + fs_name, + ) + skip_filter = False random_fraction_to_keep = get_keys_default( attrs, @@ -166,22 +204,25 @@ def filter( {}, None, ) - logger.debug("random_fraction_to_keep: %r", random_fraction_to_keep) + logger.debug( + "%s: random_fraction_to_keep: %r", fs_name, random_fraction_to_keep + ) if random_fraction_to_keep is not None: curr_state = random.getstate() try: frac = float(random_fraction_to_keep) # the seeding here makes the filter stable given new version - random.seed(a=self._new_version.replace("-", ".")) + random.seed(a=new_version.replace("-", ".")) urand = random.uniform(0, 1) if urand >= frac: skip_filter = True logger.info( - "random version skip: version=%s, fraction=%f, urand=%f, skip=%r", - self._new_version.replace("-", "."), + "%s: random version skip: version=%s, fraction=%f, urand=%f, skip=%r", + fs_name, + new_version.replace("-", "."), frac, urand, skip_filter, @@ -189,6 +230,12 @@ def filter( finally: random.setstate(curr_state) + if skip_filter: + logger.debug( + "Skip due to random version skips for feedstock %s, skipping!", fs_name + ) + return True + ignore_filter = False versions_to_ignore = get_keys_default( attrs, @@ -202,6 +249,14 @@ def filter( ): ignore_filter = True + if ignore_filter: + logger.debug( + "Skip due to ignored version %s for feedstock %s, skipping!", + new_version, + fs_name, + ) + return True + skip_me = get_keys_default( attrs, ["conda-forge.yml", "bot", "version_updates", "skip"], @@ -209,8 +264,22 @@ def filter( False, ) - self._new_version = None - return result or version_filter or skip_filter or ignore_filter or skip_me + if skip_me: + logger.debug( + "Skip due to skipped flag for feedstock %s, skipping!", fs_name + ) + return True + + return ( + no_new_version + or (not new_version) + or no_jinja2_ver + or too_many_prs + or version_filter + or skip_filter + or ignore_filter + or skip_me + ) def migrate( self, @@ -219,7 +288,8 @@ def migrate( hash_type: str = "sha256", **kwargs: Any, ) -> "MigrationUidTypedDict": - version = attrs["new_version"] + version = attrs.get("version_pr_info", {}).get("new_version", None) + recipe_dir = Path(recipe_dir) recipe_path = None recipe_path_v0 = recipe_dir / "meta.yaml" @@ -399,12 +469,18 @@ def _hint_and_maybe_update_deps(self, feedstock_ctx: ClonedFeedstockContext): return hint def commit_message(self, feedstock_ctx: FeedstockContext) -> str: - assert isinstance(feedstock_ctx.attrs["new_version"], str) - return "updated v" + feedstock_ctx.attrs["new_version"] + new_version = feedstock_ctx.attrs.get("version_pr_info", {}).get( + "new_version", None + ) + assert isinstance(new_version, str) + return "updated v" + new_version def pr_title(self, feedstock_ctx: FeedstockContext) -> str: - assert isinstance(feedstock_ctx.attrs["new_version"], str) - # TODO: turn False to True when we default to automerge + new_version = feedstock_ctx.attrs.get("version_pr_info", {}).get( + "new_version", None + ) + assert isinstance(new_version, str) + amerge = get_keys_default( feedstock_ctx.attrs, ["conda-forge.yml", "bot", "automerge"], @@ -416,12 +492,7 @@ def pr_title(self, feedstock_ctx: FeedstockContext) -> str: else: add_slug = "" - return ( - add_slug - + feedstock_ctx.feedstock_name - + " v" - + feedstock_ctx.attrs["new_version"] - ) + return add_slug + feedstock_ctx.feedstock_name + " v" + new_version def remote_branch(self, feedstock_ctx: FeedstockContext) -> str: assert isinstance(feedstock_ctx.attrs["version_pr_info"]["new_version"], str) @@ -429,10 +500,7 @@ def remote_branch(self, feedstock_ctx: FeedstockContext) -> str: def migrator_uid(self, attrs: "AttrsTypedDict") -> "MigrationUidTypedDict": n = super().migrator_uid(attrs) - if self._new_version is not None: - new_version = self._new_version - else: - new_version = attrs["new_version"] + new_version = attrs.get("version_pr_info", {}).get("new_version", None) n["version"] = new_version return n diff --git a/conda_forge_tick/models/common.py b/conda_forge_tick/models/common.py index 77b548fae..2d2dce425 100644 --- a/conda_forge_tick/models/common.py +++ b/conda_forge_tick/models/common.py @@ -30,8 +30,12 @@ class ValidatedBaseModel(BaseModel): def before_validator_ensure_dict(value: Any) -> dict: - """ - Ensure that a value is a dictionary. If it is not, raise a ValueError. + """Ensure that a value is a dictionary. + + Raises + ------ + ValueError + If the value is not a dictionary. """ if not isinstance(value, dict): raise ValueError( @@ -53,9 +57,7 @@ class Set(StrictBaseModel, Generic[T]): def none_to_empty_list(value: T | None) -> T | list[Never]: - """ - Convert `None` to an empty list. Everything else is kept as is. - """ + """Convert `None` to an empty list. Everything else is kept as is.""" if value is None: return [] return value @@ -70,9 +72,7 @@ def none_to_empty_list(value: T | None) -> T | list[Never]: def none_to_empty_dict(value: T | None) -> T | dict[Never]: - """ - Convert `None` to an empty dict. Everything else is kept as is. - """ + """Convert `None` to an empty dict. Everything else is kept as is.""" if value is None: return {} return value @@ -87,9 +87,7 @@ def none_to_empty_dict(value: T | None) -> T | dict[Never]: def convert_to_list(value: T) -> list[T]: - """ - Convert a single value to a list. - """ + """Convert a single value to a list.""" return [value] @@ -100,8 +98,12 @@ def convert_to_list(value: T) -> list[T]: def empty_string_to_none(value: Any) -> None: - """ - Convert an empty string to `None`. None is kept as is. + """Convert an empty string to `None`. None is kept as is. + + Raises + ------ + ValueError + If the value is neither an empty string nor `None`. """ if value is None or value == "": return None @@ -117,8 +119,12 @@ def empty_string_to_none(value: Any) -> None: def split_string_newline(value: Any) -> list[str]: - """ - Split a string by newlines. + """Split a string by newlines. + + Raises + ------ + ValueError + If the value is not a string. """ if not isinstance(value, str): raise ValueError("value must be a string") @@ -132,8 +138,12 @@ def split_string_newline(value: Any) -> list[str]: def false_to_none(value: Any) -> None: - """ - Convert `False` to `None`. Keep `None` as is. + """Convert `False` to `None`. Keep `None` as is. + + Raises + ------ + ValueError + If the value is not `False` or `None`. """ if value is False or value is None: return None @@ -183,17 +193,13 @@ def try_parse_conda_version(value: str) -> str: class PrInfoLazyJsonReference(StrictBaseModel): - """ - A lazy reference to a pr_info JSON object. - """ + """A lazy reference to a pr_info JSON object.""" json_reference: str = Field(pattern=r"pr_info/.*\.json$", alias="__lazy_json__") class VersionPrInfoLazyJsonReference(StrictBaseModel): - """ - A lazy reference to a version_pr_info JSON object. - """ + """A lazy reference to a version_pr_info JSON object.""" json_reference: str = Field( pattern=r"version_pr_info/.*\.json$", alias="__lazy_json__" @@ -201,8 +207,6 @@ class VersionPrInfoLazyJsonReference(StrictBaseModel): class PrJsonLazyJsonReference(StrictBaseModel): - """ - A lazy reference to a pr_json JSON object. - """ + """A lazy reference to a pr_json JSON object.""" json_reference: str = Field(pattern=r"pr_json/.*\.json$", alias="__lazy_json__") diff --git a/conda_forge_tick/models/meta_yaml.py b/conda_forge_tick/models/meta_yaml.py index 950bc40d4..c9a5edc8c 100644 --- a/conda_forge_tick/models/meta_yaml.py +++ b/conda_forge_tick/models/meta_yaml.py @@ -76,17 +76,13 @@ class LocalPathSource(BaseSource): class PatchesOnlySource(BaseSource, StrictBaseModel): - """ - This case happens due to selectors and rendering of the `source` field. - """ + """Happens due to selectors and rendering of the `source` field.""" pass class FilenameOnlySource(BaseSource, StrictBaseModel): - """ - This case happens due to selectors and rendering of the `source` field. - """ + """Happens due to selectors and rendering of the `source` field.""" filename: str | None = Field(None, alias="fn") diff --git a/conda_forge_tick/models/node_attributes.py b/conda_forge_tick/models/node_attributes.py index 98b9c90aa..303f7fcff 100644 --- a/conda_forge_tick/models/node_attributes.py +++ b/conda_forge_tick/models/node_attributes.py @@ -147,10 +147,17 @@ class NodeAttributesValid(StrictBaseModel): @classmethod def validate_platform_info(cls, data: Any) -> Any: """ + Validate the `platform_info` field. + The current autotick-bot implementation makes use of `PLATFORM_meta_yaml` and `PLATFORM_requirements` fields that are present in this model, where PLATFORM is a build platform present in `platforms`. This data model is a bit too complex for what it does, so we transform it into a simpler model that is easier to work with. See platform_info above for the new model. + + Raises + ------ + ValueError + If the `platform_info` field is present in the old model. """ data = before_validator_ensure_dict(data) @@ -174,8 +181,12 @@ def validate_platform_info(cls, data: Any) -> Any: @model_validator(mode="after") def check_all_platform_infos_present(self) -> Self: - """ - Ensure that the `platform_info` field is present for all build platforms in the `platforms` field. + """Ensure that the `platform_info` field is present for all build platforms in the `platforms` field. + + Raises + ------ + ValueError + If the condition is violated. """ if set(self.platform_info.keys()) != self.platforms: raise ValueError( @@ -187,10 +198,7 @@ def check_all_platform_infos_present(self) -> Self: def serialize_platform_info( self, wrapped_serializer: SerializerFunctionWrapHandler ) -> dict[str, Any]: - """ - Serialize the `platform_info` field into the old model. - """ - + """Serialize the `platform_info` field into the old model.""" serialized_model: dict[str, Any] = wrapped_serializer(self) serialized_model.update( @@ -288,8 +296,14 @@ def check_version_match(self) -> Self: Ensure that the version field matches the version field in the meta_yaml field. If the top-level version is None, all outputs must specify their own versions. - The top-level version should match at least one of the outputs, but may not match all of them. + + Raises + ------ + ValueError + If the top-level version is None, but not all outputs specify their own versions. + If the top-level version is None, but does not match at least one of the outputs. + If the version field does not match the package.version field in the meta_yaml field. """ if self.meta_yaml.package.version is None: output_versions = set() @@ -324,9 +338,7 @@ def check_version_match(self) -> Self: class NodeAttributesError(ValidatedBaseModel): - """ - If a parsing error occurred, any number of fields can be missing. - """ + """If a parsing error occurred, any number of fields can be missing.""" parsing_error: str """ diff --git a/conda_forge_tick/models/pr_info.py b/conda_forge_tick/models/pr_info.py index 2245f2434..e497ef4c2 100644 --- a/conda_forge_tick/models/pr_info.py +++ b/conda_forge_tick/models/pr_info.py @@ -46,13 +46,12 @@ def one_plus_to_one(value: Any) -> int: class MigratorName(StrEnum): - """ - Each value here corresponds to a subclass of migrators.core.Migrator in the codebase. - """ + """Each value here corresponds to a subclass of migrators.core.Migrator in the codebase.""" VERSION = "Version" ARCH_REBUILD = "ArchRebuild" OSX_ARM = "OSXArm" + WIN_ARM64 = "WinArm64" MIGRATION_YAML = "MigrationYaml" REBUILD = "Rebuild" BLAS_REBUILD = "BlasRebuild" @@ -92,9 +91,7 @@ class MigratorName(StrEnum): class MigrationPullRequestData(StrictBaseModel): - """ - Sometimes, this object is called `migrator_uid` or `MigrationUidTypedDict` in the code. - """ + """Sometimes, this object is called `migrator_uid` or `MigrationUidTypedDict` in the code.""" bot_rerun: bool | datetime """ @@ -201,9 +198,17 @@ class MigrationPullRequest(StrictBaseModel): @classmethod def validate_keys(cls, input_data: Any) -> Any: """ + Validate the keys field against the data field. + The current implementation uses a field "keys" which is a list of all keys present in the MigrationPullRequestData object, duplicating them. This list is redundant and should be removed. The consistency of this field is validated here, after which it is removed. + + Raises + ------ + ValueError + If the keys field or the data field is missing, has the wrong type + or the keys field does not exactly match the keys of the data field. """ input_data = before_validator_ensure_dict(input_data) @@ -229,9 +234,7 @@ def validate_keys(cls, input_data: Any) -> Any: class ExceptionInfo(StrictBaseModel): - """ - Information about an exception that occurred while performing migrations. - """ + """Information about an exception that occurred while performing migrations.""" exception: str """ diff --git a/conda_forge_tick/models/pr_json.py b/conda_forge_tick/models/pr_json.py index be0f884e5..d005b603f 100644 --- a/conda_forge_tick/models/pr_json.py +++ b/conda_forge_tick/models/pr_json.py @@ -88,6 +88,7 @@ class GithubPullRequestBase(ValidatedBaseModel): class PullRequestDataValid(ValidatedBaseModel): """ Information about a pull request, as retrieved from the GitHub API. + Refer to git_utils.PR_KEYS_TO_KEEP for the keys that are kept in the PR object. ALSO UPDATE PR_KEYS_TO_KEEP IF YOU CHANGE THIS CLASS! diff --git a/conda_forge_tick/os_utils.py b/conda_forge_tick/os_utils.py index 7663155e6..e48a410d2 100644 --- a/conda_forge_tick/os_utils.py +++ b/conda_forge_tick/os_utils.py @@ -34,7 +34,7 @@ def override_env(name, value): def eval_cmd(cmd: list[str], **kwargs) -> str: - """run a command capturing stdout + """Run a command capturing stdout. stderr is printed for debugging any kwargs are added to the env @@ -63,11 +63,17 @@ def clean_disk_space(ci_service: str = "github-actions") -> None: ci_service : str, optional The CI service to clean up disk space for. Currently only "github-actions" is supported. Default is "github-actions". + + Raises + ------ + ValueError + If the provided ci_service is not recognized. """ with tempfile.TemporaryDirectory() as tempdir, pushd(tempdir): with open("clean_disk.sh", "w") as f: if ci_service == "github-actions": - f.write("""\ + f.write( + """\ #!/bin/bash # clean disk space @@ -84,14 +90,16 @@ def clean_disk_space(ci_service: str = "github-actions") -> None: ; do sudo rsync --stats -a --delete /opt/empty_dir/ $d || true done - sudo apt-get purge -y -f firefox \ + # dpkg does not fail if the package is not installed + sudo dpkg --remove -y -f firefox \ google-chrome-stable \ microsoft-edge-stable sudo apt-get autoremove -y >& /dev/null sudo apt-get autoclean -y >& /dev/null sudo docker image prune --all --force df -h -""") +""" + ) else: raise ValueError(f"Unknown CI service: {ci_service}") diff --git a/conda_forge_tick/path_lengths.py b/conda_forge_tick/path_lengths.py index e902ea560..9768b4374 100644 --- a/conda_forge_tick/path_lengths.py +++ b/conda_forge_tick/path_lengths.py @@ -1,7 +1,4 @@ -""" -Functions to find the longest paths between nodes in a graph. - -""" +"""Functions to find the longest paths between nodes in a graph.""" from collections import defaultdict from copy import deepcopy @@ -34,7 +31,6 @@ def cyclic_topological_sort(graph: DiGraph, sources: Iterable[T]) -> Sequence[T] The nodes of `graph` in topological sort order. """ - g2 = deepcopy(graph) order: List[T] = [] for source in sorted(sources): @@ -68,7 +64,6 @@ def get_longest_paths(graph: DiGraph, source: str) -> Dict[str, float]: values are the lengths of the longest path from `source`. """ - dist = {node: -float("inf") for node in graph} dist[source] = 0 visited = [] @@ -101,7 +96,6 @@ def get_levels(graph: DiGraph, source: str) -> DefaultDict[float, Set[str]]: nodes in `graph` with longest path length equal to the key. """ - g2 = deepcopy(graph) desc = nx.algorithms.descendants(graph, source) for node in graph.nodes: diff --git a/conda_forge_tick/provide_source_code.py b/conda_forge_tick/provide_source_code.py index 28f6f83c6..4ea70e918 100644 --- a/conda_forge_tick/provide_source_code.py +++ b/conda_forge_tick/provide_source_code.py @@ -14,6 +14,12 @@ ) from conda_forge_feedstock_ops.os_utils import chmod_plus_rwX, sync_dirs +from conda_forge_tick.settings import ( + ENV_CONDA_FORGE_ORG, + ENV_GRAPH_GITHUB_BACKEND_REPO, + settings, +) + logger = logging.getLogger(__name__) CONDA_BUILD_SPECIAL_KEYS = ( @@ -39,8 +45,8 @@ def provide_source_code(recipe_dir, use_container=None): variable `CF_FEEDSTOCK_OPS_IN_CONTAINER` is 'false'. This feature can be used to avoid container in container calls. - Returns - ------- + Yields + ------ str The path to the source code directory. """ @@ -64,8 +70,8 @@ def provide_source_code_containerized(recipe_dir): recipe_dir : str The path to the recipe directory. - Returns - ------- + Yields + ------ str The path to the source code directory. """ @@ -75,9 +81,11 @@ def provide_source_code_containerized(recipe_dir): chmod_plus_rwX(tmpdir) - logger.debug(f"host recipe dir {recipe_dir}: {os.listdir(recipe_dir)}") + logger.debug("host recipe dir %s: %s", recipe_dir, os.listdir(recipe_dir)) logger.debug( - f"copied host recipe dir {tmp_recipe_dir}: {os.listdir(tmp_recipe_dir)}" + "copied host recipe dir %s: %s", + tmp_recipe_dir, + os.listdir(tmp_recipe_dir), ) tmp_source_dir = os.path.join(tmpdir, "source_dir") @@ -92,6 +100,12 @@ def provide_source_code_containerized(recipe_dir): args, mount_readonly=False, mount_dir=tmpdir, + extra_container_args=[ + "-e", + f"{ENV_CONDA_FORGE_ORG}={settings().conda_forge_org}", + "-e", + f"{ENV_GRAPH_GITHUB_BACKEND_REPO}={settings().graph_github_backend_repo}", + ], ) yield tmp_source_dir @@ -116,6 +130,11 @@ def provide_source_code_local(recipe_dir): ------- str The path to the source code directory. + + Raises + ------ + RuntimeError + If there is an error in getting the conda build source code or printing it. """ out = None diff --git a/conda_forge_tick/pypi_name_mapping.py b/conda_forge_tick/pypi_name_mapping.py index ddb090c58..d544311f2 100644 --- a/conda_forge_tick/pypi_name_mapping.py +++ b/conda_forge_tick/pypi_name_mapping.py @@ -1,5 +1,5 @@ """ -Builds and maintains mapping of pypi-names to conda-forge names +Builds and maintains mapping of pypi-names to conda-forge names. 1: Packages should be build from a `https://pypi.io/packages/` source 2: Packages MUST have a test: imports section importing it @@ -23,12 +23,12 @@ from .import_to_pkg import IMPORT_TO_PKG_DIR_CLOBBERING from .lazy_json_backends import ( CF_TICK_GRAPH_DATA_BACKENDS, - CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL, LazyJson, dump, get_all_keys_for_hashmap, loads, ) +from .settings import settings from .utils import as_iterable, load_existing_graph @@ -107,7 +107,7 @@ def _imports_to_canonical_import( split_imports: Set[Tuple[str, ...]], parent_prefix=(), ) -> Union[Tuple[str, ...], Literal[""]]: - """Extract the canonical import name from a list of imports + """Extract the canonical import name from a list of imports. We have two rules. @@ -190,7 +190,8 @@ def convert_to_grayskull_style_yaml( best_imports: Dict[str, Mapping], ) -> Dict[PypiName, Mapping]: """Convert our list style mapping to the pypi-centric version - required by grayskull by reindexing on the PyPI name""" + required by grayskull by reindexing on the PyPI name. + """ package_mappings = best_imports.values() sorted_mappings = sorted(package_mappings, key=lambda mapping: mapping["pypi_name"]) @@ -244,6 +245,11 @@ def resolve_collisions(collisions: List[Mapping]) -> Mapping: """Given a list of colliding mappings, try to resolve the collision by picking out the unique mapping whose source is from the static mappings file. If there is a problem, then make a guess, print a warning, and continue. + + Raises + ------ + ValueError + If there are no collisions to resolve. """ if len(collisions) == 0: raise ValueError("No collisions to resolve!") @@ -320,7 +326,7 @@ def determine_best_matches_for_pypi_import( clobberers = loads( requests.get( os.path.join( - CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL, + settings().graph_github_backend_raw_base_url, IMPORT_TO_PKG_DIR_CLOBBERING, ) ).text, @@ -347,7 +353,9 @@ def determine_best_matches_for_pypi_import( } def _score(conda_name, conda_name_is_feedstock_name=True, pkg_clobbers=False): - """A higher score means less preferred""" + """Get the score. + A higher score means less preferred. + """ mapping_src = map_by_conda_name.get(conda_name, {}).get( "mapping_source", "other", @@ -367,7 +375,7 @@ def _score(conda_name, conda_name_is_feedstock_name=True, pkg_clobbers=False): ) def score(pkg_name): - """Base the score on + """Score a package name. Packages that are hubs are preferred. In the event of ties, fall back to the one with the lower authority score diff --git a/conda_forge_tick/pypi_name_mapping_static.yaml b/conda_forge_tick/pypi_name_mapping_static.yaml index 96a8d814c..771288d7d 100644 --- a/conda_forge_tick/pypi_name_mapping_static.yaml +++ b/conda_forge_tick/pypi_name_mapping_static.yaml @@ -123,3 +123,11 @@ - pypi_name: pyyaml import_name: yaml conda_name: pyyaml + +- pypi_name: memory_allocator + import_name: memory_allocator + conda_name: memory-allocator + +- pypi_name: symengine + import_name: symengine + conda_name: python-symengine diff --git a/conda_forge_tick/recipe_parser/_parser.py b/conda_forge_tick/recipe_parser/_parser.py index a0f3374ab..44921df42 100644 --- a/conda_forge_tick/recipe_parser/_parser.py +++ b/conda_forge_tick/recipe_parser/_parser.py @@ -48,7 +48,7 @@ def _get_yaml_parser(typ="jinja2"): - """yaml parser that is jinja2 aware""" + """Yaml parser that is jinja2 aware.""" # using a function here so settings are always the same def represent_none(self, data): @@ -164,7 +164,7 @@ def _parse_jinja2_variables(meta_yaml: str) -> dict: def _munge_line(line: str) -> str: - """turn lines like + """Turn lines like. key: val # [sel] @@ -194,7 +194,7 @@ def _munge_line(line: str) -> str: def _unmunge_line(line: str) -> str: - """turn lines like + """Turn lines like. key__###conda-selector###__sel: val @@ -249,7 +249,7 @@ def _unmunge_split_key_value_pairs_with_selectors(lines): def _munge_multiline_jinja2(lines): - """puts a comment slug in front of any multiline jinja2 statements""" + """Put a comment slug in front of any multiline jinja2 statements.""" in_statement = False special_end_slug_re = [] new_lines = [] @@ -285,7 +285,7 @@ def _munge_multiline_jinja2(lines): def _unmunge_multiline_jinja2(lines): - """removes a comment slug in front of any multiline jinja2 statements""" + """Remove a comment slug in front of any multiline jinja2 statements.""" start_slug = "# {# " + JINJA2_ML_SLUG start = len(start_slug) stop = len(" #}\n") @@ -299,8 +299,8 @@ def _unmunge_multiline_jinja2(lines): def _demunge_jinja2_vars(meta: Union[dict, list], sentinel: str) -> Union[dict, list]: - """recursively iterate through dictionary / list and replace any instance - in any string of `<{` with '{{' + """Recursively iterate through dictionary / list and replace any instance + in any string of `<{` with '{{'. """ if isinstance(meta, collections.abc.MutableMapping): for key, val in meta.items(): @@ -317,8 +317,8 @@ def _demunge_jinja2_vars(meta: Union[dict, list], sentinel: str) -> Union[dict, def _remunge_jinja2_vars(meta: Union[dict, list], sentinel: str) -> Union[dict, list]: - """recursively iterate through dictionary / list and replace any instance - in any string of `{{` with '<{' + """Recursively iterate through dictionary / list and replace any instance + in any string of `{{` with '<{'. """ if isinstance(meta, collections.abc.MutableMapping): for key, val in meta.items(): @@ -460,7 +460,7 @@ def _build_jinja2_expr_tmp(jinja2_exprs): def _remove_quoted_jinja2_vars(lines): - """Remove any quoted jinja2 vars from the lines. + r"""Remove any quoted jinja2 vars from the lines. Sometimes people write @@ -516,6 +516,17 @@ def _remove_bad_jinja2_set_statements(lines): return new_lines +def _munge_jinj2_comments(lines): + """Turn any jinja2 comments: `{# #}` into yaml comments.""" + new_lines = [] + for line in lines: + if line.lstrip().startswith("{#") and line.rstrip().endswith("#}"): + line = line.replace("{#", "#").replace("#}", "") + line = line.rstrip() + "\n" + new_lines.append(line) + return new_lines + + class CondaMetaYAML: """Crude parsing of conda recipes. @@ -570,6 +581,9 @@ def __init__(self, meta_yaml: str): # pre-munge odd syntax that we do not want lines = list(io.StringIO(meta_yaml).readlines()) + # turn jinja2 comments in yaml ones + lines = _munge_jinj2_comments(lines) + # remove bad jinja2 set statements lines = _remove_bad_jinja2_set_statements(lines) @@ -606,8 +620,8 @@ def __init__(self, meta_yaml: str): self.meta = _demunge_jinja2_vars(self.meta, self._jinja2_sentinel) def eval_jinja2_exprs(self, jinja2_vars): - """Using a set of values for the jinja2 vars, evaluate the - jinja2 template to get any jinja2 expression values. + """Evaluate the jinja2 template to get any jinja2 expression values, + using a set of values for the jinja2 vars. Parameters ---------- @@ -653,7 +667,7 @@ def eval_jinja2_exprs(self, jinja2_vars): return _parser.load(jinja2.Template(tmpl).render(**jinja2_vars)) def dumps(self): - """Dump the recipe to a string""" + """Dump the recipe to a string.""" buff = io.StringIO() self.dump(buff) buff.seek(0) diff --git a/conda_forge_tick/settings.py b/conda_forge_tick/settings.py new file mode 100644 index 000000000..470099073 --- /dev/null +++ b/conda_forge_tick/settings.py @@ -0,0 +1,129 @@ +import contextlib +from typing import Annotated + +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict + +ENVIRONMENT_PREFIX = "CF_TICK_" +""" +All environment variables are expected to be prefixed with this. +""" + +ENV_CONDA_FORGE_ORG = ENVIRONMENT_PREFIX + "CONDA_FORGE_ORG" +""" +The environment variable used to set the `conda_forge_org` setting. +Note: This must match the field name in the `BotSettings` class. +""" + +ENV_GRAPH_GITHUB_BACKEND_REPO = ENVIRONMENT_PREFIX + "GRAPH_GITHUB_BACKEND_REPO" +""" +The environment variable used to set the `graph_github_backend_repo` setting. +Note: This must match the field name in the `BotSettings` class. +""" + +Fraction = Annotated[float, Field(ge=0.0, le=1.0)] + + +class BotSettings(BaseSettings): + """ + The global settings for the bot. + + To configure a settings value, set the corresponding environment variable with the prefix `CF_TICK_`. + For example, to set the `graph_github_backend_repo` setting, set the environment variable + `CF_TICK_GRAPH_GITHUB_BACKEND_REPO`. + + To access the current settings object, please use the `settings()` function. + + Note: There still exists a significant amount of settings that are not yet exposed here. + All new settings should go here, and the other ones should eventually be migrated. + """ + + model_config = SettingsConfigDict(env_prefix=ENVIRONMENT_PREFIX) + + conda_forge_org: str = Field("conda-forge", pattern=r"^[\w\.-]+$") + """ + The GitHub organization containing all feedstocks. Default: "conda-forge". + If you change the field name, you must also update the `ENV_CONDA_FORGE_ORG` constant. + """ + + graph_github_backend_repo: str = Field( + "regro/cf-graph-countyfair", pattern=r"^[\w\.-]+/[\w\.-]+$" + ) + """ + The GitHub repository to deploy to. Default: "regro/cf-graph-countyfair". + If you change the field name, you must also update the `ENV_GRAPH_GITHUB_BACKEND_REPO` constant. + """ + + graph_repo_default_branch: str = "master" + """ + The default branch of the graph_github_backend_repo repository. + """ + + @property + def graph_github_backend_raw_base_url(self) -> str: + """ + The base URL for the GitHub raw view of the graph_github_backend_repo repository. + Example: https://github.com/regro/cf-graph-countyfair/raw/master. + """ + return f"https://github.com/{self.graph_github_backend_repo}/raw/{self.graph_repo_default_branch}/" + + github_runner_debug: bool = Field(False, alias="RUNNER_DEBUG") + """ + Whether we are executing within a GitHub Actions run with debug logging enabled. Default: False. + This is set automatically by GitHub Actions. + https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables + """ + + frac_update_upstream_versions: Fraction = 0.1 + """ + The fraction of feedstocks (randomly selected) to update in the update-upstream-versions job. + This is currently only respected when running concurrently (via process pool), not in sequential mode. + Therefore, you don't need to set this when debugging locally. + """ + + frac_make_graph: Fraction = 0.1 + """ + The fraction of feedstocks (randomly selected) to update in the make-graph job. + In tests or when debugging, you probably need to set this to 1.0 to update all feedstocks. + """ + + +_use_settings_override: BotSettings | None = None +""" +If not None, the application should use this settings object instead of generating a new one. +""" + + +def settings() -> BotSettings: + """Get the current settings object.""" + if _use_settings_override: + return _use_settings_override.model_copy() # prevent side-effects + return BotSettings() + + +@contextlib.contextmanager +def use_settings(s: BotSettings | None): + """ + Context manager that overrides the application settings with the values set in the provided settings object. + The new settings are used within the context of the `with` statement. + After exiting the context, the original settings are restored. + + DO NOT call this function within multithreading contexts, as it will override the settings for all threads, + and lead to unpredictable behavior. + + Parameters + ---------- + s + The settings object to use. None stands for the default settings behavior. The default settings + behavior reads the environment variables every time the settings are accessed. + """ + global _use_settings_override + + old_settings = ( + _use_settings_override.model_copy() if _use_settings_override else None + ) + _use_settings_override = s.model_copy() if s else None + + yield + + _use_settings_override = old_settings diff --git a/conda_forge_tick/solver_checks.py b/conda_forge_tick/solver_checks.py index 4052da737..5a8e08573 100644 --- a/conda_forge_tick/solver_checks.py +++ b/conda_forge_tick/solver_checks.py @@ -11,6 +11,12 @@ ) from conda_forge_feedstock_ops.os_utils import sync_dirs +from conda_forge_tick.settings import ( + ENV_CONDA_FORGE_ORG, + ENV_GRAPH_GITHUB_BACKEND_REPO, + settings, +) + logger = logging.getLogger(__name__) @@ -71,8 +77,9 @@ def is_recipe_solvable( str(logging.getLevelName(logger.getEffectiveLevel())).upper() ) logger.debug( - f"is_recipe_solver log-level={logging.getLevelName(logger.getEffectiveLevel())}" - f" -> verbosity={verbosity}" + "is_recipe_solver log-level=%d -> verbosity=%d", + logging.getLevelName(logger.getEffectiveLevel()), + verbosity, ) if should_use_container(use_container=use_container): @@ -110,7 +117,6 @@ def _is_recipe_solvable_containerized( See the docstring of `is_recipe_solvable` for inputs and outputs. """ - args = [ "conda-forge-tick-container", "check-solvable", @@ -133,15 +139,25 @@ def _is_recipe_solvable_containerized( feedstock_dir, tmp_feedstock_dir, ignore_dot_git=True, update_git=False ) - logger.debug(f"host feedstock dir {feedstock_dir}: {os.listdir(feedstock_dir)}") logger.debug( - f"copied host feedstock dir {tmp_feedstock_dir}: {os.listdir(tmp_feedstock_dir)}" + "host feedstock dir %s: %s", feedstock_dir, os.listdir(feedstock_dir) + ) + logger.debug( + "copied host feedstock dir %s: %s", + tmp_feedstock_dir, + os.listdir(tmp_feedstock_dir), ) data = run_container_operation( args, mount_readonly=True, mount_dir=tmp_feedstock_dir, + extra_container_args=[ + "-e", + f"{ENV_CONDA_FORGE_ORG}={settings().conda_forge_org}", + "-e", + f"{ENV_GRAPH_GITHUB_BACKEND_REPO}={settings().graph_github_backend_repo}", + ], ) # When tempfile removes tempdir, it tries to reset permissions on subdirs. diff --git a/conda_forge_tick/status_report.py b/conda_forge_tick/status_report.py index d815f47eb..4d9eecf83 100644 --- a/conda_forge_tick/status_report.py +++ b/conda_forge_tick/status_report.py @@ -30,6 +30,7 @@ OSXArm, Replacement, Version, + WinArm64, ) from conda_forge_tick.os_utils import eval_cmd from conda_forge_tick.path_lengths import cyclic_topological_sort @@ -53,7 +54,15 @@ def _sorted_set_json(obj: Any) -> Any: - """For custom object serialization.""" + """If obj is a set, return sorted(obj). Else, raise TypeError. + + Used for custom object serialization. + + Raises + ------ + TypeError + If obj is not a set. + """ if isinstance(obj, Set): return sorted(obj) raise TypeError(repr(obj) + " is not JSON serializable") @@ -64,8 +73,7 @@ def _ok_version(ver): def write_version_migrator_status(migrator, mctx): - """write the status of the version migrator""" - + """Write the status of the version migrator.""" out: Dict[str, Dict[str, str]] = { "queued": {}, # name -> pending version "errors": {}, # name -> error @@ -100,11 +108,26 @@ def write_version_migrator_status(migrator, mctx): else: new_version = vpri.get("new_version", False) + try: + if "new_version" in vpri: + old_vpri_version = vpri["new_version"] + had_vpri_version = True + else: + had_vpri_version = False + + vpri["new_version"] = new_version + + new_version_is_ok = _ok_version( + new_version + ) and not migrator.filter(attrs) + finally: + if had_vpri_version: + vpri["new_version"] = old_vpri_version + else: + del vpri["new_version"] + # run filter with new_version - if _ok_version(new_version) and not migrator.filter( - attrs, - new_version=new_version, - ): + if new_version_is_ok: attempts = vpri.get("new_version_attempts", {}).get(new_version, 0) if attempts == 0: out["queued"][node] = new_version @@ -144,8 +167,7 @@ def graph_migrator_status( migrator: Migrator, gx: nx.DiGraph, ) -> Tuple[dict, list, nx.DiGraph]: - """Gets the migrator progress for a given migrator""" - + """Get the migrator progress for a given migrator.""" migrator_name = get_migrator_name(migrator) num_viz = 0 @@ -455,6 +477,7 @@ def main() -> None: mgconf.get("longterm", False) or isinstance(migrator, ArchRebuild) or isinstance(migrator, OSXArm) + or isinstance(migrator, WinArm64) ): longterm_status[migrator_name] = f"{migrator.name} Migration Status" else: diff --git a/conda_forge_tick/update_deps.py b/conda_forge_tick/update_deps.py index 5bda5f11d..b3a34d5d3 100644 --- a/conda_forge_tick/update_deps.py +++ b/conda_forge_tick/update_deps.py @@ -11,12 +11,12 @@ from conda_forge_tick.depfinder_api import simple_import_to_pkg_map from conda_forge_tick.feedstock_parser import load_feedstock -from conda_forge_tick.lazy_json_backends import CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL from conda_forge_tick.make_graph import COMPILER_STUBS_WITH_STRONG_EXPORTS from conda_forge_tick.os_utils import pushd from conda_forge_tick.provide_source_code import provide_source_code from conda_forge_tick.pypi_name_mapping import _KNOWN_NAMESPACE_PACKAGES from conda_forge_tick.recipe_parser import CONDA_SELECTOR, CondaMetaYAML +from conda_forge_tick.settings import settings try: from grayskull.main import create_python_recipe @@ -67,7 +67,7 @@ for _ in range(10): r = requests.get( os.path.join( - CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL, + settings().graph_github_backend_raw_base_url, "ranked_hubs_authorities.json", ) ) @@ -198,6 +198,12 @@ def get_dep_updates_and_hints( to understand its contents. hint : str The dependency update hint. + + + Raises + ------ + ValueError + If the update kind is not supported. """ if update_deps == "disabled": # no dependency updates or hinting diff --git a/conda_forge_tick/update_prs.py b/conda_forge_tick/update_prs.py index ce6d5d4a4..e6d912b82 100644 --- a/conda_forge_tick/update_prs.py +++ b/conda_forge_tick/update_prs.py @@ -118,23 +118,20 @@ def _update_pr(update_function, dry_run, gx, job, n_jobs): with pr_json as attrs: attrs.update(**res) except (github3.GitHubError, github.GithubException) as e: - logger.error(f"GITHUB ERROR ON FEEDSTOCK: {name}") + logger.error("GITHUB ERROR ON FEEDSTOCK: %s", name) failed_refresh += 1 if is_github_api_limit_reached(): logger.warning("GitHub API error", exc_info=e) break except (github3.exceptions.ConnectionError, github.GithubException): - logger.error(f"GITHUB ERROR ON FEEDSTOCK: {name}") + logger.error("GITHUB ERROR ON FEEDSTOCK: %s", name) failed_refresh += 1 except Exception: - import traceback - logger.critical( - "ERROR ON FEEDSTOCK: {}: {} - {}".format( - name, - gx.nodes[name]["payload"]["pr_info"]["PRed"][i], - traceback.format_exc(), - ), + "ERROR ON FEEDSTOCK: %s: %s", + name, + gx.nodes[name]["payload"]["pr_info"]["PRed"][i], + exc_info=True, ) raise @@ -151,8 +148,8 @@ def update_pr_combined( _combined_update_function, dry_run, gx, job, n_jobs ) - logger.info(f"JSON Refresh failed for {failed_refresh} PRs") - logger.info(f"JSON Refresh succeed for {succeeded_refresh} PRs") + logger.info("JSON Refresh failed for %d PRs", failed_refresh) + logger.info("JSON Refresh succeed for %d PRs", succeeded_refresh) return gx diff --git a/conda_forge_tick/update_recipe/v1_recipe/build_number.py b/conda_forge_tick/update_recipe/v1_recipe/build_number.py index 131e88f10..bbf30c2ac 100644 --- a/conda_forge_tick/update_recipe/v1_recipe/build_number.py +++ b/conda_forge_tick/update_recipe/v1_recipe/build_number.py @@ -25,8 +25,8 @@ def old_build_number(recipe_text: str) -> int: ---------- * `recipe_text` - The recipe text. - Returns: - -------- + Returns + ------- * The build number. """ match = re.search(RE_PATTERN, recipe_text) @@ -85,8 +85,8 @@ def update_build_number(file: Path, new_build_number: int | Callable = 0) -> str * `file` - The path to the recipe file. * `new_build_number` - The new build number to use. (default: 0) - Returns: - -------- + Returns + ------- * The updated recipe as a string. """ data = _load_yaml(file) diff --git a/conda_forge_tick/update_recipe/version.py b/conda_forge_tick/update_recipe/version.py index b7aacf1fa..701488f69 100644 --- a/conda_forge_tick/update_recipe/version.py +++ b/conda_forge_tick/update_recipe/version.py @@ -30,6 +30,11 @@ from conda_forge_tick.hashing import hash_url from conda_forge_tick.lazy_json_backends import loads from conda_forge_tick.recipe_parser import CONDA_SELECTOR, CondaMetaYAML +from conda_forge_tick.settings import ( + ENV_CONDA_FORGE_ORG, + ENV_GRAPH_GITHUB_BACKEND_REPO, + settings, +) from conda_forge_tick.url_transforms import gen_transformed_urls from conda_forge_tick.utils import sanitize_string @@ -168,7 +173,6 @@ def _try_pypi_api(url_tmpl: str, context: MutableMapping, hash_type: str, cmeta: new_hash : str or None The new hash if found. """ - if "version" not in context: return None, None @@ -642,9 +646,13 @@ def _update_version_feedstock_dir_containerized(feedstock_dir, version, hash_typ chmod_plus_rwX(tmpdir, recursive=True) - logger.debug(f"host feedstock dir {feedstock_dir}: {os.listdir(feedstock_dir)}") logger.debug( - f"copied host feedstock dir {tmp_feedstock_dir}: {os.listdir(tmp_feedstock_dir)}" + "host feedstock dir %s: %s", feedstock_dir, os.listdir(feedstock_dir) + ) + logger.debug( + "copied host feedstock dir %s: %s", + tmp_feedstock_dir, + os.listdir(tmp_feedstock_dir), ) args = [ @@ -662,6 +670,12 @@ def _update_version_feedstock_dir_containerized(feedstock_dir, version, hash_typ mount_readonly=False, mount_dir=tmpdir, json_loads=loads, + extra_container_args=[ + "-e", + f"{ENV_CONDA_FORGE_ORG}={settings().conda_forge_org}", + "-e", + f"{ENV_GRAPH_GITHUB_BACKEND_REPO}={settings().graph_github_backend_repo}", + ], ) sync_dirs( diff --git a/conda_forge_tick/update_sources.py b/conda_forge_tick/update_sources.py index a772cf56e..b0794d79e 100644 --- a/conda_forge_tick/update_sources.py +++ b/conda_forge_tick/update_sources.py @@ -20,7 +20,7 @@ # TODO: parse_version has bad type annotations from pkg_resources import parse_version -from conda_forge_tick.utils import parse_meta_yaml +from conda_forge_tick.utils import parse_meta_yaml, parse_recipe_yaml from .hashing import hash_url @@ -247,7 +247,7 @@ def init(self) -> None: CRAN_INDEX = self._get_cran_index(session) logger.debug("Cran source initialized") except Exception: - logger.error("Cran initialization failed", exc_info=True) + logger.exception("Cran initialization failed") CRAN_INDEX = {} def _get_cran_index(self, session: requests.Session) -> dict: @@ -332,7 +332,7 @@ def init(self) -> None: ROS_DISTRO_INDEX = self.parse_idx("melodic") logger.info("ROS Distro source initialized") except Exception: - logger.error("ROS Distro initialization failed", exc_info=True) + logger.exception("ROS Distro initialization failed") ROS_DISTRO_INDEX = {} def get_url(self, meta_yaml: "RecipeTypedDict") -> Optional[str]: @@ -373,11 +373,11 @@ def get_sha256(url: str) -> Optional[str]: return None -def url_exists(url: str, timeout=2) -> bool: +def url_exists(url: str, timeout=5) -> bool: """ We use curl/wget here, as opposed requests.head, because - github urls redirect with a 3XX code even if the file doesn't exist - - requests cannot handle ftp + - requests cannot handle ftp. """ if not any(slug in url for slug in CURL_ONLY_URL_SLUGS): try: @@ -427,14 +427,14 @@ class BaseRawURL(AbstractSource): name = "BaseRawURL" next_ver_func = None - def get_url(self, meta_yaml) -> Optional[str]: - if "feedstock_name" not in meta_yaml: + def get_url(self, attrs) -> Optional[str]: + if "feedstock_name" not in attrs: return None - if "version" not in meta_yaml: + if "version" not in attrs: return None # TODO: pull this from the graph itself - content = meta_yaml["raw_meta_yaml"] + content = attrs["raw_meta_yaml"] if any(ln.startswith("{% set version") for ln in content.splitlines()): has_version_jinja2 = True @@ -443,9 +443,9 @@ def get_url(self, meta_yaml) -> Optional[str]: # this while statement runs until a bad version is found # then it uses the previous one - orig_urls = urls_from_meta(meta_yaml["meta_yaml"]) + orig_urls = urls_from_meta(attrs["meta_yaml"]) logger.debug("orig urls: %s", orig_urls) - current_ver = meta_yaml["version"] + current_ver = attrs["version"] current_sha256 = None orig_ver = current_ver found = True @@ -469,7 +469,10 @@ def get_url(self, meta_yaml) -> Optional[str]: new_content = "\n".join(_new_lines) else: new_content = content.replace(orig_ver, next_ver) - new_meta = parse_meta_yaml(new_content) + if attrs["meta_yaml"].get("schema_version", 0) == 0: + new_meta = parse_meta_yaml(new_content) + else: + new_meta = parse_recipe_yaml(new_content) new_urls = urls_from_meta(new_meta) if len(new_urls) == 0: logger.debug("No URL in meta.yaml") @@ -481,7 +484,7 @@ def get_url(self, meta_yaml) -> Optional[str]: # this URL looks bad if these things happen if ( str(new_meta["package"]["version"]) != next_ver - or meta_yaml["url"] == url + or attrs.get("url", "") == url or url in orig_urls ): logger.debug( @@ -491,7 +494,7 @@ def get_url(self, meta_yaml) -> Optional[str]: 'str(new_meta["package"]["version"]) != next_ver', str(new_meta["package"]["version"]) != next_ver, 'meta_yaml["url"] == url', - meta_yaml["url"] == url, + attrs.get("url", "") == url, "url in orig_urls", url in orig_urls, ) @@ -562,12 +565,13 @@ def set_version_prefix(self, version: str, split_url: list[str]): self.version_prefix = self.get_version_prefix(version, split_url) if self.version_prefix is None: return - logger.debug(f"Found version prefix from url: {self.version_prefix}") + logger.debug("Found version prefix from url: %s", self.version_prefix) self.ver_prefix_remove = [self.version_prefix] + self.ver_prefix_remove def get_version_prefix(self, version: str, split_url: list[str]): - """Returns prefix for the first split that contains version. If prefix - is empty - returns None.""" + """Return prefix for the first split that contains version. If prefix + is empty - returns None. + """ r = re.compile(rf"^(.*){version}") for split in split_url: match = r.match(split) @@ -753,7 +757,12 @@ def get_version(self, url: str) -> Optional[str]: def _tier_directory(package: str) -> str: """Depending on the length of the package name, the tier directory structure will differ. - Documented here: https://doc.rust-lang.org/cargo/reference/registry-index.html#index-files + Documented here: https://doc.rust-lang.org/cargo/reference/registry-index.html#index-files. + + Raises + ------ + ValueError + If the package name is empty. """ if not package: raise ValueError("Package name cannot be empty") diff --git a/conda_forge_tick/update_upstream_versions.py b/conda_forge_tick/update_upstream_versions.py index abc5ac355..26c68ff47 100644 --- a/conda_forge_tick/update_upstream_versions.py +++ b/conda_forge_tick/update_upstream_versions.py @@ -30,6 +30,11 @@ from conda_forge_tick.cli_context import CliContext from conda_forge_tick.executors import executor from conda_forge_tick.lazy_json_backends import LazyJson, dumps +from conda_forge_tick.settings import ( + ENV_CONDA_FORGE_ORG, + ENV_GRAPH_GITHUB_BACKEND_REPO, + settings, +) from conda_forge_tick.update_sources import ( CRAN, NPM, @@ -52,15 +57,21 @@ RNG = secrets.SystemRandom() -RANDOM_FRAC_TO_UPDATE = 0.1 - def ignore_version(attrs: Mapping[str, Any], version: str) -> bool: - """ - Check if a version should be ignored based on the `conda-forge.yml` file. - :param attrs: The node attributes - :param version: The version to check - :return: True if the version should be ignored, False otherwise + """Check if a version should be ignored based on the `conda-forge.yml` file. + + Parameters + ---------- + attrs + The node attributes. + version + The version to check. + + Returns + ------- + bool + True if the version should be ignored, False otherwise. """ versions_to_ignore = get_keys_default( attrs, @@ -78,8 +89,7 @@ def get_latest_version_local( attrs: Mapping[str, Any], sources: Iterable[AbstractSource], ) -> Dict[str, Union[Literal[False], str]]: - """ - Given a package, return the new version information to be written into the cf-graph. + """Given a package, return the new version information to be written into the cf-graph. Parameters ---------- @@ -92,7 +102,7 @@ def get_latest_version_local( Returns ------- - version_data : dict + dict The new version information. """ version_data: Dict[str, Union[Literal[False], str]] = {"new_version": False} @@ -122,19 +132,25 @@ def get_latest_version_local( break else: logger.warning( - f"Package {name} requests version source '{vs}' which is not available. Skipping.", + "Package %s requests version source '%s' which is not available. Skipping.", + name, + vs, ) sources_to_use = sources_to_use_list logger.debug( - f"{name} defines the following custom version sources: {[source.name for source in sources_to_use]}", + "%s defines the following custom version sources: %s", + name, + [source.name for source in sources_to_use], ) skipped_sources = [ source.name for source in sources if source not in sources_to_use ] if skipped_sources: - logger.debug(f"Therefore, we skip the following sources: {skipped_sources}") + logger.debug( + "Therefore, we skip the following sources: %s", skipped_sources + ) else: logger.debug("No sources are skipped.") @@ -144,21 +160,24 @@ def get_latest_version_local( exceptions = [] for source in sources_to_use: try: - logger.debug(f"Fetching latest version for {name} from {source.name}...") + logger.debug("Fetching latest version for %s from %s...", name, source.name) url = source.get_url(attrs) if url is None: continue - logger.debug(f"Using URL {url}") + logger.debug("Using URL %s", url) ver = source.get_version(url) if not ver: - logger.debug(f"Upstream: Could not find version on {source.name}") + logger.debug("Upstream: Could not find version on %s", source.name) continue - logger.debug(f"Found version {ver} on {source.name}") + logger.debug("Found version %s on %s", ver, source.name) version_data["new_version"] = ver break except Exception as e: logger.error( - f"An exception occurred while fetching {name} from {source.name}: {e}", + "An exception occurred while fetching %s from %s.", + name, + source.name, + exc_info=e, ) exceptions.append(e) @@ -176,7 +195,7 @@ def get_latest_version_local( if ignore_version(attrs, new_version): logger.debug( - f"Ignoring version {new_version} because it is in the exclude list.", + "Ignoring version %s because it is in the exclude list.", new_version ) version_data["new_version"] = False @@ -188,8 +207,7 @@ def get_latest_version_containerized( attrs: Mapping[str, Any], sources: Iterable[AbstractSource], ) -> Dict[str, Union[Literal[False], str]]: - """ - Given a package, return the new version information to be written into the cf-graph. + """Given a package, return the new version information to be written into the cf-graph. **This function runs the version parsing in a container.** @@ -204,7 +222,7 @@ def get_latest_version_containerized( Returns ------- - version_data : dict + dict The new version information. """ if "feedstock_name" not in attrs: @@ -225,6 +243,12 @@ def get_latest_version_containerized( return run_container_operation( args, input=json_blob, + extra_container_args=[ + "-e", + f"{ENV_CONDA_FORGE_ORG}={settings().conda_forge_org}", + "-e", + f"{ENV_GRAPH_GITHUB_BACKEND_REPO}={settings().graph_github_backend_repo}", + ], ) @@ -234,8 +258,7 @@ def get_latest_version( sources: Iterable[AbstractSource], use_container: bool | None = None, ) -> Dict[str, Union[Literal[False], str]]: - """ - Given a package, return the new version information to be written into the cf-graph. + """Given a package, return the new version information to be written into the cf-graph. Parameters ---------- @@ -253,7 +276,7 @@ def get_latest_version( Returns ------- - version_data : dict + dict The new version information. """ if should_use_container(use_container=use_container): @@ -263,6 +286,20 @@ def get_latest_version( def get_job_number_for_package(name: str, n_jobs: int): + """Get the job number for a package. + + Parameters + ---------- + name + The name of the package. + n_jobs + The total number of jobs. + + Returns + ------- + int + The job number for the package. + """ return abs(int(hashlib.sha1(name.encode("utf-8")).hexdigest(), 16)) % n_jobs + 1 @@ -271,48 +308,72 @@ def filter_nodes_for_job( job: int, n_jobs: int, ) -> Iterator[Tuple[str, T]]: + """Filter nodes for a specific job. + + Parameters + ---------- + all_nodes + All nodes to filter. + job + The job number. + n_jobs + The total number of jobs. + + Returns + ------- + Iterator[Tuple[str, T]] + The filtered nodes. + """ return (t for t in all_nodes if get_job_number_for_package(t[0], n_jobs) == job) def include_node(package_name: str, payload_attrs: Mapping) -> bool: - """ - Given a package name and its node attributes, determine whether + """Given a package name and its node attributes, determine whether the package should be included in the update process. Also log the reason why a package is not included. - :param package_name: The name of the package - :param payload_attrs: The cf-graph node payload attributes for the package - :return: True if the package should be included, False otherwise + Parameters + ---------- + package_name + The name of the package. + payload_attrs + The cf-graph node payload attributes for the package. + + Returns + ------- + bool + True if the package should be included, False otherwise. """ pr_info = payload_attrs.get("pr_info", {}) if payload_attrs.get("parsing_error"): logger.debug( - f"Skipping {package_name} because it is marked as having a parsing error. The error is printed below.\n" - f"{payload_attrs['parsing_error']}", + "Skipping %s because it is marked as having a parsing error. The error is printed below.\n%s", + package_name, + payload_attrs["parsing_error"], ) return False if payload_attrs.get("archived"): - logger.debug( - f"Skipping {package_name} because it is marked as archived.", - ) + logger.debug("Skipping %s because it is marked as archived.", package_name) return False if pr_info.get("bad") and "Upstream" not in pr_info.get("bad"): logger.debug( - f"Skipping {package_name} because its corresponding Pull Request is " - f"marked as bad with a non-upstream issue. The error is printed below.\n" - f"{pr_info['bad']}", + "Skipping %s because its corresponding Pull Request is " + "marked as bad with a non-upstream issue. The error is printed below.\n%s", + package_name, + pr_info["bad"], ) return False if pr_info.get("bad"): logger.debug( - f"Note: {package_name} has a bad Pull Request, but this is marked as an upstream issue. " - f"Therefore, it will be included in the update process. The error is printed below.\n" - f"{pr_info['bad']}", + "Note: %s has a bad Pull Request, but this is marked as an upstream issue. " + "Therefore, it will be included in the update process. The error is printed below.\n%s", + package_name, + pr_info["bad"], ) # no return here @@ -337,12 +398,17 @@ def _update_upstream_versions_sequential( se = repr(e) except Exception as ee: se = f"Bad exception string: {ee}" - logger.warning(f"Warning: Error getting upstream version of {node}: {se}") + logger.warning( + "Warning: Error getting upstream version of %s: %s", node, se + ) version_data["bad"] = "Upstream: Error getting upstream version" else: logger.info( - f"# {node_count:<5} - {node} - {attrs.get('version')} " - f"-> {version_data.get('new_version')}", + "# %-5s - %s - %s -> %s", + node_count, + node, + attrs.get("version"), + version_data.get("new_version"), ) logger.debug("writing out file") @@ -365,7 +431,7 @@ def _update_upstream_versions_process_pool( ncols=80, desc="submitting version update jobs", ): - if RNG.random() >= RANDOM_FRAC_TO_UPDATE: + if RNG.random() >= settings().frac_update_upstream_versions: continue futures.update( @@ -398,21 +464,21 @@ def _update_upstream_versions_process_pool( except Exception as ee: se = f"Bad exception string: {ee}" logger.error( - "itr % 5d - eta % 5ds: " - "Error getting upstream version of %s: %s" - % (n_left, eta, node, se), + "itr % 5d - eta % 5ds: Error getting upstream version of %s: %s", + n_left, + eta, + node, + se, ) version_data["bad"] = "Upstream: Error getting upstream version" else: logger.info( - "itr % 5d - eta % 5ds: %s - %s -> %s" - % ( - n_left, - eta, - node, - attrs.get("version", ""), - version_data["new_version"], - ), + "itr % 5d - eta % 5ds: %s - %s -> %s", + n_left, + eta, + node, + attrs.get("version", ""), + version_data["new_version"], ) # writing out file lazyjson = LazyJson(f"versions/{node}.json") @@ -445,17 +511,25 @@ def update_upstream_versions( n_jobs=1, package: Optional[str] = None, ) -> None: - """ - Update the upstream versions of packages. - :param gx: The conda forge graph - :param sources: The sources to use for fetching the upstream versions - :param debug: Whether to run in debug mode - :param job: The job number - :param n_jobs: The total number of jobs - :param package: The package to update. If None, update all packages. + """Update the upstream versions of packages. + + Parameters + ---------- + gx + The conda forge graph. + sources + The sources to use for fetching the upstream versions. + debug + Whether to run in debug mode. + job + The job number. + n_jobs + The total number of jobs. + package + The package to update. If None, update all packages. """ if package and package not in gx.nodes: - logger.error(f"Package {package} not found in graph. Exiting.") + logger.error("Package %s not found in graph. Exiting.", package) return # In the future, we should have some sort of typed graph structure @@ -466,7 +540,7 @@ def update_upstream_versions( job_nodes = filter_nodes_for_job(all_nodes, job, n_jobs) if not job_nodes: - logger.info(f"No packages to update for job {job}") + logger.info("No packages to update for job %d", job) return def extract_payload(node: Tuple[str, Mapping[str, Mapping]]) -> Tuple[str, Mapping]: @@ -502,12 +576,20 @@ def main( n_jobs: int = 1, package: Optional[str] = None, ) -> None: - """ - Main function for updating the upstream versions of packages. - :param ctx: The CLI context. - :param job: The job number. - :param n_jobs: The total number of jobs. - :param package: The package to update. If None, update all packages. + """Update the upstream version of packages. + + This is the main entry point for the update function. + + Parameters + ---------- + ctx + The CLI context. + job + The job number. + n_jobs + The total number of jobs. + package + The package to update. If None, update all packages. """ logger.info("Reading graph") # Graph enabled for inspection diff --git a/conda_forge_tick/utils.py b/conda_forge_tick/utils.py index ae169528f..df3ab6645 100644 --- a/conda_forge_tick/utils.py +++ b/conda_forge_tick/utils.py @@ -41,6 +41,7 @@ from . import sensitive_env from .lazy_json_backends import LazyJson from .recipe_parser import CondaMetaYAML +from .settings import ENV_CONDA_FORGE_ORG, ENV_GRAPH_GITHUB_BACKEND_REPO, settings if typing.TYPE_CHECKING: from mypy_extensions import TypedDict @@ -183,12 +184,12 @@ def fold_log_lines(title): def yaml_safe_load(stream): - """Load a yaml doc safely""" + """Load a yaml doc safely.""" return ruamel.yaml.YAML(typ="safe", pure=True).load(stream) def yaml_safe_dump(data, stream=None): - """Dump a yaml object""" + """Dump a yaml object.""" yaml = ruamel.yaml.YAML(typ="safe", pure=True) yaml.default_flow_style = False return yaml.dump(data, stream=stream) @@ -208,7 +209,6 @@ def _render_meta_yaml(text: str, for_pinning: bool = False, **kwargs) -> str: The text of the meta.yaml with Jinja2 variables replaced. """ - cfg = dict(**kwargs) env = jinja2.sandbox.SandboxedEnvironment(undefined=NullUndefined) @@ -324,6 +324,12 @@ def parse_recipe_yaml_containerized( args, input=text, mount_readonly=True, + extra_container_args=[ + "-e", + f"{ENV_CONDA_FORGE_ORG}={settings().conda_forge_org}", + "-e", + f"{ENV_GRAPH_GITHUB_BACKEND_REPO}={settings().graph_github_backend_repo}", + ], ) @@ -407,8 +413,12 @@ def parse_recipe_yaml_local( dict : The parsed YAML dict. If parsing fails, returns an empty dict. May raise for some errors. Have fun. - """ + Raises + ------ + RuntimeError + If the recipe YAML rendering fails or no output recipes are found. + """ rendered_recipes = _render_recipe_yaml( text, cbc_path=cbc_path, platform_arch=platform_arch ) @@ -451,7 +461,7 @@ def _render_recipe_yaml( cbc_path: str | None = None, ) -> list[dict[str, Any]]: """ - Renders the given recipe YAML text using the `rattler-build` command-line tool. + Render the given recipe YAML text using the `rattler-build` command-line tool. Parameters ---------- @@ -514,7 +524,7 @@ def replace_name_key(d: dict[str, Any]) -> Any: def _parse_recipes( validated_recipes: list[dict[str, Any]], ) -> "RecipeTypedDict": - """Parses validated recipes and transform them to fit `RecipeTypedDict` + """Parse validated recipes and transform them to fit `RecipeTypedDict`. Parameters ---------- @@ -632,8 +642,7 @@ def _parse_recipes( def _parse_recipe_yaml_requirements(requirements) -> None: - """Parse requirement section of render by rattler-build to fit `RecipeTypedDict` - + """Parse requirement section of render by rattler-build to fit `RecipeTypedDict`. When rendering the recipe by rattler build, `requirements["run_exports"]["weak"]` gives a list looking like: @@ -686,6 +695,39 @@ def _remove_none_values(d): return {k: _remove_none_values(v) for k, v in d.items() if v is not None} +def get_recipe_schema_version(feedstock_attrs: Mapping[str, Any]) -> int: + """ + Get the recipe schema version from the feedstock attributes. + + Parameters + ---------- + feedstock_attrs : Mapping[str, Any] + The feedstock attributes. + + Returns + ------- + int + The recipe version. If it does not exist in the feedstock attributes, + it defaults to 0. + + Raises + ------ + ValueError + If the recipe version is not an integer, i.e. the attributes are invalid. + """ + version = get_keys_default( + feedstock_attrs, + ["meta_yaml", "schema_version"], + {}, + 0, + ) + + if not isinstance(version, int): + raise ValueError("Recipe version is not an integer") + + return version + + def parse_meta_yaml( text: str, for_pinning=False, @@ -811,6 +853,12 @@ def _run(_args, _mount_dir): input=text, mount_readonly=True, mount_dir=_mount_dir, + extra_container_args=[ + "-e", + f"{ENV_CONDA_FORGE_ORG}={settings().conda_forge_org}", + "-e", + f"{ENV_GRAPH_GITHUB_BACKEND_REPO}={settings().graph_github_backend_repo}", + ], ) if (cbc_path is not None and os.path.exists(cbc_path)) or ( @@ -872,6 +920,11 @@ def parse_meta_yaml_local( dict : The parsed YAML dict. If parsing fails, returns an empty dict. May raise for some errors. Have fun. + + Raises + ------ + RuntimeError + If parsing fails. """ def _run(*, use_orig_cbc_path): @@ -957,12 +1010,10 @@ def _parse_meta_yaml_impl( def _run_parsing(): logger.debug( - "parsing for platform %s with cbc %s and arch %s" - % ( - platform, - cbc_path, - arch, - ), + "parsing for platform %s with cbc %s and arch %s", + platform, + cbc_path, + arch, ) config = conda_build.config.get_or_merge_config( None, @@ -1149,8 +1200,15 @@ def load_existing_graph(filename: str = DEFAULT_GRAPH_FILENAME) -> nx.DiGraph: If empty JSON is encountered, a ValueError is raised. If you expect the graph to be possibly empty JSON (i.e. not initialized), use load_graph. - :return: the graph - :raises ValueError if the file contains empty JSON (or did not exist before) + Returns + ------- + nx.DiGraph + The graph loaded from the file. + + Raises + ------ + ValueError + If the file contains empty JSON. """ gx = load_graph(filename) if gx is None: @@ -1159,13 +1217,14 @@ def load_existing_graph(filename: str = DEFAULT_GRAPH_FILENAME) -> nx.DiGraph: def load_graph(filename: str = DEFAULT_GRAPH_FILENAME) -> Optional[nx.DiGraph]: - """ - Load the graph from a file using the lazy json backend. + """Load the graph from a file using the lazy json backend. If the file does not exist, it is initialized with empty JSON. If you expect the graph to be non-empty JSON, use load_existing_graph. - :return: the graph, or None if the file is empty JSON (or - :raises FileNotFoundError if the file does not exist + Returns + ------- + nx.DiGraph or None + The graph, or None if the file is empty JSON """ dta = copy.deepcopy(LazyJson(filename).data) if dta: @@ -1223,16 +1282,19 @@ def as_iterable(x: T) -> Tuple[T]: ... @typing.no_type_check def as_iterable(iterable_or_scalar): - """Utility for converting an object to an iterable. + """Convert an object into an iterable. + Parameters ---------- iterable_or_scalar : anything + Returns ------- l : iterable If `obj` was None, return the empty tuple. If `obj` was not iterable returns a 1-tuple containing `obj`. Otherwise return `obj` + Notes ----- Although both string types and dictionaries are iterable in Python, we are @@ -1240,7 +1302,7 @@ def as_iterable(iterable_or_scalar): returns (dict, ) and as_iterable(string) returns (string, ) Examples - --------- + -------- >>> as_iterable(1) (1,) >>> as_iterable([1, 2, 3]) @@ -1250,7 +1312,6 @@ def as_iterable(iterable_or_scalar): >>> as_iterable({'a': 1}) ({'a': 1}, ) """ - if iterable_or_scalar is None: return () elif isinstance(iterable_or_scalar, (str, bytes)): @@ -1314,20 +1375,39 @@ def change_log_level(logger, new_level): logger.setLevel(saved_logger_level) -def run_command_hiding_token(args: list[str], token: str) -> int: - """ - Run a command and hide the token in the output. +def run_command_hiding_token(args: list[str], token: str, **kwargs) -> int: + """Run a command and hide the token in the output. Prints the outputs (stdout and stderr) of the subprocess.CompletedProcess object. The token or tokens will be replaced with a string of asterisks of the same length. If stdout or stderr is None, it will not be printed. - :param args: The command to run. - :param token: The token to hide in the output. - :return: The return code of the command. + Parameters + ---------- + args + The command to run. + token + The token to hide in the output. + kwargs + additional arguments for subprocess.run + + Returns + ------- + int + The return code of the command. + + Raises + ------ + ValueError + If the kwargs contain 'text', 'stdout', or 'stderr'. """ - p = subprocess.run(args, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + if kwargs.keys() & {"text", "stdout", "stderr"}: + raise ValueError("text, stdout, and stderr are not allowed in kwargs") + + p = subprocess.run( + args, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs + ) out, err = p.stdout, p.stderr @@ -1351,7 +1431,7 @@ def extract_section_from_yaml_text( section_name: str, exclude_requirements: bool = False, ) -> list[str]: - """Extract a section from YAML as text + """Extract a section from YAML as text. Parameters ---------- @@ -1370,7 +1450,15 @@ def extract_section_from_yaml_text( A list of strings for the extracted sections. """ # normalize the indents etc. - yaml_text = CondaMetaYAML(yaml_text).dumps() + try: + yaml_text = CondaMetaYAML(yaml_text).dumps() + except Exception as e: + logger.debug( + "Failed to normalize the YAML text due to error %s. We will try to parse anyways!", + repr(e), + ) + pass + lines = yaml_text.splitlines() in_requirements = False diff --git a/docs/runner_allocation.md b/docs/runner_allocation.md index fd418678d..a7d6218fb 100644 --- a/docs/runner_allocation.md +++ b/docs/runner_allocation.md @@ -18,7 +18,7 @@ These are split across our workflows as follows: - `bot-update-nodes` - 3 runners - `bot-versions` - 6 runners - `docker` (on demand) - 1 runner -- `keepalive` - 1 runner +- `bot-keepalive` (periodic) - 1 runner - `relock` - 1 runner - `test-model` (daily for ~4 minutes, on demand) - 1 runner - `tests` (on demand) - 1 runner diff --git a/environment.yml b/environment.yml index 3a0a72649..f73feab82 100644 --- a/environment.yml +++ b/environment.yml @@ -14,7 +14,7 @@ dependencies: - conda - conda-lock - conda-forge-feedstock-check-solvable >=0.8.0 - - conda-forge-feedstock-ops >=0.9.0 + - conda-forge-feedstock-ops >=0.12.0 - conda-forge-pinning - conda-libmamba-solver - conda-forge-metadata >=0.3.0 @@ -24,6 +24,7 @@ dependencies: - curl - depfinder - distributed + - fastapi - feedparser - frozendict - git @@ -32,6 +33,7 @@ dependencies: - jinja2 - lockfile - mamba >=0.23 + - mitmproxy - msgpack-python - networkx !=2.8.1 - numpy @@ -40,6 +42,7 @@ dependencies: - psutil - pydantic - pydantic-extra-types + - pydantic-settings - pygithub - pymongo - pynamodb @@ -62,6 +65,7 @@ dependencies: - yaml - pip - pytest <8.1.0 + - pytest-xprocess - codecov - requests-mock - pre-commit diff --git a/pyproject.toml b/pyproject.toml index de2226870..049549c49 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,8 +39,9 @@ write_to_template = "__version__ = '{version}'\n" extend-exclude = ["conda_forge_tick/migrators/disabled/legacy.py"] [tool.ruff.lint] -select = ["E", "F", "I", "W"] -ignore = ["E501"] +select = ["E", "F", "I", "W", "LOG", "G", "D2", "D3", "D4", "DOC202", "DOC403", "DOC5"] +ignore = ["E501", "D205"] +pydocstyle.convention = "numpy" preview = true [tool.ruff.lint.pycodestyle] diff --git a/tests/conftest.py b/tests/conftest.py index 4cdb1bad3..a0c8a9043 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,13 @@ import os +import tempfile from types import TracebackType from typing import Self +import networkx as nx import pytest from conda_forge_tick import global_sensitive_env +from conda_forge_tick.lazy_json_backends import LazyJson @pytest.fixture @@ -103,8 +106,31 @@ def data(self): return self +@pytest.fixture +def test_graph(): + with tempfile.TemporaryDirectory() as tmpdir: + gx = nx.DiGraph() + lzj = LazyJson(os.path.join(tmpdir, "conda.json")) + with lzj as attrs: + attrs.update({"reqs": ["python"]}) + gx.add_node("conda", payload=lzj) + gx.graph["outputs_lut"] = {} + + yield gx + + def pytest_configure(config): config.addinivalue_line( "markers", "mongodb: mark tests that run with mongodb", ) + + +@pytest.fixture +def temporary_environment(): + try: + old_env = os.environ.copy() + yield + finally: + os.environ.clear() + os.environ.update(old_env) diff --git a/tests/test_auto_tick.py b/tests/test_auto_tick.py index 1676e0b4b..e0c9680e2 100644 --- a/tests/test_auto_tick.py +++ b/tests/test_auto_tick.py @@ -100,10 +100,7 @@ def test_prepare_feedstock_repository_repository_not_found(caplog): def test_prepare_feedstock_repository_complete_dry_run(): - """ - This test really clones the repository using the DryRunBackend. - """ - + """Really clones the repository using the DryRunBackend.""" backend = DryRunBackend() context = FeedstockContext( @@ -130,10 +127,7 @@ def test_prepare_feedstock_repository_complete_dry_run(): def test_prepare_feedstock_repository_complete_fail(): - """ - This test really clones the repository using the DryRunBackend. - """ - + """Really clones the repository using the DryRunBackend.""" backend = DryRunBackend() context = FeedstockContext( diff --git a/tests/test_cfyaml_cleanup_migrator.py b/tests/test_cfyaml_cleanup_migrator.py index 5e1f9af0f..636f4a837 100644 --- a/tests/test_cfyaml_cleanup_migrator.py +++ b/tests/test_cfyaml_cleanup_migrator.py @@ -1,14 +1,18 @@ import os +import networkx as nx import pytest from ruamel.yaml import YAML from test_migrators import run_test_migration from conda_forge_tick.migrators import CondaForgeYAMLCleanup, Version +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} VERSION_CF = Version( set(), piggy_back_migrations=[CondaForgeYAMLCleanup()], + total_graph=TOTAL_GRAPH, ) YAML_PATHS = [ diff --git a/tests/test_container_tasks.py b/tests/test_container_tasks.py index 878bf62d4..97c7f5a2c 100644 --- a/tests/test_container_tasks.py +++ b/tests/test_container_tasks.py @@ -9,6 +9,7 @@ import tempfile import conda_smithy +import networkx as nx import pytest from conda.models.version import VersionOrder from conda_forge_feedstock_ops.container_utils import ( @@ -48,7 +49,9 @@ parse_meta_yaml_containerized, ) -VERSION = Version(set()) +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} +VERSION = Version(set(), total_graph=TOTAL_GRAPH) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @@ -608,7 +611,7 @@ def test_container_tasks_is_recipe_solvable_containerized(use_containers): assert res_cont == res_local -yaml_rebuild = MigrationYaml(yaml_contents="{}", name="hi") +yaml_rebuild = MigrationYaml(yaml_contents="{}", name="hi", total_graph=TOTAL_GRAPH) yaml_rebuild.cycles = [] @@ -732,7 +735,7 @@ def test_migration_runner_run_migration_containerized_version( pmy["req"] |= set(_set) pmy["raw_meta_yaml"] = inp pmy.update(kwargs) - pmy["new_version"] = new_ver + pmy["version_pr_info"] = {"new_version": new_ver} data = run_migration_containerized( migrator=m, diff --git a/tests/test_contexts.py b/tests/test_contexts.py index 6b5091e23..6e51d907c 100644 --- a/tests/test_contexts.py +++ b/tests/test_contexts.py @@ -50,6 +50,11 @@ def test_feedstock_context_git_repo_owner(): context = FeedstockContext("TEST-FEEDSTOCK-NAME", demo_attrs) assert context.git_repo_owner == "conda-forge" + override_git_repo_owner_context = FeedstockContext( + "TEST-FEEDSTOCK-NAME", demo_attrs, git_repo_owner="GIT_REPO_OWNER" + ) + assert override_git_repo_owner_context.git_repo_owner == "GIT_REPO_OWNER" + def test_feedstock_context_git_repo_name(): context = FeedstockContext("TEST-FEEDSTOCK-NAME", demo_attrs) diff --git a/tests/test_cos7_config_migrator.py b/tests/test_cos7_config_migrator.py index cff9aa4b6..13ee9fca0 100644 --- a/tests/test_cos7_config_migrator.py +++ b/tests/test_cos7_config_migrator.py @@ -1,14 +1,18 @@ import os +import networkx as nx import pytest from test_migrators import run_test_migration from conda_forge_tick.migrators import Cos7Config, Version from conda_forge_tick.migrators.cos7 import REQUIRED_RE_LINES, _has_line_set +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} VERSION_COS7 = Version( set(), piggy_back_migrations=[Cos7Config()], + total_graph=TOTAL_GRAPH, ) YAML_PATHS = [ diff --git a/tests/test_cross_compile.py b/tests/test_cross_compile.py index 8e8530538..52d907973 100644 --- a/tests/test_cross_compile.py +++ b/tests/test_cross_compile.py @@ -1,5 +1,6 @@ from pathlib import Path +import networkx as nx import pytest from flaky import flaky from test_migrators import run_test_migration @@ -10,8 +11,10 @@ CrossPythonMigrator, CrossRBaseMigrator, GuardTestingMigrator, + GuardTestingWinMigrator, NoCondaInspectMigrator, UpdateCMakeArgsMigrator, + UpdateCMakeArgsWinMigrator, UpdateConfigSubGuessMigrator, Version, ) @@ -22,9 +25,14 @@ ] YAML_PATH = YAML_PATHS[0] +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} + config_migrator = UpdateConfigSubGuessMigrator() guard_testing_migrator = GuardTestingMigrator() +guard_testing_win_migrator = GuardTestingWinMigrator() cmake_migrator = UpdateCMakeArgsMigrator() +cmake_win_migrator = UpdateCMakeArgsWinMigrator() cross_python_migrator = CrossPythonMigrator() cross_rbase_migrator = CrossRBaseMigrator() b2h_migrator = Build2HostMigrator() @@ -33,36 +41,51 @@ version_migrator_autoconf = Version( set(), - piggy_back_migrations=[config_migrator, cmake_migrator, guard_testing_migrator], + piggy_back_migrations=[ + config_migrator, + cmake_migrator, + cmake_win_migrator, + guard_testing_migrator, + guard_testing_win_migrator, + ], + total_graph=TOTAL_GRAPH, ) version_migrator_cmake = Version( set(), piggy_back_migrations=[ cmake_migrator, + cmake_win_migrator, guard_testing_migrator, + guard_testing_win_migrator, cross_rbase_migrator, cross_python_migrator, ], + total_graph=TOTAL_GRAPH, ) version_migrator_python = Version( set(), piggy_back_migrations=[cross_python_migrator], + total_graph=TOTAL_GRAPH, ) version_migrator_rbase = Version( set(), piggy_back_migrations=[cross_rbase_migrator], + total_graph=TOTAL_GRAPH, ) version_migrator_b2h = Version( set(), piggy_back_migrations=[b2h_migrator], + total_graph=TOTAL_GRAPH, ) version_migrator_nci = Version( set(), piggy_back_migrations=[nci_migrator], + total_graph=TOTAL_GRAPH, ) version_migrator_arm_and_power = Version( set(), piggy_back_migrations=[arm_and_power_migrator], + total_graph=TOTAL_GRAPH, ) @@ -124,6 +147,8 @@ def test_cmake(tmp_path): tmp_path.joinpath("recipe").mkdir() with open(tmp_path / "recipe/build.sh", "w") as f: f.write("#!/bin/bash\ncmake ..\nctest") + with open(tmp_path / "recipe/bld.bat", "w") as f: + f.write("cmake ..\nctest") run_test_migration( m=version_migrator_cmake, inp=YAML_PATH.joinpath("config_recipe.yaml").read_text(), @@ -137,16 +162,25 @@ def test_cmake(tmp_path): }, tmp_path=tmp_path, ) - expected = [ + expected_unix = [ "#!/bin/bash\n", "cmake ${CMAKE_ARGS} ..\n", 'if [[ "${CONDA_BUILD_CROSS_COMPILATION:-}" != "1" || "${CROSSCOMPILING_EMULATOR}" != "" ]]; then\n', "ctest\n", "fi\n", ] + expected_win = [ + "cmake %CMAKE_ARGS% ..\n", + 'if not "%CONDA_BUILD_SKIP_TESTS%"=="1" (\n', + "ctest\n", + ")\n", + ] with open(tmp_path / "recipe/build.sh") as f: lines = f.readlines() - assert lines == expected + assert lines == expected_unix + with open(tmp_path / "recipe/bld.bat") as f: + lines = f.readlines() + assert lines == expected_win @pytest.mark.parametrize("recipe_version", [0, 1]) diff --git a/tests/test_duplicate_lines_cleanup_migrator.py b/tests/test_duplicate_lines_cleanup_migrator.py index 577bc912d..b67e1dd96 100644 --- a/tests/test_duplicate_lines_cleanup_migrator.py +++ b/tests/test_duplicate_lines_cleanup_migrator.py @@ -1,13 +1,17 @@ import os +import networkx as nx import pytest from test_migrators import run_test_migration from conda_forge_tick.migrators import DuplicateLinesCleanup, Version +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} VERSION_DLC = Version( set(), piggy_back_migrations=[DuplicateLinesCleanup()], + total_graph=TOTAL_GRAPH, ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") diff --git a/tests/test_extra_jinja2_keys_migrator.py b/tests/test_extra_jinja2_keys_migrator.py index db2feb2e5..f9286c645 100644 --- a/tests/test_extra_jinja2_keys_migrator.py +++ b/tests/test_extra_jinja2_keys_migrator.py @@ -1,13 +1,17 @@ import os +import networkx as nx from flaky import flaky from test_migrators import run_test_migration from conda_forge_tick.migrators import ExtraJinja2KeysCleanup, Version +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} VERSION_CF = Version( set(), piggy_back_migrations=[ExtraJinja2KeysCleanup()], + total_graph=TOTAL_GRAPH, ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") diff --git a/tests/test_files_make_migrators/aws-c-io_node_attrs.json b/tests/test_files_make_migrators/aws-c-io_node_attrs.json new file mode 100644 index 000000000..c16fdc5ab --- /dev/null +++ b/tests/test_files_make_migrators/aws-c-io_node_attrs.json @@ -0,0 +1,987 @@ +{ + "archived": false, + "branch": "main", + "conda-forge.yml": { + "bot": { + "automerge": true + }, + "build_platform": { + "linux_aarch64": "linux_64", + "linux_ppc64le": "linux_64", + "osx_arm64": "osx_64" + }, + "conda_build": { + "pkg_format": "2" + }, + "conda_build_tool": "rattler-build", + "conda_forge_output_validation": true, + "conda_install_tool": "pixi", + "github": { + "branch_name": "main", + "tooling_branch_name": "main" + }, + "test": "native_and_emulated" + }, + "feedstock_name": "aws-c-io", + "hash_type": "sha256", + "linux_64_meta_yaml": { + "about": { + "home": "https://github.com/awslabs/aws-c-io", + "license": "Apache-2.0", + "license_family": "Apache-2.0", + "license_file": "LICENSE", + "summary": "This is a module for the AWS SDK for C. It handles all IO and TLS work for application protocols." + }, + "build": { + "number": "0", + "run_exports": { + "weak": [ + "aws-c-io" + ] + } + }, + "extra": { + "recipe-maintainers": [ + "xhochy", + "conda-forge/aws-sdk-cpp" + ] + }, + "outputs": [ + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + } + ], + "package": { + "name": "aws-c-io", + "version": "0.18.0" + }, + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ] + }, + "schema_version": 1, + "source": { + "sha256": "c65a9f059dfe3208dbc92b7fc11f6d846d15e1a14cd0dabf98041ce9627cadda", + "url": "https://github.com/awslabs/aws-c-io/archive/v0.18.0.tar.gz" + } + }, + "linux_64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cmake", + "ninja" + ] + }, + "host": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "s2n" + ] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [] + } + }, + "linux_aarch64_meta_yaml": { + "about": { + "home": "https://github.com/awslabs/aws-c-io", + "license": "Apache-2.0", + "license_family": "Apache-2.0", + "license_file": "LICENSE", + "summary": "This is a module for the AWS SDK for C. It handles all IO and TLS work for application protocols." + }, + "build": { + "number": "0", + "run_exports": { + "weak": [ + "aws-c-io" + ] + } + }, + "extra": { + "recipe-maintainers": [ + "xhochy", + "conda-forge/aws-sdk-cpp" + ] + }, + "outputs": [ + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + } + ], + "package": { + "name": "aws-c-io", + "version": "0.18.0" + }, + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ] + }, + "schema_version": 1, + "source": { + "sha256": "c65a9f059dfe3208dbc92b7fc11f6d846d15e1a14cd0dabf98041ce9627cadda", + "url": "https://github.com/awslabs/aws-c-io/archive/v0.18.0.tar.gz" + } + }, + "linux_aarch64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cmake", + "ninja" + ] + }, + "host": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "s2n" + ] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [] + } + }, + "linux_ppc64le_meta_yaml": { + "about": { + "home": "https://github.com/awslabs/aws-c-io", + "license": "Apache-2.0", + "license_family": "Apache-2.0", + "license_file": "LICENSE", + "summary": "This is a module for the AWS SDK for C. It handles all IO and TLS work for application protocols." + }, + "build": { + "number": "0", + "run_exports": { + "weak": [ + "aws-c-io" + ] + } + }, + "extra": { + "recipe-maintainers": [ + "xhochy", + "conda-forge/aws-sdk-cpp" + ] + }, + "outputs": [ + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + } + ], + "package": { + "name": "aws-c-io", + "version": "0.18.0" + }, + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ] + }, + "schema_version": 1, + "source": { + "sha256": "c65a9f059dfe3208dbc92b7fc11f6d846d15e1a14cd0dabf98041ce9627cadda", + "url": "https://github.com/awslabs/aws-c-io/archive/v0.18.0.tar.gz" + } + }, + "linux_ppc64le_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cmake", + "ninja" + ] + }, + "host": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "s2n" + ] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [] + } + }, + "meta_yaml": { + "about": { + "home": "https://github.com/awslabs/aws-c-io", + "license": "Apache-2.0", + "license_family": "Apache-2.0", + "license_file": "LICENSE", + "summary": "This is a module for the AWS SDK for C. It handles all IO and TLS work for application protocols." + }, + "build": { + "number": "0", + "run_exports": { + "weak": [ + "aws-c-io" + ] + } + }, + "extra": { + "recipe-maintainers": [ + "xhochy", + "conda-forge/aws-sdk-cpp" + ] + }, + "outputs": [ + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + }, + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + }, + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + }, + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + }, + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + }, + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + } + ], + "package": { + "name": "aws-c-io", + "version": "0.18.0" + }, + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ] + }, + "schema_version": 1, + "source": { + "sha256": "c65a9f059dfe3208dbc92b7fc11f6d846d15e1a14cd0dabf98041ce9627cadda", + "url": "https://github.com/awslabs/aws-c-io/archive/v0.18.0.tar.gz" + } + }, + "name": "aws-c-io", + "osx_64_meta_yaml": { + "about": { + "home": "https://github.com/awslabs/aws-c-io", + "license": "Apache-2.0", + "license_family": "Apache-2.0", + "license_file": "LICENSE", + "summary": "This is a module for the AWS SDK for C. It handles all IO and TLS work for application protocols." + }, + "build": { + "number": "0", + "run_exports": { + "weak": [ + "aws-c-io" + ] + } + }, + "extra": { + "recipe-maintainers": [ + "xhochy", + "conda-forge/aws-sdk-cpp" + ] + }, + "outputs": [ + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + } + ], + "package": { + "name": "aws-c-io", + "version": "0.18.0" + }, + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ] + }, + "schema_version": 1, + "source": { + "sha256": "c65a9f059dfe3208dbc92b7fc11f6d846d15e1a14cd0dabf98041ce9627cadda", + "url": "https://github.com/awslabs/aws-c-io/archive/v0.18.0.tar.gz" + } + }, + "osx_64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cmake", + "ninja" + ] + }, + "host": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "s2n" + ] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [] + } + }, + "osx_arm64_meta_yaml": { + "about": { + "home": "https://github.com/awslabs/aws-c-io", + "license": "Apache-2.0", + "license_family": "Apache-2.0", + "license_file": "LICENSE", + "summary": "This is a module for the AWS SDK for C. It handles all IO and TLS work for application protocols." + }, + "build": { + "number": "0", + "run_exports": { + "weak": [ + "aws-c-io" + ] + } + }, + "extra": { + "recipe-maintainers": [ + "xhochy", + "conda-forge/aws-sdk-cpp" + ] + }, + "outputs": [ + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + } + ], + "package": { + "name": "aws-c-io", + "version": "0.18.0" + }, + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ] + }, + "schema_version": 1, + "source": { + "sha256": "c65a9f059dfe3208dbc92b7fc11f6d846d15e1a14cd0dabf98041ce9627cadda", + "url": "https://github.com/awslabs/aws-c-io/archive/v0.18.0.tar.gz" + } + }, + "osx_arm64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cmake", + "ninja" + ] + }, + "host": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "s2n" + ] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [] + } + }, + "outputs_names": { + "__set__": true, + "elements": [ + "aws-c-io" + ] + }, + "parsing_error": false, + "platforms": [ + "linux_64", + "linux_aarch64", + "linux_ppc64le", + "osx_64", + "osx_arm64", + "win_64" + ], + "pr_info": { + "__lazy_json__": "pr_info/aws-c-io.json" + }, + "raw_meta_yaml": "schema_version: 1\n\ncontext:\n version: \"0.18.0\"\n\npackage:\n name: aws-c-io\n version: ${{ version }}\n\nsource:\n url: https://github.com/awslabs/aws-c-io/archive/v${{ version }}.tar.gz\n sha256: c65a9f059dfe3208dbc92b7fc11f6d846d15e1a14cd0dabf98041ce9627cadda\n\nbuild:\n number: 0\n\nrequirements:\n build:\n - cmake !=3.19.0,!=3.19.1\n - ${{ compiler('c') }}\n - ${{ stdlib(\"c\") }}\n - ninja\n host:\n - aws-c-common\n - aws-c-cal\n - if: linux\n then: s2n\n run_exports:\n - ${{ pin_subpackage(\"aws-c-io\", upper_bound=\"x.x.x\") }}\n\ntests:\n - script:\n - if: unix\n then:\n - test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}\n - test -f $PREFIX/include/aws/io/io.h\n - if: win\n then:\n - if not exist %LIBRARY_INC%\\\\aws\\\\io\\\\io.h exit 1\n - if not exist %PREFIX%\\\\Library\\\\bin\\\\aws-c-io.dll exit 1\nabout:\n license: Apache-2.0\n license_file: LICENSE\n summary: This is a module for the AWS SDK for C. It handles all IO and TLS work for application protocols.\n homepage: https://github.com/awslabs/aws-c-io\n\nextra:\n recipe-maintainers:\n - xhochy\n - conda-forge/aws-sdk-cpp\n", + "req": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "c_compiler_stub", + "c_stdlib_stub", + "cmake", + "ninja", + "s2n" + ] + }, + "requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cmake", + "ninja" + ] + }, + "host": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "c_compiler_stub", + "c_stdlib_stub", + "s2n" + ] + }, + "run": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub" + ] + }, + "test": { + "__set__": true, + "elements": [] + } + }, + "strong_exports": false, + "total_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cmake !=3.19.0,!=3.19.1", + "ninja" + ] + }, + "host": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "s2n" + ] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [] + } + }, + "url": "https://github.com/awslabs/aws-c-io/archive/v0.18.0.tar.gz", + "version": "0.18.0", + "version_pr_info": { + "__lazy_json__": "version_pr_info/aws-c-io.json" + }, + "win_64_meta_yaml": { + "about": { + "home": "https://github.com/awslabs/aws-c-io", + "license": "Apache-2.0", + "license_family": "Apache-2.0", + "license_file": "LICENSE", + "summary": "This is a module for the AWS SDK for C. It handles all IO and TLS work for application protocols." + }, + "build": { + "number": "0", + "run_exports": { + "weak": [ + "aws-c-io" + ] + } + }, + "extra": { + "recipe-maintainers": [ + "xhochy", + "conda-forge/aws-sdk-cpp" + ] + }, + "outputs": [ + { + "build": { + "strong": [], + "weak": [ + "aws-c-io" + ] + }, + "name": "aws-c-io", + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ], + "run": [] + }, + "tests": [ + { + "script": [ + "test -f $PREFIX/lib/libaws-c-io${SHLIB_EXT}", + "test -f $PREFIX/include/aws/io/io.h" + ] + } + ] + } + ], + "package": { + "name": "aws-c-io", + "version": "0.18.0" + }, + "requirements": { + "build": [ + "cmake !=3.19.0,!=3.19.1", + "c_compiler_stub", + "c_stdlib_stub", + "ninja" + ], + "host": [ + "aws-c-common", + "aws-c-cal", + "s2n" + ] + }, + "schema_version": 1, + "source": { + "sha256": "c65a9f059dfe3208dbc92b7fc11f6d846d15e1a14cd0dabf98041ce9627cadda", + "url": "https://github.com/awslabs/aws-c-io/archive/v0.18.0.tar.gz" + } + }, + "win_64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cmake", + "ninja" + ] + }, + "host": { + "__set__": true, + "elements": [ + "aws-c-cal", + "aws-c-common", + "s2n" + ] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [] + } + } +} diff --git a/tests/test_files_make_migrators/conda-forge-pinning_node_attrs.json b/tests/test_files_make_migrators/conda-forge-pinning_node_attrs.json new file mode 100644 index 000000000..7da708d08 --- /dev/null +++ b/tests/test_files_make_migrators/conda-forge-pinning_node_attrs.json @@ -0,0 +1,202 @@ +{ + "archived": false, + "branch": "main", + "conda-forge.yml": { + "conda_build": { + "pkg_format": "2" + }, + "conda_forge_output_validation": true, + "github": { + "branch_name": "main", + "tooling_branch_name": "main" + }, + "provider": { + "linux_64": "github_actions" + }, + "upload_on_branch": "main" + }, + "feedstock_name": "conda-forge-pinning", + "linux_64_meta_yaml": { + "about": { + "home": "https://conda-forge.org/docs/maintainer/infrastructure.html#conda-forge-pinning", + "license": "BSD-3-Clause", + "license_family": "BSD", + "license_file": "LICENSE.txt", + "summary": "The baseline versions of software for the conda-forge ecosystem" + }, + "build": { + "noarch": "generic", + "number": "0", + "script": [ + "cp conda_build_config.yaml $PREFIX", + "mkdir -p $PREFIX/share/conda-forge/migrations", + "cp migrations/*.yaml $PREFIX/share/conda-forge/migrations/", + "cp migrations/*.txt $PREFIX/share/conda-forge/migrations/", + "cp migrations/*.exyaml $PREFIX/share/conda-forge/migrations/", + "mkdir -p $PREFIX/share/conda-forge/migration_support", + "cp migration_support/* $PREFIX/share/conda-forge/migration_support/" + ] + }, + "extra": { + "recipe-maintainers": [ + "conda-forge/Core" + ] + }, + "package": { + "name": "conda-forge-pinning", + "version": "2025.04.01.13.38.21" + }, + "schema_version": 0, + "source": { + "path": "." + }, + "test": { + "commands": [ + "test -f $PREFIX/conda_build_config.yaml", + "test -f $PREFIX/share/conda-forge/migrations/example.exyaml", + "test -d $PREFIX/share/conda-forge/migration_support", + "if [[ $(ls -1 $PREFIX/share/conda-forge/migrations/*.yml) != \"\" ]]; then echo \"All migration files must end in .yaml\" && exit 1; fi" + ], + "requires": [ + "python >=3.8", + "pyyaml" + ] + } + }, + "linux_64_requirements": { + "build": { + "__set__": true, + "elements": [] + }, + "host": { + "__set__": true, + "elements": [] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [ + "python", + "pyyaml" + ] + } + }, + "meta_yaml": { + "about": { + "home": "https://conda-forge.org/docs/maintainer/infrastructure.html#conda-forge-pinning", + "license": "BSD-3-Clause", + "license_family": "BSD", + "license_file": "LICENSE.txt", + "summary": "The baseline versions of software for the conda-forge ecosystem" + }, + "build": { + "noarch": "generic", + "number": "0", + "script": [ + "cp conda_build_config.yaml $PREFIX", + "mkdir -p $PREFIX/share/conda-forge/migrations", + "cp migrations/*.yaml $PREFIX/share/conda-forge/migrations/", + "cp migrations/*.txt $PREFIX/share/conda-forge/migrations/", + "cp migrations/*.exyaml $PREFIX/share/conda-forge/migrations/", + "mkdir -p $PREFIX/share/conda-forge/migration_support", + "cp migration_support/* $PREFIX/share/conda-forge/migration_support/" + ] + }, + "extra": { + "recipe-maintainers": [ + "conda-forge/Core" + ] + }, + "package": { + "name": "conda-forge-pinning", + "version": "2025.04.01.13.38.21" + }, + "schema_version": 0, + "source": { + "path": "." + }, + "test": { + "commands": [ + "test -f $PREFIX/conda_build_config.yaml", + "test -f $PREFIX/share/conda-forge/migrations/example.exyaml", + "test -d $PREFIX/share/conda-forge/migration_support", + "if [[ $(ls -1 $PREFIX/share/conda-forge/migrations/*.yml) != \"\" ]]; then echo \"All migration files must end in .yaml\" && exit 1; fi" + ], + "requires": [ + "python >=3.8", + "pyyaml" + ] + } + }, + "name": "conda-forge-pinning", + "outputs_names": { + "__set__": true, + "elements": [ + "conda-forge-pinning" + ] + }, + "parsing_error": false, + "platforms": [ + "linux_64" + ], + "pr_info": { + "__lazy_json__": "pr_info/conda-forge-pinning.json" + }, + "raw_meta_yaml": "{% set version = datetime.datetime.utcnow().strftime('%Y.%m.%d.%H.%M.%S') %}\n\npackage:\n name: conda-forge-pinning\n version: {{ version }}\n\nsource:\n path: .\n\nbuild:\n number: 0\n noarch: generic\n script:\n - cp conda_build_config.yaml $PREFIX # [unix]\n - mkdir -p $PREFIX/share/conda-forge/migrations # [unix]\n - cp migrations/*.yaml $PREFIX/share/conda-forge/migrations/ # [unix]\n - cp migrations/*.txt $PREFIX/share/conda-forge/migrations/ # [unix]\n - cp migrations/*.exyaml $PREFIX/share/conda-forge/migrations/ # [unix]\n - mkdir -p $PREFIX/share/conda-forge/migration_support # [unix]\n - cp migration_support/* $PREFIX/share/conda-forge/migration_support/ # [unix]\n - echo \"This package can't be built on windows\" # [win]\n - exit 1 # [win]\n\ntest:\n requires:\n - python >=3.8\n - pyyaml\n commands:\n - test -f $PREFIX/conda_build_config.yaml # [unix]\n - test -f $PREFIX/share/conda-forge/migrations/example.exyaml # [unix]\n - test -d $PREFIX/share/conda-forge/migration_support # [unix]\n - >- # [unix]\n if [[ $(ls -1 $PREFIX/share/conda-forge/migrations/*.yml) != \"\" ]]; then # [unix]\n echo \"All migration files must end in .yaml\" && exit 1; # [unix]\n fi # [unix]\n\nabout:\n summary: The baseline versions of software for the conda-forge ecosystem\n license: BSD-3-Clause\n license_family: BSD\n license_file: LICENSE.txt\n home: https://conda-forge.org/docs/maintainer/infrastructure.html#conda-forge-pinning\n\nextra:\n recipe-maintainers:\n - conda-forge/Core\n", + "req": { + "__set__": true, + "elements": [] + }, + "requirements": { + "build": { + "__set__": true, + "elements": [] + }, + "host": { + "__set__": true, + "elements": [] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [ + "python", + "pyyaml" + ] + } + }, + "strong_exports": false, + "time": 1568135286.5968125, + "total_requirements": { + "build": { + "__set__": true, + "elements": [] + }, + "host": { + "__set__": true, + "elements": [] + }, + "run": { + "__set__": true, + "elements": [] + }, + "test": { + "__set__": true, + "elements": [ + "python >=3.8", + "pyyaml" + ] + } + }, + "url": null, + "version": "2025.04.01.13.38.21", + "version_pr_info": { + "__lazy_json__": "version_pr_info/conda-forge-pinning.json" + } +} diff --git a/tests/test_files_make_migrators/numpy_node_attrs.json b/tests/test_files_make_migrators/numpy_node_attrs.json new file mode 100644 index 000000000..bb26a8825 --- /dev/null +++ b/tests/test_files_make_migrators/numpy_node_attrs.json @@ -0,0 +1,3366 @@ +{ + "archived": false, + "branch": "main", + "conda-forge.yml": { + "azure": { + "settings_win": { + "variables": { + "SET_PAGEFILE": "True" + } + } + }, + "build_platform": { + "linux_aarch64": "linux_64", + "linux_ppc64le": "linux_64", + "osx_arm64": "osx_64" + }, + "conda_build": { + "pkg_format": "2" + }, + "conda_forge_output_validation": true, + "github": { + "branch_name": "main", + "tooling_branch_name": "main" + }, + "provider": { + "win": "azure" + }, + "test": "native_and_emulated" + }, + "feedstock_name": "numpy", + "hash_type": "sha256", + "linux_64_meta_yaml": { + "about": { + "dev_url": "https://github.com/numpy/numpy", + "doc_url": "https://numpy.org/doc/stable/", + "home": "http://numpy.org/", + "license": "BSD-3-Clause", + "license_file": "LICENSE.txt", + "summary": "The fundamental package for scientific computing with Python." + }, + "build": { + "entry_points": null, + "number": "0", + "run_exports": [ + "numpy >=1.21,<3" + ] + }, + "extra": { + "recipe-maintainers": [ + "jakirkham", + "msarahan", + "pelson", + "rgommers", + "ocefpaf", + "isuruf", + "xhochy", + "h-vetinari" + ] + }, + "package": { + "name": "numpy", + "version": "2.2.4" + }, + "requirements": { + "build": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ], + "host": [ + "pip", + "meson-python", + "ninja", + "pkg-config", + "python", + "python-build", + "cython", + "libblas", + "libcblas", + "liblapack" + ], + "run": [ + "python" + ], + "run_constrained": [ + "numpy-base <0a0" + ] + }, + "schema_version": 0, + "source": [ + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + } + ], + "test": { + "commands": [ + "f2py -h", + "python -c \"import numpy, sys; sys.exit(not numpy.test(verbose=1, label='full', tests=None, extra_argv=['-k', 'not (_not_a_real_test or Test_ARM_Features or Test_POWER_Features or test_new_policy or (test_basic_property and float32) or (test_regression and test_gh25784) or test_einsum_sums_float32 or test_ufunc_noncontiguous[matvec] or test_accelerate_framework_sgemv_fix )', '-nauto', '--timeout=3000', '--durations=50', '--maxfail=100']))\"" + ], + "imports": [ + "numpy", + "numpy.ctypeslib", + "numpy.distutils", + "numpy.dtypes", + "numpy.exceptions", + "numpy.f2py", + "numpy.fft", + "numpy.lib", + "numpy.lib.format", + "numpy.lib.mixins", + "numpy.lib.recfunctions", + "numpy.lib.scimath", + "numpy.lib.stride_tricks", + "numpy.lib.npyio", + "numpy.lib.introspect", + "numpy.lib.array_utils", + "numpy.linalg", + "numpy.ma", + "numpy.ma.extras", + "numpy.ma.mrecords", + "numpy.polynomial", + "numpy.polynomial.chebyshev", + "numpy.polynomial.hermite", + "numpy.polynomial.hermite_e", + "numpy.polynomial.laguerre", + "numpy.polynomial.legendre", + "numpy.polynomial.polynomial", + "numpy.random", + "numpy.testing", + "numpy.testing.overrides", + "numpy.typing", + "numpy.typing.mypy_plugin", + "numpy.version", + "numpy.core.multiarray", + "numpy.core.numeric", + "numpy.core.umath", + "numpy.linalg.lapack_lite", + "numpy.random.mtrand" + ], + "requires": [ + "pytest", + "pytest-timeout", + "pytest-xdist", + "cython", + "hypothesis", + "meson", + "pytz", + "setuptools <60.0.0", + "typing_extensions", + "c_compiler_stub", + "cxx_compiler_stub", + "fortran_compiler_stub", + "pkg-config" + ] + } + }, + "linux_64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ] + }, + "host": { + "__set__": true, + "elements": [ + "cython", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "run": { + "__set__": true, + "elements": [ + "python" + ] + }, + "test": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "cxx_compiler_stub", + "cython", + "fortran_compiler_stub", + "hypothesis", + "meson", + "pkg-config", + "pytest", + "pytest-timeout", + "pytest-xdist", + "pytz", + "setuptools", + "typing_extensions" + ] + } + }, + "linux_aarch64_meta_yaml": { + "about": { + "dev_url": "https://github.com/numpy/numpy", + "doc_url": "https://numpy.org/doc/stable/", + "home": "http://numpy.org/", + "license": "BSD-3-Clause", + "license_file": "LICENSE.txt", + "summary": "The fundamental package for scientific computing with Python." + }, + "build": { + "entry_points": null, + "number": "0", + "run_exports": [ + "numpy >=1.21,<3" + ] + }, + "extra": { + "recipe-maintainers": [ + "jakirkham", + "msarahan", + "pelson", + "rgommers", + "ocefpaf", + "isuruf", + "xhochy", + "h-vetinari" + ] + }, + "package": { + "name": "numpy", + "version": "2.2.4" + }, + "requirements": { + "build": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ], + "host": [ + "pip", + "meson-python", + "ninja", + "pkg-config", + "python", + "python-build", + "cython", + "libblas", + "libcblas", + "liblapack" + ], + "run": [ + "python" + ], + "run_constrained": [ + "numpy-base <0a0" + ] + }, + "schema_version": 0, + "source": [ + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + } + ], + "test": { + "commands": [ + "f2py -h", + "python -c \"import numpy, sys; sys.exit(not numpy.test(verbose=1, label='full', tests=None, extra_argv=['-k', 'not (_not_a_real_test or Test_ARM_Features or Test_POWER_Features or test_new_policy or (test_basic_property and float32) or (test_regression and test_gh25784) or test_einsum_sums_float32 or test_ufunc_noncontiguous[matvec] or test_accelerate_framework_sgemv_fix )', '-nauto', '--timeout=3000', '--durations=50', '--maxfail=100']))\"" + ], + "imports": [ + "numpy", + "numpy.ctypeslib", + "numpy.distutils", + "numpy.dtypes", + "numpy.exceptions", + "numpy.f2py", + "numpy.fft", + "numpy.lib", + "numpy.lib.format", + "numpy.lib.mixins", + "numpy.lib.recfunctions", + "numpy.lib.scimath", + "numpy.lib.stride_tricks", + "numpy.lib.npyio", + "numpy.lib.introspect", + "numpy.lib.array_utils", + "numpy.linalg", + "numpy.ma", + "numpy.ma.extras", + "numpy.ma.mrecords", + "numpy.polynomial", + "numpy.polynomial.chebyshev", + "numpy.polynomial.hermite", + "numpy.polynomial.hermite_e", + "numpy.polynomial.laguerre", + "numpy.polynomial.legendre", + "numpy.polynomial.polynomial", + "numpy.random", + "numpy.testing", + "numpy.testing.overrides", + "numpy.typing", + "numpy.typing.mypy_plugin", + "numpy.version", + "numpy.core.multiarray", + "numpy.core.numeric", + "numpy.core.umath", + "numpy.linalg.lapack_lite", + "numpy.random.mtrand" + ], + "requires": [ + "pytest", + "pytest-timeout", + "pytest-xdist", + "cython", + "hypothesis", + "meson", + "pytz", + "setuptools <60.0.0", + "typing_extensions", + "c_compiler_stub", + "cxx_compiler_stub", + "fortran_compiler_stub", + "pkg-config" + ] + } + }, + "linux_aarch64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ] + }, + "host": { + "__set__": true, + "elements": [ + "cython", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "run": { + "__set__": true, + "elements": [ + "python" + ] + }, + "test": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "cxx_compiler_stub", + "cython", + "fortran_compiler_stub", + "hypothesis", + "meson", + "pkg-config", + "pytest", + "pytest-timeout", + "pytest-xdist", + "pytz", + "setuptools", + "typing_extensions" + ] + } + }, + "linux_ppc64le_meta_yaml": { + "about": { + "dev_url": "https://github.com/numpy/numpy", + "doc_url": "https://numpy.org/doc/stable/", + "home": "http://numpy.org/", + "license": "BSD-3-Clause", + "license_file": "LICENSE.txt", + "summary": "The fundamental package for scientific computing with Python." + }, + "build": { + "entry_points": null, + "number": "0", + "run_exports": [ + "numpy >=1.21,<3" + ] + }, + "extra": { + "recipe-maintainers": [ + "jakirkham", + "msarahan", + "pelson", + "rgommers", + "ocefpaf", + "isuruf", + "xhochy", + "h-vetinari" + ] + }, + "package": { + "name": "numpy", + "version": "2.2.4" + }, + "requirements": { + "build": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ], + "host": [ + "pip", + "meson-python", + "ninja", + "pkg-config", + "python", + "python-build", + "cython", + "libblas", + "libcblas", + "liblapack" + ], + "run": [ + "python" + ], + "run_constrained": [ + "numpy-base <0a0" + ] + }, + "schema_version": 0, + "source": [ + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + } + ], + "test": { + "commands": [ + "f2py -h", + "python -c \"import numpy, sys; sys.exit(not numpy.test(verbose=1, label='full', tests=None, extra_argv=['-k', 'not (_not_a_real_test or Test_ARM_Features or Test_POWER_Features or test_new_policy or (test_basic_property and float32) or (test_regression and test_gh25784) or test_einsum_sums_float32 or test_ufunc_noncontiguous[matvec] or test_accelerate_framework_sgemv_fix )', '-nauto', '--timeout=3000', '--durations=50', '--maxfail=100']))\"" + ], + "imports": [ + "numpy", + "numpy.ctypeslib", + "numpy.distutils", + "numpy.dtypes", + "numpy.exceptions", + "numpy.f2py", + "numpy.fft", + "numpy.lib", + "numpy.lib.format", + "numpy.lib.mixins", + "numpy.lib.recfunctions", + "numpy.lib.scimath", + "numpy.lib.stride_tricks", + "numpy.lib.npyio", + "numpy.lib.introspect", + "numpy.lib.array_utils", + "numpy.linalg", + "numpy.ma", + "numpy.ma.extras", + "numpy.ma.mrecords", + "numpy.polynomial", + "numpy.polynomial.chebyshev", + "numpy.polynomial.hermite", + "numpy.polynomial.hermite_e", + "numpy.polynomial.laguerre", + "numpy.polynomial.legendre", + "numpy.polynomial.polynomial", + "numpy.random", + "numpy.testing", + "numpy.testing.overrides", + "numpy.typing", + "numpy.typing.mypy_plugin", + "numpy.version", + "numpy.core.multiarray", + "numpy.core.numeric", + "numpy.core.umath", + "numpy.linalg.lapack_lite", + "numpy.random.mtrand" + ], + "requires": [ + "pytest", + "pytest-timeout", + "pytest-xdist", + "cython", + "hypothesis", + "meson", + "pytz", + "setuptools <60.0.0", + "typing_extensions", + "c_compiler_stub", + "cxx_compiler_stub", + "fortran_compiler_stub", + "pkg-config" + ] + } + }, + "linux_ppc64le_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ] + }, + "host": { + "__set__": true, + "elements": [ + "cython", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "run": { + "__set__": true, + "elements": [ + "python" + ] + }, + "test": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "cxx_compiler_stub", + "cython", + "fortran_compiler_stub", + "hypothesis", + "meson", + "pkg-config", + "pytest", + "pytest-timeout", + "pytest-xdist", + "pytz", + "setuptools", + "typing_extensions" + ] + } + }, + "meta_yaml": { + "about": { + "dev_url": "https://github.com/numpy/numpy", + "doc_url": "https://numpy.org/doc/stable/", + "home": "http://numpy.org/", + "license": "BSD-3-Clause", + "license_file": "LICENSE.txt", + "summary": "The fundamental package for scientific computing with Python." + }, + "build": { + "entry_points": [ + "f2py = numpy.f2py.f2py2e:main", + "numpy-config = numpy._configtool:main" + ], + "number": "0", + "run_exports": [ + "numpy >=1.21,<3" + ] + }, + "extra": { + "recipe-maintainers": [ + "jakirkham", + "msarahan", + "pelson", + "rgommers", + "ocefpaf", + "isuruf", + "xhochy", + "h-vetinari" + ] + }, + "package": { + "name": "numpy", + "version": "2.2.4" + }, + "requirements": { + "build": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ], + "host": [ + "pip", + "meson-python", + "ninja", + "pkg-config", + "python", + "python-build", + "cython", + "libblas", + "libcblas", + "liblapack" + ], + "run": [ + "python" + ], + "run_constrained": [ + "numpy-base <0a0" + ] + }, + "schema_version": 0, + "source": [ + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + } + ], + "test": { + "commands": [ + "f2py -h", + "python -c \"import numpy, sys; sys.exit(not numpy.test(verbose=1, label='full', tests=None, extra_argv=['-k', 'not (_not_a_real_test or Test_ARM_Features or Test_POWER_Features or test_new_policy or (test_basic_property and float32) or (test_regression and test_gh25784) or test_einsum_sums_float32 or test_ufunc_noncontiguous[matvec] or test_accelerate_framework_sgemv_fix )', '-nauto', '--timeout=3000', '--durations=50', '--maxfail=100']))\"" + ], + "imports": [ + "numpy", + "numpy.ctypeslib", + "numpy.distutils", + "numpy.dtypes", + "numpy.exceptions", + "numpy.f2py", + "numpy.fft", + "numpy.lib", + "numpy.lib.format", + "numpy.lib.mixins", + "numpy.lib.recfunctions", + "numpy.lib.scimath", + "numpy.lib.stride_tricks", + "numpy.lib.npyio", + "numpy.lib.introspect", + "numpy.lib.array_utils", + "numpy.linalg", + "numpy.ma", + "numpy.ma.extras", + "numpy.ma.mrecords", + "numpy.polynomial", + "numpy.polynomial.chebyshev", + "numpy.polynomial.hermite", + "numpy.polynomial.hermite_e", + "numpy.polynomial.laguerre", + "numpy.polynomial.legendre", + "numpy.polynomial.polynomial", + "numpy.random", + "numpy.testing", + "numpy.testing.overrides", + "numpy.typing", + "numpy.typing.mypy_plugin", + "numpy.version", + "numpy.core.multiarray", + "numpy.core.numeric", + "numpy.core.umath", + "numpy.linalg.lapack_lite", + "numpy.random.mtrand" + ], + "requires": [ + "pytest", + "pytest-timeout", + "pytest-xdist", + "cython", + "hypothesis", + "meson", + "pytz", + "setuptools <60.0.0", + "typing_extensions", + "c_compiler_stub", + "cxx_compiler_stub", + "fortran_compiler_stub", + "pkg-config" + ] + } + }, + "name": "numpy", + "osx_64_meta_yaml": { + "about": { + "dev_url": "https://github.com/numpy/numpy", + "doc_url": "https://numpy.org/doc/stable/", + "home": "http://numpy.org/", + "license": "BSD-3-Clause", + "license_file": "LICENSE.txt", + "summary": "The fundamental package for scientific computing with Python." + }, + "build": { + "entry_points": null, + "number": "0", + "run_exports": [ + "numpy >=1.21,<3" + ] + }, + "extra": { + "recipe-maintainers": [ + "jakirkham", + "msarahan", + "pelson", + "rgommers", + "ocefpaf", + "isuruf", + "xhochy", + "h-vetinari" + ] + }, + "package": { + "name": "numpy", + "version": "2.2.4" + }, + "requirements": { + "build": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ], + "host": [ + "pip", + "meson-python", + "ninja", + "pkg-config", + "python", + "python-build", + "cython", + "libblas", + "libcblas", + "liblapack" + ], + "run": [ + "python" + ], + "run_constrained": [ + "numpy-base <0a0" + ] + }, + "schema_version": 0, + "source": [ + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + } + ], + "test": { + "commands": [ + "f2py -h", + "python -c \"import numpy, sys; sys.exit(not numpy.test(verbose=1, label='full', tests=None, extra_argv=['-k', 'not (_not_a_real_test or Test_ARM_Features or Test_POWER_Features or test_new_policy or (test_basic_property and float32) or (test_regression and test_gh25784) or test_einsum_sums_float32 or test_ufunc_noncontiguous[matvec] or test_accelerate_framework_sgemv_fix )', '-nauto', '--timeout=3000', '--durations=50', '--maxfail=100']))\"" + ], + "imports": [ + "numpy", + "numpy.ctypeslib", + "numpy.distutils", + "numpy.dtypes", + "numpy.exceptions", + "numpy.f2py", + "numpy.fft", + "numpy.lib", + "numpy.lib.format", + "numpy.lib.mixins", + "numpy.lib.recfunctions", + "numpy.lib.scimath", + "numpy.lib.stride_tricks", + "numpy.lib.npyio", + "numpy.lib.introspect", + "numpy.lib.array_utils", + "numpy.linalg", + "numpy.ma", + "numpy.ma.extras", + "numpy.ma.mrecords", + "numpy.polynomial", + "numpy.polynomial.chebyshev", + "numpy.polynomial.hermite", + "numpy.polynomial.hermite_e", + "numpy.polynomial.laguerre", + "numpy.polynomial.legendre", + "numpy.polynomial.polynomial", + "numpy.random", + "numpy.testing", + "numpy.testing.overrides", + "numpy.typing", + "numpy.typing.mypy_plugin", + "numpy.version", + "numpy.core.multiarray", + "numpy.core.numeric", + "numpy.core.umath", + "numpy.linalg.lapack_lite", + "numpy.random.mtrand" + ], + "requires": [ + "pytest", + "pytest-timeout", + "pytest-xdist", + "cython", + "hypothesis", + "meson", + "pytz", + "setuptools <60.0.0", + "typing_extensions", + "c_compiler_stub", + "cxx_compiler_stub", + "pkg-config" + ] + } + }, + "osx_64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ] + }, + "host": { + "__set__": true, + "elements": [ + "cython", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "run": { + "__set__": true, + "elements": [ + "python" + ] + }, + "test": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "cxx_compiler_stub", + "cython", + "hypothesis", + "meson", + "pkg-config", + "pytest", + "pytest-timeout", + "pytest-xdist", + "pytz", + "setuptools", + "typing_extensions" + ] + } + }, + "osx_arm64_meta_yaml": { + "about": { + "dev_url": "https://github.com/numpy/numpy", + "doc_url": "https://numpy.org/doc/stable/", + "home": "http://numpy.org/", + "license": "BSD-3-Clause", + "license_file": "LICENSE.txt", + "summary": "The fundamental package for scientific computing with Python." + }, + "build": { + "entry_points": null, + "number": "0", + "run_exports": [ + "numpy >=1.21,<3" + ] + }, + "extra": { + "recipe-maintainers": [ + "jakirkham", + "msarahan", + "pelson", + "rgommers", + "ocefpaf", + "isuruf", + "xhochy", + "h-vetinari" + ] + }, + "package": { + "name": "numpy", + "version": "2.2.4" + }, + "requirements": { + "build": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ], + "host": [ + "pip", + "meson-python", + "ninja", + "pkg-config", + "python", + "python-build", + "cython", + "libblas", + "libcblas", + "liblapack" + ], + "run": [ + "python" + ], + "run_constrained": [ + "numpy-base <0a0" + ] + }, + "schema_version": 0, + "source": [ + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + } + ], + "test": { + "commands": [ + "f2py -h", + "python -c \"import numpy, sys; sys.exit(not numpy.test(verbose=1, label='full', tests=None, extra_argv=['-k', 'not (_not_a_real_test or Test_ARM_Features or Test_POWER_Features or test_new_policy or (test_basic_property and float32) or (test_regression and test_gh25784) or test_einsum_sums_float32 or test_ufunc_noncontiguous[matvec] or test_accelerate_framework_sgemv_fix )', '-nauto', '--timeout=3000', '--durations=50', '--maxfail=100']))\"" + ], + "imports": [ + "numpy", + "numpy.ctypeslib", + "numpy.distutils", + "numpy.dtypes", + "numpy.exceptions", + "numpy.f2py", + "numpy.fft", + "numpy.lib", + "numpy.lib.format", + "numpy.lib.mixins", + "numpy.lib.recfunctions", + "numpy.lib.scimath", + "numpy.lib.stride_tricks", + "numpy.lib.npyio", + "numpy.lib.introspect", + "numpy.lib.array_utils", + "numpy.linalg", + "numpy.ma", + "numpy.ma.extras", + "numpy.ma.mrecords", + "numpy.polynomial", + "numpy.polynomial.chebyshev", + "numpy.polynomial.hermite", + "numpy.polynomial.hermite_e", + "numpy.polynomial.laguerre", + "numpy.polynomial.legendre", + "numpy.polynomial.polynomial", + "numpy.random", + "numpy.testing", + "numpy.testing.overrides", + "numpy.typing", + "numpy.typing.mypy_plugin", + "numpy.version", + "numpy.core.multiarray", + "numpy.core.numeric", + "numpy.core.umath", + "numpy.linalg.lapack_lite", + "numpy.random.mtrand" + ], + "requires": [ + "pytest", + "pytest-timeout", + "pytest-xdist", + "cython", + "hypothesis", + "meson", + "pytz", + "setuptools <60.0.0", + "typing_extensions", + "c_compiler_stub", + "cxx_compiler_stub", + "pkg-config" + ] + } + }, + "osx_arm64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ] + }, + "host": { + "__set__": true, + "elements": [ + "cython", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "run": { + "__set__": true, + "elements": [ + "python" + ] + }, + "test": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "cxx_compiler_stub", + "cython", + "hypothesis", + "meson", + "pkg-config", + "pytest", + "pytest-timeout", + "pytest-xdist", + "pytz", + "setuptools", + "typing_extensions" + ] + } + }, + "outputs_names": { + "__set__": true, + "elements": [ + "numpy" + ] + }, + "parsing_error": false, + "platforms": [ + "linux_64", + "linux_aarch64", + "linux_ppc64le", + "osx_64", + "osx_arm64", + "win_64" + ], + "pr_info": { + "__lazy_json__": "pr_info/numpy.json" + }, + "raw_meta_yaml": "{% set version = \"2.2.4\" %}\n{% set dev = \"\" %}\n# numpy will by default use the ABI feature level for the first numpy version\n# that added support for the oldest currently-supported CPython version; see\n# https://github.com/numpy/numpy/blob/v2.0.0rc1/numpy/_core/include/numpy/numpyconfig.h#L124\n{% set default_abi_level = \"1.21\" %}\n\n# ensure is_freethreading gets detected as a used variable\n# [is_freethreading]\n\npackage:\n name: numpy\n version: {{ version }}{{ dev }}\n\nsource:\n # The sdist distributed by numpy contains submodules; by taking the tarball\n # from github we can everything pythran ourselves, but manually need to include\n # the submodules (not in tarball due to dear-github/dear-github#214); for the\n # list of modules see https://github.com/numpy/numpy/blob/main/.gitmodules\n - url: https://github.com/numpy/numpy/archive/refs/tags/v{{ version }}{{ dev }}.tar.gz\n sha256: 011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3\n # https://github.com/numpy/numpy/tree/v{{ version }}/numpy/_core/src\n - folder: numpy/_core/src/highway\n git_url: https://github.com/google/highway.git\n git_rev: 0b696633f9ad89497dd5532b55eaa01625ad71ca\n # https://github.com/numpy/numpy/tree/v{{ version }}/numpy/_core/src/common\n - folder: numpy/_core/src/common/pythoncapi-compat\n git_url: https://github.com/python/pythoncapi-compat.git\n git_rev: 0f1d42a10a3f594ad48894912396df31b2c2d55d\n # https://github.com/numpy/numpy/tree/v{{ version }}/numpy/_core/src/npysort\n - folder: numpy/_core/src/npysort/x86-simd-sort\n git_url: https://github.com/intel/x86-simd-sort.git\n git_rev: 9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d\n # https://github.com/numpy/numpy/tree/v{{ version }}/numpy/_core/src/umath\n - folder: numpy/_core/src/umath/svml\n git_url: https://github.com/numpy/SVML.git\n git_rev: 32bf2a98420762a63ab418aaa0a7d6e17eb9627a\n # https://github.com/numpy/numpy/tree/v{{ version }}/numpy/fft\n - folder: numpy/fft/pocketfft\n git_url: https://github.com/mreineck/pocketfft.git\n git_rev: 33ae5dc94c9cdc7f1c78346504a85de87cadaa12\n # https://github.com/numpy/numpy/tree/v{{ version }}/vendored-meson\n - folder: vendored-meson/meson\n git_url: https://github.com/numpy/meson.git\n git_rev: 0d93515fb826440d19707eee47fd92655fe2f166\n\nbuild:\n number: 0\n skip: true # [py<310]\n entry_points:\n - f2py = numpy.f2py.f2py2e:main # [win]\n - numpy-config = numpy._configtool:main # [win]\n run_exports:\n - numpy >={{ default_abi_level }},<3\n\nrequirements:\n build:\n - python # [build_platform != target_platform]\n - cross-python_{{ target_platform }} # [build_platform != target_platform]\n - cython # [build_platform != target_platform]\n - ninja # [build_platform != target_platform]\n - pkg-config # [build_platform != target_platform]\n - {{ compiler('c') }}\n - {{ stdlib('c') }}\n - {{ compiler('cxx') }}\n host:\n - pip\n # temporarily vendored by numpy\n # - meson\n - meson-python\n - ninja\n - pkg-config\n - python\n - python-build\n - cython\n - libblas\n - libcblas\n - liblapack\n run:\n - python\n{% if dev != '' %}\n - _numpy_rc\n{% endif %}\n run_constrained:\n # enforce eviction of package from anaconda defaults\n - numpy-base <0a0\n\n{% set tests_to_skip = \"_not_a_real_test\" %}\n# there are some tests that cannot really work in emulation, see e.g. numpy/numpy#20445\n{% set tests_to_skip = tests_to_skip + \" or Test_ARM_Features\" %} # [build_platform != target_platform]\n{% set tests_to_skip = tests_to_skip + \" or Test_POWER_Features\" %} # [build_platform != target_platform]\n# test_new_policy reruns part of test suite; including a CPU feature test that fails in emulation\n{% set tests_to_skip = tests_to_skip + \" or test_new_policy\" %} # [build_platform != target_platform]\n# emulation problems (apparently) on aarch\n{% set tests_to_skip = tests_to_skip + \" or (test_basic_property and float32)\" %} # [aarch64]\n# https://github.com/numpy/numpy/issues/27045\n{% set tests_to_skip = tests_to_skip + \" or (test_regression and test_gh25784)\" %} # [osx]\n# new test failures for 2.2.4; possibly emulation-related; see\n# https://github.com/numpy/numpy/issues/28548\n{% set tests_to_skip = tests_to_skip + \" or test_einsum_sums_float32\" %} # [ppc64le]\n{% set tests_to_skip = tests_to_skip + \" or test_ufunc_noncontiguous[matvec]\" %} # [ppc64le]\n{% set tests_to_skip = tests_to_skip + \" or test_accelerate_framework_sgemv_fix \" %} # [ppc64le]\n\ntest:\n requires:\n - pytest\n - pytest-timeout\n - pytest-xdist\n # (mostly) optional test requirements (except wheel, python-cov, mypy), see\n # https://github.com/numpy/numpy/blob/main/requirements/test_requirements.txt\n - cython\n - hypothesis\n - meson\n - pytz\n - setuptools <60.0.0 # [py<=311]\n - setuptools # [py>=312]\n - typing_extensions\n # some linux tests need a C/C++ compiler;\n # extra f2py tests need a fortran compiler\n - {{ compiler('c') }} # [unix]\n - {{ compiler('cxx') }} # [unix]\n - {{ compiler('fortran') }} # [linux]\n # For some cython tests\n - pkg-config\n commands:\n - f2py -h\n # numpy.test will show SIMD features of agent (in the past, there have been\n # failures that occurred depending on presence/absence of e.g. AVX512);\n # for signature of numpy.test see the following (note default: label='fast'),\n # https://github.com/numpy/numpy/blob/maintenance/1.22.x/numpy/_pytesttester.py#L81-L82\n {% set param = \"verbose=1, label='full', tests=None\" %}\n {% set extra = \"extra_argv=['-k', 'not (\" + tests_to_skip + \")', '-nauto', '--timeout=3000', '--durations=50', '--maxfail=100']\" %}\n - python -c \"import numpy, sys; sys.exit(not numpy.test({{ param }}, {{ extra }}))\"\n imports:\n - numpy\n # reference for public API is effectively PUBLIC_MODULES under\n # https://github.com/numpy/numpy/blame/main/numpy/tests/test_public_api.py\n - numpy.ctypeslib\n - numpy.distutils # [py<312]\n - numpy.dtypes\n - numpy.exceptions\n - numpy.f2py\n - numpy.fft\n - numpy.lib\n - numpy.lib.format\n - numpy.lib.mixins\n - numpy.lib.recfunctions\n - numpy.lib.scimath\n - numpy.lib.stride_tricks\n - numpy.lib.npyio\n - numpy.lib.introspect\n - numpy.lib.array_utils\n - numpy.linalg\n - numpy.ma\n - numpy.ma.extras\n - numpy.ma.mrecords\n - numpy.polynomial\n - numpy.polynomial.chebyshev\n - numpy.polynomial.hermite\n - numpy.polynomial.hermite_e\n - numpy.polynomial.laguerre\n - numpy.polynomial.legendre\n - numpy.polynomial.polynomial\n - numpy.random\n - numpy.testing\n - numpy.testing.overrides\n - numpy.typing\n - numpy.typing.mypy_plugin\n - numpy.version\n # some private modules that were once upon a time\n # determined to be useful packaging checks\n - numpy.core.multiarray\n - numpy.core.numeric\n - numpy.core.umath\n - numpy.linalg.lapack_lite\n - numpy.random.mtrand\n\nabout:\n home: http://numpy.org/\n license: BSD-3-Clause\n license_file: LICENSE.txt\n summary: The fundamental package for scientific computing with Python.\n doc_url: https://numpy.org/doc/stable/\n dev_url: https://github.com/numpy/numpy\n\nextra:\n recipe-maintainers:\n - jakirkham\n - msarahan\n - pelson\n - rgommers\n - ocefpaf\n - isuruf\n - xhochy\n - h-vetinari\n", + "req": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub", + "cython", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ] + }, + "host": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub", + "cython", + "fortran_compiler_stub", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "run": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub", + "fortran_compiler_stub", + "python" + ] + }, + "test": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "cxx_compiler_stub", + "cython", + "fortran_compiler_stub", + "hypothesis", + "meson", + "pkg-config", + "pytest", + "pytest-timeout", + "pytest-xdist", + "pytz", + "setuptools", + "typing_extensions" + ] + } + }, + "strong_exports": false, + "total_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ] + }, + "host": { + "__set__": true, + "elements": [ + "cython", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "run": { + "__set__": true, + "elements": [ + "python" + ] + }, + "test": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "cxx_compiler_stub", + "cython", + "fortran_compiler_stub", + "hypothesis", + "meson", + "pkg-config", + "pytest", + "pytest-timeout", + "pytest-xdist", + "pytz", + "setuptools <60.0.0", + "typing_extensions" + ] + } + }, + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz", + "version": "2.2.4", + "version_pr_info": { + "__lazy_json__": "version_pr_info/numpy.json" + }, + "win_64_meta_yaml": { + "about": { + "dev_url": "https://github.com/numpy/numpy", + "doc_url": "https://numpy.org/doc/stable/", + "home": "http://numpy.org/", + "license": "BSD-3-Clause", + "license_file": "LICENSE.txt", + "summary": "The fundamental package for scientific computing with Python." + }, + "build": { + "entry_points": [ + "f2py = numpy.f2py.f2py2e:main", + "numpy-config = numpy._configtool:main" + ], + "number": "0", + "run_exports": [ + "numpy >=1.21,<3" + ] + }, + "extra": { + "recipe-maintainers": [ + "jakirkham", + "msarahan", + "pelson", + "rgommers", + "ocefpaf", + "isuruf", + "xhochy", + "h-vetinari" + ] + }, + "package": { + "name": "numpy", + "version": "2.2.4" + }, + "requirements": { + "build": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ], + "host": [ + "pip", + "meson-python", + "ninja", + "pkg-config", + "python", + "python-build", + "cython", + "libblas", + "libcblas", + "liblapack" + ], + "run": [ + "python" + ], + "run_constrained": [ + "numpy-base <0a0" + ] + }, + "schema_version": 0, + "source": [ + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + }, + { + "sha256": "011a9483768b14bb00c6dab1c666f6d86f95b23036eb1e4b2b65db700f98c9a3", + "url": "https://github.com/numpy/numpy/archive/refs/tags/v2.2.4.tar.gz" + }, + { + "folder": "numpy/_core/src/highway", + "git_rev": "0b696633f9ad89497dd5532b55eaa01625ad71ca", + "git_url": "https://github.com/google/highway.git" + }, + { + "folder": "numpy/_core/src/common/pythoncapi-compat", + "git_rev": "0f1d42a10a3f594ad48894912396df31b2c2d55d", + "git_url": "https://github.com/python/pythoncapi-compat.git" + }, + { + "folder": "numpy/_core/src/npysort/x86-simd-sort", + "git_rev": "9a1b616d5cd4eaf49f7664fb86ccc1d18bad2b8d", + "git_url": "https://github.com/intel/x86-simd-sort.git" + }, + { + "folder": "numpy/_core/src/umath/svml", + "git_rev": "32bf2a98420762a63ab418aaa0a7d6e17eb9627a", + "git_url": "https://github.com/numpy/SVML.git" + }, + { + "folder": "numpy/fft/pocketfft", + "git_rev": "33ae5dc94c9cdc7f1c78346504a85de87cadaa12", + "git_url": "https://github.com/mreineck/pocketfft.git" + }, + { + "folder": "vendored-meson/meson", + "git_rev": "0d93515fb826440d19707eee47fd92655fe2f166", + "git_url": "https://github.com/numpy/meson.git" + } + ], + "test": { + "commands": [ + "f2py -h", + "python -c \"import numpy, sys; sys.exit(not numpy.test(verbose=1, label='full', tests=None, extra_argv=['-k', 'not (_not_a_real_test or Test_ARM_Features or Test_POWER_Features or test_new_policy or (test_basic_property and float32) or (test_regression and test_gh25784) or test_einsum_sums_float32 or test_ufunc_noncontiguous[matvec] or test_accelerate_framework_sgemv_fix )', '-nauto', '--timeout=3000', '--durations=50', '--maxfail=100']))\"" + ], + "imports": [ + "numpy", + "numpy.ctypeslib", + "numpy.distutils", + "numpy.dtypes", + "numpy.exceptions", + "numpy.f2py", + "numpy.fft", + "numpy.lib", + "numpy.lib.format", + "numpy.lib.mixins", + "numpy.lib.recfunctions", + "numpy.lib.scimath", + "numpy.lib.stride_tricks", + "numpy.lib.npyio", + "numpy.lib.introspect", + "numpy.lib.array_utils", + "numpy.linalg", + "numpy.ma", + "numpy.ma.extras", + "numpy.ma.mrecords", + "numpy.polynomial", + "numpy.polynomial.chebyshev", + "numpy.polynomial.hermite", + "numpy.polynomial.hermite_e", + "numpy.polynomial.laguerre", + "numpy.polynomial.legendre", + "numpy.polynomial.polynomial", + "numpy.random", + "numpy.testing", + "numpy.testing.overrides", + "numpy.typing", + "numpy.typing.mypy_plugin", + "numpy.version", + "numpy.core.multiarray", + "numpy.core.numeric", + "numpy.core.umath", + "numpy.linalg.lapack_lite", + "numpy.random.mtrand" + ], + "requires": [ + "pytest", + "pytest-timeout", + "pytest-xdist", + "cython", + "hypothesis", + "meson", + "pytz", + "setuptools <60.0.0", + "typing_extensions", + "pkg-config" + ] + } + }, + "win_64_requirements": { + "build": { + "__set__": true, + "elements": [ + "c_compiler_stub", + "c_stdlib_stub", + "cxx_compiler_stub" + ] + }, + "host": { + "__set__": true, + "elements": [ + "cython", + "libblas", + "libcblas", + "liblapack", + "meson-python", + "ninja", + "pip", + "pkg-config", + "python", + "python-build" + ] + }, + "run": { + "__set__": true, + "elements": [ + "python" + ] + }, + "test": { + "__set__": true, + "elements": [ + "cython", + "hypothesis", + "meson", + "pkg-config", + "pytest", + "pytest-timeout", + "pytest-xdist", + "pytz", + "setuptools", + "typing_extensions" + ] + } + } +} diff --git a/tests/test_files_make_migrators/test_conda_build_config.yaml b/tests/test_files_make_migrators/test_conda_build_config.yaml new file mode 100644 index 000000000..99553d39d --- /dev/null +++ b/tests/test_files_make_migrators/test_conda_build_config.yaml @@ -0,0 +1,47 @@ +zip_keys: + - + - python + - numpy + - python_impl + - is_python_min + + +pin_run_as_build: + libblst: + max_pin: x.x + netcdf-cxx4: + max_pin: x.x + vlfeat: + max_pin: x.x.x + +# Pinning packages +numpy: + # part of a zip_keys: python, python_impl, numpy + - 1.22 + - 1.22 + - 1.23 + - 1.26 +python: + # part of a zip_keys: python, python_impl, numpy, is_python_min + - 3.9.* *_cpython + - 3.10.* *_cpython + - 3.11.* *_cpython + - 3.12.* *_cpython +python_impl: + # part of a zip_keys: python, python_impl, numpy, is_python_min + - cpython + - cpython + - cpython + - cpython +python_min: + # minimum supported python version per CFEP-25 + # bump to next minor version when we drop python versions + - '3.9' +is_python_min: + - true + - false + - false + - false + +aws_c_io: + - 0.15.3 diff --git a/tests/test_files_make_migrators/test_graph.json b/tests/test_files_make_migrators/test_graph.json new file mode 100644 index 000000000..2c4e48fb9 --- /dev/null +++ b/tests/test_files_make_migrators/test_graph.json @@ -0,0 +1,63 @@ +{ + "directed": true, + "graph": { + "outputs_lut": { + "conda-forge-pinning": { + "__set__": true, + "elements": [ + "conda-forge-pinning" + ] + }, + "pydantic": { + "__set__": true, + "elements": [ + "pydantic" + ] + }, + "numpy": { + "__set__": true, + "elements": [ + "numpy" + ] + }, + "aws-c-io": { + "__set__": true, + "elements": [ + "aws-c-io" + ] + } + }, + "strong_exports": { + "__set__": true, + "elements": [] + } + }, + "links": [], + "multigraph": false, + "nodes": [ + { + "id": "conda-forge-pinning", + "payload": { + "__lazy_json__": "node_attrs/conda-forge-pinning.json" + } + }, + { + "id": "pydantic", + "payload": { + "__lazy_json__": "node_attrs/pydantic.json" + } + }, + { + "id": "numpy", + "payload": { + "__lazy_json__": "node_attrs/numpy.json" + } + }, + { + "id": "aws-c-io", + "payload": { + "__lazy_json__": "node_attrs/aws-c-io.json" + } + } + ] +} diff --git a/tests/test_flang.py b/tests/test_flang.py index 87d3c39c0..59197cf5a 100644 --- a/tests/test_flang.py +++ b/tests/test_flang.py @@ -1,5 +1,6 @@ import os +import networkx as nx import pytest from test_migrators import run_test_migration @@ -8,10 +9,13 @@ TEST_YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} FLANG = FlangMigrator() VERSION_WITH_FLANG = Version( set(), piggy_back_migrations=[FLANG], + total_graph=TOTAL_GRAPH, ) diff --git a/tests/test_git_utils.py b/tests/test_git_utils.py index 14877d94e..b8f310056 100644 --- a/tests/test_git_utils.py +++ b/tests/test_git_utils.py @@ -47,11 +47,11 @@ @mock.patch("subprocess.run") @pytest.mark.parametrize("check_error", [True, False]) def test_git_cli_run_git_command_no_error( - subprocess_run_mock: MagicMock, check_error: bool + subprocess_run_mock: MagicMock, check_error: bool, tmp_path: Path ): cli = GitCli() - working_directory = Path("TEST_DIR") + working_directory = tmp_path cli._run_git_command( ["GIT_COMMAND", "ARG1", "ARG2"], working_directory, check_error @@ -67,10 +67,10 @@ def test_git_cli_run_git_command_no_error( @mock.patch("subprocess.run") -def test_git_cli_run_git_command_error(subprocess_run_mock: MagicMock): +def test_git_cli_run_git_command_error(subprocess_run_mock: MagicMock, tmp_path: Path): cli = GitCli() - working_directory = Path("TEST_DIR") + working_directory = tmp_path subprocess_run_mock.side_effect = subprocess.CalledProcessError( returncode=1, cmd="" @@ -84,14 +84,15 @@ def test_git_cli_run_git_command_error(subprocess_run_mock: MagicMock): @pytest.mark.parametrize("check_error", [True, False]) @mock.patch("subprocess.run") def test_git_cli_run_git_command_mock( - subprocess_run_mock: MagicMock, check_error: bool, suppress_all_output: bool + subprocess_run_mock: MagicMock, + check_error: bool, + suppress_all_output: bool, + tmp_path: Path, ): - """ - This test checks if all parameters are passed correctly to the subprocess.run function. - """ + """Check if all parameters are passed correctly to the subprocess.run function.""" cli = GitCli() - working_directory = Path("TEST_DIR") + working_directory = tmp_path cli._run_git_command( ["COMMAND", "ARG1", "ARG2"], working_directory, check_error, suppress_all_output @@ -116,9 +117,7 @@ def test_git_cli_run_git_command_mock( @pytest.mark.parametrize("check_error", [True, False]) def test_git_cli_run_git_command_stdout_captured(capfd, check_error: bool): - """ - Verify that the stdout of the git command is captured and not printed to the console. - """ + """Verify that the stdout of the git command is captured and not printed to the console.""" cli = GitCli() p = cli._run_git_command(["version"], check_error=check_error) @@ -130,9 +129,7 @@ def test_git_cli_run_git_command_stdout_captured(capfd, check_error: bool): def test_git_cli_run_git_command_stderr_not_captured(capfd): - """ - Verify that the stderr of the git command is not captured if no token is hidden. - """ + """Verify that the stderr of the git command is not captured if no token is hidden.""" cli = GitCli() p = cli._run_git_command(["non-existing-command"], check_error=False) @@ -190,11 +187,11 @@ def init_temp_git_repo(git_dir: Path, bare: bool = False): ) @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") def test_git_cli_add_success_mock( - run_git_command_mock: MagicMock, n_paths: int, all_: bool + run_git_command_mock: MagicMock, n_paths: int, all_: bool, tmp_path: Path ): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path paths = [Path(f"test{i}.txt") for i in range(n_paths)] cli.add(git_dir, *paths, all_=all_) @@ -207,10 +204,12 @@ def test_git_cli_add_success_mock( @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_add_no_arguments_error(run_git_command_mock: MagicMock): +def test_git_cli_add_no_arguments_error( + run_git_command_mock: MagicMock, tmp_path: Path +): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path with pytest.raises(ValueError, match="Either pathspec or all_ must be set"): cli.add(git_dir) @@ -253,9 +252,9 @@ def test_git_cli_add_success(n_paths: int, all_: bool): @pytest.mark.parametrize("all_", [True, False]) @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") def test_git_cli_commit_success_mock( - run_git_command_mock: MagicMock, all_: bool, allow_empty: bool + run_git_command_mock: MagicMock, all_: bool, allow_empty: bool, tmp_path: Path ): - git_dir = Path("GIT_DIR") + git_dir = tmp_path message = "COMMIT_MESSAGE" cli = GitCli() @@ -329,10 +328,10 @@ def test_git_cli_reset_hard_already_reset(): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_reset_hard_mock(run_git_command_mock: MagicMock): +def test_git_cli_reset_hard_mock(run_git_command_mock: MagicMock, tmp_path: Path): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path cli.reset_hard(git_dir) @@ -449,10 +448,10 @@ def test_git_cli_clone_repo_mock_error(run_git_command_mock: MagicMock): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_add_remote_mock(run_git_command_mock: MagicMock): +def test_git_cli_add_remote_mock(run_git_command_mock: MagicMock, tmp_path: Path): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path remote_name = "origin" remote_url = "https://git-repository.com/repo.git" @@ -485,10 +484,10 @@ def test_git_cli_add_remote(): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_push_to_url_mock(run_git_command_mock: MagicMock): +def test_git_cli_push_to_url_mock(run_git_command_mock: MagicMock, tmp_path: Path): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path remote_url = "https://git-repository.com/repo.git" cli.push_to_url(git_dir, remote_url, "BRANCH_NAME") @@ -499,15 +498,15 @@ def test_git_cli_push_to_url_mock(run_git_command_mock: MagicMock): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_push_to_url_mock_error(run_git_command_mock: MagicMock): +def test_git_cli_push_to_url_mock_error( + run_git_command_mock: MagicMock, tmp_path: Path +): cli = GitCli() run_git_command_mock.side_effect = GitCliError("Error") with pytest.raises(GitCliError): - cli.push_to_url( - Path("TEST_DIR"), "https://git-repository.com/repo.git", "BRANCH_NAME" - ) + cli.push_to_url(tmp_path, "https://git-repository.com/repo.git", "BRANCH_NAME") def test_git_cli_push_to_url_local_repository(): @@ -547,10 +546,10 @@ def test_git_cli_push_to_url_local_repository(): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_add_token_mock(run_git_command_mock: MagicMock): +def test_git_cli_add_token_mock(run_git_command_mock: MagicMock, tmp_path: Path): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path origin = "https://git-repository.com" token = "TOKEN" @@ -572,10 +571,10 @@ def test_git_cli_add_token_mock(run_git_command_mock: MagicMock): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_clear_token_mock(run_git_command_mock: MagicMock): +def test_git_cli_clear_token_mock(run_git_command_mock: MagicMock, tmp_path: Path): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path origin = "https://git-repository.com" @@ -593,10 +592,10 @@ def test_git_cli_clear_token_mock(run_git_command_mock: MagicMock): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_fetch_all_mock(run_git_command_mock: MagicMock): +def test_git_cli_fetch_all_mock(run_git_command_mock: MagicMock, tmp_path: Path): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path cli.fetch_all(git_dir) @@ -637,11 +636,11 @@ def test_git_cli_does_branch_exist(): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") @pytest.mark.parametrize("does_exist", [True, False]) def test_git_cli_does_branch_exist_mock( - run_git_command_mock: MagicMock, does_exist: bool + run_git_command_mock: MagicMock, does_exist: bool, tmp_path: Path ): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path branch_name = "main" run_git_command_mock.return_value = ( @@ -699,11 +698,13 @@ def test_git_cli_does_remote_exist_mock( @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") @pytest.mark.parametrize("track", [True, False]) -def test_git_cli_checkout_branch_mock(run_git_command_mock: MagicMock, track: bool): +def test_git_cli_checkout_branch_mock( + run_git_command_mock: MagicMock, track: bool, tmp_path: Path +): branch_name = "BRANCH_NAME" cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path cli.checkout_branch(git_dir, branch_name, track=track) @@ -783,10 +784,10 @@ def test_git_cli_diffed_files_no_diff(): @mock.patch("conda_forge_tick.git_utils.GitCli._run_git_command") -def test_git_cli_diffed_files_mock(run_git_command_mock: MagicMock): +def test_git_cli_diffed_files_mock(run_git_command_mock: MagicMock, tmp_path: Path): cli = GitCli() - git_dir = Path("TEST_DIR") + git_dir = tmp_path commit = "COMMIT" run_git_command_mock.return_value = subprocess.CompletedProcess( @@ -969,10 +970,11 @@ def test_git_platform_backend_clone_fork_and_branch( clone_fork_and_branch_mock: MagicMock, user_mock: MagicMock, backend: GitPlatformBackend, + tmp_path: Path, ): upstream_owner = "UPSTREAM-OWNER" repo_name = "REPO" - target_dir = Path("TARGET_DIR") + target_dir = tmp_path new_branch = "NEW_BRANCH" base_branch = "BASE_BRANCH" @@ -1093,11 +1095,14 @@ def test_github_backend_fork_not_exists_repo_found( @mock.patch("conda_forge_tick.git_utils.GitCli.add_token") @mock.patch("conda_forge_tick.git_utils.GitCli.push_to_url") def test_github_backend_push_to_repository( - push_to_url_mock: MagicMock, add_token_mock: MagicMock, clear_token_mock: MagicMock + push_to_url_mock: MagicMock, + add_token_mock: MagicMock, + clear_token_mock: MagicMock, + tmp_path: Path, ): backend = GitHubBackend.from_token("THIS_IS_THE_TOKEN") - git_dir = Path("GIT_DIR") + git_dir = tmp_path backend.push_to_repository("OWNER", "REPO", git_dir, "BRANCH_NAME") @@ -1398,9 +1403,7 @@ def test_github_backend_create_pull_request_validation_error( github_response_get_repo: dict, github_response_create_pull_validation_error: dict, ): - """ - Test that other GitHub API 422 validation errors are not caught as DuplicatePullRequestError. - """ + """Test that other GitHub API 422 validation errors are not caught as DuplicatePullRequestError.""" def request_side_effect(method, _url, **_kwargs): response = requests.Response() @@ -1657,17 +1660,17 @@ def test_dry_run_backend_get_remote_url_existing_fork(): assert url == "https://github.com/conda-forge/pytest-feedstock.git" -def test_dry_run_backend_push_to_repository(caplog): +def test_dry_run_backend_push_to_repository(caplog, tmp_path: Path): caplog.set_level(logging.DEBUG) backend = DryRunBackend() - git_dir = Path("GIT_DIR") + git_dir = tmp_path backend.push_to_repository("OWNER", "REPO", git_dir, "BRANCH_NAME") assert ( - "Dry Run: Pushing changes from GIT_DIR to OWNER/REPO on branch BRANCH_NAME" + f"Dry Run: Pushing changes from {git_dir} to OWNER/REPO on branch BRANCH_NAME" in caplog.text ) diff --git a/tests/test_lazy_json_backends.py b/tests/test_lazy_json_backends.py index 44ea8a8c0..e1566e1fb 100644 --- a/tests/test_lazy_json_backends.py +++ b/tests/test_lazy_json_backends.py @@ -16,7 +16,6 @@ import conda_forge_tick.utils from conda_forge_tick.git_utils import github_client from conda_forge_tick.lazy_json_backends import ( - CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL, LAZY_JSON_BACKENDS, GithubLazyJsonBackend, LazyJson, @@ -37,6 +36,7 @@ touch_all_lazy_json_refs, ) from conda_forge_tick.os_utils import pushd +from conda_forge_tick.settings import settings HAVE_MONGODB = ( "MONGODB_CONNECTION_STRING" in conda_forge_tick.global_sensitive_env.classified_info @@ -624,7 +624,7 @@ def test_lazy_json_backends_hashmap(tmpdir): def test_github_base_url() -> None: github_backend = GithubLazyJsonBackend() - assert github_backend.base_url == CF_TICK_GRAPH_GITHUB_BACKEND_BASE_URL + "/" + assert github_backend.base_url == settings().graph_github_backend_raw_base_url github_backend.base_url = "https://github.com/lorem/ipsum" assert github_backend.base_url == "https://github.com/lorem/ipsum" + "/" diff --git a/tests/test_libboost.py b/tests/test_libboost.py index 15b2ba12e..b8f0340ab 100644 --- a/tests/test_libboost.py +++ b/tests/test_libboost.py @@ -1,5 +1,6 @@ import os +import networkx as nx import pytest from test_migrators import run_test_migration @@ -9,10 +10,13 @@ TEST_YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} LIBBOOST = LibboostMigrator() VERSION_WITH_LIBBOOST = Version( set(), piggy_back_migrations=[LIBBOOST], + total_graph=TOTAL_GRAPH, ) diff --git a/tests/test_license_migrator.py b/tests/test_license_migrator.py index 4bde56ba4..e75cbefd6 100644 --- a/tests/test_license_migrator.py +++ b/tests/test_license_migrator.py @@ -1,11 +1,14 @@ +import networkx as nx from flaky import flaky from test_migrators import run_test_migration from conda_forge_tick.migrators import LicenseMigrator, Version from conda_forge_tick.migrators.license import _munge_licenses +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} LM = LicenseMigrator() -VER_LM = Version(set(), piggy_back_migrations=[LM]) +VER_LM = Version(set(), piggy_back_migrations=[LM], total_graph=TOTAL_GRAPH) version_license = """\ {% set version = "0.8" %} diff --git a/tests/test_make_migrators.py b/tests/test_make_migrators.py new file mode 100644 index 000000000..0f9d06113 --- /dev/null +++ b/tests/test_make_migrators.py @@ -0,0 +1,198 @@ +import os +import shutil +import subprocess +import tempfile +from pathlib import Path + +import networkx as nx +import pytest +from pytest import FixtureRequest + +from conda_forge_tick.lazy_json_backends import ( + get_sharded_path, + lazy_json_override_backends, +) +from conda_forge_tick.make_migrators import ( + create_migration_yaml_creator, + dump_migrators, + initialize_migrators, + load_migrators, +) +from conda_forge_tick.migrators import MigrationYaml, MigrationYamlCreator +from conda_forge_tick.os_utils import pushd +from conda_forge_tick.utils import load_existing_graph, load_graph, pluck + +TEST_FILES_DIR = Path(__file__).parent / "test_files_make_migrators" +TEST_GRAPH_FILE = TEST_FILES_DIR / "test_graph.json" +TEST_CONDA_BUILD_CONFIG_FILE = TEST_FILES_DIR / "test_conda_build_config.yaml" + +CONDA_FORGE_PINNINGS_ATTRS_FILE = TEST_FILES_DIR / "conda-forge-pinning_node_attrs.json" +NUMPY_NODE_ATTRS_FILE = TEST_FILES_DIR / "numpy_node_attrs.json" + + +@pytest.mark.parametrize("enable_containers", [True, False]) +class TestCreateMigrationYamlCreator: + @pytest.fixture + def inject_conda_build_config(self): + with tempfile.TemporaryDirectory(prefix="cf-graph") as s_tmpdir: + conda_prefix_dir = Path(s_tmpdir) + shutil.copy( + TEST_CONDA_BUILD_CONFIG_FILE, + conda_prefix_dir / "conda_build_config.yaml", + ) + + old_conda_prefix = os.environ["CONDA_PREFIX"] + os.environ["CONDA_PREFIX"] = str(conda_prefix_dir) + yield + os.environ["CONDA_PREFIX"] = old_conda_prefix + + @pytest.fixture + def prepared_graph(self, request: pytest.FixtureRequest): + """ + Get the graph with the node attrs files that should be present in the + graph as indirect fixture parameters. + """ + node_attrs_files: list[str] = request.param + + with tempfile.TemporaryDirectory(prefix="cf-graph") as s_tmpdir: + cf_graph_dir = Path(s_tmpdir) + shutil.copy(TEST_GRAPH_FILE, cf_graph_dir / "graph.json") + + for node_attrs_file in node_attrs_files: + origin = TEST_FILES_DIR / f"{node_attrs_file}_node_attrs.json" + dest = cf_graph_dir / get_sharded_path( + f"node_attrs/{node_attrs_file}.json" + ) + os.makedirs(dest.parent, exist_ok=True) + shutil.copy(origin, dest) + + old_cwd = os.getcwd() + os.chdir(cf_graph_dir) + yield load_existing_graph() + os.chdir(old_cwd) + + @pytest.mark.parametrize( + "prepared_graph", [["conda-forge-pinning", "numpy"]], indirect=True + ) + def test_successful_recipe_v0( + self, + prepared_graph: nx.DiGraph, + inject_conda_build_config, + enable_containers: bool, + request: FixtureRequest, + ): + if enable_containers: + request.getfixturevalue("use_containers") + + # feedstock under test: numpy + migrators: list[MigrationYamlCreator] = [] + create_migration_yaml_creator(migrators, prepared_graph) + + assert len(migrators) == 1 + migrator = migrators[0] + + assert migrator.feedstock_name == "numpy" + assert migrator.package_name == "numpy" + assert migrator.current_pin == "1.26" + assert migrator.new_pin_version == "2" + assert migrator.pin_spec == "x" + + assert len(migrator.effective_graph) == 1 + assert "conda-forge-pinning" in migrator.effective_graph + + @pytest.mark.parametrize( + "prepared_graph", [["conda-forge-pinning", "aws-c-io"]], indirect=True + ) + def test_successful_recipe_v1( + self, + prepared_graph: nx.DiGraph, + inject_conda_build_config, + enable_containers: bool, + request: FixtureRequest, + ): + if enable_containers: + request.getfixturevalue("use_containers") + + # feedstock under test: aws-c-io + migrators: list[MigrationYamlCreator] = [] + create_migration_yaml_creator(migrators, prepared_graph) + + assert len(migrators) == 1 + migrator = migrators[0] + + assert migrator.feedstock_name == "aws-c-io" + assert migrator.package_name == "aws_c_io" + assert migrator.current_pin == "0.15.3" + assert migrator.new_pin_version == "0.18.0" + assert migrator.pin_spec == "x.x.x" + + assert len(migrator.effective_graph) == 1 + assert "conda-forge-pinning" in migrator.effective_graph + + +def test_make_migrators_initialize_migrators(): + with tempfile.TemporaryDirectory() as tmpdir: + subprocess.run( + [ + "git", + "clone", + "--depth=1", + "https://github.com/regro/cf-graph-countyfair.git", + ], + cwd=tmpdir, + check=True, + ) + with ( + pushd(os.path.join(tmpdir, "cf-graph-countyfair")), + lazy_json_override_backends(["file"], use_file_cache=True), + ): + gx = load_graph() + + assert "payload" in gx.nodes["conda-forge-pinning"], ( + "Payload not found for conda-forge-pinning!" + ) + + nodes_to_keep = set() + # random selection of packages to cut the graph down + nodes_to_test = [ + "ngmix", + "ultraplot", + "r-semaphore", + "r-tidyverse", + "conda-forge-pinning", + ] + while nodes_to_test: + pkg = nodes_to_test.pop(0) + if pkg in gx.nodes: + nodes_to_keep.add(pkg) + for n in gx.predecessors(pkg): + if n not in nodes_to_keep: + nodes_to_test.append(n) + + for pkg in set(gx.nodes) - nodes_to_keep: + pluck(gx, pkg) + + # post plucking cleanup + gx.remove_edges_from(nx.selfloop_edges(gx)) + + print( + "Number of nodes in the graph after plucking:", + len(gx.nodes), + flush=True, + ) + + migrators = initialize_migrators(gx) + + assert len(migrators) > 0, "No migrators found!" + for migrator in migrators: + assert migrator is not None, "Migrator is None!" + assert hasattr(migrator, "effective_graph"), ( + "Migrator has no effective graph!" + ) + assert hasattr(migrator, "graph"), "Migrator has no graph attribute!" + if isinstance(migrator, MigrationYaml): + assert "conda-forge-pinning" in migrator.graph.nodes + + # dump and load the migrators + dump_migrators(migrators) + load_migrators(skip_paused=False) diff --git a/tests/test_matplotlib_base.py b/tests/test_matplotlib_base.py index cea900f14..f844b950d 100644 --- a/tests/test_matplotlib_base.py +++ b/tests/test_matplotlib_base.py @@ -1,10 +1,13 @@ import os +import networkx as nx import pytest from test_migrators import run_test_migration from conda_forge_tick.migrators import MatplotlibBase +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} MPLB = MatplotlibBase( old_pkg="matplotlib", new_pkg="matplotlib-base", @@ -12,6 +15,7 @@ "Unless you need `pyqt`, recipes should depend only on `matplotlib-base`." ), pr_limit=5, + total_graph=TOTAL_GRAPH, ) diff --git a/tests/test_migration_runner.py b/tests/test_migration_runner.py index ef875fa6d..511853106 100644 --- a/tests/test_migration_runner.py +++ b/tests/test_migration_runner.py @@ -2,6 +2,7 @@ import pprint import subprocess +import networkx as nx from test_migrators import sample_yaml_rebuild, updated_yaml_rebuild from conda_forge_tick.migration_runner import run_migration_local @@ -19,7 +20,9 @@ class _MigrationYaml(NoFilter, MigrationYaml): pass -yaml_rebuild = _MigrationYaml(yaml_contents="{}", name="hi") +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} +yaml_rebuild = _MigrationYaml(yaml_contents="{}", name="hi", total_graph=TOTAL_GRAPH) yaml_rebuild.cycles = [] diff --git a/tests/test_migration_yaml_migration.py b/tests/test_migration_yaml_migration.py index bf764460f..6c4b1a55c 100644 --- a/tests/test_migration_yaml_migration.py +++ b/tests/test_migration_yaml_migration.py @@ -3,7 +3,6 @@ import re from unittest import mock -import networkx as nx import pytest from conda_forge_tick.feedstock_parser import populate_feedstock_attributes @@ -11,9 +10,6 @@ from conda_forge_tick.os_utils import eval_cmd, pushd from conda_forge_tick.utils import frozen_to_json_friendly, parse_meta_yaml -G = nx.DiGraph() -G.add_node("conda", reqs=["python"], payload={}) -G.graph["outputs_lut"] = {} os.environ["RUN_URL"] = "hi world" YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @@ -124,7 +120,7 @@ [(IN_YAML, OUT_YAML), (IN_YAML_TODAY, OUT_YAML_TODAY)], ) @mock.patch("time.time") -def test_migration_yaml_migration(tmock, in_out_yaml, caplog, tmp_path): +def test_migration_yaml_migration(tmock, in_out_yaml, caplog, tmp_path, test_graph): caplog.set_level( logging.DEBUG, logger="conda_forge_tick.migrators.migration_yaml", @@ -136,14 +132,13 @@ def test_migration_yaml_migration(tmock, in_out_yaml, caplog, tmp_path): pin_spec = "x.x" MYM = MigrationYamlCreator( - pname, - pin_ver, - curr_pin, - pin_spec, - "hi", - G, - G, + package_name=pname, + new_pin_version=pin_ver, + current_pin=curr_pin, + pin_spec=pin_spec, + feedstock_name="hi", pinnings=["libboost_devel", "libboost_python_devel"], + total_graph=test_graph, ) with pushd(tmp_path): diff --git a/tests/test_migrator_nvtools.py b/tests/test_migrator_nvtools.py index 19cd08a8e..8112e9440 100644 --- a/tests/test_migrator_nvtools.py +++ b/tests/test_migrator_nvtools.py @@ -28,13 +28,13 @@ def write_file_contents(filename, buffer): file.write(buffer) -def test_nvtools_migrate(): +def test_nvtools_migrate(test_graph): backups = [ store_file_contents(os.path.join(mock_recipe_dir, f)) for f in ["build.sh", "meta.yaml", "../conda-forge.yml"] ] - migrator = AddNVIDIATools() + migrator = AddNVIDIATools(total_graph=test_graph) migrator.migrate( mock_recipe_dir, mock_node_attrs, diff --git a/tests/test_migrator_to_json.py b/tests/test_migrator_to_json.py index 7c9953f62..dbb38c2bf 100644 --- a/tests/test_migrator_to_json.py +++ b/tests/test_migrator_to_json.py @@ -3,11 +3,15 @@ import pprint import networkx as nx +import pytest import conda_forge_tick.migrators from conda_forge_tick.lazy_json_backends import dumps, loads from conda_forge_tick.migrators import make_from_lazy_json_data +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} + def test_migrator_to_json_dep_update_minimigrator(): python_nodes = ["blah"] @@ -82,6 +86,7 @@ def test_migrator_to_json_version(): conda_forge_tick.migrators.DuplicateLinesCleanup(), conda_forge_tick.migrators.MiniReplacement(old_pkg="foo", new_pkg="bar"), ], + total_graph=TOTAL_GRAPH, ) data = migrator.to_lazy_json_data() pprint.pprint(data) @@ -100,24 +105,19 @@ def test_migrator_to_json_version(): assert dumps(migrator2.to_lazy_json_data()) == lzj_data -def test_migrator_to_json_migration_yaml_creator(): - gx = nx.DiGraph() - gx.add_node("conda", reqs=["python"], payload={}, blah="foo") - gx.graph["outputs_lut"] = {} - +def test_migrator_to_json_migration_yaml_creator(test_graph): pname = "boost" pin_ver = "1.99.0" curr_pin = "1.70.0" pin_spec = "x.x" migrator = conda_forge_tick.migrators.MigrationYamlCreator( - pname, - pin_ver, - curr_pin, - pin_spec, - "hi", - gx, - gx, + package_name=pname, + new_pin_version=pin_ver, + current_pin=curr_pin, + pin_spec=pin_spec, + feedstock_name="hi", + total_graph=test_graph, blah="foo", ) data = migrator.to_lazy_json_data() @@ -147,6 +147,7 @@ def test_migrator_to_json_matplotlib_base(): "Unless you need `pyqt`, recipes should depend only on `matplotlib-base`." ), pr_limit=5, + total_graph=TOTAL_GRAPH, ) data = migrator.to_lazy_json_data() pprint.pprint(data) @@ -166,9 +167,10 @@ def test_migrator_to_json_matplotlib_base(): def test_migrator_to_json_migration_yaml(): migrator = conda_forge_tick.migrators.MigrationYaml( - yaml_contents="hello world", + yaml_contents="{}", name="hi", blah="foo", + total_graph=TOTAL_GRAPH, ) data = migrator.to_lazy_json_data() @@ -189,7 +191,7 @@ def test_migrator_to_json_migration_yaml(): assert dumps(migrator2.to_lazy_json_data()) == lzj_data -def test_migrator_to_json_rebuild(): +def test_migrator_to_json_replacement(): migrator = conda_forge_tick.migrators.Replacement( old_pkg="matplotlib", new_pkg="matplotlib-base", @@ -197,6 +199,7 @@ def test_migrator_to_json_rebuild(): "Unless you need `pyqt`, recipes should depend only on `matplotlib-base`." ), pr_limit=5, + total_graph=TOTAL_GRAPH, ) data = migrator.to_lazy_json_data() @@ -218,12 +221,12 @@ def test_migrator_to_json_rebuild(): def test_migrator_to_json_arch(): gx = nx.DiGraph() gx.add_node("conda", reqs=["python"], payload={}, blah="foo") + gx.graph["outputs_lut"] = {} migrator = conda_forge_tick.migrators.ArchRebuild( target_packages=["python"], - graph=gx, pr_limit=5, - name="aarch64 and ppc64le addition", + total_graph=gx, ) data = migrator.to_lazy_json_data() @@ -245,12 +248,12 @@ def test_migrator_to_json_arch(): def test_migrator_to_json_osx_arm(): gx = nx.DiGraph() gx.add_node("conda", reqs=["python"], payload={}, blah="foo") + gx.graph["outputs_lut"] = {} migrator = conda_forge_tick.migrators.OSXArm( target_packages=["python"], - graph=gx, + total_graph=gx, pr_limit=5, - name="arm osx addition", ) data = migrator.to_lazy_json_data() @@ -267,3 +270,56 @@ def test_migrator_to_json_osx_arm(): ] assert isinstance(migrator2, conda_forge_tick.migrators.OSXArm) assert dumps(migrator2.to_lazy_json_data()) == lzj_data + + +def test_migrator_to_json_win_arm64(): + gx = nx.DiGraph() + gx.add_node("conda", reqs=["python"], payload={}, blah="foo") + gx.graph["outputs_lut"] = {} + + migrator = conda_forge_tick.migrators.WinArm64( + target_packages=["python"], + total_graph=gx, + pr_limit=5, + ) + + data = migrator.to_lazy_json_data() + pprint.pprint(data) + lzj_data = dumps(data) + print("lazy json data:\n", lzj_data) + assert data["__migrator__"] is True + assert data["class"] == "WinArm64" + assert data["name"] == "support_windows_arm64_platform" + + migrator2 = make_from_lazy_json_data(loads(lzj_data)) + assert [pgm.__class__.__name__ for pgm in migrator2.piggy_back_migrations] == [ + pgm.__class__.__name__ for pgm in migrator.piggy_back_migrations + ] + assert isinstance(migrator2, conda_forge_tick.migrators.WinArm64) + assert dumps(migrator2.to_lazy_json_data()) == lzj_data + + +@pytest.mark.parametrize( + "klass", + [ + conda_forge_tick.migrators.AddNVIDIATools, + conda_forge_tick.migrators.RebuildBroken, + ], +) +def test_migrator_to_json_others(klass): + migrator = klass(total_graph=TOTAL_GRAPH) + + data = migrator.to_lazy_json_data() + pprint.pprint(data) + lzj_data = dumps(data) + print("lazy json data:\n", lzj_data) + assert data["__migrator__"] is True + assert data["class"] == klass.__name__ + assert data["name"] == migrator.name.replace(" ", "_") + + migrator2 = make_from_lazy_json_data(loads(lzj_data)) + assert [pgm.__class__.__name__ for pgm in migrator2.piggy_back_migrations] == [ + pgm.__class__.__name__ for pgm in migrator.piggy_back_migrations + ] + assert isinstance(migrator2, klass) + assert dumps(migrator2.to_lazy_json_data()) == lzj_data diff --git a/tests/test_migrators.py b/tests/test_migrators.py index e4d50eafd..1f0ee1f76 100644 --- a/tests/test_migrators.py +++ b/tests/test_migrators.py @@ -5,6 +5,7 @@ import subprocess from pathlib import Path +import networkx as nx import pytest import yaml @@ -261,12 +262,15 @@ class _MigrationYaml(NoFilter, MigrationYaml): pass -yaml_rebuild = _MigrationYaml(yaml_contents="hello world", name="hi") +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} +yaml_rebuild = _MigrationYaml(yaml_contents="{}", name="hi", total_graph=TOTAL_GRAPH) yaml_rebuild.cycles = [] yaml_rebuild_no_build_number = _MigrationYaml( - yaml_contents="hello world", + yaml_contents="{}", name="hi", bump_number=0, + total_graph=TOTAL_GRAPH, ) yaml_rebuild_no_build_number.cycles = [] @@ -449,7 +453,7 @@ def test_yaml_migration_rebuild_no_buildno(tmp_path): - kthyng """ -version = Version(set()) +version = Version(set(), total_graph=TOTAL_GRAPH) matplotlib = Replacement( old_pkg="matplotlib", @@ -458,6 +462,7 @@ def test_yaml_migration_rebuild_no_buildno(tmp_path): "Unless you need `pyqt`, recipes should depend only on `matplotlib-base`." ), pr_limit=5, + total_graph=TOTAL_GRAPH, ) @@ -545,6 +550,8 @@ def run_test_migration( name = parse_meta_yaml(inp, cbc_path=conda_build_config_file)["package"][ "name" ] + if name is None: + name = "blah" except Exception: name = "blah" @@ -566,6 +573,8 @@ def run_test_migration( name = parse_recipe_yaml(inp, cbc_path=conda_build_config_file)["package"][ "name" ] + if name is None: + name = "blah" except Exception: name = "blah" @@ -578,14 +587,14 @@ def run_test_migration( ) pmy["version"] = pmy["meta_yaml"]["package"]["version"] pmy["raw_meta_yaml"] = inp - pmy.update(kwargs) - try: - if "new_version" in kwargs: - pmy["version_pr_info"] = {"new_version": kwargs["new_version"]} - assert m.filter(pmy) == should_filter - finally: - pmy.pop("version_pr_info", None) + if "new_version" in kwargs: + pmy["version_pr_info"] = {"new_version": kwargs.pop("new_version")} + + pmy.update(kwargs) + + assert m.filter(pmy) == should_filter + if should_filter: return pmy @@ -602,7 +611,7 @@ def run_test_migration( hash_type=pmy.get("hash_type", "sha256"), ) - if make_body: + if make_body or prb: fctx = ClonedFeedstockContext( feedstock_name=name, attrs=pmy, @@ -610,7 +619,9 @@ def run_test_migration( ) m.effective_graph.add_node(name) m.effective_graph.nodes[name]["payload"] = MockLazyJson({}) - m.pr_body(fctx) + prb_from_m = m.pr_body(fctx) + else: + prb_from_m = None assert mr_out == mr if not mr: @@ -628,17 +639,11 @@ def run_test_migration( output = pat.sub("", output) assert actual_output == output - # TODO: fix subgraph here (need this to be xsh file) - if isinstance(m, Version): - pass - else: - assert prb in m.pr_body(None) - try: - if "new_version" in kwargs: - pmy["version_pr_info"] = {"new_version": kwargs["new_version"]} - assert m.filter(pmy) is True - finally: - pmy.pop("version_pr_info", None) + + if prb_from_m and prb: + assert prb in prb_from_m + + assert m.filter(pmy) is True return pmy diff --git a/tests/test_migrators_v1.py b/tests/test_migrators_v1.py index 4e6a142ed..18706b586 100644 --- a/tests/test_migrators_v1.py +++ b/tests/test_migrators_v1.py @@ -2,6 +2,7 @@ import os +import networkx as nx from test_migrators import run_test_migration, run_test_yaml_migration from test_recipe_yaml_parsing import TEST_RECIPE_YAML_PATH @@ -21,12 +22,15 @@ class _MigrationYaml(NoFilter, MigrationYaml): pass -yaml_rebuild = _MigrationYaml(yaml_contents="hello world", name="hi") +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} +yaml_rebuild = _MigrationYaml(yaml_contents="{}", name="hi", total_graph=TOTAL_GRAPH) yaml_rebuild.cycles = [] yaml_rebuild_no_build_number = _MigrationYaml( - yaml_contents="hello world", + yaml_contents="{}", name="hi", bump_number=0, + total_graph=TOTAL_GRAPH, ) yaml_rebuild_no_build_number.cycles = [] @@ -38,7 +42,7 @@ def sample_yaml_rebuild() -> str: def test_yaml_migration_rebuild(tmp_path): - """Test that the build number is bumped""" + """Test that the build number is bumped.""" sample = sample_yaml_rebuild() updated_yaml_rebuild = sample.replace("number: 0", "number: 1") @@ -83,7 +87,7 @@ def test_yaml_migration_rebuild_no_buildno(tmp_path): # Run Matplotlib mini-migrator ### ################################################################## -version = Version(set()) +version = Version(set(), total_graph=TOTAL_GRAPH) matplotlib = Replacement( old_pkg="matplotlib", @@ -92,6 +96,7 @@ def test_yaml_migration_rebuild_no_buildno(tmp_path): "Unless you need `pyqt`, recipes should depend only on `matplotlib-base`." ), pr_limit=5, + total_graph=TOTAL_GRAPH, ) diff --git a/tests/test_minireplacement.py b/tests/test_minireplacement.py index e10227af6..aeff9ff7a 100644 --- a/tests/test_minireplacement.py +++ b/tests/test_minireplacement.py @@ -1,5 +1,6 @@ from pathlib import Path +import networkx as nx import pytest from test_migrators import run_test_migration @@ -9,17 +10,23 @@ JPEGJPEGTURBO = MiniReplacement(old_pkg="jpeg", new_pkg="libjpeg-turbo") QTQTMAIN = MiniReplacement(old_pkg="qt", new_pkg="qt-main") +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} + VERSION_WITH_XZLIBLZMADEVEL = Version( set(), piggy_back_migrations=[XZLIBLZMADEVEL], + total_graph=TOTAL_GRAPH, ) VERSION_WITH_JPEGTURBO = Version( set(), piggy_back_migrations=[JPEGJPEGTURBO], + total_graph=TOTAL_GRAPH, ) VERSION_WITH_QTQTMAIN = Version( set(), piggy_back_migrations=[QTQTMAIN], + total_graph=TOTAL_GRAPH, ) YAML_PATHS = [ diff --git a/tests/test_mpi_pin_run_as_build_cleanup_migrator.py b/tests/test_mpi_pin_run_as_build_cleanup_migrator.py index 0f40ad332..d81ed2e55 100644 --- a/tests/test_mpi_pin_run_as_build_cleanup_migrator.py +++ b/tests/test_mpi_pin_run_as_build_cleanup_migrator.py @@ -1,5 +1,6 @@ import os +import networkx as nx import pytest from ruamel.yaml import YAML from test_migrators import run_test_migration @@ -7,9 +8,12 @@ from conda_forge_tick.migrators import MPIPinRunAsBuildCleanup, Version from conda_forge_tick.migrators.mpi_pin_run_as_build import MPIS +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} VERSION_CF = Version( set(), piggy_back_migrations=[MPIPinRunAsBuildCleanup()], + total_graph=TOTAL_GRAPH, ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") diff --git a/tests/test_noarch_python_min_migrator.py b/tests/test_noarch_python_min_migrator.py index c0fe3a7d9..d10c54b48 100644 --- a/tests/test_noarch_python_min_migrator.py +++ b/tests/test_noarch_python_min_migrator.py @@ -2,6 +2,7 @@ import tempfile import textwrap +import networkx as nx import pytest from test_migrators import run_test_migration @@ -21,6 +22,9 @@ + str(int(GLOBAL_PYTHON_MIN.split(".")[1]) + 1) ) +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} + @pytest.mark.parametrize("replace_host_with_build", [True, False]) @pytest.mark.parametrize( @@ -561,7 +565,7 @@ def test_noarch_python_min_migrator(tmp_path, name): os.path.join(TEST_YAML_PATH, f"noarch_python_min_{name}_after_meta.yaml") ) as f: recipe_after = f.read() - m = NoarchPythonMinMigrator() + m = NoarchPythonMinMigrator(total_graph=TOTAL_GRAPH) run_test_migration( m=m, inp=recipe_before, diff --git a/tests/test_pip_check_migrator.py b/tests/test_pip_check_migrator.py index 81d5f6da7..ee618e776 100644 --- a/tests/test_pip_check_migrator.py +++ b/tests/test_pip_check_migrator.py @@ -1,12 +1,15 @@ import os +import networkx as nx import pytest from test_migrators import run_test_migration from conda_forge_tick.migrators import PipCheckMigrator, Version +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} PC = PipCheckMigrator() -VERSION_PC = Version(set(), piggy_back_migrations=[PC]) +VERSION_PC = Version(set(), piggy_back_migrations=[PC], total_graph=TOTAL_GRAPH) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") diff --git a/tests/test_pypi_org.py b/tests/test_pypi_org.py index d61f79c90..32389d37d 100644 --- a/tests/test_pypi_org.py +++ b/tests/test_pypi_org.py @@ -1,5 +1,6 @@ import os +import networkx as nx import pytest from test_migrators import run_test_migration @@ -7,11 +8,13 @@ TEST_YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") - +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} PYPI_ORG = PyPIOrgMigrator() VERSION_WITH_PYPI_ORG = Version( set(), piggy_back_migrations=[PYPI_ORG], + total_graph=TOTAL_GRAPH, ) diff --git a/tests/test_pypi_whl_mini.py b/tests/test_pypi_whl_mini.py index 0081ede62..36dd5b3af 100644 --- a/tests/test_pypi_whl_mini.py +++ b/tests/test_pypi_whl_mini.py @@ -1,5 +1,6 @@ import os +import networkx as nx import pytest import requests from flaky import flaky @@ -7,11 +8,15 @@ from conda_forge_tick.migrators import PipWheelMigrator, Version +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} + wheel_mig = PipWheelMigrator() version_migrator_whl = Version( set(), piggy_back_migrations=[wheel_mig], + total_graph=TOTAL_GRAPH, ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @@ -115,9 +120,8 @@ def test_migrate_opentelemetry(tmp_dir_with_conf): @pytest.mark.parametrize("package", ["icu", "pipcheck_simple"]) @flaky def test_migrate_non_python(tmp_dir_with_conf, package): - """Shouldn't run for non-python recipes or recipes that - have not opted in to the wheel migrator. - """ + # the migrator shouldn't run for non-python recipes or recipes that + # have not opted in to the wheel migrator. with open(os.path.join(YAML_PATH, f"version_{package}.yaml")) as fp: in_yaml = fp.read() @@ -133,7 +137,7 @@ def test_migrate_non_python(tmp_dir_with_conf, package): @flaky def test_migrate_thrift(tmp_dir_with_conf): - """Packages without a wheel should be filtered out""" + """Packages without a wheel should be filtered out.""" url = ( "https://raw.githubusercontent.com/conda-forge/thrift-feedstock/" "e0327f2a8b75151428e22c722b311a4ac9fccf41/recipe/meta.yaml" @@ -152,7 +156,7 @@ def test_migrate_thrift(tmp_dir_with_conf): @flaky def test_migrate_psutil(tmp_dir_with_conf): - """Packages with many wheels should be filtered out""" + """Packages with many wheels should be filtered out.""" url = ( "https://raw.githubusercontent.com/conda-forge/psutil-feedstock/" "0cfe57ff0dd639ed872e6e1d220a297ddc3b9100/recipe/meta.yaml" @@ -171,7 +175,7 @@ def test_migrate_psutil(tmp_dir_with_conf): @flaky def test_migrate_black(tmp_dir_with_conf): - """Black has a wheel so this minimigrator should attempt to run""" + """Black has a wheel so this minimigrator should attempt to run.""" url = ( "https://raw.githubusercontent.com/conda-forge/black-feedstock/" "fc15d64cbd793b31a26cae5347dedcf42f562f1c/recipe/meta.yaml" @@ -191,7 +195,7 @@ def test_migrate_black(tmp_dir_with_conf): @flaky def test_migrate_black_no_conf(tmp_path): - """Without enabling the feature, don't run for black""" + """Without enabling the feature, don't run for black.""" url = ( "https://raw.githubusercontent.com/conda-forge/black-feedstock/" "fc15d64cbd793b31a26cae5347dedcf42f562f1c/recipe/meta.yaml" diff --git a/tests/test_r_ucrt.py b/tests/test_r_ucrt.py index febd43b5e..f4c558c62 100644 --- a/tests/test_r_ucrt.py +++ b/tests/test_r_ucrt.py @@ -1,12 +1,16 @@ +import networkx as nx from flaky import flaky from test_migrators import run_test_migration from conda_forge_tick.migrators import RUCRTCleanup, Version +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} r_ucrt_migrator = RUCRTCleanup() version_migrator_rbase = Version( set(), piggy_back_migrations=[r_ucrt_migrator], + total_graph=TOTAL_GRAPH, ) rbase_recipe = """\ diff --git a/tests/test_recipe_parser.py b/tests/test_recipe_parser.py index 3312a366c..26c9c5448 100644 --- a/tests/test_recipe_parser.py +++ b/tests/test_recipe_parser.py @@ -36,6 +36,8 @@ def test_parsing_ml_jinja2(): {% set build = build + 100 %} {% endif %} +{# this is a comment #} + package: name: {{ name|lower }} @@ -98,6 +100,8 @@ def test_parsing_ml_jinja2(): {% set build = build + 100 %} {% endif %} +# this is a comment + package: name: {{ name|lower }} @@ -235,6 +239,8 @@ def test_parsing_ml_jinja2(): {% set build = build + 100 %} {% endif %} +# this is a comment + package: name: {{ name|lower }} diff --git a/tests/test_recipe_v1.py b/tests/test_recipe_v1.py index ed07543ed..089ea748a 100644 --- a/tests/test_recipe_v1.py +++ b/tests/test_recipe_v1.py @@ -1,5 +1,6 @@ from pathlib import Path +import networkx as nx import pytest from flaky import flaky from test_migrators import run_test_migration @@ -10,14 +11,19 @@ ) from conda_forge_tick.migrators.recipe_v1 import ( get_condition, + get_new_sub_condition, is_negated_condition, + is_sub_condition, ) YAML_PATH = Path(__file__).parent / "test_v1_yaml" +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} combine_conditions_migrator = Version( set(), piggy_back_migrations=[CombineV1ConditionsMigrator()], + total_graph=TOTAL_GRAPH, ) @@ -68,6 +74,74 @@ def test_not_is_negated_condition(a, b): assert not is_negated_condition(b_cond, a_cond) +@pytest.mark.parametrize( + "sub_cond,super_cond,new_sub", + [ + ( + "build_platform != target_platform and megabuild", + "build_platform != target_platform", + "megabuild", + ), + ( + "build_platform != target_platform and not megabuild", + "build_platform != target_platform", + "not megabuild", + ), + ( + 'cuda_compiler_version != "None" and linux', + 'cuda_compiler_version != "None"', + "linux", + ), + ( + 'linux and cuda_compiler_version != "None"', + 'cuda_compiler_version != "None"', + "linux", + ), + ("a and b", "a", "b"), + ("a and b", "b", "a"), + ("(a or b) and c", "c", "a or b"), + ("(a or b) and c", "(a or b)", "c"), + ("(a or b) and (c or d)", "(a or b)", "c or d"), + ("(a or b) and (c or d)", "(c or d)", "a or b"), + ("(a or b) and c", "a or b", "c"), + ("(a or b) and (c or d)", "a or b", "c or d"), + ("(a or b) and (c or d)", "c or d", "a or b"), + ("a and b and c", "a and b", "c"), + ("a and b and c", "c", "a and b"), + ("a and (b and c)", "a", "b and c"), + ("a and (b and c)", "(b and c)", "a"), + ("a and (b and c)", "b and c", "a"), + ], +) +def test_sub_condition(sub_cond, super_cond, new_sub): + sub_node = get_condition({"if": sub_cond}) + super_node = get_condition({"if": super_cond}) + assert is_sub_condition(sub_node=sub_node, super_node=super_node) + assert not is_sub_condition(sub_node=super_node, super_node=sub_node) + assert get_new_sub_condition(sub_cond=sub_cond, super_cond=super_cond) == new_sub + assert get_new_sub_condition(sub_cond=super_cond, super_cond=sub_cond) is None + + +@pytest.mark.parametrize( + "sub_cond,super_cond", + [ + ("a or b and c", "a"), + ("a or b and c", "c"), + # jinja2 interprets this as (a and b) and c, but we handle only + # the top-most node + ("a and b and c", "a"), + ("a and b and c", "b and c"), + ("a and bar", "a and b"), + ("not (a and b)", "a and b"), + ], +) +def test_not_sub_condition(sub_cond, super_cond): + sub_node = get_condition({"if": sub_cond}) + super_node = get_condition({"if": super_cond}) + assert not is_sub_condition(sub_node=sub_node, super_node=super_node) + assert not is_sub_condition(sub_node=super_node, super_node=sub_node) + + @flaky def test_combine_v1_conditions(tmp_path): run_test_migration( diff --git a/tests/test_recipe_yaml_parsing.py b/tests/test_recipe_yaml_parsing.py index 159503e29..7f6d8f77d 100644 --- a/tests/test_recipe_yaml_parsing.py +++ b/tests/test_recipe_yaml_parsing.py @@ -184,6 +184,11 @@ def __init__( Args: recipe_path: Path to the recipe YAML file ci_support_files: Optional sequence of CI support configuration files + + Raises + ------ + FileNotFoundError + If the recipe file or CI support files are not found """ if not recipe_path.is_file(): raise FileNotFoundError(f"Recipe file not found: {recipe_path}") @@ -202,14 +207,28 @@ def _create_directory_structure(self) -> None: (self.root / ".ci_support").mkdir(parents=True, exist_ok=True) def _setup_recipe(self) -> None: - """Copy recipe file to temporary directory.""" + """Copy recipe file to temporary directory. + + Raises + ------ + OSError + If copying the recipe file fails. + """ try: shutil.copy2(self.recipe_path, self.root / "recipe" / "recipe.yaml") except OSError as e: raise OSError(f"Failed to copy recipe file: {e}") def _setup_ci_support(self, ci_support_files: list[Path] | None) -> None: - """Setup CI support configuration files.""" + """Set up CI support configuration files. + + Raises + ------ + FileNotFoundError + If the CI support file is not found. + OSError + If copying the CI support files fails. + """ if ci_support_files: for file in ci_support_files: if not file.is_file(): diff --git a/tests/test_settings.py b/tests/test_settings.py new file mode 100644 index 000000000..00a9930a3 --- /dev/null +++ b/tests/test_settings.py @@ -0,0 +1,120 @@ +import os + +import pytest +from pydantic import ValidationError + +from conda_forge_tick.settings import ( + ENV_CONDA_FORGE_ORG, + BotSettings, + settings, + use_settings, +) + + +class TestBotSettings: + def test_parse(self, temporary_environment): + os.environ["CF_TICK_CONDA_FORGE_ORG"] = "myorg" + os.environ["CF_TICK_GRAPH_GITHUB_BACKEND_REPO"] = "graph-owner/graph-repo" + os.environ["CF_TICK_GRAPH_REPO_DEFAULT_BRANCH"] = "mybranch" + os.environ["RUNNER_DEBUG"] = "1" + os.environ["CF_TICK_FRAC_UPDATE_UPSTREAM_VERSIONS"] = "0.5" + os.environ["CF_TICK_FRAC_MAKE_GRAPH"] = "0.7" + + bot_settings = BotSettings() + + assert bot_settings.conda_forge_org == "myorg" + assert bot_settings.graph_github_backend_repo == "graph-owner/graph-repo" + assert bot_settings.graph_repo_default_branch == "mybranch" + assert ( + bot_settings.graph_github_backend_raw_base_url + == "https://github.com/graph-owner/graph-repo/raw/mybranch/" + ) + assert bot_settings.github_runner_debug is True + assert bot_settings.frac_update_upstream_versions == 0.5 + assert bot_settings.frac_make_graph == 0.7 + + def test_defaults(self, temporary_environment): + os.environ.clear() + + bot_settings = BotSettings() + + assert bot_settings.conda_forge_org == "conda-forge" + assert bot_settings.graph_github_backend_repo == "regro/cf-graph-countyfair" + assert bot_settings.graph_repo_default_branch == "master" + assert bot_settings.github_runner_debug is False + assert bot_settings.frac_update_upstream_versions == 0.1 + assert bot_settings.frac_make_graph == 0.1 + + def test_env_conda_forge_org(self, temporary_environment): + os.environ.clear() + + os.environ[ENV_CONDA_FORGE_ORG] = "myorg" + + bot_settings = BotSettings() + + assert bot_settings.conda_forge_org == "myorg" + + def test_reject_invalid_conda_forge_org(self, temporary_environment): + os.environ.clear() + + os.environ["CF_TICK_CONDA_FORGE_ORG"] = "invalid org" + + with pytest.raises(ValidationError, match="should match pattern"): + BotSettings() + + def test_reject_invalid_repo_pattern(self, temporary_environment): + os.environ.clear() + + os.environ["CF_TICK_GRAPH_GITHUB_BACKEND_REPO"] = "no-owner-repo" + + with pytest.raises(ValidationError, match="should match pattern"): + BotSettings() + + @pytest.mark.parametrize("value", [-0.1, 1.1]) + @pytest.mark.parametrize( + "attribute", ["FRAC_UPDATE_UPSTREAM_VERSIONS", "FRAC_MAKE_GRAPH"] + ) + def test_reject_invalid_fraction( + self, attribute: str, value: float, temporary_environment + ): + os.environ.clear() + + os.environ[f"CF_TICK_{attribute}"] = str(value) + + with pytest.raises(ValidationError, match="Input should be (greater|less)"): + BotSettings() + + @pytest.mark.parametrize("value", [0.0, 1.0]) + @pytest.mark.parametrize( + "attribute", ["FRAC_UPDATE_UPSTREAM_VERSIONS", "FRAC_MAKE_GRAPH"] + ) + def test_accept_valid_fraction( + self, attribute: str, value: float, temporary_environment + ): + os.environ.clear() + + os.environ[f"CF_TICK_{attribute}"] = str(value) + + bot_settings = BotSettings() + + assert getattr(bot_settings, attribute.lower()) == value + + +def test_use_settings(temporary_environment): + os.environ.clear() + bot_settings = settings() + bot_settings.github_runner_debug = True + + with use_settings(bot_settings): + ret_settings = settings() + assert ret_settings.github_runner_debug is True + + # there should be no side effects + bot_settings.github_runner_debug = False + ret_settings.github_runner_debug = False + + side_effect_check_settings = settings() + assert side_effect_check_settings.github_runner_debug is True + + # the settings should be restored + assert settings().github_runner_debug is False diff --git a/tests/test_staticlib.py b/tests/test_staticlib.py index 0a4d0feb1..b5bb63d1c 100644 --- a/tests/test_staticlib.py +++ b/tests/test_staticlib.py @@ -19,6 +19,9 @@ TEST_YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") TEST_YAML_PATH_V1 = os.path.join(os.path.dirname(__file__), "test_v1_yaml") +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} + @pytest.mark.parametrize( "spec,res", @@ -290,8 +293,9 @@ def test_staticlib_migrator_llvmlite(tmp_path, yaml_path): graph = nx.DiGraph() graph.add_node(name, payload=pmy) + graph.graph["outputs_lut"] = {} m = StaticLibMigrator( - graph=graph, + total_graph=graph, ) run_test_migration( m=m, diff --git a/tests/test_stdlib.py b/tests/test_stdlib.py index 652956e35..9b86c1851 100644 --- a/tests/test_stdlib.py +++ b/tests/test_stdlib.py @@ -1,6 +1,7 @@ import os import re +import networkx as nx import pytest from test_migrators import run_test_migration @@ -9,10 +10,13 @@ TEST_YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} STDLIB = StdlibMigrator() VERSION_WITH_STDLIB = Version( set(), piggy_back_migrations=[STDLIB], + total_graph=TOTAL_GRAPH, ) diff --git a/tests/test_update_deps.py b/tests/test_update_deps.py index 9780cc7f0..4e5d17537 100644 --- a/tests/test_update_deps.py +++ b/tests/test_update_deps.py @@ -3,6 +3,7 @@ import tempfile from pathlib import Path +import networkx as nx import pytest from flaky import flaky from test_migrators import run_test_migration @@ -20,9 +21,12 @@ make_grayskull_recipe, ) +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} VERSION = Version( set(), piggy_back_migrations=[DependencyUpdateMigrator(set())], + total_graph=TOTAL_GRAPH, ) diff --git a/tests/test_upstream_versions.py b/tests/test_upstream_versions.py index e1c21c072..5f5a678c1 100644 --- a/tests/test_upstream_versions.py +++ b/tests/test_upstream_versions.py @@ -15,6 +15,7 @@ from conda_forge_tick.cli_context import CliContext from conda_forge_tick.lazy_json_backends import LazyJson +from conda_forge_tick.settings import settings, use_settings from conda_forge_tick.update_sources import ( NPM, NVIDIA, @@ -548,7 +549,7 @@ def test_latest_version_skip_error_success(caplog): assert "Using URL https://source-a.com" in caplog.text assert ( - "An exception occurred while fetching crazy-package from source a:" + "An exception occurred while fetching crazy-package from source a" in caplog.text ) assert "source a error" in caplog.text @@ -579,7 +580,7 @@ def test_latest_version_error_and_no_new_version(caplog): assert "Using URL https://source-a.com" in caplog.text assert ( - "An exception occurred while fetching crazy-package from source a:" + "An exception occurred while fetching crazy-package from source a" in caplog.text ) assert "source a error" in caplog.text @@ -1356,11 +1357,22 @@ def test_update_upstream_versions_run_parallel_custom_sources( ) == ("source a", "source b") -@mock.patch("conda_forge_tick.update_upstream_versions.RANDOM_FRAC_TO_UPDATE", new=1.1) +@pytest.fixture +def version_update_frac_always(): + new_settings = settings() + + new_settings.frac_update_upstream_versions = True + with use_settings(new_settings): + yield + + @mock.patch("conda_forge_tick.update_upstream_versions.get_latest_version") @mock.patch("conda_forge_tick.update_upstream_versions.LazyJson") def test_update_upstream_versions_sequential_error( - lazy_json_mock: MagicMock, get_latest_version_mock: MagicMock, caplog + lazy_json_mock: MagicMock, + get_latest_version_mock: MagicMock, + version_update_frac_always, + caplog, ): caplog.set_level(logging.DEBUG) source_a = Mock(AbstractSource) @@ -1394,11 +1406,13 @@ def __repr__(self): raise Exception("broken exception") -@mock.patch("conda_forge_tick.update_upstream_versions.RANDOM_FRAC_TO_UPDATE", new=1.1) @mock.patch("conda_forge_tick.update_upstream_versions.get_latest_version") @mock.patch("conda_forge_tick.update_upstream_versions.LazyJson") def test_update_upstream_versions_sequential_exception_repr_exception( - lazy_json_mock: MagicMock, get_latest_version_mock: MagicMock, caplog + lazy_json_mock: MagicMock, + get_latest_version_mock: MagicMock, + version_update_frac_always, + caplog, ): caplog.set_level(logging.DEBUG) source_a = Mock(AbstractSource) @@ -1429,11 +1443,13 @@ def test_update_upstream_versions_sequential_exception_repr_exception( ) -@mock.patch("conda_forge_tick.update_upstream_versions.RANDOM_FRAC_TO_UPDATE", new=1.1) @mock.patch("conda_forge_tick.update_upstream_versions.get_latest_version") @mock.patch("conda_forge_tick.update_upstream_versions.LazyJson") def test_update_upstream_versions_sequential( - lazy_json_mock: MagicMock, get_latest_version_mock: MagicMock, caplog + lazy_json_mock: MagicMock, + get_latest_version_mock: MagicMock, + version_update_frac_always, + caplog, ): caplog.set_level(logging.DEBUG) source_a = Mock(AbstractSource) @@ -1480,11 +1496,13 @@ def test_update_upstream_versions_sequential( assert "# 1 - testpackage2 - 1.2.4 -> 1.2.5" in caplog.text -@mock.patch("conda_forge_tick.update_upstream_versions.RANDOM_FRAC_TO_UPDATE", new=1.1) @mock.patch("conda_forge_tick.update_upstream_versions.executor") @mock.patch("conda_forge_tick.update_upstream_versions.LazyJson") def test_update_upstream_versions_process_pool( - lazy_json_mock: MagicMock, executor_mock: MagicMock, caplog + lazy_json_mock: MagicMock, + executor_mock: MagicMock, + version_update_frac_always, + caplog, ): caplog.set_level(logging.DEBUG) source_a = Mock(AbstractSource) @@ -1541,11 +1559,13 @@ def test_update_upstream_versions_process_pool( assert "testpackage - 2.2.3 -> 2.2.4" in caplog.text -@mock.patch("conda_forge_tick.update_upstream_versions.RANDOM_FRAC_TO_UPDATE", new=1.1) @mock.patch("conda_forge_tick.update_upstream_versions.executor") @mock.patch("conda_forge_tick.update_upstream_versions.LazyJson") def test_update_upstream_versions_process_pool_exception( - lazy_json_mock: MagicMock, executor_mock: MagicMock, caplog + lazy_json_mock: MagicMock, + executor_mock: MagicMock, + version_update_frac_always, + caplog, ): caplog.set_level(logging.DEBUG) source_a = Mock(AbstractSource) @@ -1585,11 +1605,13 @@ def test_update_upstream_versions_process_pool_exception( assert "source a error" in caplog.text -@mock.patch("conda_forge_tick.update_upstream_versions.RANDOM_FRAC_TO_UPDATE", new=1.1) @mock.patch("conda_forge_tick.update_upstream_versions.executor") @mock.patch("conda_forge_tick.update_upstream_versions.LazyJson") def test_update_upstream_versions_process_pool_exception_repr_exception( - lazy_json_mock: MagicMock, executor_mock: MagicMock, caplog + lazy_json_mock: MagicMock, + executor_mock: MagicMock, + version_update_frac_always, + caplog, ): caplog.set_level(logging.DEBUG) source_a = Mock(AbstractSource) @@ -1742,9 +1764,7 @@ def test_github_releases(tmpdir, url, feedstock_version): def test_github_releases_unusual_version( tmp_path: Path, url: str, feedstock_version: str, regex: str ): - """ - Tests that the GitHubReleases source can handle unusual version strings such as timestamps. - """ + """Tests that the GitHubReleases source can handle unusual version strings such as timestamps.""" meta_yaml = LazyJson(str(tmp_path / "cf-scripts-test.json")) with meta_yaml as _meta_yaml: _meta_yaml.update( diff --git a/tests/test_url_exists.py b/tests/test_url_exists.py index edc842ac0..f1428a2d4 100644 --- a/tests/test_url_exists.py +++ b/tests/test_url_exists.py @@ -21,7 +21,11 @@ def delay_rerun(*args): "http://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-7.6p1.tar.gz", True, ), - ("https://eups.lsst.codes/stack/src/tags/w_2021_07.list", True), + pytest.param( + "https://eups.lsst.codes/stack/src/tags/w_2021_07.list", + True, + marks=pytest.mark.xfail(reason="expired HTTPS certificate"), + ), pytest.param( "https://downloads.sourceforge.net/project/healpix/Healpix_3.31/Healpix_3.31_2016Aug26.tar.gz", # noqa True, diff --git a/tests/test_use_pip.py b/tests/test_use_pip.py index 1e101fda7..d07b44bcf 100644 --- a/tests/test_use_pip.py +++ b/tests/test_use_pip.py @@ -1,12 +1,15 @@ import os +import networkx as nx import pytest from test_migrators import run_test_migration from conda_forge_tick.migrators import PipMigrator, Version +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} PC = PipMigrator() -VERSION_PC = Version(set(), piggy_back_migrations=[PC]) +VERSION_PC = Version(set(), piggy_back_migrations=[PC], total_graph=TOTAL_GRAPH) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") diff --git a/tests/test_utils.py b/tests/test_utils.py index 265bcbe8b..609d17e44 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -11,6 +11,7 @@ _munge_dict_repr, extract_section_from_yaml_text, get_keys_default, + get_recipe_schema_version, load_existing_graph, load_graph, parse_munged_run_export, @@ -121,7 +122,9 @@ def test_load_graph_file_does_not_exist(exists_mock: MagicMock): mock_file.assert_has_calls([mock.call(DEFAULT_GRAPH_FILENAME, "w")]) -def test_load_existing_graph(): +@mock.patch("os.path.isfile") +def test_load_existing_graph(isfile_mock: MagicMock): + isfile_mock.return_value = True with mock.patch("builtins.open", mock_open(read_data=DEMO_GRAPH)) as mock_file: gx = load_existing_graph() @@ -155,6 +158,35 @@ def test_munge_dict_repr(): assert parse_munged_run_export(_munge_dict_repr(d)) == d +@pytest.mark.parametrize("version", [0, 1]) +def test_get_recipe_schema_version_valid(version: int): + attrs = { + "meta_yaml": { + "schema_version": version, + } + if version is not None + else {}, + } + + assert get_recipe_schema_version(attrs) == version + + +def test_get_recipe_schema_version_missing_keys_1(): + attrs = {"meta_yaml": {}} + assert get_recipe_schema_version(attrs) == 0 + + +def test_get_recipe_schema_version_missing_keys_2(): + attrs = {} + assert get_recipe_schema_version(attrs) == 0 + + +def test_get_recipe_schema_version_invalid(): + attrs = {"meta_yaml": {"schema_version": "invalid"}} + with pytest.raises(ValueError, match="Recipe version is not an integer"): + get_recipe_schema_version(attrs) + + def test_run_command_hiding_token(): cmd = ["python", "-c", "print('stdTOKEN.out')"] diff --git a/tests/test_v1_yaml/version_pytorch.yaml b/tests/test_v1_yaml/version_pytorch.yaml index 1bba42164..fe8d2cb13 100644 --- a/tests/test_v1_yaml/version_pytorch.yaml +++ b/tests/test_v1_yaml/version_pytorch.yaml @@ -71,10 +71,10 @@ outputs: then: rsync host: # GPU requirements - - if: "cuda_compiler_version != \"None\"" - then: cudnn - if: "cuda_compiler_version != \"None\" and linux" then: nccl + - if: "cuda_compiler_version != \"None\"" + then: cudnn - if: "cuda_compiler_version != \"None\"" then: magma - if: "cuda_compiler_version != \"None\"" diff --git a/tests/test_v1_yaml/version_pytorch_correct.yaml b/tests/test_v1_yaml/version_pytorch_correct.yaml index 778dd9386..4aa00b332 100644 --- a/tests/test_v1_yaml/version_pytorch_correct.yaml +++ b/tests/test_v1_yaml/version_pytorch_correct.yaml @@ -69,19 +69,16 @@ outputs: - rsync host: # GPU requirements - - if: "cuda_compiler_version != \"None\"" - then: cudnn - - if: "cuda_compiler_version != \"None\" and linux" - then: nccl - if: "cuda_compiler_version != \"None\"" then: + - if: "linux" + then: nccl + - cudnn - magma - cuda-version ${{ cuda_compiler_version }} - nvtx-c - - if: "linux and cuda_compiler_version != \"None\"" - then: cuda-driver-dev - - if: "cuda_compiler_version != \"None\"" - then: + - if: "linux" + then: cuda-driver-dev - cuda-cudart-dev - cuda-cupti-dev - cuda-nvrtc-dev @@ -91,10 +88,8 @@ outputs: - cusparselt - libcublas-dev - libcudss-dev - - if: "linux and cuda_compiler_version != \"None\"" - then: libcufile-dev - - if: "cuda_compiler_version != \"None\"" - then: + - if: "linux" + then: libcufile-dev - libcufft-dev - libcurand-dev - libcusolver-dev @@ -183,19 +178,16 @@ outputs: host: - ${{ pin_subpackage('libtorch', exact=True) }} # GPU requirements - - if: "cuda_compiler_version != \"None\"" - then: cudnn - - if: "cuda_compiler_version != \"None\" and linux" - then: nccl - if: "cuda_compiler_version != \"None\"" then: + - cudnn + - if: "linux" + then: nccl - cuda-version ${{ cuda_compiler_version }} - nvtx-c - magma - - if: "linux and cuda_compiler_version != \"None\"" - then: cuda-driver-dev - - if: "cuda_compiler_version != \"None\"" - then: + - if: "linux" + then: cuda-driver-dev - cuda-cudart-dev - cuda-cupti-dev - cuda-nvrtc-dev @@ -205,10 +197,8 @@ outputs: - cusparselt - libcublas-dev - libcudss-dev - - if: "linux and cuda_compiler_version != \"None\"" - then: libcufile-dev - - if: "cuda_compiler_version != \"None\"" - then: + - if: "linux" + then: libcufile-dev - libcufft-dev - libcurand-dev - libcusolver-dev @@ -253,12 +243,12 @@ outputs: then: libblas * *${{ blas_impl }} else: nomkl - if: "cuda_compiler_version != \"None\"" - then: ${{ pin_compatible('cudnn') }} - - if: "cuda_compiler_version != \"None\" and not win" - then: triton =${{ triton }} + then: + - ${{ pin_compatible('cudnn') }} + - if: "not win" + then: triton =${{ triton }} # avoid that people without GPUs needlessly download ~0.5-1GB - - if: "cuda_compiler_version != \"None\"" - then: __cuda + - __cuda - python # other requirements, see https://github.com/pytorch/pytorch/blame/main/requirements.txt - filelock diff --git a/tests/test_version_migrator.py b/tests/test_version_migrator.py index d573c8cc8..1ec437e9b 100644 --- a/tests/test_version_migrator.py +++ b/tests/test_version_migrator.py @@ -3,6 +3,7 @@ import random from pathlib import Path +import networkx as nx import pytest from flaky import flaky from test_migrators import run_test_migration @@ -10,7 +11,9 @@ from conda_forge_tick.migrators import Version from conda_forge_tick.migrators.version import VersionMigrationError -VERSION = Version(set()) +TOTAL_GRAPH = nx.DiGraph() +TOTAL_GRAPH.graph["outputs_lut"] = {} +VERSION = Version(set(), total_graph=TOTAL_GRAPH) YAML_PATH = Path(__file__).parent / "test_yaml" YAML_V1_PATH = Path(__file__).parent / "test_v1_yaml" diff --git a/tests/test_yaml/version_jinja2expr.yaml b/tests/test_yaml/version_jinja2expr.yaml index dd9d4cae9..bd3ac7812 100644 --- a/tests/test_yaml/version_jinja2expr.yaml +++ b/tests/test_yaml/version_jinja2expr.yaml @@ -3,7 +3,7 @@ {% set version = "0.10.4" %} {% set name = goname.split('/')[-1] %} -{% set pkg_src = ('src/'+goname).replace("/",os.sep) %} +{% set pkg_src = ('src/'+goname).replace("/","/") %} {% set x4_plugin = name + '_v' + version + "_x4" + (".exe" if win else "")%} diff --git a/tests/test_yaml/version_jinja2expr_correct.yaml b/tests/test_yaml/version_jinja2expr_correct.yaml index 28a0f9a3f..e773f7ab9 100644 --- a/tests/test_yaml/version_jinja2expr_correct.yaml +++ b/tests/test_yaml/version_jinja2expr_correct.yaml @@ -3,7 +3,7 @@ {% set version = "1.1.1" %} {% set name = goname.split('/')[-1] %} -{% set pkg_src = ('src/'+goname).replace("/",os.sep) %} +{% set pkg_src = ('src/'+goname).replace("/","/") %} {% set x4_plugin = name + '_v' + version + "_x4" + (".exe" if win else "")%} diff --git a/tests/test_yaml/version_python.yaml b/tests/test_yaml/version_python.yaml index 3d0997e00..433d3cc48 100644 --- a/tests/test_yaml/version_python.yaml +++ b/tests/test_yaml/version_python.yaml @@ -151,7 +151,6 @@ outputs: # - lib/python{{ ver2 }}/lib-dynload/_hashlib.cpython-{{ ver2nd }}-x86_64-linux-gnu.so # [linux] # - lib/libpython3.dylib # [osx] # match python.org compiler standard - skip: True # [win and int(float(vc)) < 14] skip_compile_pyc: - '*.py' # [osx and arm64] string: {{ dev_ }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}{{ linkage_nature }}{{ debug }}_cpython # ["conda-forge" in (channel_targets or "")] diff --git a/tests/test_yaml/version_python_correct.yaml b/tests/test_yaml/version_python_correct.yaml index 814a85ddb..3360197af 100644 --- a/tests/test_yaml/version_python_correct.yaml +++ b/tests/test_yaml/version_python_correct.yaml @@ -151,7 +151,6 @@ outputs: # - lib/python{{ ver2 }}/lib-dynload/_hashlib.cpython-{{ ver2nd }}-x86_64-linux-gnu.so # [linux] # - lib/libpython3.dylib # [osx] # match python.org compiler standard - skip: true # [win and int(float(vc)) < 14] skip_compile_pyc: - '*.py' # [osx and arm64] string: {{ dev_ }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}{{ linkage_nature }}{{ debug }}_cpython # ["conda-forge" in (channel_targets or "")] diff --git a/tests_integration/.mitmproxy/.gitignore b/tests_integration/.mitmproxy/.gitignore new file mode 100644 index 000000000..72eeef24f --- /dev/null +++ b/tests_integration/.mitmproxy/.gitignore @@ -0,0 +1,9 @@ +# mitmproxy certificate files (for integration tests) +mitmproxy-ca.key +mitmproxy-ca.crt +mitmproxy-ca.pem +mitmproxy-cert-bundle.pem +mitmproxy-dhparam.pem + +# trust script generated by CI +mitmproxy_trust_script.sh diff --git a/tests_integration/.mitmproxy/README.md b/tests_integration/.mitmproxy/README.md new file mode 100644 index 000000000..9704504bf --- /dev/null +++ b/tests_integration/.mitmproxy/README.md @@ -0,0 +1,4 @@ +# mitmproxy confdir + +This directory is intended to be used as configuration directory for `mitmproxy`. +Check the README in [the parent directory](..) for more information. diff --git a/tests_integration/README.md b/tests_integration/README.md new file mode 100644 index 000000000..8be3d97f7 --- /dev/null +++ b/tests_integration/README.md @@ -0,0 +1,173 @@ +# Integration Tests + +This directory contains integration tests for the autotick-bot. +The tests are run against actual GitHub repositories, and are used to verify that the +bot works as expected in an environment closely resembling production. + +## Test Environment +The integration tests operate in a testing environment consisting of three real GitHub entities: + +- [conda-forge-bot-staging](https://github.com/conda-forge-bot-staging) (organization) mimics the +[conda-forge](https://github.com/conda-forge) organization and will contain a selection of test feedstocks +(see below how we create them) +- [regro-cf-autotick-bot-staging](https://github.com/regro-cf-autotick-bot-staging) (user) mimics the +[regro-cf-autotick-bot](https://github.com/regro-cf-autotick-bot) account and is a test environment in which the bot +will create forks of the conda-forge-bot-staging repositories +- [regro-staging](https://github.com/regro-staging) (organization) (named after the [regro](https://github.com/regro) +account) contains a special version of the [cf-graph-countyfair](https://github.com/regro/cf-graph-countyfair) which +the bot uses during testing. + +## Test Cases Definition +The integration tests are defined in the [lib/_definitions](lib/_definitions) directory. The following directory structure is +used (using `pydantic` and `llvmdev` as example feedstocks): + +```text +definitions/ +├── pydantic/ +│ ├── resources/ +│ │ ├── feedstock +│ │ └── ... (entirely custom) +│ └── __init__.py +├── llvmdev/ +│ ├── resources/ +│ └── __init__.py +└── ... +``` + +Each feedstock has its own Python module containing the test cases for that feedstock. +**The test cases are always defined in the top-level `__init__.py` file of the feedstock directory.** + +For storing resources, a `resources` directory is used for each feedstock directory. +Inside the `resources` directory, you can use an arbitrary directory structure to store the resources. + +Usually, we include a specific revision of the original feedstock as a submodule in the `resources` directory. + +A test case always tests the entire pipeline of the bot and not any intermediate states that could be checked +in the cf-graph. See the [pytest test definition](test_integration.py) for more details. +Also, a test case is always bound to one specific feedstock. + +### Test Case Definition +To define a test case, create a subclass of `tests_integration.lib.TestCase` in the `__init__.py` file of +your feedstock. You can name it arbitrarily. +Referring to the minimal `VersionUpdate` test case in the +[pydantic module](lib/_definitions/pydantic/__init__.py), +your class has to implement three methods: + +1. `get_router()` should return an `APIRouter` object to define mock responses for specific HTTP requests. All web requests are intercepted by an HTTP proxy. +Refer to `tests_integration.lib.get_transparent_urls` to define URLs that should not be intercepted. + +2. `prepare(helper: AbstractIntegrationTestHelper)` for setting up your test case. Usually, you will want to +overwrite the feedstock repository in the test environment. The `AbstractIntegrationTestHelper` provides methods to interact +with the test environment. + +3. A function `validate(helper: AbstractIntegrationTestHelper)` for validating the state after the bot has run. +The `AbstractIntegrationTestHelper` provides convenience methods such as `assert_version_pr_present` to check for the presence +of a version update PR. + +The creation of GitHub repositories in the test environment is done automatically based on the directory structure. + +### Adding to Test Case Lists + +> [!IMPORTANT] +> Please make sure to add any added test cases to the `ALL_TEST_CASES` list in the respective `__init__.py` file of the feedstock. +> You also need to add any added feedstock to the `TEST_CASE_MAPPING` dictionary in the `definitions/__init__.py` file. + +### How Test Cases are Run + +Importantly, the integration test workflow does not execute the test cases directly. +Instead, it groups them into test scenarios and executes those at once. +A test scenario assigns one test case to every feedstock and runs them in parallel. + +Thus, test cases of different feedstocks can run simultaneously, but the different test cases for the same feedstock +are always run sequentially. + +The generation of test scenarios is done in [_collect_test_scenarios.py](lib/_collect_test_scenarios.py). It is pseudo-random, +ensuring that faulty interactions between test cases are detected eventually. + +In detail, the process of collecting test scenarios is as follows: + +#### 1. Collect Test Cases +For each feedstock, collect the available test cases in lexically sorted order. + +| Feedstock A | Feedstock B | +|-------------|-------------| +| Test Case 1 | Test Case 1 | +| Test Case 2 | Test Case 2 | +| Test Case 3 | | +| Test Case 4 | | +| Test Case 5 | | + + +#### 2. Fill Test Scenarios +The number of test scenarios is equal to the maximum number of test cases for a feedstock. +Feedstocks that have fewer test cases repeat their test cases to supply exactly one test case per scenario. +In the example below, the last instance of `test_case_2.py` for Feedstock B is not needed and thus discarded. + + +| Feedstock A | Feedstock B | +|------------------------------|------------------------------| +| Test Case 1 | Test Case 1 | +| Test Case 2 | Test Case 2 | +| Test Case 3 | Test Case 1 | +| Test Case 4 | Test Case 2 | +| Test Case 5 | Test Case 1 | +| ✂️ everything is cut here ✂️ | ✂️ everything is cut here ✂️ | +| | Test Case 2 (discarded 🗑️) | + +#### 3. Shuffle Test Scenarios +For each feedstock, we shuffle the test cases (rows) individually to ensure a random combination of test cases. +The shuffling is done pseudo-randomly based on `GITHUB_RUN_ID` (which persists for re-runs of the same workflow). + +Finally, we get the test scenarios as the rows of the table below. +Each test scenario executes exactly one test case per feedstock, in parallel. + +| | Feedstock A | Feedstock B | +|------------|-------------|-------------| +| Scenario 1 | Test Case 3 | Test Case 2 | +| Scenario 2 | Test Case 1 | Test Case 1 | +| Scenario 3 | Test Case 4 | Test Case 1 | +| Scenario 4 | Test Case 2 | Test Case 2 | +| Scenario 5 | Test Case 5 | Test Case 1 | + + +## Environment Variables +The tests expect the following environment variables: + +| Variable | Description | +|--------------------|-----------------------------------------------------------------------------------------------------------------------------------| +| `BOT_TOKEN` | Classic PAT for `cf-regro-autotick-bot-staging`. Used to interact with the test environment. | +| `TEST_SETUP_TOKEN` | Classic PAT for `cf-regro-autotick-bot-staging` used to setup the test environment. Typically, this is identical to `BOT_TOKEN`. | +| `GITHUB_RUN_ID` | Set by GitHub. ID of the current run. Used as random seed. | + + +We do not use `BOT_TOKEN` instead of `TEST_SETUP_TOKEN` for setting up the test environment to allow for future separation of the two tokens. +Furthermore, `BOT_TOKEN` is hidden by the sensitive env logic of `conda_forge_tick` and we want the test environment to not need to rely on this logic. + + +### GitHub Token Permissions +The bot token (which you can should use as the test setup token) should have the following scopes: `repo`, `workflow`, `delete_repo`. + +## Running the Integration Tests Locally + +To run the integration tests locally, you currently need to have a valid token for the `cf-regro-autotick-bot-staging` account. +Besides that, run the following setup wizard to set up self-signed certificates for the HTTP proxy: + +```bash +./mitmproxy_setup_wizard.sh +``` + +After that, run the following command to run the tests +(you need to be in the `tests_integration` directory): + +```bash +pytest -s -v --dist=no tests_integration +``` + +Remember to set the environment variables from above beforehand. + +## Debugging CI Issues + +The proxy setup of the integration tests is quite complex, and you can experience issues that only occur on GitHub Actions +and not locally. + +To debug them, consider to [use vscode-server-action](https://gist.github.com/ytausch/612106cfbc2cc660130d247fa2f3a673). diff --git a/tests_integration/__init__.py b/tests_integration/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests_integration/lib/__init__.py b/tests_integration/lib/__init__.py new file mode 100644 index 000000000..98f3be6ef --- /dev/null +++ b/tests_integration/lib/__init__.py @@ -0,0 +1,37 @@ +from ._collect_test_scenarios import get_all_test_scenario_ids, get_test_scenario +from ._definitions.base_classes import AbstractIntegrationTestHelper, TestCase +from ._integration_test_helper import IntegrationTestHelper +from ._run_test_cases import ( + close_all_open_pull_requests, + reset_cf_graph, + run_all_prepare_functions, + run_all_validate_functions, +) +from ._setup_repositories import prepare_all_accounts +from ._shared import ( + ENV_TEST_SCENARIO_ID, + VIRTUAL_PROXY_HOSTNAME, + VIRTUAL_PROXY_PORT, + get_global_router, + get_transparent_urls, + setup_logging, +) + +__all__ = [ + "get_all_test_scenario_ids", + "get_test_scenario", + "IntegrationTestHelper", + "close_all_open_pull_requests", + "reset_cf_graph", + "run_all_prepare_functions", + "run_all_validate_functions", + "prepare_all_accounts", + "get_global_router", + "get_transparent_urls", + "setup_logging", + "ENV_TEST_SCENARIO_ID", + "VIRTUAL_PROXY_HOSTNAME", + "VIRTUAL_PROXY_PORT", + "AbstractIntegrationTestHelper", + "TestCase", +] diff --git a/tests_integration/lib/_collect_test_scenarios.py b/tests_integration/lib/_collect_test_scenarios.py new file mode 100644 index 000000000..25f3c99be --- /dev/null +++ b/tests_integration/lib/_collect_test_scenarios.py @@ -0,0 +1,61 @@ +import os +import random + +from ._definitions import TEST_CASE_MAPPING, TestCase +from ._shared import ENV_GITHUB_RUN_ID + + +def get_number_of_test_scenarios() -> int: + return max(len(test_cases) for test_cases in TEST_CASE_MAPPING.values()) + + +def get_all_test_scenario_ids() -> list[int]: + return list(range(get_number_of_test_scenarios())) + + +def init_random(): + random.seed(int(os.environ.get(ENV_GITHUB_RUN_ID, 0))) + + +def get_test_scenario(scenario_id: int) -> dict[str, TestCase]: + """ + Get the test scenario for the given ID. + The scenario is a dictionary with the feedstock name as key and the test case name as value. + + Test scenarios are pseudo-randomly generated with the GitHub run ID as seed. + + Raises + ------ + ValueError + If the scenario ID is invalid (i.e. not between 0 and n_scenarios - 1). + """ + init_random() + + n_scenarios = get_number_of_test_scenarios() + + if n_scenarios < 0 or scenario_id >= n_scenarios: + raise ValueError( + f"Invalid scenario ID: {scenario_id}. Must be between 0 and {n_scenarios - 1}." + ) + + # make sure that each feedstock has exactly n_scenarios test cases + # We have to cut the additional test cases here to avoid that some test cases are not run. + test_cases_extended = { + feedstock: ( + test_cases + * (n_scenarios // len(test_cases) + (n_scenarios % len(test_cases) > 0)) + )[:n_scenarios] + for feedstock, test_cases in TEST_CASE_MAPPING.items() + } + + for test_cases in test_cases_extended.values(): + # in-place + random.shuffle(test_cases) + + # At this point, test_cases_extended[feedstock][i] is the test case for + # feedstock "feedstock" in the i-th test scenario. + # We need to return the i-th test scenario, so we set i to scenario_id. + return { + feedstock: test_cases_extended[feedstock][scenario_id] + for feedstock in test_cases_extended + } diff --git a/tests_integration/lib/_definitions/__init__.py b/tests_integration/lib/_definitions/__init__.py new file mode 100644 index 000000000..b5340e6f5 --- /dev/null +++ b/tests_integration/lib/_definitions/__init__.py @@ -0,0 +1,17 @@ +from . import conda_forge_pinning, pydantic +from .base_classes import AbstractIntegrationTestHelper, GitHubAccount, TestCase + +TEST_CASE_MAPPING: dict[str, list[TestCase]] = { + "conda-forge-pinning": conda_forge_pinning.ALL_TEST_CASES, + "pydantic": pydantic.ALL_TEST_CASES, +} +""" +Maps from feedstock name to a list of all test cases for that feedstock. +""" + +__all__ = [ + "AbstractIntegrationTestHelper", + "GitHubAccount", + "TestCase", + "TEST_CASE_MAPPING", +] diff --git a/tests_integration/lib/_definitions/base_classes.py b/tests_integration/lib/_definitions/base_classes.py new file mode 100644 index 000000000..2e17e8593 --- /dev/null +++ b/tests_integration/lib/_definitions/base_classes.py @@ -0,0 +1,121 @@ +""" +Module providing base classes for the integration tests. + +Both _definitions and lib refer to this module. +""" + +from abc import ABC +from enum import StrEnum +from pathlib import Path + +from fastapi import APIRouter + + +class GitHubAccount(StrEnum): + CONDA_FORGE_ORG = "conda-forge-bot-staging" + BOT_USER = "regro-cf-autotick-bot-staging" + REGRO_ORG = "regro-staging" + + +class AbstractIntegrationTestHelper(ABC): + """Abstract base class for the IntegrationTestHelper in tests_integration.lib. + Without this class, we cannot refer to IntegrationTestHelper in the definitions module + because it would create a circular import. So we refer to this class instead + and make sure that IntegrationTestHelper inherits from this class. + """ + + def overwrite_feedstock_contents( + self, feedstock_name: str, source_dir: Path, branch: str = "main" + ): + """ + Overwrite the contents of the feedstock with the contents of the source directory. + This prunes the entire git history. + + Parameters + ---------- + feedstock_name + The name of the feedstock repository, without the "-feedstock" suffix. + source_dir + The directory containing the new contents of the feedstock. + branch + The branch to overwrite. + """ + pass + + def overwrite_github_repository( + self, + owner_account: GitHubAccount, + repo_name: str, + source_dir: Path, + branch: str = "main", + ): + """ + Overwrite the contents of the repository with the contents of the source directory. + This prunes the entire git history. + + Parameters + ---------- + owner_account + The owner of the repository. + repo_name + The name of the repository. + source_dir + The directory containing the new contents of the repository. + branch + The branch to overwrite. + """ + pass + + def assert_version_pr_present( + self, + feedstock: str, + new_version: str, + new_hash: str, + old_version: str, + old_hash: str, + ) -> None: + """ + Assert that the bot has opened a version update PR. + + Parameters + ---------- + feedstock + The feedstock we expect the PR for, without the -feedstock suffix. + new_version + The new version that is expected. + new_hash + The new SHA-256 source artifact hash. + old_version + The old version of the feedstock, to check that it no longer appears in the recipe. + old_hash + The old SHA-256 source artifact hash, to check that it no longer appears in the recipe. + + + Raises + ------ + AssertionError + If the assertion fails. + """ + pass + + +class TestCase(ABC): + """ + Abstract base class for a single test case in a scenario. + Per test case, there is exactly one instance of this class statically created + in the definition of the ALL_TEST_CASES list of the feedstock module. + Note that a test case (i.e. an instance of this class) might be run multiple times, + so be careful with state you keep in the instance. + """ + + def get_router(self) -> APIRouter: + """Return the FastAPI router for the test case.""" + pass + + def prepare(self, helper: AbstractIntegrationTestHelper): + """Prepare the test case using the given helper.""" + pass + + def validate(self, helper: AbstractIntegrationTestHelper): + """Validate the test case using the given helper.""" + pass diff --git a/tests_integration/lib/_definitions/conda_forge_pinning/__init__.py b/tests_integration/lib/_definitions/conda_forge_pinning/__init__.py new file mode 100644 index 000000000..4d0aa7eec --- /dev/null +++ b/tests_integration/lib/_definitions/conda_forge_pinning/__init__.py @@ -0,0 +1,20 @@ +from pathlib import Path + +from fastapi import APIRouter + +from ..base_classes import AbstractIntegrationTestHelper, TestCase + + +class SetupPinnings(TestCase): + def get_router(self) -> APIRouter: + return APIRouter() + + def prepare(self, helper: AbstractIntegrationTestHelper): + feedstock_dir = Path(__file__).parent / "resources" / "feedstock" + helper.overwrite_feedstock_contents("conda-forge-pinning", feedstock_dir) + + def validate(self, helper: AbstractIntegrationTestHelper): + pass + + +ALL_TEST_CASES: list[TestCase] = [SetupPinnings()] diff --git a/tests_integration/lib/_definitions/conda_forge_pinning/resources/feedstock b/tests_integration/lib/_definitions/conda_forge_pinning/resources/feedstock new file mode 160000 index 000000000..3e96d074a --- /dev/null +++ b/tests_integration/lib/_definitions/conda_forge_pinning/resources/feedstock @@ -0,0 +1 @@ +Subproject commit 3e96d074a01bb5790dead82728f02f9acc809104 diff --git a/tests_integration/lib/_definitions/pydantic/__init__.py b/tests_integration/lib/_definitions/pydantic/__init__.py new file mode 100644 index 000000000..fecddb524 --- /dev/null +++ b/tests_integration/lib/_definitions/pydantic/__init__.py @@ -0,0 +1,45 @@ +import json +from pathlib import Path + +from fastapi import APIRouter + +from ..base_classes import AbstractIntegrationTestHelper, TestCase + +PYPI_SIMPLE_API_RESPONSE = json.loads( + Path(__file__) + .parent.joinpath("resources/pypi_simple_api_response.json") + .read_text() +) + + +class VersionUpdate(TestCase): + def get_router(self) -> APIRouter: + router = APIRouter() + + @router.get("/pypi.org/pypi/pydantic/json") + def handle_pypi_json_api(): + return { + # rest omitted + "info": {"name": "pydantic", "version": "2.10.2"} + } + + return router + + def prepare(self, helper: AbstractIntegrationTestHelper): + feedstock_dir = Path(__file__).parent / "resources" / "feedstock" + helper.overwrite_feedstock_contents("pydantic", feedstock_dir) + + feedstock_v1_dir = Path(__file__).parent / "resources" / "feedstock_v1" + helper.overwrite_feedstock_contents("pydantic", feedstock_v1_dir, branch="1.x") + + def validate(self, helper: AbstractIntegrationTestHelper): + helper.assert_version_pr_present( + "pydantic", + new_version="2.10.2", + new_hash="2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa", + old_version="2.10.1", + old_hash="a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560", + ) + + +ALL_TEST_CASES: list[TestCase] = [VersionUpdate()] diff --git a/tests_integration/lib/_definitions/pydantic/resources/feedstock b/tests_integration/lib/_definitions/pydantic/resources/feedstock new file mode 160000 index 000000000..d347fade3 --- /dev/null +++ b/tests_integration/lib/_definitions/pydantic/resources/feedstock @@ -0,0 +1 @@ +Subproject commit d347fade36be6541e73e649fcc448316c7e7e696 diff --git a/tests_integration/lib/_definitions/pydantic/resources/feedstock_v1 b/tests_integration/lib/_definitions/pydantic/resources/feedstock_v1 new file mode 160000 index 000000000..7e5827b31 --- /dev/null +++ b/tests_integration/lib/_definitions/pydantic/resources/feedstock_v1 @@ -0,0 +1 @@ +Subproject commit 7e5827b317b91157fed6a2a8587b5fbcde0f409c diff --git a/tests_integration/lib/_definitions/pydantic/resources/pypi_simple_api_response.json b/tests_integration/lib/_definitions/pydantic/resources/pypi_simple_api_response.json new file mode 100644 index 000000000..5e0e6eb36 --- /dev/null +++ b/tests_integration/lib/_definitions/pydantic/resources/pypi_simple_api_response.json @@ -0,0 +1,78 @@ +{ + "alternate-locations": [], + "files": [ + { + "core-metadata": { + "sha256": "8891ad1ba9f1dd99bdad181d0ad82ef51517615ed7065bf934397a45ef8d91a3" + }, + "data-dist-info-metadata": { + "sha256": "8891ad1ba9f1dd99bdad181d0ad82ef51517615ed7065bf934397a45ef8d91a3" + }, + "filename": "pydantic-2.10.1-py3-none-any.whl", + "hashes": { + "sha256": "a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e" + }, + "provenance": "https://pypi.org/integrity/pydantic/2.10.1/pydantic-2.10.1-py3-none-any.whl/provenance", + "requires-python": ">=3.8", + "size": 455329, + "upload-time": "2024-11-22T00:58:40.347020Z", + "url": "https://files.pythonhosted.org/packages/e0/fc/fda48d347bd50a788dd2a0f318a52160f911b86fc2d8b4c86f4d7c9bceea/pydantic-2.10.1-py3-none-any.whl", + "yanked": false + }, + { + "core-metadata": false, + "data-dist-info-metadata": false, + "filename": "pydantic-2.10.1.tar.gz", + "hashes": { + "sha256": "a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560" + }, + "provenance": "https://pypi.org/integrity/pydantic/2.10.1/pydantic-2.10.1.tar.gz/provenance", + "requires-python": ">=3.8", + "size": 783717, + "upload-time": "2024-11-22T00:58:43.709945Z", + "url": "https://files.pythonhosted.org/packages/c4/bd/7fc610993f616d2398958d0028d15eaf53bde5f80cb2edb7aa4f1feaf3a7/pydantic-2.10.1.tar.gz", + "yanked": false + }, + { + "core-metadata": { + "sha256": "4065bed9e76ea559db6875b14516a3dca5e9dda81fa1d296efa542f90422c558" + }, + "data-dist-info-metadata": { + "sha256": "4065bed9e76ea559db6875b14516a3dca5e9dda81fa1d296efa542f90422c558" + }, + "filename": "pydantic-2.10.2-py3-none-any.whl", + "hashes": { + "sha256": "cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e" + }, + "provenance": "https://pypi.org/integrity/pydantic/2.10.2/pydantic-2.10.2-py3-none-any.whl/provenance", + "requires-python": ">=3.8", + "size": 456364, + "upload-time": "2024-11-26T13:02:27.147110Z", + "url": "https://files.pythonhosted.org/packages/d5/74/da832196702d0c56eb86b75bfa346db9238617e29b0b7ee3b8b4eccfe654/pydantic-2.10.2-py3-none-any.whl", + "yanked": false + }, + { + "core-metadata": false, + "data-dist-info-metadata": false, + "filename": "pydantic-2.10.2.tar.gz", + "hashes": { + "sha256": "2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa" + }, + "provenance": "https://pypi.org/integrity/pydantic/2.10.2/pydantic-2.10.2.tar.gz/provenance", + "requires-python": ">=3.8", + "size": 785401, + "upload-time": "2024-11-26T13:02:29.793774Z", + "url": "https://files.pythonhosted.org/packages/41/86/a03390cb12cf64e2a8df07c267f3eb8d5035e0f9a04bb20fb79403d2a00e/pydantic-2.10.2.tar.gz", + "yanked": false + } + ], + "meta": { + "_last-serial": 27108195, + "api-version": "1.3" + }, + "name": "pydantic", + "versions": [ + "2.10.1", + "2.10.2" + ] +} diff --git a/tests_integration/lib/_integration_test_helper.py b/tests_integration/lib/_integration_test_helper.py new file mode 100644 index 000000000..c5f2c06b2 --- /dev/null +++ b/tests_integration/lib/_integration_test_helper.py @@ -0,0 +1,141 @@ +import logging +import shutil +import subprocess +import tempfile +from pathlib import Path +from tempfile import TemporaryDirectory + +from github import Github + +from conda_forge_tick.git_utils import GitCli +from conda_forge_tick.utils import ( + run_command_hiding_token, +) + +from ._definitions import AbstractIntegrationTestHelper, GitHubAccount +from ._shared import ( + FEEDSTOCK_SUFFIX, + get_github_token, +) + +LOGGER = logging.getLogger(__name__) + + +class IntegrationTestHelper(AbstractIntegrationTestHelper): + def overwrite_feedstock_contents( + self, feedstock_name: str, source_dir: Path, branch: str = "main" + ): + self.overwrite_github_repository( + GitHubAccount.CONDA_FORGE_ORG, + feedstock_name + FEEDSTOCK_SUFFIX, + source_dir, + branch, + ) + + def overwrite_github_repository( + self, + owner_account: GitHubAccount, + repo_name: str, + source_dir: Path, + branch: str = "main", + ): + # We execute all git operations in a separate temporary directory to avoid side effects. + with TemporaryDirectory(repo_name) as tmpdir_str: + tmpdir = Path(tmpdir_str) + self._overwrite_github_repository_with_tmpdir( + owner_account, repo_name, source_dir, tmpdir, branch + ) + + @staticmethod + def _overwrite_github_repository_with_tmpdir( + owner_account: GitHubAccount, + repo_name: str, + source_dir: Path, + tmpdir: Path, + branch: str = "main", + ): + """See `overwrite_github_repository`.""" + dest_dir = tmpdir / repo_name + shutil.copytree(source_dir, dest_dir) + + # Remove the .git directory (if it exists) + shutil.rmtree(dest_dir / ".git", ignore_errors=True) + dest_dir.joinpath(".git").unlink(missing_ok=True) # if it is a file + + # Initialize a new git repository and commit everything + subprocess.run( + ["git", "init", f"--initial-branch={branch}"], cwd=dest_dir, check=True + ) + subprocess.run(["git", "add", "--all"], cwd=dest_dir, check=True) + subprocess.run( + ["git", "commit", "-m", "Overwrite Repository Contents"], + cwd=dest_dir, + check=True, + ) + + # Push the new contents to the repository + push_token = get_github_token(owner_account) + run_command_hiding_token( + [ + "git", + "push", + f"https://{push_token}@github.com/{owner_account}/{repo_name}.git", + branch, + "--force", + ], + token=push_token, + cwd=dest_dir, + check=True, + ) + + LOGGER.info( + "Repository contents of %s have been overwritten successfully.", repo_name + ) + + def assert_version_pr_present( + self, + feedstock: str, + new_version: str, + new_hash: str, + old_version: str, + old_hash: str, + ): + gh = Github(get_github_token(GitHubAccount.CONDA_FORGE_ORG)) + + full_feedstock_name = feedstock + FEEDSTOCK_SUFFIX + repo = gh.get_organization(GitHubAccount.CONDA_FORGE_ORG).get_repo( + full_feedstock_name + ) + matching_prs = [ + pr for pr in repo.get_pulls(state="open") if f"v{new_version}" in pr.title + ] + + assert len(matching_prs) == 1, ( + f"Found {len(matching_prs)} matching version PRs, but exactly 1 must be present." + ) + + matching_pr = matching_prs[0] + + assert matching_pr.head.repo.owner.login == GitHubAccount.BOT_USER + assert matching_pr.head.repo.name == full_feedstock_name + + cli = GitCli() + + with tempfile.TemporaryDirectory() as tmpdir: + target_dir = Path(tmpdir) / full_feedstock_name + cli.clone_repo(matching_pr.head.repo.clone_url, target_dir) + cli.checkout_branch(target_dir, matching_pr.head.ref) + + with open(target_dir / "recipe" / "meta.yaml") as f: + meta = f.read() + + assert f'{{% set version = "{new_version}" %}}' in meta + assert f"sha256: {new_hash}" in meta + assert old_version not in meta + assert old_hash not in meta + + LOGGER.info( + "Version PR for %s v%s validated successfully.", + feedstock, + new_version, + ) diff --git a/tests_integration/lib/_run_test_cases.py b/tests_integration/lib/_run_test_cases.py new file mode 100644 index 000000000..12234ea47 --- /dev/null +++ b/tests_integration/lib/_run_test_cases.py @@ -0,0 +1,51 @@ +import logging +from importlib import resources + +from github import Github + +from conda_forge_tick.settings import settings + +from ._definitions import GitHubAccount, TestCase +from ._integration_test_helper import IntegrationTestHelper +from ._shared import FEEDSTOCK_SUFFIX, get_github_token + +LOGGER = logging.getLogger(__name__) +EMPTY_GRAPH_DIR = resources.files("tests_integration.resources").joinpath("empty-graph") + + +def close_all_open_pull_requests(): + github = Github(get_github_token(GitHubAccount.CONDA_FORGE_ORG)) + org = github.get_organization(GitHubAccount.CONDA_FORGE_ORG) + + for repo in org.get_repos(): + if not repo.name.endswith(FEEDSTOCK_SUFFIX): + continue + for pr in repo.get_pulls(state="open"): + pr.create_issue_comment( + "Closing this PR because it is a leftover from a previous test run." + ) + pr.edit(state="closed") + + +def reset_cf_graph(): + with resources.as_file(EMPTY_GRAPH_DIR) as empty_graph_dir: + IntegrationTestHelper().overwrite_github_repository( + GitHubAccount.REGRO_ORG, + "cf-graph-countyfair", + empty_graph_dir, + branch=settings().graph_repo_default_branch, + ) + + +def run_all_prepare_functions(scenario: dict[str, TestCase]): + test_helper = IntegrationTestHelper() + for feedstock_name, test_case in scenario.items(): + LOGGER.info("Preparing %s...", feedstock_name) + test_case.prepare(test_helper) + + +def run_all_validate_functions(scenario: dict[str, TestCase]): + test_helper = IntegrationTestHelper() + for feedstock_name, test_case in scenario.items(): + LOGGER.info("Validating %s...", feedstock_name) + test_case.validate(test_helper) diff --git a/tests_integration/lib/_setup_repositories.py b/tests_integration/lib/_setup_repositories.py new file mode 100644 index 000000000..b0627c9fa --- /dev/null +++ b/tests_integration/lib/_setup_repositories.py @@ -0,0 +1,193 @@ +""" +Module used by the integration tests to set up the GitHub repositories +that are needed for running the tests. + +We do not *create* any repositories within the bot's user account here. This is handled in the prepare function of the +test cases themselves because tests could purposefully rely on the actual bot itself to create repositories. + +However, we do delete unnecessary feedstocks from the bot's user account. +""" + +import logging +from collections.abc import Iterable +from dataclasses import dataclass +from typing import Protocol + +from github import Github +from github.Repository import Repository + +from ._definitions import TEST_CASE_MAPPING, GitHubAccount +from ._shared import ( + FEEDSTOCK_SUFFIX, + REGRO_ACCOUNT_REPOS, + get_github_token, + is_user_account, +) + +LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True) +class GitHubAccountSetup: + """Information about the setup of a GitHub account for the integration tests.""" + + account: GitHubAccount + """ + The GitHub account for which the setup is done. + """ + + target_names: set[str] + """ + The names of the repositories that should exist after the preparation (excluding the suffix). + """ + + suffix: str | None = None + """ + If given, only repositories with the given suffix are considered for deletion and the target names + are extended with the suffix. + """ + + delete_only: bool = False + """ + If True, only delete unnecessary repositories and do not create any new ones. + """ + + +class RepositoryOwner(Protocol): + def create_repo(self, name: str) -> Repository: + pass + + def get_repo(self, name: str) -> Repository: + pass + + def get_repos(self) -> Iterable[Repository]: + pass + + +def get_test_feedstock_names() -> set[str]: + """Return the list of feedstock names that are needed for the integration tests. + + The names do not include the "-feedstock" suffix. + """ + return set(TEST_CASE_MAPPING.keys()) + + +def _or_empty_set(value: set[str]) -> set[str] | str: + """Return "{}" if the given set is empty, otherwise return the set itself.""" + return value or "{}" + + +def prepare_repositories( + owner: RepositoryOwner, + owner_name: str, + existing_repos: Iterable[Repository], + target_names: Iterable[str], + delete_only: bool, + suffix: str | None = None, +) -> None: + """Prepare the repositories of a certain owner for the integration tests. + Unnecessary repositories are deleted and missing repositories are created. + + Parameters + ---------- + owner + The owner of the repositories. + owner_name + The name of the owner (for logging). + existing_repos + The existing repositories of the owner. + target_names + The names of the repositories that should exist after the preparation (excluding the suffix). + suffix + If given, only repositories with the given suffix are considered for deletion and the target names + are extended with the suffix. + delete_only + If True, only delete unnecessary repositories and do not create any new ones. + """ + existing_names = {repo.name for repo in existing_repos} + target_names = set(target_names) + + if suffix: + existing_names = {name for name in existing_names if name.endswith(suffix)} + target_names = {name + suffix for name in target_names} + + to_delete = existing_names - target_names + to_create = target_names - existing_names + + LOGGER.info( + "Deleting the following repositories for %s: %s", + owner_name, + _or_empty_set(to_delete), + ) + for name in to_delete: + owner.get_repo(name).delete() + + if delete_only: + return + + LOGGER.info( + "Creating the following repositories for %s: %s", + owner_name, + _or_empty_set(to_create), + ) + for name in to_create: + owner.create_repo(name) + + +def prepare_accounts(setup_infos: Iterable[GitHubAccountSetup]): + """Prepare the repositories of all GitHub accounts for the integration tests. + + Raises + ------ + ValueError + If a token is not for associated user. + """ + for setup_info in setup_infos: + # for each account, we need to create a separate GitHub instance because different tokens are needed + github = Github(get_github_token(setup_info.account)) + + owner: RepositoryOwner + existing_repos: Iterable[Repository] + if is_user_account(setup_info.account): + current_user = github.get_user() + if current_user.login != setup_info.account: + raise ValueError("The token is not for the expected user") + owner = current_user + existing_repos = current_user.get_repos(type="owner") + else: + owner = github.get_organization(setup_info.account) + existing_repos = owner.get_repos() + + prepare_repositories( + owner=owner, + owner_name=setup_info.account, + existing_repos=existing_repos, + target_names=setup_info.target_names, + delete_only=setup_info.delete_only, + suffix=setup_info.suffix, + ) + + +def prepare_all_accounts(): + test_feedstock_names = get_test_feedstock_names() + LOGGER.info("Test feedstock names: %s", _or_empty_set(test_feedstock_names)) + + setup_infos: list[GitHubAccountSetup] = [ + GitHubAccountSetup( + GitHubAccount.CONDA_FORGE_ORG, + target_names=test_feedstock_names, + suffix=FEEDSTOCK_SUFFIX, + ), + GitHubAccountSetup( + GitHubAccount.BOT_USER, + target_names=set(), + suffix=FEEDSTOCK_SUFFIX, + delete_only=True, # see the top-level comment for the reason + ), + GitHubAccountSetup( + GitHubAccount.REGRO_ORG, + REGRO_ACCOUNT_REPOS, + ), + ] + + prepare_accounts(setup_infos) diff --git a/tests_integration/lib/_shared.py b/tests_integration/lib/_shared.py new file mode 100644 index 000000000..3f9fa7d73 --- /dev/null +++ b/tests_integration/lib/_shared.py @@ -0,0 +1,109 @@ +import logging +import os + +from fastapi import APIRouter + +from conda_forge_tick.settings import settings + +from ._definitions import GitHubAccount + +GITHUB_TOKEN_ENV_VARS: dict[GitHubAccount, str] = { + GitHubAccount.CONDA_FORGE_ORG: "TEST_SETUP_TOKEN", + GitHubAccount.BOT_USER: "TEST_SETUP_TOKEN", + GitHubAccount.REGRO_ORG: "TEST_SETUP_TOKEN", +} + +IS_USER_ACCOUNT: dict[GitHubAccount, bool] = { + GitHubAccount.CONDA_FORGE_ORG: False, + GitHubAccount.BOT_USER: True, + GitHubAccount.REGRO_ORG: False, +} + +REGRO_ACCOUNT_REPOS = {"cf-graph-countyfair"} + +ENV_GITHUB_RUN_ID = "GITHUB_RUN_ID" +""" +Used as a random seed for the integration tests. +""" +ENV_TEST_SCENARIO_ID = "SCENARIO_ID" + +FEEDSTOCK_SUFFIX = "-feedstock" + + +def setup_logging(default_level: int): + """ + Set up the Python logging module. + Uses the passed log level as the default level. + If running within GitHub Actions and the workflow runs in debug mode, the log level is never set above DEBUG. + """ + if settings().github_runner_debug and default_level > logging.DEBUG: + level = logging.DEBUG + else: + level = default_level + logging.basicConfig(level=level) + + +def get_github_token(account: GitHubAccount) -> str: + return os.environ[GITHUB_TOKEN_ENV_VARS[account]] + + +def is_user_account(account: GitHubAccount) -> bool: + return IS_USER_ACCOUNT[account] + + +def get_transparent_urls() -> set[str]: + """ + Get URLs which should be forwarded to the actual upstream URLs in the tests. + + Unix filename patterns (provided by fnmatch) are used to specify wildcards: + https://docs.python.org/3/library/fnmatch.html + """ + # this is not a constant because the graph_repo_default_branch setting is dynamic + graph_repo_default_branch = settings().graph_repo_default_branch + transparent_urls = { + f"https://raw.githubusercontent.com/regro/cf-graph-countyfair/{graph_repo_default_branch}/mappings/pypi/name_mapping.yaml", + f"https://raw.githubusercontent.com/regro/cf-graph-countyfair/{graph_repo_default_branch}/mappings/pypi/grayskull_pypi_mapping.json", + "https://raw.githubusercontent.com/regro/cf-scripts/refs/heads/main/conda_forge_tick/cf_tick_schema.json", + "https://raw.githubusercontent.com/conda-forge/conda-smithy/refs/heads/main/conda_smithy/data/conda-forge.json", + "https://api.github.com/*", + "https://github.com/regro-staging/*", + "https://github.com/conda-forge-bot-staging/*", + "https://github.com/regro-cf-autotick-bot-staging/*", + "https://raw.githubusercontent.com/regro-staging/*", + "https://raw.githubusercontent.com/conda-forge-bot-staging/*", + "https://raw.githubusercontent.com/regro-cf-autotick-bot-staging/*", + "https://pypi.io/packages/source/*", + "https://pypi.org/packages/source/*", + "https://files.pythonhosted.org/packages/*", + "https://api.anaconda.org/package/conda-forge/conda-forge-pinning", + "https://api.anaconda.org/download/conda-forge/conda-forge-pinning/*", + "https://binstar-cio-packages-prod.s3.amazonaws.com/*", + } + + # this is to protect against mistakes and typos, adjust if it ever becomes too strict + assert all(url.startswith("https://") for url in transparent_urls) + + # silence the PyCharm warning about using http instead of https + # noinspection HttpUrlsUsage + http_urls = {url.replace("https://", "http://", 1) for url in transparent_urls} + + return transparent_urls | http_urls + + +def get_global_router(): + """Return the global FastAPI router to be included in all test scenarios.""" + router = APIRouter() + + @router.get("/cran.r-project.org/src/contrib/") + def handle_cran_index(): + return "" + + @router.get("/cran.r-project.org/src/contrib/Archive/") + def handle_cran_index_archive(): + return "" + + return router + + +VIRTUAL_PROXY_HOSTNAME = "virtual.proxy" +VIRTUAL_PROXY_PORT = 80 diff --git a/tests_integration/mitmproxy_setup_wizard.sh b/tests_integration/mitmproxy_setup_wizard.sh new file mode 100755 index 000000000..5b6f2776f --- /dev/null +++ b/tests_integration/mitmproxy_setup_wizard.sh @@ -0,0 +1,74 @@ +#!/usr/bin/env bash + +set -euo pipefail + +echo "=== mitmproxy certificates setup wizard ===" +echo "Use this shell script to setup the mitmproxy certificates for the integration tests on your machine." + +# we could also add openssl to the conda environment, but this should be available on most systems +if ! command -v openssl &> /dev/null; then + echo "error: openssl is not installed. Please install it first." + exit 1 +fi + +script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +mitmproxy_dir="${script_dir}/.mitmproxy" +# the mitmproxy_dir should already exist +cd "${mitmproxy_dir}" + +# Headless Mode is used in GitHub Actions only +headless_mode="${MITMPROXY_WIZARD_HEADLESS:-false}" + +if [ "${headless_mode}" = "true" ]; then + echo "Running in headless mode." + echo "The mitmproxy certificates will be generated in the directory: ${mitmproxy_dir}" + + # path to a script that will be executed after the certificates have been generated + # the script should add the mitmproxy-ca.crt certificate to the system's trust store + # the first argument is the path to the mitmproxy-ca.crt certificate + headless_mode_trust_script="${MITMPROXY_WIZARD_HEADLESS_TRUST_SCRIPT}" +else + echo "The mitmproxy certificates will be generated in the directory: ${mitmproxy_dir}" + echo "Press enter to continue or Ctrl+C to cancel." + read -r +fi + +openssl genrsa -out mitmproxy-ca.key 4096 +openssl req -x509 -new -nodes -key mitmproxy-ca.key -sha256 -days 365 -out mitmproxy-ca.crt -addext keyUsage=critical,keyCertSign -subj "/C=US/ST=cf-scripts/L=cf-scripts/O=cf-scripts/OU=cf-scripts/CN=cf-scripts" +cat mitmproxy-ca.key mitmproxy-ca.crt > mitmproxy-ca.pem + +echo "The mitmproxy certificates have been generated successfully." +echo "The root certificate will be valid for 365 days." + +mitmproxy_ca_crt_file="${mitmproxy_dir}/mitmproxy-ca.crt" + +if [ "${headless_mode}" = "true" ]; then + echo "Executing the headless mode trust script..." + bash "${headless_mode_trust_script}" "${mitmproxy_ca_crt_file}" +else + echo "You now need to trust the mitmproxy-ca.crt certificate in your system's trust store." + echo "The exact process depends on your operating system." + if [[ -f "/etc/debian_version" ]]; then + echo "On Debian-based systems, you can use the following command to trust the certificate:" + echo "sudo cp ${mitmproxy_ca_crt_file} /usr/local/share/ca-certificates/mitmproxy-ca.crt" + echo "sudo update-ca-certificates" + elif [[ "$OSTYPE" == "darwin"* ]]; then + echo "On macOS, drag and drop the mitmproxy-ca.crt file into the Keychain Access app while having the 'Login' keychain selected." + echo "Then, double-click the certificate in the keychain and set ‘Always Trust‘ in the ‘Trust‘ section." + fi + echo "The certificate is located at: ${mitmproxy_ca_crt_file}" + echo "After you're done, press enter to continue." + read -r +fi + +echo "Generating the certificate bundle mitmproxy-cert-bundle.pem to pass to Python..." +cp "$(python -m certifi)" mitmproxy-cert-bundle.pem + +{ + echo "" + echo "# cf-scripts self-signed certificate" + cat mitmproxy-ca.crt +} >> mitmproxy-cert-bundle.pem + +echo "The certificate bundle has been generated successfully." +echo "The mitmproxy certificate setup wizard has been completed successfully." diff --git a/tests_integration/mock_proxy_start.sh b/tests_integration/mock_proxy_start.sh new file mode 100755 index 000000000..1e29979dd --- /dev/null +++ b/tests_integration/mock_proxy_start.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# If debugging locally, set PROXY_DEBUG_LOGGING to true to show full HTTP request/response details. +# We can't enable this in GitHub Actions because it will expose GitHub secrets. +PROXY_DEBUG_LOGGING=${PROXY_DEBUG_LOGGING:-false} + +if [[ "${PROXY_DEBUG_LOGGING}" == "true" ]]; then + flow_detail=4 +else + flow_detail=0 +fi + +if [[ -z "$MITMPROXY_CONFDIR" ]]; then + echo "Set $MITMPROXY_CONFDIR to a directory containing a mitmproxy-ca.pem CA certificate to intercept HTTPS traffic." + exit 1 +fi + +# You might need to set PYTHONPATH to the root of cf-scripts +mitmdump -s ./mock_server_addon.py \ + --flow-detail "$flow_detail" \ + --set confdir="$MITMPROXY_CONFDIR" \ + --set connection_strategy=lazy \ + --set upstream_cert=false 2>&1 | tee /tmp/mitmproxy.log diff --git a/tests_integration/mock_server_addon.py b/tests_integration/mock_server_addon.py new file mode 100644 index 000000000..b2425821b --- /dev/null +++ b/tests_integration/mock_server_addon.py @@ -0,0 +1,59 @@ +#!/usr/bin/env mitmdump -s +""" +Start this file with `mitmdump -s mock_server_addon.py`. + +This file expects an environment variable to be set to the ID of the test scenario to run. +The name of this variable is defined in `ENV_TEST_SCENARIO_ID`. + +Starting mitmdump from a Python script is not officially supported. +""" + +import fnmatch +import logging +import os + +from fastapi import FastAPI +from mitmproxy.addons import asgiapp +from mitmproxy.http import HTTPFlow + +from tests_integration.lib import ( + ENV_TEST_SCENARIO_ID, + VIRTUAL_PROXY_HOSTNAME, + VIRTUAL_PROXY_PORT, + get_global_router, + get_test_scenario, + get_transparent_urls, +) + +LOGGER = logging.getLogger(__name__) + + +def request(flow: HTTPFlow): + if any( + fnmatch.fnmatch(flow.request.url, pattern) for pattern in get_transparent_urls() + ): + return + flow.request.path = f"/{flow.request.host}{flow.request.path}" + flow.request.host = VIRTUAL_PROXY_HOSTNAME + flow.request.port = VIRTUAL_PROXY_PORT + flow.request.scheme = "http" + + +def _setup_fastapi(): + scenario_id = int(os.environ[ENV_TEST_SCENARIO_ID]) + scenario = get_test_scenario(scenario_id) + + app = FastAPI() + + app.include_router(get_global_router()) + + for feedstock_name, test_case in scenario.items(): + LOGGER.info("Setting up mocks for %s...", feedstock_name) + app.include_router(test_case.get_router()) + + return app + + +addons = [ + asgiapp.ASGIApp(_setup_fastapi(), VIRTUAL_PROXY_HOSTNAME, VIRTUAL_PROXY_PORT), +] diff --git a/tests_integration/resources/__init__.py b/tests_integration/resources/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests_integration/resources/empty-graph/README.md b/tests_integration/resources/empty-graph/README.md new file mode 100644 index 000000000..917fd56dd --- /dev/null +++ b/tests_integration/resources/empty-graph/README.md @@ -0,0 +1,4 @@ +# cf-graph-countyfair (For Integration Tests) + +This repository is used in integration tests of [cf-scripts](https://github.com/regro/cf-scripts) to set up a clean initial state for the +`cf-graph-countyfair` repository. diff --git a/tests_integration/resources/empty-graph/graph.json b/tests_integration/resources/empty-graph/graph.json new file mode 100644 index 000000000..2f3b20c61 --- /dev/null +++ b/tests_integration/resources/empty-graph/graph.json @@ -0,0 +1,9 @@ +{ + "directed": true, + "graph": { + "outputs_lut": {} + }, + "links": [], + "multigraph": false, + "nodes": [] +} diff --git a/tests_integration/test_integration.py b/tests_integration/test_integration.py new file mode 100644 index 000000000..6901f1655 --- /dev/null +++ b/tests_integration/test_integration.py @@ -0,0 +1,321 @@ +""" +Pytest entry point for the integration tests. +Please refer to the README.md in the tests_integration (i.e., parent) directory +for more information. +""" + +import contextlib +import logging +import os +import socket +import subprocess +import tempfile +from pathlib import Path + +import pytest +from xprocess import ProcessStarter, XProcess + +from conda_forge_tick.settings import settings, use_settings +from tests_integration.lib import ( + TestCase, + close_all_open_pull_requests, + get_all_test_scenario_ids, + get_test_scenario, + prepare_all_accounts, + reset_cf_graph, + run_all_prepare_functions, + run_all_validate_functions, + setup_logging, +) + +TESTS_INTEGRATION_DIR = Path(__file__).parent +CF_SCRIPTS_ROOT_DIR = TESTS_INTEGRATION_DIR.parent +MITMPROXY_CONFDIR = TESTS_INTEGRATION_DIR / ".mitmproxy" +MITMPROXY_CERT_BUNDLE_FILE = MITMPROXY_CONFDIR / "mitmproxy-cert-bundle.pem" + + +@pytest.fixture(scope="module", autouse=True) +def global_environment_setup(): + """Set up the global environment variables for the tests.""" + # Make sure to also set BOT_TOKEN, we cannot validate this here! + assert os.environ.get("TEST_SETUP_TOKEN"), "TEST_SETUP_TOKEN must be set." + + # In Python 3.13, this might break. https://stackoverflow.com/a/79124282 + os.environ["MITMPROXY_CONFDIR"] = str(MITMPROXY_CONFDIR.resolve()) + os.environ["SSL_CERT_FILE"] = str(MITMPROXY_CERT_BUNDLE_FILE.resolve()) + os.environ["REQUESTS_CA_BUNDLE"] = str(MITMPROXY_CERT_BUNDLE_FILE.resolve()) + os.environ["GIT_SSL_CAINFO"] = str(MITMPROXY_CERT_BUNDLE_FILE.resolve()) + + github_run_id = os.environ.get("GITHUB_RUN_ID", "GITHUB_RUN_ID_NOT_SET") + os.environ["RUN_URL"] = ( + f"https://github.com/regro/cf-scripts/actions/runs/{github_run_id}" + ) + + # by default, we enable container mode because it is the default in the bot + os.environ["CF_FEEDSTOCK_OPS_IN_CONTAINER"] = "false" + + # set if not set + os.environ.setdefault("CF_FEEDSTOCK_OPS_CONTAINER_NAME", "conda-forge-tick") + os.environ.setdefault("CF_FEEDSTOCK_OPS_CONTAINER_TAG", "test") + + new_settings = settings() + + new_settings.frac_make_graph = 1.0 # do not skip nodes due to randomness + new_settings.frac_update_upstream_versions = 1.0 + new_settings.graph_github_backend_repo = "regro-staging/cf-graph-countyfair" + new_settings.conda_forge_org = "conda-forge-bot-staging" + + with use_settings(new_settings): + setup_logging(logging.INFO) + yield + + +@pytest.fixture +def disable_container_mode(monkeypatch): + """Disable container mode for the test.""" + monkeypatch.setenv("CF_FEEDSTOCK_OPS_IN_CONTAINER", "true") + + +@pytest.fixture(scope="module") +def repositories_setup(): + """Set up the repositories for the tests.""" + prepare_all_accounts() + + +@pytest.fixture(params=get_all_test_scenario_ids()) +def scenario(request) -> tuple[int, dict[str, TestCase]]: + scenario_id: int = request.param + close_all_open_pull_requests() + reset_cf_graph() + + scenario = get_test_scenario(scenario_id) + scenario_pretty_print = { + feedstock_name: test_case.__class__.__name__ + for feedstock_name, test_case in scenario.items() + } + + print(f"Preparing test scenario {scenario_id}...") + print(f"Scenario: {scenario_pretty_print}") + + run_all_prepare_functions(scenario) + + return scenario_id, scenario + + +def is_proxy_running(port: int, timeout: float = 2.0) -> bool: + """Return if the proxy is running on localhost:port. + + Parameters + ---------- + port + The port to check. + timeout + The timeout in seconds. + + Returns + ------- + bool + True if the proxy is running, False otherwise. + """ + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.settimeout(timeout) + return sock.connect_ex(("localhost", port)) == 0 + + +@pytest.fixture +def mitmproxy(xprocess: XProcess, scenario: tuple[int, dict[str, TestCase]]): + scenario_id, _ = scenario + + class MitmproxyStarter(ProcessStarter): + args = ["./mock_proxy_start.sh"] + timeout = 60 + popen_kwargs = {"cwd": TESTS_INTEGRATION_DIR} + env = os.environ | { + "SCENARIO_ID": str(scenario_id), + "PYTHONPATH": str(CF_SCRIPTS_ROOT_DIR.resolve()), + } + + def startup_check(self): + return is_proxy_running(port=8080) + + xprocess.ensure("mitmproxy", MitmproxyStarter) + + yield + + xprocess.getinfo("mitmproxy").terminate() + + +@contextlib.contextmanager +def in_fresh_cf_graph(): + """ + Context manager to execute code within the context with a new clone + of cf-graph in a temporary directory. + """ + old_working_dir = os.getcwd() + + with tempfile.TemporaryDirectory() as tmpdir_s: + tmpdir = Path(tmpdir_s) + + cf_graph_repo = settings().graph_github_backend_repo + + # --depth=5 is the same value as used in prod (see autotick-bot/install_bot_code.sh) + subprocess.run( + [ + "git", + "clone", + "--depth=5", + f"https://github.com/{cf_graph_repo}.git", + "cf-graph", + ], + check=True, + cwd=tmpdir, + ) + + cf_graph_dir = tmpdir / "cf-graph" + + subprocess.run( + [ + "git", + "config", + "user.name", + "regro-cf-autotick-bot-staging", + ], + check=True, + cwd=cf_graph_dir, + ) + + subprocess.run( + [ + "git", + "config", + "user.email", + "regro-cf-autotick-bot-staging@users.noreply.github.com", + ], + check=True, + cwd=cf_graph_dir, + ) + + subprocess.run( + [ + "git", + "config", + "pull.rebase", + "false", + ], + check=True, + cwd=cf_graph_dir, + ) + + subprocess.run( + ["git", "config", 'http."https://github.com".proxy', '""'], + check=True, + cwd=cf_graph_dir, + ) + + os.chdir(cf_graph_dir) + yield + + os.chdir(old_working_dir) + + +@contextlib.contextmanager +def mitmproxy_env(): + """Set environment variables for bot steps that should be piped through mitmproxy.""" + old_env = os.environ.copy() + + os.environ["http_proxy"] = "http://127.0.0.1:8080" + os.environ["https_proxy"] = "http://127.0.0.1:8080" + os.environ["CF_FEEDSTOCK_OPS_CONTAINER_PROXY_MODE"] = "true" + + yield + + os.environ.clear() + os.environ.update(old_env) + + +def invoke_bot_command(args: list[str]): + """Invoke the bot command with the given arguments.""" + from conda_forge_tick import cli + + cli.main(args, standalone_mode=False) + + +@pytest.mark.parametrize("use_containers", [False, True]) +def test_scenario( + use_containers: bool, + scenario: tuple[int, dict[str, TestCase]], + repositories_setup, + mitmproxy, + request: pytest.FixtureRequest, +): + """ + Execute the test scenario given by the scenario fixture (note that the fixture is + parameterized, and therefore we run this for all scenarios). + All steps of the bot are executed in sequence to test its end-to-end functionality. + + A test scenario assigns one test case to each feedstock. For details on + the testing setup, please refer to the README.md in the tests_integration + (i.e., parent) directory. + + Parameters + ---------- + use_containers + Whether container mode is enabled or not. + scenario + The test scenario to run. This is a tuple of (scenario_id, scenario), + where scenario is a dictionary with the feedstock name as key and the test + case name as value. + repositories_setup + The fixture that sets up the repositories. + mitmproxy + The fixture that sets up the mitmproxy. + request + The pytest fixture request object. + """ + _, scenario = scenario + + if not use_containers: + request.getfixturevalue("disable_container_mode") + + with in_fresh_cf_graph(): + invoke_bot_command(["--debug", "gather-all-feedstocks"]) + invoke_bot_command(["--debug", "deploy-to-github"]) + + with in_fresh_cf_graph(): + invoke_bot_command(["--debug", "make-graph", "--update-nodes-and-edges"]) + invoke_bot_command(["--debug", "deploy-to-github"]) + + with in_fresh_cf_graph(): + invoke_bot_command(["--debug", "make-graph"]) + invoke_bot_command(["--debug", "deploy-to-github"]) + + with in_fresh_cf_graph(): + with mitmproxy_env(): + invoke_bot_command(["--debug", "update-upstream-versions"]) + invoke_bot_command(["--debug", "deploy-to-github"]) + + with in_fresh_cf_graph(): + with mitmproxy_env(): + invoke_bot_command(["--debug", "make-migrators"]) + invoke_bot_command(["--debug", "deploy-to-github"]) + + with in_fresh_cf_graph(): + with mitmproxy_env(): + invoke_bot_command(["--debug", "auto-tick"]) + invoke_bot_command(["--debug", "deploy-to-github"]) + + with in_fresh_cf_graph(): + # because of an implementation detail in the bot, we need to run make-migrators twice + # for changes to be picked up + with mitmproxy_env(): + invoke_bot_command(["--debug", "make-migrators"]) + invoke_bot_command(["--debug", "deploy-to-github"]) + + with in_fresh_cf_graph(): + # due to a similar implementation detail, we need to run auto-tick twice + # for changes to be picked up + with mitmproxy_env(): + invoke_bot_command(["--debug", "auto-tick"]) + invoke_bot_command(["--debug", "deploy-to-github"]) + + run_all_validate_functions(scenario)