diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fb22da93960d..ceb5bdd9abbb 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -25,7 +25,7 @@ updates: schedule: interval: "weekly" day: "tuesday" - open-pull-requests-limit: 10 + open-pull-requests-limit: 30 rebase-strategy: auto labels: - dependencies diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index 6f7b51645230..d4a7402b3916 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -47,7 +47,7 @@ jobs: build: name: Benchmarks - runs-on: benchmark + runs-on: ${{ github.repository_owner == 'Chia-Network' && 'benchmark' || 'ubuntu-latest' }} needs: - setup container: @@ -69,7 +69,7 @@ jobs: uses: Chia-Network/actions/git-mark-workspace-safe@main - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 @@ -78,7 +78,7 @@ jobs: mode: poetry - name: Checkout test blocks and plots - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: repository: "Chia-Network/test-cache" path: ".chia" diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index 21a4f4500edf..d6df3e8fe6bd 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -59,7 +59,7 @@ jobs: madmax-suffix: "x86-64" bladebit-suffix: "ubuntu-x86-64.tar.gz" arch-artifact-name: intel - - runs-on: [Linux, ARM64] + - runs-on: ubuntu-24.04-arm arch: arm64 madmax-suffix: "arm64" bladebit-suffix: "ubuntu-arm64.tar.gz" @@ -77,7 +77,7 @@ jobs: uses: Chia-Network/actions/git-mark-workspace-safe@main - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 submodules: recursive @@ -254,7 +254,7 @@ jobs: matrix: os: - runs-on: - arm: [linux, arm64] + arm: [ubuntu-24.04-arm] intel: [ubuntu-latest] distribution: - name: debian:bookworm diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index 78074041381d..54d8f6f6dca9 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -67,7 +67,7 @@ jobs: uses: Chia-Network/actions/git-mark-workspace-safe@main - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 submodules: recursive diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index 49a700d8c32a..0e4a7068a398 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -53,11 +53,11 @@ jobs: matrix: python-version: ["3.10"] os: - - runs-on: macos-13 + - runs-on: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-intel' || 'macos-13' }} name: intel bladebit-suffix: macos-x86-64.tar.gz arch-artifact-name: intel - - runs-on: macos-13-arm64 + - runs-on: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-arm64' || 'macos-15' }} name: m1 bladebit-suffix: macos-arm64.tar.gz arch-artifact-name: arm @@ -71,7 +71,7 @@ jobs: - uses: Chia-Network/actions/clean-workspace@main - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 submodules: recursive @@ -296,7 +296,7 @@ jobs: - name: 13 matrix: 13 runs-on: - intel: macos-13 + intel: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-intel' || 'macos-13' }} - name: 14 matrix: 14 runs-on: diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index 85162ee43c1f..f15707979a01 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -60,7 +60,7 @@ jobs: steps: - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 submodules: recursive diff --git a/.github/workflows/check-commit-signing.yml b/.github/workflows/check-commit-signing.yml index fa34811a67a6..c19cf367828c 100644 --- a/.github/workflows/check-commit-signing.yml +++ b/.github/workflows/check-commit-signing.yml @@ -22,7 +22,7 @@ jobs: steps: - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index 2ea0fac39998..45f1642aac35 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -29,12 +29,12 @@ jobs: matrix: linux runs-on: intel: ubuntu-latest - arm: [linux, arm64] + arm: ubuntu-24.04-arm - name: macOS matrix: macos runs-on: - intel: macos-13 - arm: macos-13-arm64 + intel: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-intel' || 'macos-13' }} + arm: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-arm64' || 'macos-15' }} - name: Windows matrix: windows runs-on: @@ -55,7 +55,7 @@ jobs: - uses: Chia-Network/actions/clean-workspace@main - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - uses: Chia-Network/actions/setup-python@main with: diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 52876326edd2..3e6442930c8e 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -29,6 +29,7 @@ concurrency: jobs: analyze: + if: github.repository_owner == 'Chia-Network' name: Analyze runs-on: ubuntu-latest @@ -42,7 +43,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 83674fa77482..b7852e3db538 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -13,10 +13,11 @@ permissions: jobs: dependency-review: + if: github.repository_owner == 'Chia-Network' runs-on: ubuntu-latest steps: - name: "Checkout Repository" - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: "Dependency Review" uses: actions/dependency-review-action@v4 diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index becd6fb7d295..9cff6196c492 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -25,12 +25,12 @@ jobs: matrix: linux runs-on: intel: ubuntu-latest - arm: [linux, arm64] + arm: ubuntu-24.04-arm - name: macOS matrix: macos runs-on: - intel: macos-13 - arm: macos-13-arm64 + intel: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-intel' || 'macos-13' }} + arm: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-arm64' || 'macos-15' }} - name: Windows matrix: windows runs-on: @@ -50,6 +50,11 @@ jobs: matrix: windows arch: matrix: arm + # Back to self hosted mac intel runners, which we have very few of + - os: + matrix: macos + arch: + matrix: intel steps: - name: Clean workspace @@ -61,7 +66,7 @@ jobs: run: | git config --global core.autocrlf false - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: Chia-Network/actions/setup-python@main with: diff --git a/.github/workflows/reflow-publish-installer.yml b/.github/workflows/reflow-publish-installer.yml index 86973e689157..2b42259d04dc 100644 --- a/.github/workflows/reflow-publish-installer.yml +++ b/.github/workflows/reflow-publish-installer.yml @@ -27,6 +27,7 @@ permissions: jobs: publish: + if: github.repository_owner == 'Chia-Network' name: Publish ${{ matrix.arch.name }} ${{ matrix.mode.name }} ${{ matrix.os.file-type.name }} runs-on: ubuntu-latest timeout-minutes: 5 diff --git a/.github/workflows/reflow-version.yml b/.github/workflows/reflow-version.yml index ee8e366e8bd6..2458374f614c 100644 --- a/.github/workflows/reflow-version.yml +++ b/.github/workflows/reflow-version.yml @@ -28,7 +28,7 @@ jobs: steps: - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/.github/workflows/require-labels.yml b/.github/workflows/require-labels.yml index 10106b7a5cf3..630c34035559 100644 --- a/.github/workflows/require-labels.yml +++ b/.github/workflows/require-labels.yml @@ -4,6 +4,7 @@ on: types: [opened, labeled, unlabeled, synchronize] jobs: check-labels: + if: github.repository_owner == 'Chia-Network' runs-on: ubuntu-latest permissions: checks: write diff --git a/.github/workflows/stale-issue.yml b/.github/workflows/stale-issue.yml index 0bfa6b108b87..9a9bdca80d54 100644 --- a/.github/workflows/stale-issue.yml +++ b/.github/workflows/stale-issue.yml @@ -5,6 +5,7 @@ on: jobs: stale: + if: github.repository_owner == 'Chia-Network' runs-on: ubuntu-latest steps: - uses: actions/stale@v9 diff --git a/.github/workflows/start-release.yml b/.github/workflows/start-release.yml index 9d5d739f7e01..c05a73e01fbf 100644 --- a/.github/workflows/start-release.yml +++ b/.github/workflows/start-release.yml @@ -11,6 +11,7 @@ permissions: jobs: start_release: + if: github.repository_owner == 'Chia-Network' name: Starts release process in Glue API runs-on: [glue-notify] steps: diff --git a/.github/workflows/start-sync-test.yml b/.github/workflows/start-sync-test.yml index 010f605a1cdd..1bb1c484f9e9 100644 --- a/.github/workflows/start-sync-test.yml +++ b/.github/workflows/start-sync-test.yml @@ -11,6 +11,7 @@ permissions: jobs: start_release: + if: github.repository_owner == 'Chia-Network' name: Starts Sync Test runs-on: ubuntu-latest steps: diff --git a/.github/workflows/super-linter.yml b/.github/workflows/super-linter.yml index c62f9e6e0bba..3f481b304945 100644 --- a/.github/workflows/super-linter.yml +++ b/.github/workflows/super-linter.yml @@ -50,7 +50,7 @@ jobs: # Checkout the code base # ########################## - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 1b77104ffd5f..8d5052c8a048 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -34,14 +34,14 @@ jobs: - name: Linux emoji: 🐧 runs-on: - arm: [Linux, ARM64] + arm: ubuntu-24.04-arm intel: ubuntu-latest matrix: linux - name: macOS emoji: 🍎 runs-on: - arm: macos-latest - intel: macos-13 + arm: macos-15 + intel: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-intel' || 'macos-13' }} matrix: macos - name: Windows emoji: 🪟 @@ -75,7 +75,7 @@ jobs: steps: - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 @@ -145,7 +145,7 @@ jobs: matrix: linux runs-on: intel: ubuntu-latest - arm: [linux, arm64] + arm: ubuntu-24.04-arm distribution: - name: amazonlinux:2023 type: amazon @@ -245,7 +245,7 @@ jobs: # after installing git so we use that copy - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/.github/workflows/test-single.yml b/.github/workflows/test-single.yml index fc18c48dbfc7..f60e2990e680 100644 --- a/.github/workflows/test-single.yml +++ b/.github/workflows/test-single.yml @@ -134,7 +134,7 @@ jobs: git config --global core.autocrlf false - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 594041d155ba..9302e1c19b26 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -47,7 +47,7 @@ on: type: boolean run-macos-intel: description: "run macOS-intel tests" - default: true + default: false required: false type: boolean run-macos-arm: @@ -79,7 +79,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Python environment uses: Chia-Network/actions/setup-python@main @@ -104,12 +104,24 @@ jobs: || 'main' }} >> "$GITHUB_OUTPUT" + - name: Generate mac intel configuration + id: macintel + run: | + echo mac_intel=${{ + ( github.event_name == 'schedule' || inputs.run-macos-intel ) + || ( github.repository_owner == 'Chia-Network' && github.repository == 'Chia-Network/chia-blockchain' && startsWith(github.ref, 'refs/heads/release/') ) + || ( github.repository_owner == 'Chia-Network' && github.repository == 'Chia-Network/chia-blockchain' && startsWith(github.base_ref, 'release/') ) + && 'true' + || 'false' + }} >> "$GITHUB_OUTPUT" + outputs: configuration: ${{ steps.configure.outputs.configuration }} matrix_mode: ${{ steps.configure.outputs.matrix_mode }} + mac_intel: ${{ steps.macintel.outputs.mac_intel }} macos-intel: - if: github.event_name != 'workflow_dispatch' || inputs.run-macos-intel + if: needs.configure.outputs.mac_intel == 'true' uses: ./.github/workflows/test-single.yml needs: configure with: @@ -120,7 +132,7 @@ jobs: concurrency-name: macos-intel configuration: ${{ needs.configure.outputs.configuration }} matrix_mode: ${{ needs.configure.outputs.matrix_mode }} - runs-on: macos-13 + runs-on: ${{ github.repository_owner == 'Chia-Network' && 'macos-13-intel' || 'macos-13' }} arch: intel arch-emoji: 🌀 macos-arm: @@ -173,9 +185,11 @@ jobs: collect-junit: false coverage: + if: always() && github.repository_owner == 'Chia-Network' name: ${{ matrix.os.emoji }} Coverage - ${{ matrix.python.name }} runs-on: ${{ matrix.os.runs-on }} needs: + - configure - macos-intel - macos-arm - ubuntu @@ -196,7 +210,13 @@ jobs: matrix: "3.10" steps: - - uses: actions/checkout@v4 + - uses: re-actors/alls-green@release/v1.2 + id: alls-green + with: + allowed-skips: ${{ needs.configure.outputs.mac_intel != 'true' && 'macos-intel' || '' }} + jobs: ${{ toJSON(needs) }} + + - uses: actions/checkout@v5 with: fetch-depth: 0 @@ -252,13 +272,13 @@ jobs: - uses: chia-network/actions/activate-venv@main - name: Add time out assert results to workflow summary - if: always() && false + if: always() && steps.alls-green.outputs.success == 'true' && false run: | python -m chia._tests.process_junit --limit 50 --type time_out_assert --xml junit-results/junit.xml --markdown --link-prefix ${{ github.event.repository.html_url }}/blob/${{ github.sha }}/ --link-line-separator \#L >> "$GITHUB_STEP_SUMMARY" python -m chia._tests.process_junit --type time_out_assert --xml junit-results/junit.xml --markdown --link-prefix ${{ github.event.repository.html_url }}/blob/${{ github.sha }}/ --link-line-separator \#L >> junit-results/time_out_assert.md - name: Publish JUnit results - if: always() && false + if: always() && steps.alls-green.outputs.success == 'true' && false uses: actions/upload-artifact@v4 with: name: junit-results @@ -272,18 +292,18 @@ jobs: coverage html --rcfile=.coveragerc --data-file=coverage-reports/.coverage --directory coverage-reports/html/ - uses: coverallsapp/github-action@v2 - if: always() + if: always() && steps.alls-green.outputs.success == 'true' env: COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - name: Coverage report (chia/) - if: always() + if: always() && steps.alls-green.outputs.success == 'true' run: | set -o pipefail coverage report --rcfile=.coveragerc --data-file=coverage-reports/.coverage --include='chia/**/*' --omit='chia/_tests/**/*' --show-missing | tee coverage-reports/coverage-chia-stdout - name: Coverage report (chia/_tests/) - if: always() + if: always() && steps.alls-green.outputs.success == 'true' run: | set -o pipefail coverage report --rcfile=.coveragerc --data-file=coverage-reports/.coverage --include='chia/_tests/**/*' --show-missing | tee coverage-reports/coverage-tests-stdout @@ -294,7 +314,7 @@ jobs: echo "hash=$(git rev-parse HEAD~1)" >> "$GITHUB_OUTPUT" - name: Coverage report (diff) - if: (github.base_ref != '' || github.event.before != '') && always() + if: (github.base_ref != '' || github.event.before != '') && always() && steps.alls-green.outputs.success == 'true' env: compare-branch: ${{ github.base_ref == '' && steps.parent-commit.outputs.hash || format('origin/{0}', github.base_ref) }} run: | @@ -307,7 +327,7 @@ jobs: fi - name: Remove previous coverage report comment and label from PR - if: github.base_ref != '' && always() + if: github.base_ref != '' && always() && steps.alls-green.outputs.success == 'true' shell: bash env: COV_STATUS: ${{ env.COV_STATUS }} @@ -328,7 +348,7 @@ jobs: fi - name: Add diff coverage report comment to PR - if: github.base_ref != '' && always() + if: github.base_ref != '' && always() && steps.alls-green.outputs.success == 'true' env: BRANCH_NAME: ${{ github.sha }} COV_STATUS: ${{ env.COV_STATUS }} @@ -378,12 +398,12 @@ jobs: fi - name: Add diff coverage report to workflow summary - if: (github.base_ref != '' || github.event.before != '') && always() + if: (github.base_ref != '' || github.event.before != '') && always() && steps.alls-green.outputs.success == 'true' run: | cat coverage-reports/diff-cover.md >> "$GITHUB_STEP_SUMMARY" - name: Publish coverage reports - if: always() + if: always() && steps.alls-green.outputs.success == 'true' uses: actions/upload-artifact@v4 with: name: coverage-reports diff --git a/.github/workflows/trigger-docker-dev.yml b/.github/workflows/trigger-docker-dev.yml index f4f62a5402d0..8a533255ee1f 100644 --- a/.github/workflows/trigger-docker-dev.yml +++ b/.github/workflows/trigger-docker-dev.yml @@ -21,6 +21,7 @@ permissions: jobs: trigger: + if: github.repository_owner == 'Chia-Network' name: Trigger building a new dev tag for the chia-docker image runs-on: ubuntu-latest steps: diff --git a/.github/workflows/trigger-docker-main.yml b/.github/workflows/trigger-docker-main.yml index 7f506b4d3583..898f366c7c15 100644 --- a/.github/workflows/trigger-docker-main.yml +++ b/.github/workflows/trigger-docker-main.yml @@ -17,6 +17,7 @@ permissions: jobs: trigger: + if: github.repository_owner == 'Chia-Network' name: Trigger building a new `main` tag for the chia-docker image runs-on: ubuntu-latest steps: diff --git a/.github/workflows/update-mozilla-ca-cert.yaml b/.github/workflows/update-mozilla-ca-cert.yaml index 727585fafd50..2dbf56f6bfd0 100644 --- a/.github/workflows/update-mozilla-ca-cert.yaml +++ b/.github/workflows/update-mozilla-ca-cert.yaml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Download latest Mozilla CA cert.pem run: | @@ -32,10 +32,14 @@ jobs: passphrase: ${{ secrets.CHIA_AUTOMATION_PRIVATE_GPG_PASSPHRASE }} - name: Create Pull Request if cacert.pem changed - uses: peter-evans/create-pull-request@v6 + uses: peter-evans/create-pull-request@v7 with: + base: main commit-message: "Update cacert.pem from Mozilla CA bundle" title: "Update cacert.pem from Mozilla CA bundle" body: "Automated update of cacert.pem from https://curl.se/ca/cacert.pem." branch: update-cacert-pem add-paths: chia/ssl/cacert.pem + delete-branch: true + committer: "ChiaAutomation " + author: "ChiaAutomation " diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index 76314e035a3b..94a563349484 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -26,6 +26,7 @@ permissions: jobs: mypy: + if: github.repository_owner == 'Chia-Network' name: ${{ matrix.os.emoji }} ${{ matrix.arch.emoji }} ${{ matrix.check.name }} - ${{ matrix.os.name }} ${{ matrix.python.major_dot_minor }} runs-on: ${{ matrix.os.runs-on[matrix.arch.matrix] }} timeout-minutes: 20 @@ -38,12 +39,12 @@ jobs: emoji: 🐧 runs-on: intel: ubuntu-latest - arm: [linux, arm64] + arm: ubuntu-24.04-arm - name: macOS matrix: macos emoji: 🍎 runs-on: - intel: macos-13 + intel: macos-13-intel arm: macos-13-arm64 - name: Windows matrix: windows @@ -73,6 +74,11 @@ jobs: matrix: windows arch: matrix: arm + # Back to self hosted mac intel runners, which we have very few of + - os: + matrix: macos + arch: + matrix: intel steps: - uses: chia-network/actions/clean-workspace@main @@ -80,7 +86,7 @@ jobs: - name: Add safe git directory uses: chia-network/actions/git-mark-workspace-safe@main - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 submodules: recursive @@ -101,6 +107,7 @@ jobs: ${{ matrix.check.command }} check: + if: github.repository_owner == 'Chia-Network' name: ${{ matrix.os.emoji }} ${{ matrix.check.name }} - ${{ matrix.os.name }} ${{ matrix.arch.name }} ${{ matrix.python.major_dot_minor }} runs-on: ${{ matrix.os.runs-on[matrix.arch.matrix] }} timeout-minutes: 20 @@ -113,7 +120,7 @@ jobs: emoji: 🐧 runs-on: intel: ubuntu-latest - arm: [linux, arm64] + arm: ubuntu-24.04-arm arch: - name: Intel matrix: intel @@ -145,7 +152,7 @@ jobs: - name: Add safe git directory uses: chia-network/actions/git-mark-workspace-safe@main - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 submodules: recursive @@ -166,6 +173,7 @@ jobs: ${{ matrix.check.command }} upload_source_dist: + if: github.repository_owner == 'Chia-Network' name: Lint and Upload source distribution runs-on: ${{ matrix.os }} timeout-minutes: 30 @@ -183,7 +191,7 @@ jobs: uses: chia-network/actions/git-mark-workspace-safe@main - name: Checkout Code - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 submodules: recursive diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0f35151344fe..01b7da6b4d62 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -49,12 +49,12 @@ repos: - id: prettier types_or: [ini, json, toml, yaml, markdown] - repo: https://github.com/scop/pre-commit-shfmt - rev: v3.8.0-1 + rev: v3.12.0-2 hooks: - id: shfmt args: ["--diff", "--write", "-i", "2"] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v6.0.0 hooks: - id: check-yaml - id: mixed-line-ending diff --git a/CHANGELOG.md b/CHANGELOG.md index 2321c3a91f0b..6de804945301 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,109 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) for setuptools_scm/PEP 440 reasons. +## 2.5.5 Chia blockchain 2025-08-19 + +Note that protocol changes between node, farmer, and harvester will require all entities to be upgraded +at the same time to 2.5.5. On a simple one machine setup, this will be handled by the installer. +But if you have more complicated topologies with remote farmers or harvesters, you will need to upgrade +all components. + +2.5.5 will also make changes to the blockchain database that are incompatible with previous versions. If you +run 2.5.5 but later wish to downgrade to an earlier version, you will need to downgrade your database schema by +running the following command: + +``` +python -c "import sqlite3, sys, os; conn = sqlite3.connect(os.path.expanduser(sys.argv[1])); cursor = conn.execute('UPDATE coin_record SET spent_index = 0 WHERE spent_index = -1'); print(f'Updated {cursor.rowcount} records'); conn.commit(); conn.close()" +``` + +Replacing `` with your actual database path. + +## What's Changed + +### Added + +- Add `chia dev mempool` CLI commands to import, export, and benchmark the mempool +- Add new error logging if the block cost fails to compute +- Add dicts to streamable +- Add independant Full Node RPC Validation Tool (`tools\validate_rpcs.py`) +- Add Revocable CAT support to wallet (Chip 38) +- Add mempool optimizations and fixes in spend deduplication and singleton fast forward. +- Add mempool vault fast forward support +- Add unit tests to cover singleton fast forward dynamics +- Add new optional block creation algorithm to maximize transactions (set config.yaml `full_node:block_creation` to `1`) +- Add new config setting for block creation timeout (`full_node:block_creation_timeout`) +- Add preparation for new plot format and expected hard fork (Chip 48) +- Add canonical CLVM serialization requirement after expected hard fork + +### Changed + +- Harvester<->Farmer protocol change: compute plot filter on the harvester (Chip 48 prep) +- Farmer<->Node protocol change (Chip 48 prep) +- Backwards incompatible schema change for mempool fast-forward support +- Remove problematic `coins_added_at_height_cache` cache +- Remove no longer needed `item_inclusion_filter` in the mempool +- Significantly speedup mempool manager tests by not forcing them to request unneeded fixtures +- Refactor and add test for WSM puzzle hash endpoints +- Removed unneeded call to compute block cost +- Add puzzle hash generation to action scopes +- Remove direct secret key access from pool wallet +- Add ContextManager to FullNodePeers +- Simplify DB checks in `invariant_check_mempool` +- log how long it took to call `peak_post_processing_2` +- move `chia.types.aliases` to `chia.server.aliases` +- move tx processing queue entry classes +- Improve the serialisation of AddressManager +- Remove the dependency of `chia.consensus` on the module `chia.types` +- Adapt `test_check_removals_with_block_creation` to cover both block generator creation versions +- Use upstream miniupnpc 2.3.3 +- Don't return addition coin records in CoinStore's new_block +- Avoid recomputing coin record names in `rollback_to_block` when we have access to them +- move sp broadcast outside of blockchain mutex +- use `pyproject.toml` `[project]` section (again) +- Simplify `test_set_spent` +- Leverage `execute_fetchall` in CoinStore's `rollback_to_block` +- Insert DB values in CoinStore's `new_block` without creating coin records +- Also log the VDF field being compacted (thanks @xearl4) +- Avoid recomputing coin IDs in `run_new_block_benchmark` +- Simplify `test_rollback` +- Extract `_add_coin_records` out of CoinStore and simplify it +- Simplify `test_num_unspent` +- Simplify `test_basic_coin_store` +- Optimize rolled back state construction in `_reconsider_peak` +- Leverage CoinStore's `new_block` in SpendSim's `farm_block` instead of custom coin store manipulation +- Port NFT, pooling, DID, and Datalayer RPCs to `@marshal` decorator +- Simplify SpendSim's `farm_block` +- Migrate away from `clvm` imports +- Pass coin IDs from Blockchain's `_reconsider_peak` to CoinStore's `new_block` to avoid recomputing them +- Unify fork peak and reward coins handling between ForkInfo's `include_spends` and `include_block` +- Replace `CATWallet.create_new_cat_wallet` in `test_cat_wallet.py` +- Change minimium node version to 20 and npm version to 10 +- Timelord: dont skip same peak if in unfinished cache +- Set app minimum macos version to macOS 13 +- Bump `chia-rs` to `0.27.0` +- Bump `chiavdf` to `1.1.11` +- Bump `clvm` to `0.9.14` +- Bump `clvm-tools-rs` to `0.1.48` + +### Fixed + +- Enable keccak softfork in the wallet (fixes #19480) +- Add some checks when trying to join the same pool already joined (fixes #7592) +- Allow DIDs from other wallets with NIL recovery lists (fixes #18947) +- Set AGG_SIG_ME_ADDITIONAL_DATA in config.yaml for simulator +- use index when fetching SP +- redact daemon websocket message logging +- less response failure error consumption +- Fixed some typos in comments (thanks @timesince) +- Fixed more typos in comments (thanks @racerole) +- Fixed yet more typos in comments (thanks @yetyear) +- Fixed typo in CONTRIBUTING.md (thanks @ctrlaltdel) + +### Removed + +- Testing and support for Ubuntu LTS 20.04 +- Testing and support for Debian 11 "Bullseye" + ## 2.5.4 Chia blockchain 2025-05-28 ## What's Changed diff --git a/README.md b/README.md index 490b15dfad2c..90409b1db5aa 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ Once installed, an [Intro to Chia][link-intro] guide is available in the [Chia D [badge-commits]: https://img.shields.io/github/commit-activity/w/Chia-Network/chia-blockchain?logo=GitHub [badge-contributors]: https://img.shields.io/github/contributors/Chia-Network/chia-blockchain?logo=GitHub [badge-coverage]: https://img.shields.io/coverallsCoverage/github/Chia-Network/chia-blockchain?logo=Coveralls&logoColor=red&labelColor=%23212F39 -[badge-discord]: https://dcbadge.vercel.app/api/server/chia?style=flat-square&theme=full-presence +[badge-discord]: https://img.shields.io/badge/discord-Chia%20Network-green.svg [badge-discord2]: https://img.shields.io/discord/1034523881404370984.svg?label=Discord&logo=discord&colorB=1e2b2f [badge-downloads]: https://img.shields.io/github/downloads/Chia-Network/chia-blockchain/total?logo=GitHub [badge-rc]: https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdownload.chia.net%2Flatest%2Fbadge-data-rc.json&query=%24.message&logo=chianetwork&logoColor=white&label=Latest%20RC&labelColor=%230d3349&color=%23474748 diff --git a/build_scripts/build_linux_deb-2-installer.sh b/build_scripts/build_linux_deb-2-installer.sh index 0fe2f0990e4f..1d70340f4f1d 100644 --- a/build_scripts/build_linux_deb-2-installer.sh +++ b/build_scripts/build_linux_deb-2-installer.sh @@ -91,11 +91,6 @@ if [ "$PLATFORM" = "arm64" ]; then # https://github.com/jordansissel/fpm/issues/1801#issuecomment-919877499 # @TODO Consolidates the process to amd64 if the issue of electron-builder is resolved sudo apt-get -y install ruby ruby-dev - # ERROR: Error installing fpm: - # The last version of dotenv (>= 0) to support your Ruby & RubyGems was 2.8.1. Try installing it with `gem install dotenv -v 2.8.1` and then running the current command again - # dotenv requires Ruby version >= 3.0. The current ruby version is 2.7.0.0. - # @TODO Once ruby 3.0 can be installed on `apt install ruby`, installing dotenv below should be removed. - sudo gem install dotenv -v 2.8.1 sudo gem install fpm echo USE_SYSTEM_FPM=true "${NPM_PATH}/electron-builder" build --linux deb --arm64 \ --config.extraMetadata.name=chia-blockchain \ diff --git a/build_scripts/npm_linux/package-lock.json b/build_scripts/npm_linux/package-lock.json index c36b05a0bd9c..fdc6dabdf677 100644 --- a/build_scripts/npm_linux/package-lock.json +++ b/build_scripts/npm_linux/package-lock.json @@ -2360,14 +2360,11 @@ } }, "node_modules/tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dependencies": { - "rimraf": "^3.0.0" - }, + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", "engines": { - "node": ">=8.17.0" + "node": ">=14.14" } }, "node_modules/tmp-promise": { @@ -2378,20 +2375,6 @@ "tmp": "^0.2.0" } }, - "node_modules/tmp/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/truncate-utf8-bytes": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz", @@ -4335,22 +4318,9 @@ } }, "tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "requires": { - "rimraf": "^3.0.0" - }, - "dependencies": { - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "requires": { - "glob": "^7.1.3" - } - } - } + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==" }, "tmp-promise": { "version": "3.0.3", diff --git a/build_scripts/npm_macos/package-lock.json b/build_scripts/npm_macos/package-lock.json index a45f3013050d..66e0f5a6d3c2 100644 --- a/build_scripts/npm_macos/package-lock.json +++ b/build_scripts/npm_macos/package-lock.json @@ -738,6 +738,18 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -1063,6 +1075,19 @@ "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz", "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==" }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -1179,6 +1204,47 @@ "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==" }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", @@ -1259,12 +1325,14 @@ } }, "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -1306,6 +1374,14 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -1314,6 +1390,41 @@ "node": "6.* || 8.* || >= 10.*" } }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -1333,6 +1444,17 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", @@ -1346,6 +1468,42 @@ "node": ">=8" } }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/hosted-git-info": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", @@ -1636,6 +1794,14 @@ "node": ">=10" } }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/mime": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", @@ -1928,20 +2094,6 @@ "node": ">= 4" } }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -2191,14 +2343,11 @@ } }, "node_modules/tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dependencies": { - "rimraf": "^3.0.0" - }, + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", + "integrity": "sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==", "engines": { - "node": ">=8.17.0" + "node": ">=14.14" } }, "node_modules/tmp-promise": { @@ -2970,6 +3119,15 @@ "sax": "^1.2.4" } }, + "call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "requires": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + } + }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -3205,6 +3363,16 @@ "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz", "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==" }, + "dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "requires": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + } + }, "eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -3281,6 +3449,35 @@ "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==" }, + "es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==" + }, + "es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==" + }, + "es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "requires": { + "es-errors": "^1.3.0" + } + }, + "es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "requires": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + } + }, "escalade": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", @@ -3344,12 +3541,14 @@ } }, "form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, @@ -3382,11 +3581,42 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, + "function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==" + }, "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" }, + "get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "requires": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + } + }, + "get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "requires": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + } + }, "glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -3400,6 +3630,11 @@ "path-is-absolute": "^1.0.0" } }, + "gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==" + }, "graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", @@ -3410,6 +3645,27 @@ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" }, + "has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==" + }, + "has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "requires": { + "has-symbols": "^1.0.3" + } + }, + "hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "requires": { + "function-bind": "^1.1.2" + } + }, "hosted-git-info": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", @@ -3630,6 +3886,11 @@ "yallist": "^4.0.0" } }, + "math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==" + }, "mime": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", @@ -3850,14 +4111,6 @@ "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==" }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "requires": { - "glob": "^7.1.3" - } - }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -4041,12 +4294,9 @@ } }, "tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "requires": { - "rimraf": "^3.0.0" - } + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", + "integrity": "sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==" }, "tmp-promise": { "version": "3.0.3", diff --git a/build_scripts/npm_windows/package-lock.json b/build_scripts/npm_windows/package-lock.json index 2162f043bb22..8e19efb19d75 100644 --- a/build_scripts/npm_windows/package-lock.json +++ b/build_scripts/npm_windows/package-lock.json @@ -2328,14 +2328,11 @@ } }, "node_modules/tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dependencies": { - "rimraf": "^3.0.0" - }, + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", + "integrity": "sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==", "engines": { - "node": ">=8.17.0" + "node": ">=14.14" } }, "node_modules/tmp-promise": { @@ -2346,20 +2343,6 @@ "tmp": "^0.2.0" } }, - "node_modules/tmp/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/truncate-utf8-bytes": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz", @@ -4310,22 +4293,9 @@ } }, "tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "requires": { - "rimraf": "^3.0.0" - }, - "dependencies": { - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "requires": { - "glob": "^7.1.3" - } - } - } + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", + "integrity": "sha512-UdiSoX6ypifLmrfQ/XfiawN6hkjSBpCjhKxxZcWlUUmoXLaCKQU0bx4HF/tdDK2uzRuchf1txGvrWBzYREssoQ==" }, "tmp-promise": { "version": "3.0.3", diff --git a/build_scripts/pyinstaller.spec b/build_scripts/pyinstaller.spec index ff778fe4d2c2..ae18dbb48357 100644 --- a/build_scripts/pyinstaller.spec +++ b/build_scripts/pyinstaller.spec @@ -173,7 +173,7 @@ add_binary("chia", f"{ROOT}/chia/cmds/chia.py", COLLECT_ARGS) add_binary("daemon", f"{ROOT}/chia/daemon/server.py", COLLECT_ARGS) for server in SERVERS: - add_binary(f"start_{server}", f"{ROOT}/chia/server/start_{server}.py", COLLECT_ARGS) + add_binary(f"start_{server}", f"{ROOT}/chia/{server}/start_{server}.py", COLLECT_ARGS) add_binary("start_crawler", f"{ROOT}/chia/seeder/start_crawler.py", COLLECT_ARGS) add_binary("start_seeder", f"{ROOT}/chia/seeder/dns_server.py", COLLECT_ARGS) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index 1c6fb425c5b5..5d57f5dc6094 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit 1c6fb425c5b5f03e7c10cd11d0ff6003866a6e0c +Subproject commit 5d57f5dc60945dda2c94dcbde1ec80fb2d59c2d7 diff --git a/chia/_tests/blockchain/blockchain_test_utils.py b/chia/_tests/blockchain/blockchain_test_utils.py index 3de9ff90553d..b2cd67e2d258 100644 --- a/chia/_tests/blockchain/blockchain_test_utils.py +++ b/chia/_tests/blockchain/blockchain_test_utils.py @@ -76,7 +76,7 @@ async def _validate_and_add_block( conds = None else: # fake the signature validation. Just say True here. - conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, True, 0, 0) + conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, True, 0, 0, 0, 0, 0) results = PreValidationResult(None, uint64(1), conds, uint32(0)) else: future = await pre_validate_block( @@ -116,11 +116,10 @@ async def _validate_and_add_block( if err is not None: # Got an error raise AssertionError(err) - else: - # Here we will enforce checking of the exact error - if err != expected_error: - # Did not get the right error, or did not get an error - raise AssertionError(f"Expected {expected_error} but got {err}") + # Here we will enforce checking of the exact error + elif err != expected_error: + # Did not get the right error, or did not get an error + raise AssertionError(f"Expected {expected_error} but got {err}") if expected_result is not None and expected_result != result: raise AssertionError(f"Expected {expected_result} but got {result}") diff --git a/chia/_tests/blockchain/test_augmented_chain.py b/chia/_tests/blockchain/test_augmented_chain.py index c9d751950ef7..cb7a729e5258 100644 --- a/chia/_tests/blockchain/test_augmented_chain.py +++ b/chia/_tests/blockchain/test_augmented_chain.py @@ -1,5 +1,6 @@ from __future__ import annotations +import re from dataclasses import dataclass, field from typing import TYPE_CHECKING, ClassVar, Optional, cast @@ -93,7 +94,7 @@ async def test_augmented_chain(default_10000_blocks: list[FullBlock]) -> None: with pytest.raises(KeyError): await abc.prev_block_hash([blocks[2].header_hash]) - with pytest.raises(ValueError, match="Err.GENERATOR_REF_HAS_NO_GENERATOR"): + with pytest.raises(ValueError, match=re.escape(Err.GENERATOR_REF_HAS_NO_GENERATOR.name)): await abc.lookup_block_generators(blocks[3].header_hash, {uint32(3)}) block_records = [] @@ -105,11 +106,11 @@ async def test_augmented_chain(default_10000_blocks: list[FullBlock]) -> None: assert abc.height_to_block_record(uint32(1)) == block_records[1] - with pytest.raises(ValueError, match="Err.GENERATOR_REF_HAS_NO_GENERATOR"): + with pytest.raises(ValueError, match=re.escape(Err.GENERATOR_REF_HAS_NO_GENERATOR.name)): await abc.lookup_block_generators(blocks[10].header_hash, {uint32(3), uint32(10)}) # block 1 exists in the chain, but it doesn't have a generator - with pytest.raises(ValueError, match="Err.GENERATOR_REF_HAS_NO_GENERATOR"): + with pytest.raises(ValueError, match=re.escape(Err.GENERATOR_REF_HAS_NO_GENERATOR.name)): await abc.lookup_block_generators(blocks[1].header_hash, {uint32(1)}) expect_gen = blocks[2].transactions_generator diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 0b8d639a6d87..e0de389d4d3a 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -4,6 +4,7 @@ import copy import logging import random +import re import time from collections.abc import AsyncIterator, Awaitable from contextlib import asynccontextmanager @@ -4158,24 +4159,24 @@ async def test_lookup_block_generators( # make sure we don't cross the forks if clear_cache: b.clean_block_records() - with pytest.raises(ValueError, match="Err.GENERATOR_REF_HAS_NO_GENERATOR"): + with pytest.raises(ValueError, match=re.escape(Err.GENERATOR_REF_HAS_NO_GENERATOR.name)): await b.lookup_block_generators(peak_1.prev_header_hash, {uint32(516)}) if clear_cache: b.clean_block_records() - with pytest.raises(ValueError, match="Err.GENERATOR_REF_HAS_NO_GENERATOR"): + with pytest.raises(ValueError, match=re.escape(Err.GENERATOR_REF_HAS_NO_GENERATOR.name)): await b.lookup_block_generators(peak_2.prev_header_hash, {uint32(503)}) # make sure we fail when looking up a non-transaction block from the main # chain, regardless of which chain we start at if clear_cache: b.clean_block_records() - with pytest.raises(ValueError, match="Err.GENERATOR_REF_HAS_NO_GENERATOR"): + with pytest.raises(ValueError, match=re.escape(Err.GENERATOR_REF_HAS_NO_GENERATOR.name)): await b.lookup_block_generators(peak_1.prev_header_hash, {uint32(8)}) if clear_cache: b.clean_block_records() - with pytest.raises(ValueError, match="Err.GENERATOR_REF_HAS_NO_GENERATOR"): + with pytest.raises(ValueError, match=re.escape(Err.GENERATOR_REF_HAS_NO_GENERATOR.name)): await b.lookup_block_generators(peak_2.prev_header_hash, {uint32(8)}) # if we try to look up generators starting from a disconnected block, we @@ -4336,6 +4337,7 @@ async def test_include_spends_same_as_parent( 0, 0, 0, + b"", ) ], 0, @@ -4350,6 +4352,9 @@ async def test_include_spends_same_as_parent( True, 0, 0, + 0, + 0, + 0, ) # Now let's run the test test_setup.fork_info.include_spends(conds, test_setup.test_block, test_setup.test_block.header_hash) diff --git a/chia/_tests/clvm/coin_store.py b/chia/_tests/clvm/coin_store.py index 8fdeac2217aa..110434de5484 100644 --- a/chia/_tests/clvm/coin_store.py +++ b/chia/_tests/clvm/coin_store.py @@ -73,7 +73,7 @@ def validate_spend_bundle(self, spend_bundle: SpendBundle, now: CoinTimestamp, m assert result.conds is not None for spend in result.conds.spends: for puzzle_hash, amount, hint in spend.create_coin: - coin = Coin(bytes32(spend.coin_id), bytes32(puzzle_hash), uint64(amount)) + coin = Coin(spend.coin_id, puzzle_hash, uint64(amount)) name = coin.name() ephemeral_db[name] = CoinRecord( coin, diff --git a/chia/_tests/cmds/cmd_test_utils.py b/chia/_tests/cmds/cmd_test_utils.py index 44672b148cce..8234fb838f03 100644 --- a/chia/_tests/cmds/cmd_test_utils.py +++ b/chia/_tests/cmds/cmd_test_utils.py @@ -7,7 +7,7 @@ from pathlib import Path from typing import Any, Optional, cast -from chia_rs import BlockRecord, Coin, G2Element +from chia_rs import BlockRecord, Coin, G1Element, G2Element from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint8, uint16, uint32, uint64 @@ -35,6 +35,8 @@ from chia.wallet.util.wallet_types import WalletType from chia.wallet.wallet_request_types import ( GetSyncStatusResponse, + GetTransaction, + GetTransactionResponse, GetWallets, GetWalletsResponse, NFTCalculateRoyalties, @@ -42,6 +44,10 @@ NFTGetInfo, NFTGetInfoResponse, SendTransactionMultiResponse, + SignMessageByAddress, + SignMessageByAddressResponse, + SignMessageByID, + SignMessageByIDResponse, WalletInfoResponse, ) from chia.wallet.wallet_rpc_client import WalletRpcClient @@ -115,45 +121,67 @@ async def get_wallets(self, request: GetWallets) -> GetWalletsResponse: raise ValueError(f"Invalid fingerprint: {self.fingerprint}") return GetWalletsResponse([WalletInfoResponse(id=uint32(1), name="", type=uint8(w_type.value), data="")]) - async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: - self.add_to_log("get_transaction", (transaction_id,)) - return TransactionRecord( - confirmed_at_height=uint32(1), - created_at_time=uint64(1234), - to_puzzle_hash=bytes32([1] * 32), - amount=uint64(12345678), - fee_amount=uint64(1234567), - confirmed=False, - sent=uint32(0), - spend_bundle=WalletSpendBundle([], G2Element()), - additions=[Coin(bytes32([1] * 32), bytes32([2] * 32), uint64(12345678))], - removals=[Coin(bytes32([2] * 32), bytes32([4] * 32), uint64(12345678))], - wallet_id=uint32(1), - sent_to=[("aaaaa", uint8(1), None)], - trade_id=None, - type=uint32(TransactionType.OUTGOING_TX.value), - name=bytes32([2] * 32), - memos={bytes32([3] * 32): [bytes([4] * 32)]}, - valid_times=ConditionValidTimes(), + async def get_transaction(self, request: GetTransaction) -> GetTransactionResponse: + self.add_to_log("get_transaction", (request,)) + return GetTransactionResponse( + TransactionRecord( + confirmed_at_height=uint32(1), + created_at_time=uint64(1234), + to_puzzle_hash=bytes32([1] * 32), + to_address=encode_puzzle_hash(bytes32([1] * 32), "xch"), + amount=uint64(12345678), + fee_amount=uint64(1234567), + confirmed=False, + sent=uint32(0), + spend_bundle=WalletSpendBundle([], G2Element()), + additions=[Coin(bytes32([1] * 32), bytes32([2] * 32), uint64(12345678))], + removals=[Coin(bytes32([2] * 32), bytes32([4] * 32), uint64(12345678))], + wallet_id=uint32(1), + sent_to=[("aaaaa", uint8(1), None)], + trade_id=None, + type=uint32(TransactionType.OUTGOING_TX.value), + name=bytes32([2] * 32), + memos={bytes32([3] * 32): [bytes([4] * 32)]}, + valid_times=ConditionValidTimes(), + ), + bytes32([2] * 32), ) async def get_cat_name(self, wallet_id: int) -> str: self.add_to_log("get_cat_name", (wallet_id,)) return "test" + str(wallet_id) - async def sign_message_by_address(self, address: str, message: str) -> tuple[str, str, str]: - self.add_to_log("sign_message_by_address", (address, message)) - pubkey = bytes([3] * 48).hex() - signature = bytes([6] * 576).hex() + async def sign_message_by_address(self, request: SignMessageByAddress) -> SignMessageByAddressResponse: + self.add_to_log("sign_message_by_address", (request.address, request.message)) + pubkey = G1Element.from_bytes( + bytes.fromhex( + "b5acf3599bc5fa5da1c00f6cc3d5bcf1560def67778b7f50a8c373a83f78761505b6250ab776e38a292e26628009aec4" + ) + ) + signature = G2Element.from_bytes( + bytes.fromhex( + "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + ) + ) signing_mode = SigningMode.CHIP_0002.value - return pubkey, signature, signing_mode + return SignMessageByAddressResponse(pubkey, signature, signing_mode) - async def sign_message_by_id(self, id: str, message: str) -> tuple[str, str, str]: - self.add_to_log("sign_message_by_id", (id, message)) - pubkey = bytes([4] * 48).hex() - signature = bytes([7] * 576).hex() + async def sign_message_by_id(self, request: SignMessageByID) -> SignMessageByIDResponse: + self.add_to_log("sign_message_by_id", (request.id, request.message)) + pubkey = G1Element.from_bytes( + bytes.fromhex( + "a9e652cb551d5978a9ee4b7aa52a4e826078a54b08a3d903c38611cb8a804a9a29c926e4f8549314a079e04ecde10cc1" + ) + ) + signature = G2Element.from_bytes( + bytes.fromhex( + "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + ) + ) signing_mode = SigningMode.CHIP_0002.value - return pubkey, signature, signing_mode + return SignMessageByIDResponse(pubkey, signature, bytes32.zeros, signing_mode) async def cat_asset_id_to_name(self, asset_id: bytes32) -> Optional[tuple[Optional[uint32], str]]: """ @@ -244,14 +272,6 @@ async def get_spendable_coins( unconfirmed_additions = [Coin(bytes32([7] * 32), bytes32([8] * 32), uint64(1234580000))] return confirmed_records, unconfirmed_removals, unconfirmed_additions - async def get_next_address(self, wallet_id: int, new_address: bool) -> str: - self.add_to_log("get_next_address", (wallet_id, new_address)) - addr = encode_puzzle_hash(bytes32([self.wallet_index] * 32), "xch") - self.wallet_index += 1 - if self.wallet_index > 254: - self.wallet_index = 1 - return addr - async def send_transaction_multi( self, wallet_id: int, @@ -271,6 +291,7 @@ async def send_transaction_multi( confirmed_at_height=uint32(1), created_at_time=uint64(1234), to_puzzle_hash=bytes32([1] * 32), + to_address=encode_puzzle_hash(bytes32([1] * 32), "xch"), amount=uint64(12345678), fee_amount=uint64(1234567), confirmed=False, diff --git a/chia/_tests/cmds/test_farm_cmd.py b/chia/_tests/cmds/test_farm_cmd.py index 968152331e7b..e72cb86bc927 100644 --- a/chia/_tests/cmds/test_farm_cmd.py +++ b/chia/_tests/cmds/test_farm_cmd.py @@ -8,10 +8,12 @@ from chia._tests.util.time_out_assert import time_out_assert from chia.cmds.farm_funcs import summary from chia.farmer.farmer import Farmer +from chia.farmer.farmer_service import FarmerService from chia.harvester.harvester import Harvester -from chia.server.aliases import FarmerService, HarvesterService, WalletService +from chia.harvester.harvester_service import HarvesterService from chia.simulator.block_tools import BlockTools from chia.simulator.start_simulator import SimulatorFullNodeService +from chia.wallet.wallet_service import WalletService @pytest.mark.anyio @@ -46,7 +48,15 @@ async def receiver_available() -> bool: wallet_rpc_port = wallet_service.rpc_server.webserver.listen_port farmer_rpc_port = farmer_service.rpc_server.webserver.listen_port - await summary(full_node_rpc_port, wallet_rpc_port, None, farmer_rpc_port, bt.root_path) + # Test with include_pool_rewards=False (original test) + await summary( + rpc_port=full_node_rpc_port, + wallet_rpc_port=wallet_rpc_port, + harvester_rpc_port=None, + farmer_rpc_port=farmer_rpc_port, + include_pool_rewards=False, + root_path=bt.root_path, + ) captured = capsys.readouterr() match = re.search(r"^.+(Farming status:.+)$", captured.out, re.DOTALL) @@ -64,3 +74,47 @@ async def receiver_available() -> bool: assert "e (effective)" in lines[8] assert "Estimated network space:" in lines[9] assert "Expected time to win:" in lines[10] + + # Test with include_pool_rewards=True + await summary( + rpc_port=full_node_rpc_port, + wallet_rpc_port=wallet_rpc_port, + harvester_rpc_port=None, + farmer_rpc_port=farmer_rpc_port, + include_pool_rewards=True, + root_path=bt.root_path, + ) + + captured = capsys.readouterr() + match = re.search(r"Farming status:.*", captured.out, re.DOTALL) + assert match, "no 'Farming status:' line" + output = match.group(0).strip() + lines = [line.strip() for line in output.splitlines()] + + # always check these first six lines + assert lines[0].startswith("Farming status:") + assert lines[1].startswith("Total chia farmed:") + assert lines[2].startswith("User transaction fees:") + assert lines[3].startswith("Farmer rewards:") + assert lines[4].startswith("Pool rewards:") + assert lines[5].startswith("Total rewards:") + + # decide where the harvester section starts + if "Current/Last height farmed:" in output: + # we saw the height-farmed block, so it occupies lines[6-8] + assert lines[6].startswith("Current/Last height farmed:") + assert lines[7].startswith("Blocks since last farmed:") + assert lines[8].startswith("Time since last farmed:") + harvester_idx = 9 + else: + # no height block, so harvester begins at line 6 + harvester_idx = 6 + + # now the harvester lines + assert lines[harvester_idx] == "Local Harvester" + assert "plots of size" in lines[harvester_idx + 1] + assert lines[harvester_idx + 2].startswith("Plot count for all harvesters:") + assert lines[harvester_idx + 3].startswith("Total size of plots:") + assert lines[harvester_idx + 4].startswith("Estimated network space:") + assert lines[harvester_idx + 5].startswith("Expected time to win:") + assert lines[harvester_idx + 6].startswith("Note:") diff --git a/chia/_tests/cmds/wallet/test_consts.py b/chia/_tests/cmds/wallet/test_consts.py index c5c06e406ca6..42cf60c50f01 100644 --- a/chia/_tests/cmds/wallet/test_consts.py +++ b/chia/_tests/cmds/wallet/test_consts.py @@ -4,6 +4,7 @@ from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint32, uint64 +from chia.util.bech32m import encode_puzzle_hash from chia.wallet.conditions import ConditionValidTimes from chia.wallet.signer_protocol import KeyHints, SigningInstructions, TransactionInfo, UnsignedTransaction from chia.wallet.transaction_record import TransactionRecord @@ -27,6 +28,7 @@ def get_bytes32(bytes_index: int) -> bytes32: confirmed_at_height=uint32(1), created_at_time=uint64(1234), to_puzzle_hash=get_bytes32(1), + to_address=encode_puzzle_hash(get_bytes32(1), "xch"), amount=uint64(12345678), fee_amount=uint64(1234567), confirmed=False, diff --git a/chia/_tests/cmds/wallet/test_did.py b/chia/_tests/cmds/wallet/test_did.py index 4fefab7e2fd6..5144cde7f71b 100644 --- a/chia/_tests/cmds/wallet/test_did.py +++ b/chia/_tests/cmds/wallet/test_did.py @@ -13,7 +13,6 @@ from chia.types.blockchain_format.program import NIL, Program from chia.types.signing_mode import SigningMode from chia.util.bech32m import encode_puzzle_hash -from chia.util.config import load_config from chia.wallet.conditions import Condition, ConditionValidTimes, CreateCoinAnnouncement, CreatePuzzleAnnouncement from chia.wallet.did_wallet.did_info import did_recovery_is_nil from chia.wallet.util.curry_and_treehash import NIL_TREEHASH @@ -117,8 +116,8 @@ def test_did_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcCli # these are various things that should be in the output assert_list = [ f"Message: {message.hex()}", - f"Public Key: {bytes([4] * 48).hex()}", - f"Signature: {bytes([7] * 576).hex()}", + "Public Key: a9e652cb551d5978a9ee4b7aa52a4e826078a54b08a3d903c38611cb8a804a9a29c926e4f8549314a079e04ecde10cc1", + "Signature: c0" + "00" * (42 - 1), f"Signing Mode: {SigningMode.CHIP_0002.value}", ] run_cli_command_and_assert(capsys, root_dir, [*command_args, f"-i{did_id}"], assert_list) @@ -421,14 +420,10 @@ async def did_transfer_did( "150", ] # these are various things that should be in the output - config = load_config( - root_dir, - "config.yaml", - ) assert_list = [ f"Successfully transferred DID to {t_address}", f"Transaction ID: {get_bytes32(2).hex()}", - f"Transaction: {STD_TX.to_json_dict_convenience(config)}", + f"Transaction: {STD_TX.to_json_dict()}", ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { diff --git a/chia/_tests/cmds/wallet/test_nft.py b/chia/_tests/cmds/wallet/test_nft.py index 43f10363f273..1ff5fe62834e 100644 --- a/chia/_tests/cmds/wallet/test_nft.py +++ b/chia/_tests/cmds/wallet/test_nft.py @@ -64,19 +64,19 @@ def test_nft_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcCli inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client - did_id = encode_puzzle_hash(get_bytes32(1), "nft") + nft_id = encode_puzzle_hash(get_bytes32(1), "nft") message = b"hello nft world!!" - command_args = ["wallet", "did", "sign_message", FINGERPRINT_ARG, f"-m{message.hex()}"] + command_args = ["wallet", "nft", "sign_message", FINGERPRINT_ARG, f"-m{message.hex()}"] # these are various things that should be in the output assert_list = [ f"Message: {message.hex()}", - f"Public Key: {bytes([4] * 48).hex()}", - f"Signature: {bytes([7] * 576).hex()}", + "Public Key: a9e652cb551d5978a9ee4b7aa52a4e826078a54b08a3d903c38611cb8a804a9a29c926e4f8549314a079e04ecde10cc1", + "Signature: c0" + "00" * (42 - 1), f"Signing Mode: {SigningMode.CHIP_0002.value}", ] - run_cli_command_and_assert(capsys, root_dir, [*command_args, f"-i{did_id}"], assert_list) + run_cli_command_and_assert(capsys, root_dir, [*command_args, f"-i{nft_id}"], assert_list) expected_calls: logType = { - "sign_message_by_id": [(did_id, message.hex())], # xch std + "sign_message_by_id": [(nft_id, message.hex())], # xch std } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) diff --git a/chia/_tests/cmds/wallet/test_notifications.py b/chia/_tests/cmds/wallet/test_notifications.py index 40bf0afde1f4..cde0ca62b0c1 100644 --- a/chia/_tests/cmds/wallet/test_notifications.py +++ b/chia/_tests/cmds/wallet/test_notifications.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Optional, cast +from typing import cast from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint32, uint64 @@ -12,7 +12,7 @@ from chia.wallet.conditions import ConditionValidTimes from chia.wallet.notification_store import Notification from chia.wallet.transaction_record import TransactionRecord -from chia.wallet.wallet_request_types import GetNotifications, GetNotificationsResponse +from chia.wallet.wallet_request_types import DeleteNotifications, GetNotifications, GetNotificationsResponse test_condition_valid_times: ConditionValidTimes = ConditionValidTimes(min_time=uint64(100), max_time=uint64(150)) @@ -111,15 +111,31 @@ def test_notifications_delete(capsys: object, get_test_cli_clients: tuple[TestRp # set RPC Client class NotificationsDeleteRpcClient(TestWalletRpcClient): - async def delete_notifications(self, ids: Optional[list[bytes32]] = None) -> bool: - self.add_to_log("delete_notifications", (ids,)) - return True + async def delete_notifications(self, request: DeleteNotifications) -> None: + self.add_to_log("delete_notifications", (request.ids,)) inst_rpc_client = NotificationsDeleteRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client + # Try all first command_args = ["wallet", "notifications", "delete", FINGERPRINT_ARG, "--all"] # these are various things that should be in the output - assert_list = ["Success: True"] + assert_list = ["Success!"] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = {"delete_notifications": [(None,)]} test_rpc_clients.wallet_rpc_client.check_log(expected_calls) + # Next try specifying IDs + command_args = [ + "wallet", + "notifications", + "delete", + FINGERPRINT_ARG, + "--id", + bytes32.zeros.hex(), + "--id", + bytes32.zeros.hex(), + ] + # these are various things that should be in the output + assert_list = ["Success!"] + run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) + expected_calls = {"delete_notifications": [([bytes32.zeros, bytes32.zeros],)]} + test_rpc_clients.wallet_rpc_client.check_log(expected_calls) diff --git a/chia/_tests/cmds/wallet/test_wallet.py b/chia/_tests/cmds/wallet/test_wallet.py index 38a7f2e35ba8..073fd806928d 100644 --- a/chia/_tests/cmds/wallet/test_wallet.py +++ b/chia/_tests/cmds/wallet/test_wallet.py @@ -9,7 +9,7 @@ import pytest from chia_rs import Coin, G2Element from chia_rs.sized_bytes import bytes32 -from chia_rs.sized_ints import uint8, uint16, uint32, uint64 +from chia_rs.sized_ints import uint8, uint16, uint32, uint64, uint128 from click.testing import CliRunner from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert @@ -29,31 +29,48 @@ from chia.types.blockchain_format.program import Program from chia.types.signing_mode import SigningMode from chia.util.bech32m import encode_puzzle_hash -from chia.wallet.conditions import ConditionValidTimes +from chia.wallet.conditions import Condition, ConditionValidTimes from chia.wallet.trade_record import TradeRecord from chia.wallet.trading.offer import Offer from chia.wallet.trading.trade_status import TradeStatus from chia.wallet.transaction_record import TransactionRecord from chia.wallet.transaction_sorting import SortKey -from chia.wallet.util.query_filter import HashFilter, TransactionTypeFilter +from chia.wallet.util.query_filter import HashFilter from chia.wallet.util.transaction_type import TransactionType from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG, TXConfig from chia.wallet.util.wallet_types import WalletType from chia.wallet.wallet_coin_store import GetCoinRecords from chia.wallet.wallet_request_types import ( + BalanceResponse, CancelOfferResponse, CATSpendResponse, + ClawbackPuzzleDecoratorOverride, CreateOfferForIDsResponse, + DeleteUnconfirmedTransactions, + ExtendDerivationIndex, + ExtendDerivationIndexResponse, FungibleAsset, + GetCurrentDerivationIndexResponse, GetHeightInfoResponse, + GetNextAddress, + GetNextAddressResponse, + GetTransaction, + GetTransactions, + GetTransactionsResponse, + GetWalletBalance, + GetWalletBalanceResponse, GetWallets, GetWalletsResponse, NFTCalculateRoyalties, NFTGetWalletDID, NFTGetWalletDIDResponse, RoyaltyAsset, + SendTransaction, SendTransactionResponse, + SpendClawbackCoins, + SpendClawbackCoinsResponse, TakeOfferResponse, + TransactionRecordWithMetadata, WalletInfoResponse, ) from chia.wallet.wallet_spend_bundle import WalletSpendBundle @@ -97,9 +114,9 @@ def test_get_transaction(capsys: object, get_test_cli_clients: tuple[TestRpcClie "get_wallets": [(GetWallets(type=None, include_data=True),)] * 3, "get_cat_name": [(1,)], "get_transaction": [ - (bytes32.from_hexstr(bytes32_hexstr),), - (bytes32.from_hexstr(bytes32_hexstr),), - (bytes32.from_hexstr(bytes32_hexstr),), + (GetTransaction(bytes32.from_hexstr(bytes32_hexstr)),), + (GetTransaction(bytes32.from_hexstr(bytes32_hexstr)),), + (GetTransaction(bytes32.from_hexstr(bytes32_hexstr)),), ], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -110,27 +127,17 @@ def test_get_transactions(capsys: object, get_test_cli_clients: tuple[TestRpcCli # set RPC Client class GetTransactionsWalletRpcClient(TestWalletRpcClient): - async def get_transactions( - self, - wallet_id: int, - start: int, - end: int, - sort_key: Optional[SortKey] = None, - reverse: bool = False, - to_address: Optional[str] = None, - type_filter: Optional[TransactionTypeFilter] = None, - confirmed: Optional[bool] = None, - ) -> list[TransactionRecord]: - self.add_to_log( - "get_transactions", (wallet_id, start, end, sort_key, reverse, to_address, type_filter, confirmed) - ) + async def get_transactions(self, request: GetTransactions) -> GetTransactionsResponse: + self.add_to_log("get_transactions", (request,)) l_tx_rec = [] - for i in range(start, end): - t_type = TransactionType.INCOMING_CLAWBACK_SEND if i == end - 1 else TransactionType.INCOMING_TX - tx_rec = TransactionRecord( + assert request.start is not None and request.end is not None + for i in range(request.start, request.end): + t_type = TransactionType.INCOMING_CLAWBACK_SEND if i == request.end - 1 else TransactionType.INCOMING_TX + tx_rec = TransactionRecordWithMetadata( confirmed_at_height=uint32(1 + i), created_at_time=uint64(1234 + i), to_puzzle_hash=bytes32([1 + i] * 32), + to_address=encode_puzzle_hash(bytes32([1 + i] * 32), "xch"), amount=uint64(12345678 + i), fee_amount=uint64(1234567 + i), confirmed=False, @@ -148,7 +155,7 @@ async def get_transactions( ) l_tx_rec.append(tx_rec) - return l_tx_rec + return GetTransactionsResponse(l_tx_rec, request.wallet_id) async def get_coin_records(self, request: GetCoinRecords) -> dict[str, Any]: self.add_to_log("get_coin_records", (request,)) @@ -197,8 +204,8 @@ async def get_coin_records(self, request: GetCoinRecords) -> dict[str, Any]: expected_calls: logType = { "get_wallets": [(GetWallets(type=None, include_data=True),)] * 2, "get_transactions": [ - (1, 2, 4, SortKey.RELEVANCE, True, None, None, None), - (1, 2, 4, SortKey.RELEVANCE, True, None, None, None), + (GetTransactions(uint32(1), uint16(2), uint16(4), SortKey.RELEVANCE.name, True, None, None, None),), + (GetTransactions(uint32(1), uint16(2), uint16(4), SortKey.RELEVANCE.name, True, None, None, None),), ], "get_coin_records": [ (GetCoinRecords(coin_id_filter=HashFilter.include([expected_coin_id])),), @@ -242,19 +249,23 @@ async def get_height_info(self) -> GetHeightInfoResponse: self.add_to_log("get_height_info", ()) return GetHeightInfoResponse(uint32(10)) - async def get_wallet_balance(self, wallet_id: int) -> dict[str, uint64]: - self.add_to_log("get_wallet_balance", (wallet_id,)) - if wallet_id == 1: - amount = uint64(1000000000) - elif wallet_id == 2: - amount = uint64(2000000000) + async def get_wallet_balance(self, request: GetWalletBalance) -> GetWalletBalanceResponse: + self.add_to_log("get_wallet_balance", (request,)) + if request.wallet_id == 1: + amount = uint128(1000000000) + elif request.wallet_id == 2: + amount = uint128(2000000000) else: - amount = uint64(1) - return { - "confirmed_wallet_balance": amount, - "spendable_balance": amount, - "unconfirmed_wallet_balance": uint64(0), - } + amount = uint128(1) + return GetWalletBalanceResponse( + BalanceResponse( + wallet_id=request.wallet_id, + wallet_type=uint8(0), # Doesn't matter + confirmed_wallet_balance=amount, + spendable_balance=amount, + unconfirmed_wallet_balance=uint128(0), + ) + ) async def get_nft_wallet_did(self, request: NFTGetWalletDID) -> NFTGetWalletDIDResponse: self.add_to_log("get_nft_wallet_did", (request.wallet_id,)) @@ -307,7 +318,12 @@ async def get_connections( ], "get_sync_status": [(), ()], "get_height_info": [(), ()], - "get_wallet_balance": [(1,), (2,), (3,), (2,)], + "get_wallet_balance": [ + (GetWalletBalance(wallet_id=uint32(1)),), + (GetWalletBalance(wallet_id=uint32(2)),), + (GetWalletBalance(wallet_id=uint32(3)),), + (GetWalletBalance(wallet_id=uint32(2)),), + ], "get_nft_wallet_did": [(3,)], "get_connections": [(None,), (None,)], } @@ -321,25 +337,31 @@ def test_send(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) class SendWalletRpcClient(TestWalletRpcClient): async def send_transaction( self, - wallet_id: int, - amount: uint64, - address: str, + request: SendTransaction, tx_config: TXConfig, - fee: uint64 = uint64(0), - memos: Optional[list[str]] = None, - puzzle_decorator_override: Optional[list[dict[str, Union[str, int, bool]]]] = None, - push: bool = True, + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> SendTransactionResponse: self.add_to_log( "send_transaction", - (wallet_id, amount, address, tx_config, fee, memos, puzzle_decorator_override, push, timelock_info), + ( + request.wallet_id, + request.amount, + request.address, + tx_config, + request.fee, + request.memos, + request.puzzle_decorator, + request.push, + timelock_info, + ), ) name = get_bytes32(2) tx_rec = TransactionRecord( confirmed_at_height=uint32(1), created_at_time=uint64(1234), to_puzzle_hash=get_bytes32(1), + to_address=encode_puzzle_hash(get_bytes32(1), "xch"), amount=uint64(12345678), fee_amount=uint64(1234567), confirmed=False, @@ -450,7 +472,7 @@ async def cat_spend( ), 500000000000, ["0x6262626262626262626262626262626262626262626262626262626262626262"], - [{"decorator": "CLAWBACK", "clawback_timelock": 60}], + [ClawbackPuzzleDecoratorOverride(decorator="CLAWBACK", clawback_timelock=uint64(60))], True, test_condition_valid_times, ) @@ -476,7 +498,7 @@ async def cat_spend( test_condition_valid_times, ) ], - "get_transaction": [(get_bytes32(2),), (get_bytes32(2),)], + "get_transaction": [(GetTransaction(get_bytes32(2)),), (GetTransaction(get_bytes32(2)),)], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) @@ -486,11 +508,11 @@ def test_get_address(capsys: object, get_test_cli_clients: tuple[TestRpcClients, # set RPC Client class GetAddressWalletRpcClient(TestWalletRpcClient): - async def get_next_address(self, wallet_id: int, new_address: bool) -> str: - self.add_to_log("get_next_address", (wallet_id, new_address)) - if new_address: - return encode_puzzle_hash(get_bytes32(3), "xch") - return encode_puzzle_hash(get_bytes32(4), "xch") + async def get_next_address(self, request: GetNextAddress) -> GetNextAddressResponse: + self.add_to_log("get_next_address", (request.wallet_id, request.new_address)) + if request.new_address: + return GetNextAddressResponse(request.wallet_id, encode_puzzle_hash(get_bytes32(3), "xch")) + return GetNextAddressResponse(request.wallet_id, encode_puzzle_hash(get_bytes32(4), "xch")) inst_rpc_client = GetAddressWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -519,30 +541,25 @@ def test_clawback(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Pa class ClawbackWalletRpcClient(TestWalletRpcClient): async def spend_clawback_coins( self, - coin_ids: list[bytes32], - fee: int = 0, - force: bool = False, - push: bool = True, + request: SpendClawbackCoins, + tx_config: TXConfig, + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> dict[str, Any]: - self.add_to_log("spend_clawback_coins", (coin_ids, fee, force, push, timelock_info)) - tx_hex_list = [get_bytes32(6).hex(), get_bytes32(7).hex(), get_bytes32(8).hex()] - return { - "transaction_ids": tx_hex_list, - "transactions": [ - STD_TX.to_json_dict_convenience( - { - "selected_network": "mainnet", - "network_overrides": {"config": {"mainnet": {"address_prefix": "xch"}}}, - } - ) - ], - } + ) -> SpendClawbackCoinsResponse: + self.add_to_log( + "spend_clawback_coins", (request.coin_ids, request.fee, request.force, request.push, timelock_info) + ) + tx_list = [get_bytes32(6), get_bytes32(7), get_bytes32(8)] + return SpendClawbackCoinsResponse( + transaction_ids=tx_list, + transactions=[STD_TX], + unsigned_transactions=[STD_UTX], + ) inst_rpc_client = ClawbackWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client tx_ids = [get_bytes32(3), get_bytes32(4), get_bytes32(5)] - r_tx_ids_hex = [get_bytes32(6).hex(), get_bytes32(7).hex(), get_bytes32(8).hex()] + r_tx_ids_hex = ["0x" + get_bytes32(6).hex(), "0x" + get_bytes32(7).hex(), "0x" + get_bytes32(8).hex()] command_args = [ "wallet", "clawback", @@ -556,7 +573,7 @@ async def spend_clawback_coins( "--expires-at", "150", ] - run_cli_command_and_assert(capsys, root_dir, command_args, ["transaction_ids", str(r_tx_ids_hex)]) + run_cli_command_and_assert(capsys, root_dir, command_args, ["transaction_ids", *r_tx_ids_hex]) # these are various things that should be in the output expected_calls: logType = { "spend_clawback_coins": [(tx_ids, 500000000000, False, True, test_condition_valid_times)], @@ -569,8 +586,8 @@ def test_del_unconfirmed_tx(capsys: object, get_test_cli_clients: tuple[TestRpcC # set RPC Client class UnconfirmedTxRpcClient(TestWalletRpcClient): - async def delete_unconfirmed_transactions(self, wallet_id: int) -> None: - self.add_to_log("delete_unconfirmed_transactions", (wallet_id,)) + async def delete_unconfirmed_transactions(self, request: DeleteUnconfirmedTransactions) -> None: + self.add_to_log("delete_unconfirmed_transactions", (request.wallet_id,)) inst_rpc_client = UnconfirmedTxRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -594,9 +611,9 @@ def test_get_derivation_index(capsys: object, get_test_cli_clients: tuple[TestRp # set RPC Client class GetDerivationIndexRpcClient(TestWalletRpcClient): - async def get_current_derivation_index(self) -> str: + async def get_current_derivation_index(self) -> GetCurrentDerivationIndexResponse: self.add_to_log("get_current_derivation_index", ()) - return str(520) + return GetCurrentDerivationIndexResponse(uint32(520)) inst_rpc_client = GetDerivationIndexRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -625,8 +642,9 @@ def test_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcClients # these are various things that should be in the output assert_list = [ f"Message: {message.hex()}", - f"Public Key: {bytes([3] * 48).hex()}", - f"Signature: {bytes([6] * 576).hex()}", + "Public Key: b5acf3599bc5fa5da1c00f6cc3d5bcf1560def67778b7f50a8c373a83f78761505b6250ab776e38a292e26628009aec4", + "Signature: c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", f"Signing Mode: {SigningMode.CHIP_0002.value}", ] run_cli_command_and_assert(capsys, root_dir, [*command_args, f"-a{xch_addr}"], assert_list) @@ -641,9 +659,9 @@ def test_update_derivation_index(capsys: object, get_test_cli_clients: tuple[Tes # set RPC Client class UpdateDerivationIndexRpcClient(TestWalletRpcClient): - async def extend_derivation_index(self, index: int) -> str: - self.add_to_log("extend_derivation_index", (index,)) - return str(index) + async def extend_derivation_index(self, request: ExtendDerivationIndex) -> ExtendDerivationIndexResponse: + self.add_to_log("extend_derivation_index", (request.index,)) + return ExtendDerivationIndexResponse(request.index) inst_rpc_client = UpdateDerivationIndexRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client diff --git a/chia/_tests/conftest.py b/chia/_tests/conftest.py index c01042c453c1..8ce767056c49 100644 --- a/chia/_tests/conftest.py +++ b/chia/_tests/conftest.py @@ -50,19 +50,14 @@ from chia._tests.util.spend_sim import CostLogger from chia._tests.util.time_out_assert import time_out_assert from chia.farmer.farmer_rpc_client import FarmerRpcClient +from chia.farmer.farmer_service import FarmerService from chia.full_node.full_node_api import FullNodeAPI +from chia.full_node.full_node_service import FullNodeService from chia.harvester.harvester_rpc_client import HarvesterRpcClient +from chia.harvester.harvester_service import HarvesterService +from chia.seeder.crawler_service import CrawlerService from chia.seeder.dns_server import DNSServer -from chia.server.aliases import ( - CrawlerService, - FarmerService, - FullNodeService, - HarvesterService, - TimelordService, - WalletService, -) from chia.server.server import ChiaServer -from chia.server.start_service import Service from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.setup_services import ( setup_crawler, @@ -70,11 +65,14 @@ setup_full_node, setup_introducer, setup_seeder, + setup_solver, setup_timelord, ) from chia.simulator.start_simulator import SimulatorFullNodeService from chia.simulator.wallet_tools import WalletTool -from chia.types.peer_info import PeerInfo +from chia.solver.solver_service import SolverService +from chia.timelord.timelord_service import TimelordService +from chia.types.peer_info import PeerInfo, UnresolvedPeerInfo from chia.util.config import create_default_chia_config, lock_and_load_config from chia.util.db_wrapper import generate_in_memory_db_uri from chia.util.keychain import Keychain @@ -82,6 +80,7 @@ from chia.util.task_timing import start_task_instrumentation, stop_task_instrumentation from chia.wallet.wallet_node import WalletNode from chia.wallet.wallet_rpc_client import WalletRpcClient +from chia.wallet.wallet_service import WalletService # TODO: review how this is now after other imports and before some stdlib imports... :[ # Set spawn after stdlib imports, but before other imports @@ -233,8 +232,8 @@ def blockchain_constants(consensus_mode: ConsensusMode) -> ConsensusConstants: @pytest.fixture(scope="session", name="bt") async def block_tools_fixture(get_keychain, blockchain_constants, anyio_backend) -> BlockTools: # Note that this causes a lot of CPU and disk traffic - disk, DB, ports, process creation ... - _shared_block_tools = await create_block_tools_async(constants=blockchain_constants, keychain=get_keychain) - return _shared_block_tools + shared_block_tools = await create_block_tools_async(constants=blockchain_constants, keychain=get_keychain) + return shared_block_tools # if you have a system that has an unusual hostname for localhost and you want @@ -790,7 +789,7 @@ async def three_nodes_two_wallets(blockchain_constants: ConsensusConstants): @pytest.fixture(scope="function") async def one_node( blockchain_constants: ConsensusConstants, -) -> AsyncIterator[tuple[list[Service], list[FullNodeSimulator], BlockTools]]: +) -> AsyncIterator[tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools]]: async with setup_simulators_and_wallets_service(1, 0, blockchain_constants) as _: yield _ @@ -884,10 +883,27 @@ async def farmer_one_harvester(tmp_path: Path, get_b_tools: BlockTools) -> Async yield _ +FarmerOneHarvesterSolver = tuple[list[HarvesterService], FarmerService, SolverService, BlockTools] + + +@pytest.fixture(scope="function") +async def farmer_one_harvester_solver( + tmp_path: Path, get_b_tools: BlockTools +) -> AsyncIterator[FarmerOneHarvesterSolver]: + async with setup_farmer_multi_harvester(get_b_tools, 1, tmp_path, get_b_tools.constants, start_services=True) as ( + harvester_services, + farmer_service, + bt, + ): + farmer_peer = UnresolvedPeerInfo(bt.config["self_hostname"], farmer_service._server.get_port()) + async with setup_solver(tmp_path / "solver", bt, bt.constants, farmer_peer=farmer_peer) as solver_service: + yield harvester_services, farmer_service, solver_service, bt + + @pytest.fixture(scope="function") async def farmer_one_harvester_not_started( tmp_path: Path, get_b_tools: BlockTools -) -> AsyncIterator[tuple[list[Service], Service]]: +) -> AsyncIterator[tuple[list[HarvesterService], FarmerService, BlockTools]]: async with setup_farmer_multi_harvester(get_b_tools, 1, tmp_path, get_b_tools.constants, start_services=False) as _: yield _ @@ -895,7 +911,7 @@ async def farmer_one_harvester_not_started( @pytest.fixture(scope="function") async def farmer_two_harvester_not_started( tmp_path: Path, get_b_tools: BlockTools -) -> AsyncIterator[tuple[list[Service], Service]]: +) -> AsyncIterator[tuple[list[HarvesterService], FarmerService, BlockTools]]: async with setup_farmer_multi_harvester(get_b_tools, 2, tmp_path, get_b_tools.constants, start_services=False) as _: yield _ @@ -903,7 +919,7 @@ async def farmer_two_harvester_not_started( @pytest.fixture(scope="function") async def farmer_three_harvester_not_started( tmp_path: Path, get_b_tools: BlockTools -) -> AsyncIterator[tuple[list[Service], Service]]: +) -> AsyncIterator[tuple[list[HarvesterService], FarmerService, BlockTools]]: async with setup_farmer_multi_harvester(get_b_tools, 3, tmp_path, get_b_tools.constants, start_services=False) as _: yield _ diff --git a/chia/_tests/core/consensus/test_pot_iterations.py b/chia/_tests/core/consensus/test_pot_iterations.py index 2d2ddaeba182..7c96604b5947 100644 --- a/chia/_tests/core/consensus/test_pot_iterations.py +++ b/chia/_tests/core/consensus/test_pot_iterations.py @@ -1,5 +1,6 @@ from __future__ import annotations +import pytest from chia_rs import PlotSize from chia_rs.sized_ints import uint8, uint16, uint32, uint64, uint128 from pytest import raises @@ -83,67 +84,96 @@ def test_calculate_ip_iters(self): assert ip_iters == (sp_iters + test_constants.NUM_SP_INTERVALS_EXTRA * sp_interval_iters + required_iters) % ssi assert sp_iters > ip_iters - def test_win_percentage(self): + @pytest.mark.parametrize( + "height", + [ + uint32(0), + test_constants.HARD_FORK2_HEIGHT - 1, + test_constants.HARD_FORK2_HEIGHT, + test_constants.HARD_FORK2_HEIGHT + test_constants.PLOT_V1_PHASE_OUT, + test_constants.HARD_FORK2_HEIGHT + test_constants.PLOT_V1_PHASE_OUT + 1, + ], + ) + def test_win_percentage(self, height: uint32): """ Tests that the percentage of blocks won is proportional to the space of each farmer, with the assumption that all farmers have access to the same VDF speed. """ farmer_ks = { - uint8(32): 100, - uint8(33): 100, - uint8(34): 100, - uint8(35): 100, - uint8(36): 100, + PlotSize.make_v1(32): 100, + PlotSize.make_v1(33): 100, + PlotSize.make_v1(34): 100, + PlotSize.make_v1(35): 100, + PlotSize.make_v1(36): 100, + PlotSize.make_v2(28): 200, + PlotSize.make_v2(30): 200, + PlotSize.make_v2(32): 200, } - farmer_space = {k: _expected_plot_size(uint8(k)) * count for k, count in farmer_ks.items()} - total_space = sum(farmer_space.values()) - percentage_space = {k: float(sp / total_space) for k, sp in farmer_space.items()} + farmer_space = {k: _expected_plot_size(k) * count for k, count in farmer_ks.items()} wins = {k: 0 for k in farmer_ks.keys()} + + constants = test_constants.replace(DIFFICULTY_CONSTANT_FACTOR=uint128(2**25)) total_slots = 50 num_sps = 16 - sp_interval_iters = uint64(100000000 // 32) + sub_slot_iters = uint64(100000000) + sp_interval_iters = calculate_sp_interval_iters(constants, sub_slot_iters) difficulty = uint64(500000000000) - constants = test_constants.replace(DIFFICULTY_CONSTANT_FACTOR=uint128(2**25)) for slot_index in range(total_slots): total_wins_in_slot = 0 for sp_index in range(num_sps): sp_hash = std_hash(slot_index.to_bytes(4, "big") + sp_index.to_bytes(4, "big")) for k, count in farmer_ks.items(): for farmer_index in range(count): - quality = std_hash(slot_index.to_bytes(4, "big") + k.to_bytes(1, "big") + bytes(farmer_index)) - # TODO: todo_v2_plots + plot_k_val = k.size_v1 if k.size_v2 is None else k.size_v2 + assert plot_k_val is not None + quality = std_hash( + slot_index.to_bytes(4, "big") + plot_k_val.to_bytes(1, "big") + bytes(farmer_index) + ) required_iters = calculate_iterations_quality( - constants, quality, PlotSize.make_v1(k), difficulty, sp_hash, uint64(100000000), uint32(0) + constants, quality, k, difficulty, sp_hash, sub_slot_iters, height ) if required_iters < sp_interval_iters: wins[k] += 1 total_wins_in_slot += 1 + if height < test_constants.HARD_FORK2_HEIGHT + test_constants.PLOT_V1_PHASE_OUT: + total_space = sum(farmer_space.values()) + percentage_space = {k: float(sp / total_space) for k, sp in farmer_space.items()} + else: + # after the phase-out, v1 plots don't count + # all wins are by v2 plots + total_space = sum(0 if k.size_v2 is None else sp for k, sp in farmer_space.items()) + percentage_space = { + k: 0.0 if k.size_v2 is None else float(sp / total_space) for k, sp in farmer_space.items() + } + win_percentage = {k: wins[k] / sum(wins.values()) for k in farmer_ks.keys()} for k in farmer_ks.keys(): # Win rate is proportional to percentage of space assert abs(win_percentage[k] - percentage_space[k]) < 0.01 - def test_calculate_phase_out(self): + @pytest.mark.parametrize("sp_interval", [uint64(6250000000), uint64(1), uint64(2), uint64(10), uint64(10000000000)]) + def test_calculate_phase_out(self, sp_interval: uint64): constants = test_constants - sub_slot_iters = uint64(100000000000) - sp_interval = calculate_sp_interval_iters(constants, sub_slot_iters) + sub_slot_iters = uint64(sp_interval * constants.NUM_SPS_SUB_SLOT) # Before or at HARD_FORK2_HEIGHT, should return 0 - assert calculate_phase_out(constants, sub_slot_iters, constants.HARD_FORK2_HEIGHT - 1) == 0 + assert calculate_phase_out(constants, sub_slot_iters, uint32(constants.HARD_FORK2_HEIGHT - 1)) == 0 assert calculate_phase_out(constants, sub_slot_iters, constants.HARD_FORK2_HEIGHT) == 0 # after HARD_FORK2_HEIGHT, should return value = delta/phase_out_period * sp_interval assert ( - calculate_phase_out(constants, sub_slot_iters, constants.HARD_FORK2_HEIGHT + 1) + calculate_phase_out(constants, sub_slot_iters, uint32(constants.HARD_FORK2_HEIGHT + 1)) == sp_interval // constants.PLOT_V1_PHASE_OUT ) assert ( calculate_phase_out( - constants, sub_slot_iters, constants.HARD_FORK2_HEIGHT + constants.PLOT_V1_PHASE_OUT // 2 + constants, sub_slot_iters, uint32(constants.HARD_FORK2_HEIGHT + constants.PLOT_V1_PHASE_OUT // 2) ) == sp_interval // 2 ) assert ( - calculate_phase_out(constants, sub_slot_iters, constants.HARD_FORK2_HEIGHT + constants.PLOT_V1_PHASE_OUT) + calculate_phase_out( + constants, sub_slot_iters, uint32(constants.HARD_FORK2_HEIGHT + constants.PLOT_V1_PHASE_OUT) + ) == sp_interval ) @@ -151,3 +181,19 @@ def test_calculate_phase_out(self): max_uint32_height = uint32(0xFFFFFFFF) result_max_height = calculate_phase_out(constants, sub_slot_iters, max_uint32_height) assert result_max_height == sp_interval # Should cap at sp_interval + + +def test_expected_plot_size_v1() -> None: + last_size = 2_400_000 + for k in range(18, 50): + plot_size = _expected_plot_size(PlotSize.make_v1(k)) + assert plot_size > last_size * 2 + last_size = plot_size + + +def test_expected_plot_size_v2() -> None: + last_size = 100_000 + for k in range(16, 32, 2): + plot_size = _expected_plot_size(PlotSize.make_v2(k)) + assert plot_size > last_size * 2 + last_size = plot_size diff --git a/chia/_tests/core/custom_types/test_proof_of_space.py b/chia/_tests/core/custom_types/test_proof_of_space.py index ed8520f36051..7bddd83d3696 100644 --- a/chia/_tests/core/custom_types/test_proof_of_space.py +++ b/chia/_tests/core/custom_types/test_proof_of_space.py @@ -5,15 +5,17 @@ from typing import Optional import pytest -from chia_rs import G1Element, PlotSize, ProofOfSpace +from chia_rs import G1Element, PlotSize from chia_rs.sized_bytes import bytes32, bytes48 from chia_rs.sized_ints import uint8, uint32 from chia._tests.util.misc import Marks, datacases from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.types.blockchain_format.proof_of_space import ( - calculate_plot_difficulty, calculate_prefix_bits, + calculate_required_plot_strength, + check_plot_size, + make_pos, passes_plot_filter, verify_and_get_quality_string, ) @@ -23,7 +25,7 @@ class ProofOfSpaceCase: id: str pos_challenge: bytes32 - plot_size: uint8 + plot_size: PlotSize plot_public_key: G1Element pool_public_key: Optional[G1Element] = None pool_contract_puzzle_hash: Optional[bytes32] = None @@ -45,14 +47,14 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="Neither pool public key nor pool contract puzzle hash", pos_challenge=bytes32(b"1" * 32), - plot_size=uint8(0), + plot_size=PlotSize.make_v1(0), plot_public_key=G1Element(), expected_error="Expected pool public key or pool contract puzzle hash but got neither", ), ProofOfSpaceCase( id="Both pool public key and pool contract puzzle hash", pos_challenge=bytes32(b"1" * 32), - plot_size=uint8(0), + plot_size=PlotSize.make_v1(0), plot_public_key=G1Element(), pool_public_key=G1Element(), pool_contract_puzzle_hash=bytes32(b"1" * 32), @@ -61,7 +63,7 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="Lower than minimum plot size", pos_challenge=bytes32(b"1" * 32), - plot_size=uint8(31), + plot_size=PlotSize.make_v1(31), plot_public_key=G1Element(), pool_public_key=G1Element(), expected_error="Plot size is lower than the minimum", @@ -69,7 +71,7 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="Higher than maximum plot size", pos_challenge=bytes32(b"1" * 32), - plot_size=uint8(51), + plot_size=PlotSize.make_v1(51), plot_public_key=G1Element(), pool_public_key=G1Element(), expected_error="Plot size is higher than the maximum", @@ -77,7 +79,7 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="Different challenge", pos_challenge=bytes32(b"1" * 32), - plot_size=uint8(42), + plot_size=PlotSize.make_v1(42), pool_public_key=G1Element(), plot_public_key=G1Element(), expected_error="Calculated pos challenge doesn't match the provided one", @@ -85,7 +87,7 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="Not passing the plot filter with size 9", pos_challenge=b32("08b23cc2844dfb92d2eedaa705a1ce665d571ee753bd81cbb67b92caa6d34722"), - plot_size=uint8(42), + plot_size=PlotSize.make_v1(42), pool_public_key=g1( "b6449c2c68df97c19e884427e42ee7350982d4020571ead08732615ff39bd216bfd630b6460784982bec98b49fea79d0" ), @@ -98,7 +100,7 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="Passing the plot filter with size 8", pos_challenge=b32("08b23cc2844dfb92d2eedaa705a1ce665d571ee753bd81cbb67b92caa6d34722"), - plot_size=uint8(42), + plot_size=PlotSize.make_v1(42), pool_public_key=g1( "b6449c2c68df97c19e884427e42ee7350982d4020571ead08732615ff39bd216bfd630b6460784982bec98b49fea79d0" ), @@ -110,7 +112,7 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="v2 plot size 0", pos_challenge=bytes32(b"1" * 32), - plot_size=uint8(0x80), + plot_size=PlotSize.make_v2(0), plot_public_key=G1Element(), pool_public_key=G1Element(), expected_error="Plot size is lower than the minimum", @@ -118,7 +120,7 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="v2 plot size 34", pos_challenge=bytes32(b"1" * 32), - plot_size=uint8(0x80 | 34), + plot_size=PlotSize.make_v2(34), plot_public_key=G1Element(), pool_public_key=G1Element(), expected_error="Plot size is higher than the maximum", @@ -126,7 +128,7 @@ def b32(key: str) -> bytes32: ProofOfSpaceCase( id="Not passing the plot filter v2", pos_challenge=b32("3d29ea79d19b3f7e99ebf764ae53697cbe143603909873946af6ab1ece606861"), - plot_size=uint8(0x80 | 32), + plot_size=PlotSize.make_v2(32), pool_public_key=g1( "b6449c2c68df97c19e884427e42ee7350982d4020571ead08732615ff39bd216bfd630b6460784982bec98b49fea79d0" ), @@ -137,7 +139,7 @@ def b32(key: str) -> bytes32: ), ) def test_verify_and_get_quality_string(caplog: pytest.LogCaptureFixture, case: ProofOfSpaceCase) -> None: - pos = ProofOfSpace( + pos = make_pos( challenge=case.pos_challenge, pool_public_key=case.pool_public_key, pool_contract_puzzle_hash=case.pool_contract_puzzle_hash, @@ -159,7 +161,7 @@ def test_verify_and_get_quality_string(caplog: pytest.LogCaptureFixture, case: P @datacases( ProofOfSpaceCase( id="v2 plot are not implemented", - plot_size=uint8(0x80 | 30), + plot_size=PlotSize.make_v2(30), pos_challenge=b32("47deb938e145d25d7b3b3c85ca9e3972b76c01aeeb78a02fe5d3b040d282317e"), plot_public_key=g1( "afa3aaf09c03885154be49216ee7fb2e4581b9c4a4d7e9cc402e27280bf0cfdbdf1b9ba674e301fd1d1450234b3b1868" @@ -171,7 +173,7 @@ def test_verify_and_get_quality_string(caplog: pytest.LogCaptureFixture, case: P ), ) def test_verify_and_get_quality_string_v2(caplog: pytest.LogCaptureFixture, case: ProofOfSpaceCase) -> None: - pos = ProofOfSpace( + pos = make_pos( challenge=case.pos_challenge, pool_public_key=case.pool_public_key, pool_contract_puzzle_hash=case.pool_contract_puzzle_hash, @@ -199,26 +201,54 @@ def test_verify_and_get_quality_string_v2(caplog: pytest.LogCaptureFixture, case @pytest.mark.parametrize( - "height, difficulty", + "height, strength", [ (0, 2), (DEFAULT_CONSTANTS.HARD_FORK_HEIGHT, 2), (DEFAULT_CONSTANTS.HARD_FORK2_HEIGHT, 2), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_4_HEIGHT - 1, 2), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_4_HEIGHT, 4), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_5_HEIGHT - 1, 4), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_5_HEIGHT, 5), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_6_HEIGHT - 1, 5), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_6_HEIGHT, 6), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_7_HEIGHT - 1, 6), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_7_HEIGHT, 7), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_8_HEIGHT - 1, 7), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_8_HEIGHT, 8), - (DEFAULT_CONSTANTS.PLOT_DIFFICULTY_8_HEIGHT + 1000000, 8), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_4_HEIGHT - 1, 2), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_4_HEIGHT, 4), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_5_HEIGHT - 1, 4), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_5_HEIGHT, 5), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_6_HEIGHT - 1, 5), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_6_HEIGHT, 6), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_7_HEIGHT - 1, 6), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_7_HEIGHT, 7), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_8_HEIGHT - 1, 7), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_8_HEIGHT, 8), + (DEFAULT_CONSTANTS.PLOT_STRENGTH_8_HEIGHT + 1000000, 8), ], ) -def test_calculate_plot_difficulty(height: uint32, difficulty: uint8) -> None: - assert calculate_plot_difficulty(DEFAULT_CONSTANTS, height) == difficulty +def test_calculate_plot_strength(height: uint32, strength: uint8) -> None: + assert calculate_required_plot_strength(DEFAULT_CONSTANTS, height) == strength + + +@pytest.mark.parametrize( + "size, valid", + [ + (PlotSize.make_v1(31), False), # too small + (PlotSize.make_v1(32), True), + (PlotSize.make_v1(33), True), + (PlotSize.make_v1(34), True), + (PlotSize.make_v1(35), True), + (PlotSize.make_v1(36), True), + (PlotSize.make_v1(37), True), + (PlotSize.make_v1(49), True), + (PlotSize.make_v1(50), True), + (PlotSize.make_v1(51), False), # too large + (PlotSize.make_v2(26), False), # too small + (PlotSize.make_v2(27), False), # too small (and odd) + (PlotSize.make_v2(28), True), + (PlotSize.make_v2(29), False), # odd + (PlotSize.make_v2(30), True), + (PlotSize.make_v2(31), False), # odd + (PlotSize.make_v2(32), True), + (PlotSize.make_v2(33), False), # too large (and odd) + (PlotSize.make_v2(34), False), # too large + ], +) +def test_check_plot_size(size: PlotSize, valid: bool) -> None: + assert check_plot_size(DEFAULT_CONSTANTS, size) == valid class TestProofOfSpace: diff --git a/chia/_tests/core/data_layer/test_data_rpc.py b/chia/_tests/core/data_layer/test_data_rpc.py index 7740ed1366a9..bae6c8cf2219 100644 --- a/chia/_tests/core/data_layer/test_data_rpc.py +++ b/chia/_tests/core/data_layer/test_data_rpc.py @@ -45,6 +45,7 @@ from chia.data_layer.data_layer_errors import KeyNotFoundError, OfferIntegrityError from chia.data_layer.data_layer_rpc_api import DataLayerRpcApi from chia.data_layer.data_layer_rpc_client import DataLayerRpcClient +from chia.data_layer.data_layer_service import DataLayerService from chia.data_layer.data_layer_util import ( HashOnlyProof, OfferStore, @@ -58,8 +59,7 @@ ) from chia.data_layer.data_layer_wallet import DataLayerWallet, verify_offer from chia.data_layer.data_store import DataStore -from chia.server.aliases import DataLayerService, WalletService -from chia.server.start_data_layer import create_data_layer_service +from chia.data_layer.start_data_layer import create_data_layer_service from chia.simulator.block_tools import BlockTools from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import FarmNewBlockProtocol @@ -77,6 +77,7 @@ from chia.wallet.wallet_node import WalletNode from chia.wallet.wallet_request_types import DLLatestSingleton from chia.wallet.wallet_rpc_api import WalletRpcApi +from chia.wallet.wallet_service import WalletService pytestmark = pytest.mark.data_layer nodes = tuple[WalletNode, FullNodeSimulator] @@ -193,7 +194,7 @@ async def is_transaction_confirmed(api: WalletRpcApi, tx_id: bytes32) -> bool: except ValueError: # pragma: no cover return False - return True if TransactionRecord.from_json_dict_convenience(val["transaction"]).confirmed else False # mypy + return True if TransactionRecord.from_json_dict(val["transaction"]).confirmed else False # mypy async def farm_block_with_spend( @@ -2307,7 +2308,7 @@ async def test_unsubscribe_unknown( bare_data_layer_api: DataLayerRpcApi, seeded_random: random.Random, ) -> None: - with pytest.raises(RuntimeError, match="No subscription found for the given store_id."): + with pytest.raises(RuntimeError, match="No subscription found for the given store_id"): await bare_data_layer_api.unsubscribe(request={"id": bytes32.random(seeded_random).hex(), "retain": False}) diff --git a/chia/_tests/core/full_node/stores/test_block_store.py b/chia/_tests/core/full_node/stores/test_block_store.py index 0bf2ba048f5a..5cb22d158f7d 100644 --- a/chia/_tests/core/full_node/stores/test_block_store.py +++ b/chia/_tests/core/full_node/stores/test_block_store.py @@ -418,8 +418,8 @@ def generator(i: int) -> SerializedProgram: store = await BlockStore.create(db_wrapper, use_cache=use_cache) new_blocks = [] - for i, block in enumerate(blocks): - block = block.replace(transactions_generator=generator(i)) + for i, original_block in enumerate(blocks): + block = original_block.replace(transactions_generator=generator(i)) block_record = header_block_to_sub_block_record( DEFAULT_CONSTANTS, uint64(0), block, uint64(0), False, uint8(0), uint32(max(0, block.height - 1)), None ) diff --git a/chia/_tests/core/full_node/test_address_manager.py b/chia/_tests/core/full_node/test_address_manager.py index 7163f98c7764..ef791ddd7469 100644 --- a/chia/_tests/core/full_node/test_address_manager.py +++ b/chia/_tests/core/full_node/test_address_manager.py @@ -560,7 +560,7 @@ async def check_retrieved_peers(self, wanted_peers: list[ExtendedPeerInfo], addr # use tmp_path pytest fixture to create a temporary directory async def test_serialization(self, tmp_path: Path): addrman = AddressManagerTest() - now = int(math.floor(time.time())) + now = math.floor(time.time()) t_peer1 = TimestampedPeerInfo("250.7.1.1", uint16(8333), uint64(now - 10000)) t_peer2 = TimestampedPeerInfo("1050:0000:0000:0000:0005:0600:300c:326b", uint16(9999), uint64(now - 20000)) t_peer3 = TimestampedPeerInfo("250.7.3.3", uint16(9999), uint64(now - 30000)) @@ -587,7 +587,7 @@ async def test_serialization(self, tmp_path: Path): @pytest.mark.anyio async def test_bad_ip_encoding(self, tmp_path: Path): addrman = AddressManagerTest() - now = int(math.floor(time.time())) + now = math.floor(time.time()) t_peer1 = TimestampedPeerInfo("250.7.1.1", uint16(8333), uint64(now - 10000)) t_peer2 = TimestampedPeerInfo("1050:0000:0000:0000:0005:0600:300c:326b", uint16(9999), uint64(now - 20000)) t_peer3 = TimestampedPeerInfo("250.7.3.3", uint16(9999), uint64(now - 30000)) @@ -725,7 +725,7 @@ async def old_serialize(address_manager: AddressManager, peers_file_path: Path) # create a file with the old serialization, then migrate to new serialization addrman = AddressManagerTest() - now = int(math.floor(time.time())) + now = math.floor(time.time()) t_peer1 = TimestampedPeerInfo("250.7.1.1", uint16(8333), uint64(now - 10000)) t_peer2 = TimestampedPeerInfo("1050:0000:0000:0000:0005:0600:300c:326b", uint16(9999), uint64(now - 20000)) t_peer3 = TimestampedPeerInfo("250.7.3.3", uint16(9999), uint64(now - 30000)) diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 268a270b9306..95f37f722609 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -112,11 +112,11 @@ def test_pre_validation_result() -> None: - conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, True, 0, 0) + conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, True, 0, 0, 0, 0, 0) results = PreValidationResult(None, uint64(1), conds, uint32(0)) assert results.validated_signature is True - conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, False, 0, 0) + conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, False, 0, 0, 0, 0, 0) results = PreValidationResult(None, uint64(1), conds, uint32(0)) assert results.validated_signature is False @@ -2678,16 +2678,15 @@ async def test_long_reorg_nodes( blocks = default_10000_blocks[: 1600 - chain_length] reorg_blocks = test_long_reorg_blocks_light[: 1600 - chain_length] reorg_height = 2000 - else: - if fork_point == 1500: - blocks = default_10000_blocks[: 1900 - chain_length] - reorg_blocks = test_long_reorg_1500_blocks[: 1900 - chain_length] - reorg_height = 2300 - else: # pragma: no cover - pytest.skip("We rely on the light-blocks test for a 0 forkpoint") - blocks = default_10000_blocks[: 1100 - chain_length] - # reorg_blocks = test_long_reorg_blocks[: 1100 - chain_length] - reorg_height = 1600 + elif fork_point == 1500: + blocks = default_10000_blocks[: 1900 - chain_length] + reorg_blocks = test_long_reorg_1500_blocks[: 1900 - chain_length] + reorg_height = 2300 + else: # pragma: no cover + pytest.skip("We rely on the light-blocks test for a 0 forkpoint") + blocks = default_10000_blocks[: 1100 - chain_length] + # reorg_blocks = test_long_reorg_blocks[: 1100 - chain_length] + reorg_height = 1600 # this is a pre-requisite for a reorg to happen assert default_10000_blocks[reorg_height].weight > reorg_blocks[-1].weight @@ -3163,15 +3162,14 @@ async def declare_pos_unfinished_block( challenge_chain_sp = block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash() if block.reward_chain_block.reward_chain_sp_vdf is not None: reward_chain_sp = block.reward_chain_block.reward_chain_sp_vdf.output.get_hash() + elif len(block.finished_sub_slots) > 0: + reward_chain_sp = block.finished_sub_slots[-1].reward_chain.get_hash() else: - if len(block.finished_sub_slots) > 0: - reward_chain_sp = block.finished_sub_slots[-1].reward_chain.get_hash() - else: - curr = blockchain.block_record(block.prev_header_hash) - while not curr.first_in_sub_slot: - curr = blockchain.block_record(curr.prev_hash) - assert curr.finished_reward_slot_hashes is not None - reward_chain_sp = curr.finished_reward_slot_hashes[-1] + curr = blockchain.block_record(block.prev_header_hash) + while not curr.first_in_sub_slot: + curr = blockchain.block_record(curr.prev_hash) + assert curr.finished_reward_slot_hashes is not None + reward_chain_sp = curr.finished_reward_slot_hashes[-1] farmer_reward_address = block.foliage.foliage_block_data.farmer_reward_puzzle_hash pool_target = block.foliage.foliage_block_data.pool_target pool_target_signature = block.foliage.foliage_block_data.pool_signature diff --git a/chia/_tests/core/full_node/test_generator_tools.py b/chia/_tests/core/full_node/test_generator_tools.py index 00ed05d9cfcd..4321e103b82a 100644 --- a/chia/_tests/core/full_node/test_generator_tools.py +++ b/chia/_tests/core/full_node/test_generator_tools.py @@ -8,7 +8,6 @@ get_spends_for_trusted_block, get_spends_for_trusted_block_with_conditions, ) -from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint32, uint64 from chia.consensus.generator_tools import tx_removals_and_additions @@ -47,6 +46,7 @@ 0, execution_cost=0, condition_cost=0, + fingerprint=b"", ), SpendConditions( coin_ids[1], @@ -74,19 +74,20 @@ 0, execution_cost=0, condition_cost=0, + fingerprint=b"", ), ] def test_tx_removals_and_additions() -> None: conditions = SpendBundleConditions( - spends, uint64(0), uint32(0), uint64(0), None, None, [], uint64(0), 0, 0, False, 0, 0 + spends, uint64(0), uint32(0), uint64(0), None, None, [], uint64(0), 0, 0, False, 0, 0, 0, 0, 0 ) expected_rems = [coin_ids[0], coin_ids[1]] expected_additions = [] for spend in spends: for puzzle_hash, am, _ in spend.create_coin: - expected_additions.append(Coin(bytes32(spend.coin_id), bytes32(puzzle_hash), uint64(am))) + expected_additions.append(Coin(spend.coin_id, puzzle_hash, uint64(am))) rems, adds = tx_removals_and_additions(conditions) assert rems == expected_rems assert adds == expected_additions @@ -109,7 +110,7 @@ def test_get_spends_for_block(caplog: pytest.LogCaptureFixture) -> None: conditions = get_spends_for_trusted_block( test_constants, TEST_GENERATOR.program, TEST_GENERATOR.generator_refs, 100 ) - assert conditions[0]["block_spends"] == [] + assert conditions["block_spends"] == [] def test_get_spends_for_block_with_conditions(caplog: pytest.LogCaptureFixture) -> None: diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index 52b0a2c57673..1527a5abd057 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -119,7 +119,7 @@ def make_item( return MempoolItem( SpendBundle([], G2Element()), fee, - SpendBundleConditions([], 0, 0, 0, None, None, [], cost, 0, 0, False, 0, 0), + SpendBundleConditions([], 0, 0, 0, None, None, [], cost, 0, 0, False, 0, 0, 0, 0, 0), spend_bundle_name, uint32(0), assert_height, @@ -3197,7 +3197,9 @@ def add_spend_bundles(spend_bundles: list[SpendBundle]) -> None: def test_get_puzzle_and_solution_for_coin_failure() -> None: with pytest.raises( - ValueError, match=f"Failed to get puzzle and solution for coin {TEST_COIN}, error: \\('coin not found', '80'\\)" + ValueError, + match=f"Failed to get puzzle and solution for coin {TEST_COIN}, " + "error: \\('InvalidOperatorArg: coin not found', '80'\\)", ): try: get_puzzle_and_solution_for_coin( @@ -3211,6 +3213,97 @@ def test_get_puzzle_and_solution_for_coin_failure() -> None: raise ValueError(f"Failed to get puzzle and solution for coin {TEST_COIN}, error: {e}") from e +# this puzzle just creates coins, however many are requested by the solution +# (mod (A) +# (defun loop (n) +# (if (= n 1) +# (list) +# (c (list 51 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff n) (loop (- n 1)))) +# ) +# (loop A) +# ) +create_coins_loop: str = ( + "ff02ffff01ff02ff02ffff04ff02ffff04ff05ff80808080ffff04ffff01ff02" + "ffff03ffff09ff05ffff010180ff80ffff01ff04ffff04ffff0133ffff04ffff" + "01a0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + "ffffffff04ff05ff80808080ffff02ff02ffff04ff02ffff04ffff11ff05ffff" + "010180ff808080808080ff0180ff018080" +) + + +# this test uses artificial puzzles just to exercise the block creation. These +# spends are expected not to verify any signatures +# This is to keep the test simple. +@pytest.mark.parametrize( + "puzzle, solution", + [ + # create 2000 coins + (create_coins_loop, "ff8207d180"), + # create 1000 coins + (create_coins_loop, "ff8203e980"), + # create 500 coins + (create_coins_loop, "ff8201f580"), + ], +) +@pytest.mark.parametrize("old", [True, False]) +def test_create_block_generator_custom_spend(puzzle: str, solution: str, old: bool) -> None: + mempool_info = MempoolInfo( + CLVMCost(uint64(11000000000 * 3)), + FeeRate(uint64(1000000)), + CLVMCost(uint64(11000000000)), + ) + + fee_estimator = create_bitcoin_fee_estimator(test_constants.MAX_BLOCK_COST_CLVM) + solution_str = SerializedProgram.fromhex(solution) + puzzle_reveal = SerializedProgram.fromhex(puzzle) + puzzle_hash = puzzle_reveal.get_tree_hash() + mempool = Mempool(mempool_info, fee_estimator) + coins = [Coin(bytes32.random(), puzzle_hash, uint64(amount)) for amount in range(100000000, 100000022)] + + spend_bundles = [ + SpendBundle( + coin_spends=[CoinSpend(coin, puzzle_reveal=puzzle_reveal, solution=solution_str)], + aggregated_signature=G2Element(), + ) + for coin in coins + ] + + for sb in spend_bundles: + mi = mempool_item_from_spendbundle(sb) + mempool.add_to_pool(mi) + invariant_check_mempool(mempool) + + create_block = mempool.create_block_generator if old else mempool.create_block_generator2 + generator = create_block(test_constants, test_constants.HARD_FORK2_HEIGHT, 10.0) + assert generator is not None + + assert generator.signature == G2Element() + + removals = set(generator.removals) + + err, conds = run_block_generator2( + bytes(generator.program), + generator.generator_refs, + test_constants.MAX_BLOCK_COST_CLVM, + 0, + generator.signature, + None, + test_constants, + ) + + assert err is None + assert conds is not None + + assert len(conds.spends) == len(removals) + + for spend in conds.spends: + removal = Coin(spend.parent_id, spend.puzzle_hash, uint64(spend.coin_amount)) + assert removal in coins + assert removal in removals + + invariant_check_mempool(mempool) + + @pytest.mark.parametrize("old", [True, False]) def test_create_block_generator(old: bool) -> None: mempool = construct_mempool() diff --git a/chia/_tests/core/mempool/test_mempool_fee_estimator.py b/chia/_tests/core/mempool/test_mempool_fee_estimator.py index b18f7c109294..91f0f12b9de6 100644 --- a/chia/_tests/core/mempool/test_mempool_fee_estimator.py +++ b/chia/_tests/core/mempool/test_mempool_fee_estimator.py @@ -23,7 +23,6 @@ async def test_basics() -> None: cost = uint64(5000000) for i in range(300, 700): - i = uint32(i) items = [] for _ in range(2, 100): fee = uint64(10000000) @@ -50,7 +49,7 @@ async def test_basics() -> None: ) items.append(mempool_item2) - fee_tracker.process_block(i, items) + fee_tracker.process_block(uint32(i), items) short, med, long = fee_tracker.estimate_fees() @@ -72,7 +71,6 @@ async def test_fee_increase() -> None: estimator = SmartFeeEstimator(fee_tracker, uint64(test_constants.MAX_BLOCK_COST_CLVM)) random = Random(x=1) for i in range(300, 700): - i = uint32(i) items = [] for _ in range(20): fee = uint64(0) @@ -85,7 +83,7 @@ async def test_fee_increase() -> None: ) items.append(mempool_item) - fee_tracker.process_block(i, items) + fee_tracker.process_block(uint32(i), items) short, med, long = fee_tracker.estimate_fees() mempool_info = mempool_manager.mempool.fee_estimator.get_mempool_info() diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index a2197ccd29e1..a443f8bb2cf8 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -41,6 +41,7 @@ QUOTE_BYTES, QUOTE_EXECUTION_COST, MempoolManager, + NewPeakItem, TimelockConditions, can_replace, check_removals, @@ -359,6 +360,7 @@ def make_test_conds( flags, execution_cost=0, condition_cost=0, + fingerprint=b"", ) for coin_id, parent_id, puzzle_hash, amount, flags, create_coin in spend_info ], @@ -374,6 +376,9 @@ def make_test_conds( False, 0, 0, + 0, + 0, + 0, ) @@ -589,7 +594,9 @@ def make_bundle_spends_map_and_fee( def mempool_item_from_spendbundle(spend_bundle: SpendBundle) -> MempoolItem: - conds = get_conditions_from_spendbundle(spend_bundle, INFINITE_COST, DEFAULT_CONSTANTS, uint32(0)) + conds = get_conditions_from_spendbundle( + spend_bundle, INFINITE_COST, DEFAULT_CONSTANTS, DEFAULT_CONSTANTS.HARD_FORK2_HEIGHT + ) bundle_coin_spends, fee = make_bundle_spends_map_and_fee(spend_bundle, conds) return MempoolItem( spend_bundle=spend_bundle, @@ -3151,3 +3158,155 @@ def test_get_items_by_coin_ids(coin_ids: list[bytes32]) -> list[MempoolItem]: assert err == expected_err assert len(conflicts) == len(expected_conflicts) assert set(conflicts) == set(expected_conflicts) + + +@pytest.mark.anyio +async def test_new_peak_deferred_ff_items() -> None: + """ + Covers the case where we update lineage info for multiple fast forward + singletons at new peak. + """ + singleton_spend1 = make_singleton_spend(bytes32([1] * 32)) + singleton1_id = singleton_spend1.coin.name() + singleton_spend2 = make_singleton_spend(bytes32([2] * 32)) + singleton2_id = singleton_spend2.coin.name() + coins = TestCoins( + [singleton_spend1.coin, singleton_spend2.coin, TEST_COIN, TEST_COIN2], + { + singleton_spend1.coin.puzzle_hash: singleton_spend1.coin, + singleton_spend2.coin.puzzle_hash: singleton_spend2.coin, + }, + ) + mempool_manager = await setup_mempool(coins) + # Let's submit the two singletons transactions to the mempool + sb_names = [] + for singleton_spend, regular_coin in [(singleton_spend1, TEST_COIN), (singleton_spend2, TEST_COIN2)]: + sb = SpendBundle([singleton_spend, mk_coin_spend(regular_coin)], G2Element()) + sb_name = sb.name() + await mempool_manager.add_spend_bundle( + sb, + make_test_conds(spend_ids=[(singleton_spend.coin, ELIGIBLE_FOR_FF), (regular_coin, 0)], cost=1337), + sb_name, + uint32(1), + ) + assert mempool_manager.get_mempool_item(sb_name) is not None + sb_names.append(sb_name) + # Let's advance the mempool by spending these singletons into new lineages + singleton1_new_latest = Coin(singleton1_id, singleton_spend1.coin.puzzle_hash, singleton_spend1.coin.amount) + coins.update_lineage(singleton_spend1.coin.puzzle_hash, singleton1_new_latest) + singleton2_new_latest = Coin(singleton2_id, singleton_spend2.coin.puzzle_hash, singleton_spend2.coin.amount) + coins.update_lineage(singleton_spend2.coin.puzzle_hash, singleton2_new_latest) + await advance_mempool(mempool_manager, [singleton1_id, singleton2_id], use_optimization=True) + # Both items should get updated with their related latest lineages + mi1 = mempool_manager.get_mempool_item(sb_names[0]) + assert mi1 is not None + latest_singleton_lineage1 = mi1.bundle_coin_spends[singleton1_id].latest_singleton_lineage + assert latest_singleton_lineage1 is not None + assert latest_singleton_lineage1.coin_id == singleton1_new_latest.name() + mi2 = mempool_manager.get_mempool_item(sb_names[1]) + assert mi2 is not None + latest_singleton_lineage2 = mi2.bundle_coin_spends[singleton2_id].latest_singleton_lineage + assert latest_singleton_lineage2 is not None + assert latest_singleton_lineage2.coin_id == singleton2_new_latest.name() + + +@pytest.mark.anyio +async def test_different_ff_versions() -> None: + """ + Covers the case where we send an item with an older ff singleton version + while the mempool is aware of a newer lineage. + """ + launcher_id = bytes32([1] * 32) + singleton_spend1 = make_singleton_spend(launcher_id, bytes32([2] * 32)) + version1_id = singleton_spend1.coin.name() + singleton_spend2 = make_singleton_spend(launcher_id, bytes32([3] * 32)) + version2_id = singleton_spend2.coin.name() + singleton_ph = singleton_spend2.coin.puzzle_hash + coins = TestCoins( + [singleton_spend1.coin, singleton_spend2.coin, TEST_COIN, TEST_COIN2], {singleton_ph: singleton_spend2.coin} + ) + mempool_manager = await setup_mempool(coins) + mempool_items: list[MempoolItem] = [] + for singleton_spend, regular_coin in [(singleton_spend1, TEST_COIN), (singleton_spend2, TEST_COIN2)]: + sb = SpendBundle([singleton_spend, mk_coin_spend(regular_coin)], G2Element()) + sb_name = sb.name() + await mempool_manager.add_spend_bundle( + sb, + make_test_conds(spend_ids=[(singleton_spend.coin, ELIGIBLE_FOR_FF), (regular_coin, 0)], cost=1337), + sb_name, + uint32(1), + ) + mi = mempool_manager.get_mempool_item(sb_name) + assert mi is not None + mempool_items.append(mi) + [mi1, mi2] = mempool_items + latest_lineage_id = version2_id + assert latest_lineage_id != version1_id + # Bundle coin spends key points to version 1 but the lineage is latest (v2) + latest_singleton_lineage1 = mi1.bundle_coin_spends[version1_id].latest_singleton_lineage + assert latest_singleton_lineage1 is not None + assert latest_singleton_lineage1.coin_id == latest_lineage_id + # Both the bundle coin spends key and the lineage point to latest (v2) + latest_singleton_lineage2 = mi2.bundle_coin_spends[version2_id].latest_singleton_lineage + assert latest_singleton_lineage2 is not None + assert latest_singleton_lineage2.coin_id == latest_lineage_id + # Let's update the lineage with a new version of the singleton + new_latest_lineage = Coin(version2_id, singleton_ph, singleton_spend2.coin.amount) + new_latest_lineage_id = new_latest_lineage.name() + coins.update_lineage(singleton_ph, new_latest_lineage) + await advance_mempool(mempool_manager, [version1_id, version2_id], use_optimization=True) + # Both items should get updated with the latest lineage + new_mi1 = mempool_manager.get_mempool_item(mi1.spend_bundle_name) + assert new_mi1 is not None + latest_singleton_lineage1 = new_mi1.bundle_coin_spends[version1_id].latest_singleton_lineage + assert latest_singleton_lineage1 is not None + assert latest_singleton_lineage1.coin_id == new_latest_lineage_id + new_mi2 = mempool_manager.get_mempool_item(mi2.spend_bundle_name) + assert new_mi2 is not None + latest_singleton_lineage2 = new_mi2.bundle_coin_spends[version2_id].latest_singleton_lineage + assert latest_singleton_lineage2 is not None + assert latest_singleton_lineage2.coin_id == new_latest_lineage_id + + +@pytest.mark.anyio +@pytest.mark.parametrize( + "condition_and_error", + [ + (ConditionOpcode.ASSERT_HEIGHT_RELATIVE, Err.ASSERT_HEIGHT_RELATIVE_FAILED), + (ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, Err.ASSERT_HEIGHT_ABSOLUTE_FAILED), + ], +) +@pytest.mark.parametrize("optimized_path", [True, False]) +async def test_new_peak_txs_added(condition_and_error: tuple[ConditionOpcode, Err], optimized_path: bool) -> None: + """ + Tests that deferred transactions because of time-lock are retried once the + time-lock allows them to be reconsidered. + """ + coins = TestCoins([TEST_COIN], {}) + mempool_manager = await setup_mempool(coins) + # Add an item that should go to the pending cache + assert mempool_manager.peak is not None + condition_height = mempool_manager.peak.height + 1 + condition, expected_error = condition_and_error + sb, sb_name, result = await generate_and_add_spendbundle(mempool_manager, [[condition, condition_height]]) + _, status, error = result + assert status == MempoolInclusionStatus.PENDING + assert error == expected_error + # Advance the mempool beyond the asserted height to retry the test item + if optimized_path: + spent_coins: Optional[list[bytes32]] = [] + new_peak_info = await mempool_manager.new_peak( + create_test_block_record(height=uint32(condition_height)), spent_coins + ) + # We're not there yet (needs to be higher, not equal) + assert mempool_manager.get_mempool_item(sb_name, include_pending=False) is None + assert new_peak_info.items == [] + else: + spent_coins = None + new_peak_info = await mempool_manager.new_peak( + create_test_block_record(height=uint32(condition_height + 1)), spent_coins + ) + # The item gets retried successfully now + mi = mempool_manager.get_mempool_item(sb_name, include_pending=False) + assert mi is not None + assert new_peak_info.items == [NewPeakItem(sb_name, sb, mi.conds)] diff --git a/chia/_tests/core/mempool/test_singleton_fast_forward.py b/chia/_tests/core/mempool/test_singleton_fast_forward.py index ccbb367cbc39..3d6a01a38b6a 100644 --- a/chia/_tests/core/mempool/test_singleton_fast_forward.py +++ b/chia/_tests/core/mempool/test_singleton_fast_forward.py @@ -79,7 +79,7 @@ def test_process_fast_forward_spends_unknown_ff() -> None: singleton_ff = SingletonFastForward() # We have no fast forward records yet, so we'll process this coin for the # first time here, but the item's latest singleton lineage returns None - with pytest.raises(ValueError, match="Cannot proceed with singleton spend fast forward."): + with pytest.raises(ValueError, match="Cannot proceed with singleton spend fast forward"): singleton_ff.process_fast_forward_spends( mempool_item=internal_mempool_item, height=TEST_HEIGHT, constants=DEFAULT_CONSTANTS ) @@ -663,4 +663,4 @@ async def test_double_spend_ff_spend_no_latest_unspent() -> None: status, error = await make_and_send_spend_bundle(sim, sim_client, [singleton_coin_spend], aggsig=sig) # It fails validation because it doesn't currently have a latest unspent assert status == MempoolInclusionStatus.FAILED - assert error == Err.DOUBLE_SPEND + assert error == Err.UNKNOWN_UNSPENT diff --git a/chia/_tests/core/server/test_rate_limits.py b/chia/_tests/core/server/test_rate_limits.py index 080114c08af3..bf4cef41c768 100644 --- a/chia/_tests/core/server/test_rate_limits.py +++ b/chia/_tests/core/server/test_rate_limits.py @@ -1,22 +1,25 @@ from __future__ import annotations -import asyncio +from dataclasses import dataclass +from typing import Any, cast import pytest from chia_rs.sized_ints import uint32 from chia._tests.conftest import node_with_params +from chia._tests.util.misc import boolean_datacases from chia._tests.util.time_out_assert import time_out_assert from chia.protocols.full_node_protocol import RejectBlock, RejectBlocks, RespondBlock, RespondBlocks from chia.protocols.outbound_message import make_msg from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.shared_protocol import Capability -from chia.server.rate_limit_numbers import compose_rate_limits, get_rate_limits_to_use +from chia.server.rate_limit_numbers import RLSettings, compose_rate_limits, get_rate_limits_to_use from chia.server.rate_limit_numbers import rate_limits as rl_numbers from chia.server.rate_limits import RateLimiter from chia.server.server import ChiaServer from chia.server.ws_connection import WSChiaConnection from chia.simulator.block_tools import BlockTools +from chia.simulator.full_node_simulator import FullNodeSimulator from chia.types.peer_info import PeerInfo rl_v2 = [Capability.BASE, Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2] @@ -25,355 +28,448 @@ test_different_versions_results: list[int] = [] -class TestRateLimits: - @pytest.mark.anyio - async def test_get_rate_limits_to_use(self): - assert get_rate_limits_to_use(rl_v2, rl_v2) != get_rate_limits_to_use(rl_v2, rl_v1) - assert get_rate_limits_to_use(rl_v1, rl_v1) == get_rate_limits_to_use(rl_v2, rl_v1) - assert get_rate_limits_to_use(rl_v1, rl_v1) == get_rate_limits_to_use(rl_v1, rl_v2) - - @pytest.mark.anyio - async def test_too_many_messages(self): - # Too many messages - r = RateLimiter(incoming=True) - new_tx_message = make_msg(ProtocolMessageTypes.new_transaction, bytes([1] * 40)) - for i in range(4999): - assert r.process_msg_and_check(new_tx_message, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(4999): - response = r.process_msg_and_check(new_tx_message, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - # Non-tx message - r = RateLimiter(incoming=True) - new_peak_message = make_msg(ProtocolMessageTypes.new_peak, bytes([1] * 40)) - for i in range(200): - assert r.process_msg_and_check(new_peak_message, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(200): - response = r.process_msg_and_check(new_peak_message, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - @pytest.mark.anyio - async def test_large_message(self): - # Large tx - small_tx_message = make_msg(ProtocolMessageTypes.respond_transaction, bytes([1] * 500 * 1024)) - large_tx_message = make_msg(ProtocolMessageTypes.new_transaction, bytes([1] * 3 * 1024 * 1024)) - - r = RateLimiter(incoming=True) - assert r.process_msg_and_check(small_tx_message, rl_v2, rl_v2) is None - assert r.process_msg_and_check(large_tx_message, rl_v2, rl_v2) is not None - - small_vdf_message = make_msg(ProtocolMessageTypes.respond_signage_point, bytes([1] * 5 * 1024)) - large_vdf_message = make_msg(ProtocolMessageTypes.respond_signage_point, bytes([1] * 600 * 1024)) - large_blocks_message = make_msg(ProtocolMessageTypes.respond_blocks, bytes([1] * 51 * 1024 * 1024)) - r = RateLimiter(incoming=True) - assert r.process_msg_and_check(small_vdf_message, rl_v2, rl_v2) is None - assert r.process_msg_and_check(small_vdf_message, rl_v2, rl_v2) is None - assert r.process_msg_and_check(large_vdf_message, rl_v2, rl_v2) is not None - # this limit applies even though this message type is unlimited - assert r.process_msg_and_check(large_blocks_message, rl_v2, rl_v2) is not None - - @pytest.mark.anyio - async def test_too_much_data(self): - # Too much data - r = RateLimiter(incoming=True) - tx_message = make_msg(ProtocolMessageTypes.respond_transaction, bytes([1] * 500 * 1024)) - for i in range(40): - assert r.process_msg_and_check(tx_message, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(300): - response = r.process_msg_and_check(tx_message, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - r = RateLimiter(incoming=True) - block_message = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 1024 * 1024)) - for i in range(10): - assert r.process_msg_and_check(block_message, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(40): - response = r.process_msg_and_check(block_message, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - @pytest.mark.anyio - async def test_non_tx_aggregate_limits(self): - # Frequency limits - r = RateLimiter(incoming=True) - message_1 = make_msg(ProtocolMessageTypes.coin_state_update, bytes([1] * 32)) - message_2 = make_msg(ProtocolMessageTypes.request_blocks, bytes([1] * 64)) - message_3 = make_msg(ProtocolMessageTypes.plot_sync_start, bytes([1] * 64)) - - for i in range(500): - assert r.process_msg_and_check(message_1, rl_v2, rl_v2) is None - - for i in range(500): - assert r.process_msg_and_check(message_2, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(500): - response = r.process_msg_and_check(message_3, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - # Size limits - r = RateLimiter(incoming=True) - message_4 = make_msg(ProtocolMessageTypes.respond_proof_of_weight, bytes([1] * 49 * 1024 * 1024)) - message_5 = make_msg(ProtocolMessageTypes.request_blocks, bytes([1] * 49 * 1024 * 1024)) - - for i in range(2): - assert r.process_msg_and_check(message_4, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(2): - response = r.process_msg_and_check(message_5, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - @pytest.mark.anyio - async def test_periodic_reset(self): - r = RateLimiter(True, 5) - tx_message = make_msg(ProtocolMessageTypes.respond_transaction, bytes([1] * 500 * 1024)) - for i in range(10): - assert r.process_msg_and_check(tx_message, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(300): - response = r.process_msg_and_check(tx_message, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - assert r.process_msg_and_check(tx_message, rl_v2, rl_v2) is not None - await asyncio.sleep(6) +@dataclass +class SimClock: + current_time: float = 1000.0 + + def monotonic(self) -> float: + return self.current_time + + def advance(self, duration: float) -> None: + self.current_time += duration + + +@pytest.mark.anyio +async def test_get_rate_limits_to_use() -> None: + assert get_rate_limits_to_use(rl_v2, rl_v2) != get_rate_limits_to_use(rl_v2, rl_v1) + assert get_rate_limits_to_use(rl_v1, rl_v1) == get_rate_limits_to_use(rl_v2, rl_v1) + assert get_rate_limits_to_use(rl_v1, rl_v1) == get_rate_limits_to_use(rl_v1, rl_v2) + + +# we want to exercise every possibly limit we may hit +# they are: +# * total number of messages / 60 seconds for non-transaction messages +# * total number of bytes / 60 seconds for non-transaction messages +# * number of messages / 60 seconds for "transaction" messages +# * number of bytes / 60 seconds for transaction messages + + +@pytest.mark.anyio +@boolean_datacases(name="incoming", true="incoming", false="outgoing") +@boolean_datacases(name="tx_msg", true="tx", false="non-tx") +@boolean_datacases(name="limit_size", true="size-limit", false="count-limit") +async def test_limits_v2(incoming: bool, tx_msg: bool, limit_size: bool, monkeypatch: pytest.MonkeyPatch) -> None: + # this test uses a single message type, and alters the rate limit settings + # for it to hit the different cases + + count = 1000 + message_data = b"\0" * 1024 + msg_type = ProtocolMessageTypes.new_transaction + + limits: dict[str, Any] = {} + + if limit_size: + limits.update( + { + # this is the rate limit across all (non-tx) messages + "non_tx_freq": count * 2, + # this is the byte size limit across all (non-tx) messages + "non_tx_max_total_size": count * len(message_data), + } + ) + else: + limits.update( + { + # this is the rate limit across all (non-tx) messages + "non_tx_freq": count, + # this is the byte size limit across all (non-tx) messages + "non_tx_max_total_size": count * 2 * len(message_data), + } + ) + + if limit_size: + rate_limit = {msg_type: RLSettings(count * 2, 1024, count * len(message_data))} + else: + rate_limit = {msg_type: RLSettings(count, 1024, count * 2 * len(message_data))} + + if tx_msg: + limits.update({"rate_limits_tx": rate_limit, "rate_limits_other": {}}) + else: + limits.update({"rate_limits_other": rate_limit, "rate_limits_tx": {}}) + + def mock_get_limits(our_capabilities: list[Capability], peer_capabilities: list[Capability]) -> dict[str, Any]: + return limits + + import chia.server.rate_limits + + monkeypatch.setattr(chia.server.rate_limits, "get_rate_limits_to_use", mock_get_limits) + + r = RateLimiter(incoming=incoming, get_time=lambda: 0) + msg = make_msg(msg_type, message_data) + + for i in range(count): + assert r.process_msg_and_check(msg, rl_v2, rl_v2) is None + + expected_msg = "" + + if limit_size: + if not tx_msg: + expected_msg += "non-tx size:" + else: + expected_msg += "cumulative size:" + expected_msg += f" {(count + 1) * len(message_data)} > {count * len(message_data) * 1.0}" + else: + if not tx_msg: + expected_msg += "non-tx count:" + else: + expected_msg += "message count:" + expected_msg += f" {count + 1} > {count * 1.0}" + expected_msg += " (scale factor: 1.0)" + + response = r.process_msg_and_check(msg, rl_v2, rl_v2) + assert response == expected_msg + + for _ in range(10): + response = r.process_msg_and_check(msg, rl_v2, rl_v2) + # we can't stop incoming messages from arriving, counters keep + # increasing for incoming messages. For outgoing messages, we expect + # them not to be sent when hitting the rate limit, so those counters in + # the returned message stay the same + if incoming: + assert response is not None + else: + assert response == expected_msg + + +@pytest.mark.anyio +async def test_large_message() -> None: + # Large tx + small_tx_message = make_msg(ProtocolMessageTypes.respond_transaction, bytes([1] * 500 * 1024)) + large_tx_message = make_msg(ProtocolMessageTypes.new_transaction, bytes([1] * 3 * 1024 * 1024)) + + r = RateLimiter(incoming=True, get_time=lambda: 0) + assert r.process_msg_and_check(small_tx_message, rl_v2, rl_v2) is None + assert r.process_msg_and_check(large_tx_message, rl_v2, rl_v2) is not None + + small_vdf_message = make_msg(ProtocolMessageTypes.respond_signage_point, bytes([1] * 5 * 1024)) + large_vdf_message = make_msg(ProtocolMessageTypes.respond_signage_point, bytes([1] * 600 * 1024)) + large_blocks_message = make_msg(ProtocolMessageTypes.respond_blocks, bytes([1] * 51 * 1024 * 1024)) + r = RateLimiter(incoming=True, get_time=lambda: 0) + assert r.process_msg_and_check(small_vdf_message, rl_v2, rl_v2) is None + assert r.process_msg_and_check(small_vdf_message, rl_v2, rl_v2) is None + assert r.process_msg_and_check(large_vdf_message, rl_v2, rl_v2) is not None + # this limit applies even though this message type is unlimited + assert r.process_msg_and_check(large_blocks_message, rl_v2, rl_v2) is not None + + +@pytest.mark.anyio +async def test_too_much_data() -> None: + # Too much data + r = RateLimiter(incoming=True, get_time=lambda: 0) + tx_message = make_msg(ProtocolMessageTypes.respond_transaction, bytes([1] * 500 * 1024)) + for i in range(40): + assert r.process_msg_and_check(tx_message, rl_v2, rl_v2) is None + + saw_disconnect = False + for i in range(300): + response = r.process_msg_and_check(tx_message, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + + r = RateLimiter(incoming=True, get_time=lambda: 0) + block_message = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 1024 * 1024)) + for i in range(10): + assert r.process_msg_and_check(block_message, rl_v2, rl_v2) is None + + saw_disconnect = False + for i in range(40): + response = r.process_msg_and_check(block_message, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + + +@pytest.mark.anyio +async def test_non_tx_aggregate_limits() -> None: + # Frequency limits + r = RateLimiter(incoming=True, get_time=lambda: 0) + message_1 = make_msg(ProtocolMessageTypes.coin_state_update, bytes([1] * 32)) + message_2 = make_msg(ProtocolMessageTypes.request_blocks, bytes([1] * 64)) + message_3 = make_msg(ProtocolMessageTypes.plot_sync_start, bytes([1] * 64)) + + for i in range(500): + assert r.process_msg_and_check(message_1, rl_v2, rl_v2) is None + + for i in range(500): + assert r.process_msg_and_check(message_2, rl_v2, rl_v2) is None + + saw_disconnect = False + for i in range(500): + response = r.process_msg_and_check(message_3, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + + # Size limits + r = RateLimiter(incoming=True, get_time=lambda: 0) + message_4 = make_msg(ProtocolMessageTypes.respond_proof_of_weight, bytes([1] * 49 * 1024 * 1024)) + message_5 = make_msg(ProtocolMessageTypes.request_blocks, bytes([1] * 49 * 1024 * 1024)) + + for i in range(2): + assert r.process_msg_and_check(message_4, rl_v2, rl_v2) is None + + saw_disconnect = False + for i in range(2): + response = r.process_msg_and_check(message_5, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + + +@pytest.mark.anyio +async def test_periodic_reset() -> None: + timer = SimClock() + r = RateLimiter(True, 5, get_time=timer.monotonic) + tx_message = make_msg(ProtocolMessageTypes.respond_transaction, bytes([1] * 500 * 1024)) + for i in range(10): assert r.process_msg_and_check(tx_message, rl_v2, rl_v2) is None - # Counts reset also - r = RateLimiter(True, 5) - new_tx_message = make_msg(ProtocolMessageTypes.new_transaction, bytes([1] * 40)) - for i in range(4999): - assert r.process_msg_and_check(new_tx_message, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(4999): - response = r.process_msg_and_check(new_tx_message, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - await asyncio.sleep(6) + saw_disconnect = False + for i in range(300): + response = r.process_msg_and_check(tx_message, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + assert r.process_msg_and_check(tx_message, rl_v2, rl_v2) is not None + timer.advance(6) + assert r.process_msg_and_check(tx_message, rl_v2, rl_v2) is None + + # Counts reset also + r = RateLimiter(True, 5, get_time=timer.monotonic) + new_tx_message = make_msg(ProtocolMessageTypes.new_transaction, bytes([1] * 40)) + for i in range(4999): assert r.process_msg_and_check(new_tx_message, rl_v2, rl_v2) is None - @pytest.mark.anyio - async def test_percentage_limits(self): - r = RateLimiter(True, 60, 40) - new_peak_message = make_msg(ProtocolMessageTypes.new_peak, bytes([1] * 40)) - for i in range(50): - assert r.process_msg_and_check(new_peak_message, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(50): - response = r.process_msg_and_check(new_peak_message, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - r = RateLimiter(True, 60, 40) - block_message = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 1024 * 1024)) - for i in range(5): - assert r.process_msg_and_check(block_message, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(5): - response = r.process_msg_and_check(block_message, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - # Aggregate percentage limit count - r = RateLimiter(True, 60, 40) - message_1 = make_msg(ProtocolMessageTypes.coin_state_update, bytes([1] * 5)) - message_2 = make_msg(ProtocolMessageTypes.request_blocks, bytes([1] * 32)) - message_3 = make_msg(ProtocolMessageTypes.plot_sync_start, bytes([1] * 32)) - - for i in range(180): - assert r.process_msg_and_check(message_1, rl_v2, rl_v2) is None - for i in range(180): - assert r.process_msg_and_check(message_2, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(100): - response = r.process_msg_and_check(message_3, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - # Aggregate percentage limit max total size - r = RateLimiter(True, 60, 40) - message_4 = make_msg(ProtocolMessageTypes.respond_proof_of_weight, bytes([1] * 18 * 1024 * 1024)) - message_5 = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 24 * 1024 * 1024)) - - for i in range(2): - assert r.process_msg_and_check(message_4, rl_v2, rl_v2) is None - - saw_disconnect = False - for i in range(2): - response = r.process_msg_and_check(message_5, rl_v2, rl_v2) - if response is not None: - saw_disconnect = True - assert saw_disconnect - - @pytest.mark.anyio - async def test_too_many_outgoing_messages(self): - # Too many messages - r = RateLimiter(incoming=False) - new_peers_message = make_msg(ProtocolMessageTypes.respond_peers, bytes([1])) - non_tx_freq = get_rate_limits_to_use(rl_v2, rl_v2)["non_tx_freq"] - - passed = 0 - blocked = 0 - for i in range(non_tx_freq): - if r.process_msg_and_check(new_peers_message, rl_v2, rl_v2) is None: - passed += 1 - else: - blocked += 1 - - assert passed == 10 - assert blocked == non_tx_freq - passed - - # ensure that *another* message type is not blocked because of this - - new_signatures_message = make_msg(ProtocolMessageTypes.respond_signatures, bytes([1])) - assert r.process_msg_and_check(new_signatures_message, rl_v2, rl_v2) is None - - @pytest.mark.anyio - async def test_too_many_incoming_messages(self): - # Too many messages - r = RateLimiter(incoming=True) - new_peers_message = make_msg(ProtocolMessageTypes.respond_peers, bytes([1])) - non_tx_freq = get_rate_limits_to_use(rl_v2, rl_v2)["non_tx_freq"] - - passed = 0 - blocked = 0 - for i in range(non_tx_freq): - if r.process_msg_and_check(new_peers_message, rl_v2, rl_v2) is None: - passed += 1 - else: - blocked += 1 - - assert passed == 10 - assert blocked == non_tx_freq - passed - - # ensure that other message types *are* blocked because of this - - new_signatures_message = make_msg(ProtocolMessageTypes.respond_signatures, bytes([1])) - assert r.process_msg_and_check(new_signatures_message, rl_v2, rl_v2) is not None - - @pytest.mark.parametrize( - "node_with_params", - [ - pytest.param( - dict( - disable_capabilities=[Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2], - ), - id="V1", + saw_disconnect = False + for i in range(4999): + response = r.process_msg_and_check(new_tx_message, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + timer.advance(6) + assert r.process_msg_and_check(new_tx_message, rl_v2, rl_v2) is None + + +@pytest.mark.anyio +async def test_percentage_limits() -> None: + r = RateLimiter(True, 60, 40, get_time=lambda: 0) + new_peak_message = make_msg(ProtocolMessageTypes.new_peak, bytes([1] * 40)) + for i in range(50): + assert r.process_msg_and_check(new_peak_message, rl_v2, rl_v2) is None + + saw_disconnect = False + for i in range(50): + response = r.process_msg_and_check(new_peak_message, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + + r = RateLimiter(True, 60, 40, get_time=lambda: 0) + block_message = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 1024 * 1024)) + for i in range(5): + assert r.process_msg_and_check(block_message, rl_v2, rl_v2) is None + + saw_disconnect = False + for i in range(5): + response = r.process_msg_and_check(block_message, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + + # Aggregate percentage limit count + r = RateLimiter(True, 60, 40, get_time=lambda: 0) + message_1 = make_msg(ProtocolMessageTypes.coin_state_update, bytes([1] * 5)) + message_2 = make_msg(ProtocolMessageTypes.request_blocks, bytes([1] * 32)) + message_3 = make_msg(ProtocolMessageTypes.plot_sync_start, bytes([1] * 32)) + + for i in range(180): + assert r.process_msg_and_check(message_1, rl_v2, rl_v2) is None + for i in range(180): + assert r.process_msg_and_check(message_2, rl_v2, rl_v2) is None + + saw_disconnect = False + for i in range(100): + response = r.process_msg_and_check(message_3, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + + # Aggregate percentage limit max total size + r = RateLimiter(True, 60, 40, get_time=lambda: 0) + message_4 = make_msg(ProtocolMessageTypes.respond_proof_of_weight, bytes([1] * 18 * 1024 * 1024)) + message_5 = make_msg(ProtocolMessageTypes.respond_unfinished_block, bytes([1] * 24 * 1024 * 1024)) + + for i in range(2): + assert r.process_msg_and_check(message_4, rl_v2, rl_v2) is None + + saw_disconnect = False + for i in range(2): + response = r.process_msg_and_check(message_5, rl_v2, rl_v2) + if response is not None: + saw_disconnect = True + assert saw_disconnect + + +@pytest.mark.anyio +async def test_too_many_outgoing_messages() -> None: + # Too many messages + r = RateLimiter(incoming=False, get_time=lambda: 0) + new_peers_message = make_msg(ProtocolMessageTypes.respond_peers, bytes([1])) + non_tx_freq = get_rate_limits_to_use(rl_v2, rl_v2)["non_tx_freq"] + + passed = 0 + blocked = 0 + for i in range(non_tx_freq): + if r.process_msg_and_check(new_peers_message, rl_v2, rl_v2) is None: + passed += 1 + else: + blocked += 1 + + assert passed == 10 + assert blocked == non_tx_freq - passed + + # ensure that *another* message type is not blocked because of this + + new_signatures_message = make_msg(ProtocolMessageTypes.respond_signatures, bytes([1])) + assert r.process_msg_and_check(new_signatures_message, rl_v2, rl_v2) is None + + +@pytest.mark.anyio +async def test_too_many_incoming_messages() -> None: + # Too many messages + r = RateLimiter(incoming=True, get_time=lambda: 0) + new_peers_message = make_msg(ProtocolMessageTypes.respond_peers, bytes([1])) + non_tx_freq = get_rate_limits_to_use(rl_v2, rl_v2)["non_tx_freq"] + + passed = 0 + blocked = 0 + for i in range(non_tx_freq): + if r.process_msg_and_check(new_peers_message, rl_v2, rl_v2) is None: + passed += 1 + else: + blocked += 1 + + assert passed == 10 + assert blocked == non_tx_freq - passed + + # ensure that other message types *are* blocked because of this + + new_signatures_message = make_msg(ProtocolMessageTypes.respond_signatures, bytes([1])) + assert r.process_msg_and_check(new_signatures_message, rl_v2, rl_v2) is not None + + +@pytest.mark.parametrize( + "node_with_params", + [ + pytest.param( + dict( + disable_capabilities=[Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2], ), - pytest.param( - dict( - disable_capabilities=[], - ), - id="V2", + id="V1", + ), + pytest.param( + dict( + disable_capabilities=[], ), - ], - indirect=True, - ) - @pytest.mark.parametrize( - "node_with_params_b", - [ - pytest.param( - dict( - disable_capabilities=[Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2], - ), - id="V1", + id="V2", + ), + ], + indirect=True, +) +@pytest.mark.parametrize( + "node_with_params_b", + [ + pytest.param( + dict( + disable_capabilities=[Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2], ), - pytest.param( - dict( - disable_capabilities=[], - ), - id="V2", + id="V1", + ), + pytest.param( + dict( + disable_capabilities=[], ), - ], - indirect=True, - ) - @pytest.mark.anyio - @pytest.mark.limit_consensus_modes(reason="save time") - async def test_different_versions(self, node_with_params, node_with_params_b, self_hostname): - node_a = node_with_params - node_b = node_with_params_b - - full_node_server_a: ChiaServer = node_a.full_node.server - full_node_server_b: ChiaServer = node_b.full_node.server - - await full_node_server_b.start_client(PeerInfo(self_hostname, full_node_server_a.get_port()), None) + id="V2", + ), + ], + indirect=True, +) +@pytest.mark.anyio +@pytest.mark.limit_consensus_modes(reason="save time") +async def test_different_versions( + node_with_params: FullNodeSimulator, node_with_params_b: FullNodeSimulator, self_hostname: str +) -> None: + node_a = node_with_params + node_b = node_with_params_b - assert len(full_node_server_b.get_connections()) == 1 - assert len(full_node_server_a.get_connections()) == 1 + full_node_server_a: ChiaServer = node_a.full_node.server + full_node_server_b: ChiaServer = node_b.full_node.server - a_con: WSChiaConnection = full_node_server_a.get_connections()[0] - b_con: WSChiaConnection = full_node_server_b.get_connections()[0] + await full_node_server_b.start_client(PeerInfo(self_hostname, full_node_server_a.get_port()), None) - print(a_con.local_capabilities, a_con.peer_capabilities) - print(b_con.local_capabilities, b_con.peer_capabilities) + assert len(full_node_server_b.get_connections()) == 1 + assert len(full_node_server_a.get_connections()) == 1 - # The two nodes will use the same rate limits even if their versions are different - assert get_rate_limits_to_use(a_con.local_capabilities, a_con.peer_capabilities) == get_rate_limits_to_use( - b_con.local_capabilities, b_con.peer_capabilities - ) + a_con: WSChiaConnection = full_node_server_a.get_connections()[0] + b_con: WSChiaConnection = full_node_server_b.get_connections()[0] - # The following code checks whether all of the runs resulted in the same number of items in "rate_limits_tx", - # which would mean the same rate limits are always used. This should not happen, since two nodes with V2 - # will use V2. - total_tx_msg_count = len( - get_rate_limits_to_use(a_con.local_capabilities, a_con.peer_capabilities)["rate_limits_tx"] - ) + print(a_con.local_capabilities, a_con.peer_capabilities) + print(b_con.local_capabilities, b_con.peer_capabilities) - test_different_versions_results.append(total_tx_msg_count) - if len(test_different_versions_results) >= 4: - assert len(set(test_different_versions_results)) >= 2 + # The two nodes will use the same rate limits even if their versions are different + assert get_rate_limits_to_use(a_con.local_capabilities, a_con.peer_capabilities) == get_rate_limits_to_use( + b_con.local_capabilities, b_con.peer_capabilities + ) - @pytest.mark.anyio - async def test_compose(self): - rl_1 = rl_numbers[1] - rl_2 = rl_numbers[2] - assert ProtocolMessageTypes.respond_children in rl_1["rate_limits_other"] - assert ProtocolMessageTypes.respond_children not in rl_1["rate_limits_tx"] - assert ProtocolMessageTypes.respond_children not in rl_2["rate_limits_other"] - assert ProtocolMessageTypes.respond_children in rl_2["rate_limits_tx"] + # The following code checks whether all of the runs resulted in the same number of items in "rate_limits_tx", + # which would mean the same rate limits are always used. This should not happen, since two nodes with V2 + # will use V2. + total_tx_msg_count = len( + get_rate_limits_to_use(a_con.local_capabilities, a_con.peer_capabilities)["rate_limits_tx"] + ) - assert ProtocolMessageTypes.request_block in rl_1["rate_limits_other"] - assert ProtocolMessageTypes.request_block not in rl_1["rate_limits_tx"] - assert ProtocolMessageTypes.request_block not in rl_2["rate_limits_other"] - assert ProtocolMessageTypes.request_block not in rl_2["rate_limits_tx"] + test_different_versions_results.append(total_tx_msg_count) + if len(test_different_versions_results) >= 4: + assert len(set(test_different_versions_results)) >= 2 - comps = compose_rate_limits(rl_1, rl_2) - # v2 limits are used if present - assert ProtocolMessageTypes.respond_children not in comps["rate_limits_other"] - assert ProtocolMessageTypes.respond_children in comps["rate_limits_tx"] - # Otherwise, fall back to v1 - assert ProtocolMessageTypes.request_block in rl_1["rate_limits_other"] - assert ProtocolMessageTypes.request_block not in rl_1["rate_limits_tx"] +@pytest.mark.anyio +async def test_compose() -> None: + rl_1 = rl_numbers[1] + rl_2 = rl_numbers[2] + rl_1_rate_limits_other = cast(dict[ProtocolMessageTypes, RLSettings], rl_1["rate_limits_other"]) + rl_2_rate_limits_other = cast(dict[ProtocolMessageTypes, RLSettings], rl_2["rate_limits_other"]) + rl_1_rate_limits_tx = cast(dict[ProtocolMessageTypes, RLSettings], rl_1["rate_limits_tx"]) + rl_2_rate_limits_tx = cast(dict[ProtocolMessageTypes, RLSettings], rl_2["rate_limits_tx"]) + assert ProtocolMessageTypes.respond_children in rl_1_rate_limits_other + assert ProtocolMessageTypes.respond_children not in rl_1_rate_limits_tx + assert ProtocolMessageTypes.respond_children not in rl_2_rate_limits_other + assert ProtocolMessageTypes.respond_children in rl_2_rate_limits_tx + + assert ProtocolMessageTypes.request_block in rl_1_rate_limits_other + assert ProtocolMessageTypes.request_block not in rl_1_rate_limits_tx + assert ProtocolMessageTypes.request_block not in rl_2_rate_limits_other + assert ProtocolMessageTypes.request_block not in rl_2_rate_limits_tx + + comps = compose_rate_limits(rl_1, rl_2) + # v2 limits are used if present + assert ProtocolMessageTypes.respond_children not in comps["rate_limits_other"] + assert ProtocolMessageTypes.respond_children in comps["rate_limits_tx"] + + # Otherwise, fall back to v1 + assert ProtocolMessageTypes.request_block in rl_1_rate_limits_other + assert ProtocolMessageTypes.request_block not in rl_1_rate_limits_tx @pytest.mark.anyio @@ -386,8 +482,8 @@ async def test_compose(self): (ProtocolMessageTypes.reject_block, 90), ], ) -async def test_unlimited(msg_type: ProtocolMessageTypes, size: int): - r = RateLimiter(incoming=False) +async def test_unlimited(msg_type: ProtocolMessageTypes, size: int) -> None: + r = RateLimiter(incoming=False, get_time=lambda: 0) message = make_msg(msg_type, bytes([1] * size)) @@ -443,8 +539,12 @@ async def test_unlimited(msg_type: ProtocolMessageTypes, size: int): indirect=True, ) async def test_unsolicited_responses( - node_with_params, node_with_params_b, self_hostname: str, msg_type: ProtocolMessageTypes, bt: BlockTools -): + node_with_params: FullNodeSimulator, + node_with_params_b: FullNodeSimulator, + self_hostname: str, + msg_type: ProtocolMessageTypes, + bt: BlockTools, +) -> None: node_a = node_with_params node_b = node_with_params_b diff --git a/chia/_tests/core/server/test_server.py b/chia/_tests/core/server/test_server.py index 90ce91c3b65c..4585a4221673 100644 --- a/chia/_tests/core/server/test_server.py +++ b/chia/_tests/core/server/test_server.py @@ -14,6 +14,7 @@ from chia._tests.util.setup_nodes import SimulatorsAndWalletsServices from chia._tests.util.time_out_assert import time_out_assert from chia.full_node.full_node_api import FullNodeAPI +from chia.full_node.start_full_node import create_full_node_service from chia.protocols.full_node_protocol import RejectBlock, RequestBlock, RequestTransaction from chia.protocols.outbound_message import NodeType, make_msg from chia.protocols.protocol_message_types import ProtocolMessageTypes @@ -21,12 +22,11 @@ from chia.protocols.wallet_protocol import RejectHeaderRequest from chia.server.api_protocol import ApiMetadata from chia.server.server import ChiaServer -from chia.server.start_full_node import create_full_node_service -from chia.server.start_wallet import create_wallet_service from chia.server.ws_connection import WSChiaConnection, error_response_version from chia.simulator.block_tools import BlockTools from chia.types.peer_info import PeerInfo from chia.util.errors import ApiError, Err +from chia.wallet.start_wallet import create_wallet_service @dataclass diff --git a/chia/_tests/core/services/test_services.py b/chia/_tests/core/services/test_services.py index 841c2cf81913..e87da2e6339e 100644 --- a/chia/_tests/core/services/test_services.py +++ b/chia/_tests/core/services/test_services.py @@ -86,15 +86,15 @@ async def test_daemon_terminates(signal_number: signal.Signals, chia_root: ChiaR @pytest.mark.parametrize( argnames=["create_service", "module_path", "service_config_name"], argvalues=[ - [DataLayerRpcClient.create_as_context, "chia.server.start_data_layer", "data_layer"], - [FarmerRpcClient.create_as_context, "chia.server.start_farmer", "farmer"], - [FullNodeRpcClient.create_as_context, "chia.server.start_full_node", "full_node"], - [HarvesterRpcClient.create_as_context, "chia.server.start_harvester", "harvester"], - [WalletRpcClient.create_as_context, "chia.server.start_wallet", "wallet"], - [None, "chia.server.start_introducer", "introducer"], + [DataLayerRpcClient.create_as_context, "chia.data_layer.start_data_layer", "data_layer"], + [FarmerRpcClient.create_as_context, "chia.farmer.start_farmer", "farmer"], + [FullNodeRpcClient.create_as_context, "chia.full_node.start_full_node", "full_node"], + [HarvesterRpcClient.create_as_context, "chia.harvester.start_harvester", "harvester"], + [WalletRpcClient.create_as_context, "chia.wallet.start_wallet", "wallet"], + [None, "chia.introducer.start_introducer", "introducer"], # TODO: fails... make it not do that # [None, "chia.seeder.start_crawler", "crawler"], - [None, "chia.server.start_timelord", "timelord"], + [None, "chia.timelord.start_timelord", "timelord"], pytest.param( None, "chia.timelord.timelord_launcher", diff --git a/chia/_tests/core/test_crawler.py b/chia/_tests/core/test_crawler.py index 865944b165db..38e6bb8eea07 100644 --- a/chia/_tests/core/test_crawler.py +++ b/chia/_tests/core/test_crawler.py @@ -16,8 +16,8 @@ from chia.protocols.outbound_message import make_msg from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.wallet_protocol import RequestChildren +from chia.seeder.crawler_service import CrawlerService from chia.seeder.peer_record import PeerRecord, PeerReliability -from chia.server.aliases import CrawlerService from chia.types.peer_info import PeerInfo diff --git a/chia/_tests/core/test_full_node_rpc.py b/chia/_tests/core/test_full_node_rpc.py index 4df0ef30b2ee..8bde0aea2b04 100644 --- a/chia/_tests/core/test_full_node_rpc.py +++ b/chia/_tests/core/test_full_node_rpc.py @@ -457,9 +457,9 @@ async def test_signage_points( full_node_service_1.config, ) as client: # Only provide one - with pytest.raises(ValueError, match="sp_hash or challenge_hash must be provided."): + with pytest.raises(ValueError, match="sp_hash or challenge_hash must be provided"): await client.get_recent_signage_point_or_eos(None, None) - with pytest.raises(ValueError, match="Either sp_hash or challenge_hash must be provided, not both."): + with pytest.raises(ValueError, match="Either sp_hash or challenge_hash must be provided, not both"): await client.get_recent_signage_point_or_eos(std_hash(b"0"), std_hash(b"1")) # Not found with pytest.raises(ValueError, match="in cache"): @@ -548,6 +548,7 @@ async def test_signage_points( # Signage point is no longer in the blockchain res = await client.get_recent_signage_point_or_eos(sp.cc_vdf.output.get_hash(), None) + assert res is not None assert res["reverted"] assert res["signage_point"] == sp assert "eos" not in res diff --git a/chia/_tests/core/util/test_keychain.py b/chia/_tests/core/util/test_keychain.py index c0ba58e99886..b252667c5f21 100644 --- a/chia/_tests/core/util/test_keychain.py +++ b/chia/_tests/core/util/test_keychain.py @@ -220,9 +220,9 @@ def test_bip39_test_vectors_short(self): test_vectors_path = importlib_resources.files(chia._tests.util.__name__).joinpath("bip39_test_vectors.json") all_vectors = json.loads(test_vectors_path.read_text(encoding="utf-8")) - for idx, [entropy_hex, full_mnemonic, seed, short_mnemonic] in enumerate(all_vectors["english"]): + for idx, [entropy_hex, full_mnemonic, seed_hex, short_mnemonic] in enumerate(all_vectors["english"]): entropy_bytes = bytes.fromhex(entropy_hex) - seed = bytes.fromhex(seed) + seed = bytes.fromhex(seed_hex) assert mnemonic_from_short_words(short_mnemonic) == full_mnemonic assert bytes_from_mnemonic(short_mnemonic) == entropy_bytes diff --git a/chia/_tests/core/util/test_streamable.py b/chia/_tests/core/util/test_streamable.py index b75aaddfe867..665491d9793a 100644 --- a/chia/_tests/core/util/test_streamable.py +++ b/chia/_tests/core/util/test_streamable.py @@ -3,6 +3,7 @@ import io import re from dataclasses import dataclass, field, fields +from enum import Enum from typing import Any, Callable, ClassVar, Optional, get_type_hints import pytest @@ -27,6 +28,7 @@ function_to_parse_one_item, function_to_stream_one_item, is_type_Dict, + is_type_Enum, is_type_List, is_type_SpecificOptional, is_type_Tuple, @@ -39,6 +41,7 @@ parse_uint32, recurse_jsonify, streamable, + streamable_enum, streamable_from_dict, write_uint32, ) @@ -376,6 +379,25 @@ def test_basic_optional() -> None: assert not is_type_SpecificOptional(list[int]) +class BasicEnum(Enum): + A = 1 + B = 2 + + +def test_basic_enum() -> None: + assert is_type_Enum(BasicEnum) + assert not is_type_Enum(list[int]) + + +def test_enum_needs_proxy() -> None: + with pytest.raises(UnsupportedType): + + @streamable + @dataclass(frozen=True) + class EnumStreamable(Streamable): + enum: BasicEnum + + @streamable @dataclass(frozen=True) class PostInitTestClassBasic(Streamable): @@ -423,6 +445,25 @@ class PostInitTestClassDict(Streamable): b: dict[bytes32, dict[uint8, str]] +@streamable_enum(uint32) +class IntegerEnum(Enum): + A = 1 + B = 2 + + +@streamable_enum(str) +class StringEnum(Enum): + A = "foo" + B = "bar" + + +@streamable +@dataclass(frozen=True) +class PostInitTestClassEnum(Streamable): + a: IntegerEnum + b: StringEnum + + @pytest.mark.parametrize( "test_class, args", [ @@ -433,6 +474,7 @@ class PostInitTestClassDict(Streamable): (PostInitTestClassTuple, ((1, "test"), ((200, "test_2"), b"\xba" * 32))), (PostInitTestClassDict, ({1: "bar"}, {bytes32.zeros: {1: "bar"}})), (PostInitTestClassOptional, (12, None, 13, None)), + (PostInitTestClassEnum, (IntegerEnum.A, StringEnum.B)), ], ) def test_post_init_valid(test_class: type[Any], args: tuple[Any, ...]) -> None: @@ -453,6 +495,8 @@ def validate_item_type(type_in: type[Any], item: object) -> bool: return validate_item_type(key_type, next(iter(item.keys()))) and validate_item_type( value_type, next(iter(item.values())) ) + if is_type_Enum(type_in): + return validate_item_type(type_in._streamable_proxy, type_in._streamable_proxy(item.value)) # type: ignore[attr-defined] return isinstance(item, type_in) test_object = test_class(*args) @@ -497,6 +541,8 @@ class TestClass(Streamable): f: Optional[uint32] g: tuple[uint32, str, bytes] h: dict[uint32, str] + i: IntegerEnum + j: StringEnum # we want to test invalid here, hence the ignore. a = TestClass( @@ -508,6 +554,8 @@ class TestClass(Streamable): None, (uint32(383), "hello", b"goodbye"), {uint32(1): "foo"}, + IntegerEnum.A, + StringEnum.B, ) b: bytes = bytes(a) @@ -619,10 +667,21 @@ class TestClassUint(Streamable): a: uint32 # Does not have the required uint size - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=re.escape("uint32.from_bytes() requires 4 bytes but got: 2")): TestClassUint.from_bytes(b"\x00\x00") +def test_ambiguous_deserialization_int_enum() -> None: + @streamable + @dataclass(frozen=True) + class TestClassIntegerEnum(Streamable): + a: IntegerEnum + + # passed bytes are incorrect size for serialization proxy + with pytest.raises(ValueError, match=re.escape("uint32.from_bytes() requires 4 bytes but got: 2")): + TestClassIntegerEnum.from_bytes(b"\x00\x00") + + def test_ambiguous_deserialization_list() -> None: @streamable @dataclass(frozen=True) @@ -656,6 +715,28 @@ class TestClassStr(Streamable): TestClassStr.from_bytes(bytes([0, 0, 100, 24, 52])) +def test_ambiguous_deserialization_str_enum() -> None: + @streamable + @dataclass(frozen=True) + class TestClassStr(Streamable): + a: StringEnum + + # passed bytes are incorrect size for serialization proxy + with pytest.raises(AssertionError): + TestClassStr.from_bytes(bytes([0, 0, 100, 24, 52])) + + +def test_deserialization_to_invalid_enum() -> None: + @streamable + @dataclass(frozen=True) + class TestClassStr(Streamable): + a: StringEnum + + # encodes the string "baz" which is not a valid value for StringEnum + with pytest.raises(ValueError, match=re.escape("'baz' is not a valid StringEnum")): + TestClassStr.from_bytes(bytes([0, 0, 0, 3, 98, 97, 122])) + + def test_ambiguous_deserialization_bytes() -> None: @streamable @dataclass(frozen=True) diff --git a/chia/_tests/environments/wallet.py b/chia/_tests/environments/wallet.py index b964a03a1ef6..df38388c2102 100644 --- a/chia/_tests/environments/wallet.py +++ b/chia/_tests/environments/wallet.py @@ -25,6 +25,7 @@ from chia.wallet.wallet import Wallet from chia.wallet.wallet_node import Balance, WalletNode from chia.wallet.wallet_node_api import WalletNodeAPI +from chia.wallet.wallet_request_types import GetWalletBalance from chia.wallet.wallet_rpc_api import WalletRpcApi from chia.wallet.wallet_rpc_client import WalletRpcClient from chia.wallet.wallet_state_manager import WalletStateManager @@ -169,7 +170,9 @@ async def check_balances(self, additional_balance_info: dict[Union[int, str], di else {} ), } - balance_response: dict[str, int] = await self.rpc_client.get_wallet_balance(wallet_id) + balance_response: dict[str, int] = ( + await self.rpc_client.get_wallet_balance(GetWalletBalance(wallet_id)) + ).wallet_balance.to_json_dict() if not expected_result.items() <= balance_response.items(): for key, value in expected_result.items(): diff --git a/chia/_tests/farmer_harvester/test_farmer.py b/chia/_tests/farmer_harvester/test_farmer.py index fa5a0f125fda..252da1d14a48 100644 --- a/chia/_tests/farmer_harvester/test_farmer.py +++ b/chia/_tests/farmer_harvester/test_farmer.py @@ -9,7 +9,7 @@ from unittest.mock import ANY import pytest -from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey, ProofOfSpace +from chia_rs import AugSchemeMPL, G1Element, G2Element, PlotSize, PrivateKey, ProofOfSpace from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint8, uint16, uint32, uint64 from pytest_mock import MockerFixture @@ -21,15 +21,17 @@ from chia._tests.util.misc import DataCase, Marks, datacases from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.farmer.farmer import UPDATE_POOL_FARMER_INFO_INTERVAL, Farmer, increment_pool_stats, strip_old_entries +from chia.farmer.farmer_service import FarmerService +from chia.harvester.harvester_service import HarvesterService from chia.pools.pool_config import PoolWalletConfig from chia.protocols import farmer_protocol, harvester_protocol from chia.protocols.harvester_protocol import NewProofOfSpace, RespondSignatures from chia.protocols.pool_protocol import PoolErrorCode -from chia.server.aliases import FarmerService, HarvesterService from chia.server.ws_connection import WSChiaConnection from chia.simulator.block_tools import BlockTools from chia.types.blockchain_format.proof_of_space import ( generate_plot_public_key, + make_pos, verify_and_get_quality_string, ) from chia.util.config import load_config, save_config @@ -132,7 +134,7 @@ class NewProofOfSpaceCase: plot_identifier: str signage_point_index: uint8 plot_id: bytes32 - plot_size: uint8 + plot_size: PlotSize plot_challenge: bytes32 plot_public_key: G1Element pool_public_key: Optional[G1Element] @@ -187,7 +189,7 @@ def create_verified_quality_case( plot_identifier="test", signage_point_index=uint8(1), plot_id=bytes32.fromhex("baaa6780c53d4b3739b8807b4ae79a76644ddf0d9e03dc7d0a6a0e613e764d9f"), - plot_size=uint8(32), + plot_size=PlotSize.make_v1(32), plot_challenge=bytes32.fromhex("7580e4c366dc2c94c37ce44943f9629a3cd6e027d7b24cd014adeaa578d4b0a2"), plot_public_key=G1Element.from_bytes( bytes.fromhex( @@ -573,7 +575,7 @@ async def test_farmer_new_proof_of_space_for_pool_stats( peak_height=uint32(1), last_tx_height=uint32(0), ) - pos = ProofOfSpace( + pos = make_pos( challenge=case.plot_challenge, pool_public_key=case.pool_public_key, pool_contract_puzzle_hash=case.pool_contract_puzzle_hash, @@ -714,7 +716,7 @@ def create_valid_pos(farmer: Farmer) -> tuple[farmer_protocol.NewSignagePoint, P peak_height=uint32(1), last_tx_height=uint32(0), ) - pos = ProofOfSpace( + pos = make_pos( challenge=case.plot_challenge, pool_public_key=case.pool_public_key, pool_contract_puzzle_hash=case.pool_contract_puzzle_hash, diff --git a/chia/_tests/farmer_harvester/test_farmer_harvester.py b/chia/_tests/farmer_harvester/test_farmer_harvester.py index bd3a4b0edbb4..9d25e6815f5a 100644 --- a/chia/_tests/farmer_harvester/test_farmer_harvester.py +++ b/chia/_tests/farmer_harvester/test_farmer_harvester.py @@ -1,9 +1,11 @@ from __future__ import annotations import asyncio +import unittest.mock from math import floor from pathlib import Path from typing import Any, Optional +from unittest.mock import AsyncMock, Mock import pytest from chia_rs import G1Element @@ -15,19 +17,41 @@ from chia._tests.util.time_out_assert import time_out_assert from chia.cmds.cmds_util import get_any_service_client from chia.farmer.farmer import Farmer +from chia.farmer.farmer_service import FarmerService from chia.harvester.harvester_rpc_client import HarvesterRpcClient +from chia.harvester.harvester_service import HarvesterService from chia.plotting.util import PlotsRefreshParameter -from chia.protocols import farmer_protocol, harvester_protocol +from chia.protocols import farmer_protocol, harvester_protocol, solver_protocol from chia.protocols.outbound_message import NodeType, make_msg from chia.protocols.protocol_message_types import ProtocolMessageTypes -from chia.server.aliases import FarmerService, HarvesterService from chia.simulator.block_tools import BlockTools +from chia.solver.solver_service import SolverService from chia.types.peer_info import UnresolvedPeerInfo from chia.util.config import load_config from chia.util.hash import std_hash from chia.util.keychain import generate_mnemonic +async def get_harvester_peer(farmer: Farmer) -> Any: + """wait for harvester connection and return the peer""" + + def has_harvester_connection() -> bool: + return len(farmer.server.get_connections(NodeType.HARVESTER)) > 0 + + await time_out_assert(10, has_harvester_connection, True) + return farmer.server.get_connections(NodeType.HARVESTER)[0] + + +async def get_solver_peer(farmer: Farmer) -> Any: + """wait for solver connection and return the peer""" + + def has_solver_connection() -> bool: + return len(farmer.server.get_connections(NodeType.SOLVER)) > 0 + + await time_out_assert(60, has_solver_connection, True) + return farmer.server.get_connections(NodeType.SOLVER)[0] + + def farmer_is_started(farmer: Farmer) -> bool: return farmer.started @@ -143,9 +167,6 @@ async def test_farmer_respond_signatures( # messages even though it didn't request them, to cover when the farmer doesn't know # about an sp_hash, so it fails at the sp record check. - def log_is_ready() -> bool: - return len(caplog.text) > 0 - _, _, harvester_service, _, _ = harvester_farmer_environment # We won't have an sp record for this one challenge_hash = bytes32(b"1" * 32) @@ -160,11 +181,16 @@ def log_is_ready() -> bool: include_source_signature_data=False, farmer_reward_address_override=None, ) + + expected_error = f"Do not have challenge hash {challenge_hash}" + + def expected_log_is_ready() -> bool: + return expected_error in caplog.text + msg = make_msg(ProtocolMessageTypes.respond_signatures, response) await harvester_service._node.server.send_to_all([msg], NodeType.FARMER) - await time_out_assert(5, log_is_ready) - # We fail the sps record check - expected_error = f"Do not have challenge hash {challenge_hash}" + await time_out_assert(10, expected_log_is_ready) + # We should find the error message assert expected_error in caplog.text @@ -297,3 +323,275 @@ async def test_harvester_has_no_server( harvester_server = harvesters[0]._server assert harvester_server.webserver is None + + +@pytest.mark.anyio +async def test_v2_partial_proofs_new_sp_hash( + farmer_one_harvester_solver: tuple[list[HarvesterService], FarmerService, SolverService, BlockTools], +) -> None: + _, farmer_service, _solver_service, _bt = farmer_one_harvester_solver + farmer_api = farmer_service._api + farmer = farmer_api.farmer + + sp_hash = bytes32(b"1" * 32) + partial_proofs = harvester_protocol.PartialProofsData( + challenge_hash=bytes32(b"2" * 32), + sp_hash=sp_hash, + plot_identifier="test_plot_id", + partial_proofs=[b"test_partial_proof_1"], + signage_point_index=uint8(0), + plot_size=uint8(32), + pool_public_key=None, + pool_contract_puzzle_hash=bytes32(b"4" * 32), + plot_public_key=G1Element(), + ) + + harvester_peer = await get_harvester_peer(farmer) + await farmer_api.partial_proofs(partial_proofs, harvester_peer) + + assert sp_hash in farmer.number_of_responses + assert farmer.number_of_responses[sp_hash] == 0 + assert sp_hash in farmer.cache_add_time + + +@pytest.mark.anyio +async def test_v2_partial_proofs_missing_sp_hash( + caplog: pytest.LogCaptureFixture, + farmer_one_harvester_solver: tuple[list[HarvesterService], FarmerService, SolverService, BlockTools], +) -> None: + _, farmer_service, _, _ = farmer_one_harvester_solver + farmer_api = farmer_service._api + + sp_hash = bytes32(b"1" * 32) + partial_proofs = harvester_protocol.PartialProofsData( + challenge_hash=bytes32(b"2" * 32), + sp_hash=sp_hash, + plot_identifier="test_plot_id", + partial_proofs=[b"test_partial_proof_1"], + signage_point_index=uint8(0), + plot_size=uint8(32), + pool_public_key=None, + pool_contract_puzzle_hash=bytes32(b"4" * 32), + plot_public_key=G1Element(), + ) + + harvester_peer = await get_harvester_peer(farmer_api.farmer) + await farmer_api.partial_proofs(partial_proofs, harvester_peer) + + assert f"Received partial proofs for a signage point that we do not have {sp_hash}" in caplog.text + + +@pytest.mark.anyio +async def test_v2_partial_proofs_with_existing_sp( + farmer_one_harvester_solver: tuple[list[HarvesterService], FarmerService, SolverService, BlockTools], +) -> None: + _, farmer_service, _, _ = farmer_one_harvester_solver + farmer_api = farmer_service._api + farmer = farmer_api.farmer + + sp_hash = bytes32(b"1" * 32) + challenge_hash = bytes32(b"2" * 32) + + sp = farmer_protocol.NewSignagePoint( + challenge_hash=challenge_hash, + challenge_chain_sp=sp_hash, + reward_chain_sp=std_hash(b"1"), + difficulty=uint64(1000), + sub_slot_iters=uint64(1000), + signage_point_index=uint8(0), + peak_height=uint32(1), + last_tx_height=uint32(0), + ) + + farmer.sps[sp_hash] = [sp] + + partial_proofs = harvester_protocol.PartialProofsData( + challenge_hash=challenge_hash, + sp_hash=sp_hash, + plot_identifier="test_plot_id", + partial_proofs=[b"test_partial_proof_1", b"test_partial_proof_2"], + signage_point_index=uint8(0), + plot_size=uint8(32), + pool_public_key=G1Element(), + pool_contract_puzzle_hash=bytes32(b"4" * 32), + plot_public_key=G1Element(), + ) + + harvester_peer = await get_harvester_peer(farmer) + await farmer_api.partial_proofs(partial_proofs, harvester_peer) + + # should store 2 pending requests (one per partial proof) + assert len(farmer.pending_solver_requests) == 2 + assert sp_hash in farmer.cache_add_time + + +@pytest.mark.anyio +async def test_solution_response_handler( + farmer_one_harvester_solver: tuple[list[HarvesterService], FarmerService, SolverService, BlockTools], +) -> None: + _, farmer_service, _, _ = farmer_one_harvester_solver + farmer_api = farmer_service._api + farmer = farmer_api.farmer + + # set up a pending request + sp_hash = bytes32(b"1" * 32) + challenge_hash = bytes32(b"2" * 32) + + partial_proofs = harvester_protocol.PartialProofsData( + challenge_hash=challenge_hash, + sp_hash=sp_hash, + plot_identifier="test_plot_id", + partial_proofs=[b"test_partial_proof_for_quality"], + signage_point_index=uint8(0), + plot_size=uint8(32), + pool_public_key=G1Element(), + pool_contract_puzzle_hash=bytes32(b"4" * 32), + plot_public_key=G1Element(), + ) + + harvester_peer = await get_harvester_peer(farmer) + + # manually add pending request + farmer.pending_solver_requests[partial_proofs.partial_proofs[0]] = { + "proof_data": partial_proofs, + "peer": harvester_peer, + } + + # create solution response + solution_response = solver_protocol.SolverResponse( + partial_proof=partial_proofs.partial_proofs[0], proof=b"test_proof_from_solver" + ) + solver_peer = Mock() + solver_peer.peer_node_id = "solver_peer" + + with unittest.mock.patch.object(farmer_api, "new_proof_of_space", new_callable=AsyncMock) as mock_new_proof: + await farmer_api.solution_response(solution_response, solver_peer) + + # verify new_proof_of_space was called with correct proof + mock_new_proof.assert_called_once() + call_args = mock_new_proof.call_args[0] + new_proof_of_space = call_args[0] + original_peer = call_args[1] + + assert new_proof_of_space.proof.proof == b"test_proof_from_solver" + assert original_peer == harvester_peer + + # verify pending request was removed + assert partial_proofs.partial_proofs[0] not in farmer.pending_solver_requests + + +@pytest.mark.anyio +async def test_solution_response_unknown_quality( + farmer_one_harvester_solver: tuple[list[HarvesterService], FarmerService, SolverService, BlockTools], +) -> None: + _, farmer_service, _, _ = farmer_one_harvester_solver + farmer_api = farmer_service._api + farmer = farmer_api.farmer + + # get real solver peer connection + solver_peer = await get_solver_peer(farmer) + + # create solution response with unknown quality + solution_response = solver_protocol.SolverResponse(partial_proof=bytes(b"1" * 32), proof=b"test_proof") + + with unittest.mock.patch.object(farmer_api, "new_proof_of_space", new_callable=AsyncMock) as mock_new_proof: + await farmer_api.solution_response(solution_response, solver_peer) + # verify new_proof_of_space was NOT called + mock_new_proof.assert_not_called() + # verify pending requests unchanged + assert len(farmer.pending_solver_requests) == 0 + + +@pytest.mark.anyio +async def test_solution_response_empty_proof( + farmer_one_harvester_solver: tuple[list[HarvesterService], FarmerService, SolverService, BlockTools], +) -> None: + _, farmer_service, _solver_service, _ = farmer_one_harvester_solver + farmer_api = farmer_service._api + farmer = farmer_api.farmer + + # set up a pending request + sp_hash = bytes32(b"1" * 32) + challenge_hash = bytes32(b"2" * 32) + + partial_proofs = harvester_protocol.PartialProofsData( + challenge_hash=challenge_hash, + sp_hash=sp_hash, + plot_identifier="test_plot_id", + partial_proofs=[b"test_partial_proof_for_quality"], + signage_point_index=uint8(0), + plot_size=uint8(32), + pool_public_key=G1Element(), + pool_contract_puzzle_hash=bytes32(b"4" * 32), + plot_public_key=G1Element(), + ) + + harvester_peer = Mock() + harvester_peer.peer_node_id = "harvester_peer" + + # manually add pending request + farmer.pending_solver_requests[partial_proofs.partial_proofs[0]] = { + "proof_data": partial_proofs.partial_proofs[0], + "peer": harvester_peer, + } + + # get real solver peer connection + solver_peer = await get_solver_peer(farmer) + + # create solution response with empty proof + solution_response = solver_protocol.SolverResponse(partial_proof=partial_proofs.partial_proofs[0], proof=b"") + + with unittest.mock.patch.object(farmer_api, "new_proof_of_space", new_callable=AsyncMock) as mock_new_proof: + await farmer_api.solution_response(solution_response, solver_peer) + + # verify new_proof_of_space was NOT called + mock_new_proof.assert_not_called() + + # verify pending request was removed (cleanup still happens) + assert partial_proofs.partial_proofs[0] not in farmer.pending_solver_requests + + +@pytest.mark.anyio +async def test_v2_partial_proofs_solver_exception( + farmer_one_harvester_solver: tuple[list[HarvesterService], FarmerService, SolverService, BlockTools], +) -> None: + _, farmer_service, _solver_service, _ = farmer_one_harvester_solver + farmer_api = farmer_service._api + farmer = farmer_api.farmer + + sp_hash = bytes32(b"1" * 32) + challenge_hash = bytes32(b"2" * 32) + + sp = farmer_protocol.NewSignagePoint( + challenge_hash=challenge_hash, + challenge_chain_sp=sp_hash, + reward_chain_sp=std_hash(b"1"), + difficulty=uint64(1000), + sub_slot_iters=uint64(1000), + signage_point_index=uint8(0), + peak_height=uint32(1), + last_tx_height=uint32(0), + ) + + farmer.sps[sp_hash] = [sp] + + partial_proofs = harvester_protocol.PartialProofsData( + challenge_hash=challenge_hash, + sp_hash=sp_hash, + plot_identifier="test_plot_id", + partial_proofs=[b"test_partial_proof_1"], + signage_point_index=uint8(0), + plot_size=uint8(32), + pool_public_key=G1Element(), + pool_contract_puzzle_hash=bytes32(b"4" * 32), + plot_public_key=G1Element(), + ) + + harvester_peer = await get_harvester_peer(farmer) + + # Mock send_to_all to raise an exception + with unittest.mock.patch.object(farmer.server, "send_to_all", side_effect=Exception("Solver connection failed")): + await farmer_api.partial_proofs(partial_proofs, harvester_peer) + + # verify pending request was cleaned up after exception + assert partial_proofs.partial_proofs[0] not in farmer.pending_solver_requests diff --git a/chia/_tests/farmer_harvester/test_filter_prefix_bits.py b/chia/_tests/farmer_harvester/test_filter_prefix_bits.py index 1d965c2603a5..b4bfedd016f0 100644 --- a/chia/_tests/farmer_harvester/test_filter_prefix_bits.py +++ b/chia/_tests/farmer_harvester/test_filter_prefix_bits.py @@ -16,8 +16,8 @@ from chia.farmer.farmer_api import FarmerAPI from chia.farmer.farmer_rpc_client import FarmerRpcClient from chia.harvester.harvester_rpc_client import HarvesterRpcClient +from chia.harvester.harvester_service import HarvesterService from chia.protocols import farmer_protocol -from chia.server.aliases import HarvesterService from chia.simulator.block_tools import create_block_tools_async, test_constants from chia.types.blockchain_format.proof_of_space import get_plot_id, passes_plot_filter from chia.util.keychain import Keychain diff --git a/chia/_tests/farmer_harvester/test_third_party_harvesters.py b/chia/_tests/farmer_harvester/test_third_party_harvesters.py index 690ce85e7aa2..99cec77f8699 100644 --- a/chia/_tests/farmer_harvester/test_third_party_harvesters.py +++ b/chia/_tests/farmer_harvester/test_third_party_harvesters.py @@ -31,15 +31,17 @@ from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_block from chia.farmer.farmer import Farmer, calculate_harvester_fee_quality from chia.farmer.farmer_api import FarmerAPI +from chia.farmer.farmer_service import FarmerService from chia.full_node.full_node import FullNode from chia.full_node.full_node_api import FullNodeAPI +from chia.full_node.full_node_service import FullNodeService from chia.harvester.harvester import Harvester from chia.harvester.harvester_api import HarvesterAPI +from chia.harvester.harvester_service import HarvesterService from chia.protocols import farmer_protocol, full_node_protocol, harvester_protocol, timelord_protocol from chia.protocols.harvester_protocol import ProofOfSpaceFeeInfo, RespondSignatures, SigningDataKind from chia.protocols.outbound_message import Message, NodeType, make_msg from chia.protocols.protocol_message_types import ProtocolMessageTypes -from chia.server.aliases import FarmerService, FullNodeService, HarvesterService from chia.server.server import ChiaServer from chia.server.ws_connection import WSChiaConnection from chia.simulator.block_tools import BlockTools diff --git a/chia/_tests/fee_estimation/test_fee_estimation_integration.py b/chia/_tests/fee_estimation/test_fee_estimation_integration.py index 31d3f0b504f1..4d10f323fc46 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_integration.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_integration.py @@ -40,7 +40,7 @@ def make_mempoolitem() -> MempoolItem: fee = uint64(10000000) spends: list[SpendConditions] = [] - conds = SpendBundleConditions(spends, 0, 0, 0, None, None, [], cost, 0, 0, False, 0, 0) + conds = SpendBundleConditions(spends, 0, 0, 0, None, None, [], cost, 0, 0, False, 0, 0, 0, 0, 0) mempool_item = MempoolItem( spend_bundle, fee, diff --git a/chia/_tests/fee_estimation/test_fee_estimation_rpc.py b/chia/_tests/fee_estimation/test_fee_estimation_rpc.py index 22aa26682081..92aa00ea6405 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_rpc.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_rpc.py @@ -10,7 +10,6 @@ from chia.full_node.full_node_rpc_api import FullNodeRpcApi from chia.full_node.full_node_rpc_client import FullNodeRpcClient -from chia.server.aliases import WalletService from chia.simulator.block_tools import BlockTools from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import FarmNewBlockProtocol @@ -18,6 +17,7 @@ from chia.simulator.wallet_tools import WalletTool from chia.types.blockchain_format.coin import Coin from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG +from chia.wallet.wallet_service import WalletService @pytest.fixture(scope="function") diff --git a/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py b/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py index 27830d0b791f..cd91c3def305 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py @@ -68,8 +68,7 @@ def test_steady_fee_pressure() -> None: estimates_during = [] start_from = 250 for height in range(start, end): - height = uint32(height) - items = make_block(height, 1, cost, fee, num_blocks_wait_in_mempool) + items = make_block(uint32(height), 1, cost, fee, num_blocks_wait_in_mempool) estimator.new_block(FeeBlockInfo(uint32(height), items)) if height >= start_from: estimation = estimator.estimate_fee_rate(time_offset_seconds=time_offset_seconds * (height - start_from)) diff --git a/chia/_tests/generator/test_rom.py b/chia/_tests/generator/test_rom.py index 7358dd81b4fe..fb2a2aa7f420 100644 --- a/chia/_tests/generator/test_rom.py +++ b/chia/_tests/generator/test_rom.py @@ -146,7 +146,7 @@ def test_get_name_puzzle_conditions(self, softfork_height: int) -> None: before_seconds_relative=None, birth_height=None, birth_seconds=None, - create_coin=[(bytes([0] * 31 + [1]), 500, None)], + create_coin=[(bytes32([0] * 31 + [1]), 500, None)], agg_sig_me=[], agg_sig_parent=[], agg_sig_puzzle=[], @@ -159,6 +159,7 @@ def test_get_name_puzzle_conditions(self, softfork_height: int) -> None: # execution cost, just in run_block_generator2() execution_cost=0 if softfork_height < DEFAULT_CONSTANTS.HARD_FORK_HEIGHT else 44, condition_cost=1800000, + fingerprint=b"", ) assert npc_result.conds.spends == [spend] diff --git a/chia/_tests/harvester/__init__.py b/chia/_tests/harvester/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/chia/_tests/harvester/config.py b/chia/_tests/harvester/config.py new file mode 100644 index 000000000000..b593bfe59ade --- /dev/null +++ b/chia/_tests/harvester/config.py @@ -0,0 +1,4 @@ +from __future__ import annotations + +job_timeout = 70 +checkout_blocks_and_plots = True diff --git a/chia/_tests/harvester/test_harvester_api.py b/chia/_tests/harvester/test_harvester_api.py new file mode 100644 index 000000000000..5c9e6d59ba2c --- /dev/null +++ b/chia/_tests/harvester/test_harvester_api.py @@ -0,0 +1,157 @@ +from __future__ import annotations + +from collections.abc import AsyncGenerator, Iterator +from contextlib import contextmanager +from dataclasses import dataclass +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +from chia_rs import ConsensusConstants, FullBlock, ProofOfSpace +from chia_rs.sized_bytes import bytes32 +from chia_rs.sized_ints import uint64 + +from chia._tests.conftest import HarvesterFarmerEnvironment +from chia._tests.plotting.util import get_test_plots +from chia._tests.util.time_out_assert import time_out_assert +from chia.harvester.harvester_api import HarvesterAPI +from chia.plotting.util import PlotInfo +from chia.protocols import harvester_protocol +from chia.protocols.harvester_protocol import PoolDifficulty +from chia.protocols.protocol_message_types import ProtocolMessageTypes +from chia.server.ws_connection import WSChiaConnection + + +@dataclass +class HarvesterTestEnvironment: + """Test environment with real plots loaded for harvester testing.""" + + harvester_api: HarvesterAPI + plot_info: PlotInfo + plot_path: Path + + +@pytest.fixture(scope="function") +async def harvester_environment( + harvester_farmer_environment: HarvesterFarmerEnvironment, +) -> AsyncGenerator[HarvesterTestEnvironment, None]: + """Create a test environment with real plots loaded.""" + _, _, harvester_service, _, _ = harvester_farmer_environment + harvester_api = harvester_service._server.api + assert isinstance(harvester_api, HarvesterAPI) + test_plots = get_test_plots() + assert len(test_plots) > 0, "no test plots available" + plot_manager = harvester_api.harvester.plot_manager + plot_manager.start_refreshing() + await time_out_assert(10, lambda: len(plot_manager.plots) > 0, True) + plot_path, plot_info = next(iter(plot_manager.plots.items())) + yield HarvesterTestEnvironment(harvester_api, plot_info, plot_path) + plot_manager.stop_refreshing() + + +def signage_point_from_block( + block: FullBlock, constants: ConsensusConstants +) -> harvester_protocol.NewSignagePointHarvester2: + sp_index = block.reward_chain_block.signage_point_index + challenge_hash = block.reward_chain_block.pos_ss_cc_challenge_hash + sp_hash = ( + block.reward_chain_block.reward_chain_sp_vdf.output.get_hash() + if block.reward_chain_block.reward_chain_sp_vdf + else challenge_hash + ) + return harvester_protocol.NewSignagePointHarvester2( + challenge_hash=challenge_hash, + difficulty=uint64(constants.DIFFICULTY_STARTING), + sub_slot_iters=uint64(constants.SUB_SLOT_ITERS_STARTING), + signage_point_index=sp_index, + sp_hash=sp_hash, + pool_difficulties=[], + peak_height=block.height, + last_tx_height=block.height, + ) + + +def create_test_setup( + harvester_environment: HarvesterTestEnvironment, + default_400_blocks: list[FullBlock], + blockchain_constants: ConsensusConstants, +) -> tuple[HarvesterTestEnvironment, harvester_protocol.NewSignagePointHarvester2, MagicMock]: + env = harvester_environment + block = default_400_blocks[2] + new_challenge = signage_point_from_block(block, blockchain_constants) + mock_peer = MagicMock(spec=WSChiaConnection) + return env, new_challenge, mock_peer + + +@contextmanager +def mock_successful_proof(plot_info: PlotInfo) -> Iterator[None]: + with patch.object(plot_info.prover, "get_full_proof") as mock_get_proof: + mock_proof = MagicMock(spec=ProofOfSpace) + mock_get_proof.return_value = mock_proof, None + yield + + +def assert_farming_info_sent(mock_peer: MagicMock) -> None: + mock_peer.send_message.assert_called() + farming_info_calls = [ + call + for call in mock_peer.send_message.call_args_list + if call[0][0].type == ProtocolMessageTypes.farming_info.value + ] + assert len(farming_info_calls) == 1 + + +@pytest.mark.anyio +async def test_new_signage_point_harvester( + harvester_environment: HarvesterTestEnvironment, + default_400_blocks: list[FullBlock], + blockchain_constants: ConsensusConstants, +) -> None: + env, new_challenge, mock_peer = create_test_setup(harvester_environment, default_400_blocks, blockchain_constants) + with mock_successful_proof(env.plot_info): + await env.harvester_api.new_signage_point_harvester(new_challenge, mock_peer) + assert_farming_info_sent(mock_peer) + + +@pytest.mark.anyio +async def test_new_signage_point_harvester_pool_difficulty( + harvester_environment: HarvesterTestEnvironment, + default_400_blocks: list[FullBlock], + blockchain_constants: ConsensusConstants, +) -> None: + env, new_challenge, mock_peer = create_test_setup(harvester_environment, default_400_blocks, blockchain_constants) + pool_puzzle_hash = bytes32(b"pool" + b"0" * 28) + env.plot_info.pool_contract_puzzle_hash = pool_puzzle_hash + pool_difficulty = PoolDifficulty( + pool_contract_puzzle_hash=pool_puzzle_hash, + difficulty=uint64(500), + sub_slot_iters=uint64(67108864), + ) + + new_challenge = harvester_protocol.NewSignagePointHarvester2( + challenge_hash=new_challenge.challenge_hash, + difficulty=new_challenge.difficulty, + sub_slot_iters=new_challenge.sub_slot_iters, + signage_point_index=new_challenge.signage_point_index, + sp_hash=new_challenge.sp_hash, + pool_difficulties=[pool_difficulty], # add pool difficulty + peak_height=new_challenge.peak_height, + last_tx_height=new_challenge.last_tx_height, + ) + + with mock_successful_proof(env.plot_info): + await env.harvester_api.new_signage_point_harvester(new_challenge, mock_peer) + + assert_farming_info_sent(mock_peer) + + +@pytest.mark.anyio +async def test_new_signage_point_harvester_prover_error( + harvester_environment: HarvesterTestEnvironment, + default_400_blocks: list[FullBlock], + blockchain_constants: ConsensusConstants, +) -> None: + env, new_challenge, mock_peer = create_test_setup(harvester_environment, default_400_blocks, blockchain_constants) + with patch.object(env.plot_info.prover, "get_qualities_for_challenge", side_effect=RuntimeError("test error")): + # should not raise exception, should handle error gracefully + await env.harvester_api.new_signage_point_harvester(new_challenge, mock_peer) diff --git a/chia/_tests/plot_sync/test_plot_sync.py b/chia/_tests/plot_sync/test_plot_sync.py index 9e76d26b7a5a..58d79295600d 100644 --- a/chia/_tests/plot_sync/test_plot_sync.py +++ b/chia/_tests/plot_sync/test_plot_sync.py @@ -20,7 +20,9 @@ from chia._tests.util.split_managers import SplitAsyncManager, split_async_manager from chia._tests.util.time_out_assert import time_out_assert from chia.farmer.farmer import Farmer +from chia.farmer.farmer_service import FarmerService from chia.harvester.harvester import Harvester +from chia.harvester.harvester_service import HarvesterService from chia.plot_sync.delta import Delta, PathListDelta, PlotListDelta from chia.plot_sync.receiver import Receiver from chia.plot_sync.sender import Sender @@ -29,7 +31,6 @@ from chia.plotting.util import add_plot_directory, remove_plot_directory from chia.protocols.harvester_protocol import Plot from chia.protocols.protocol_message_types import ProtocolMessageTypes -from chia.server.aliases import FarmerService, HarvesterService from chia.simulator.block_tools import BlockTools from chia.util.config import create_default_chia_config, lock_and_load_config, save_config from chia.util.streamable import _T_Streamable @@ -194,7 +195,8 @@ async def plot_sync_callback(self, peer_id: bytes32, delta: Optional[Delta]) -> plot = harvester.plot_manager.plots.get(Path(path), None) assert plot is not None assert plot.prover.get_filename() == delta.valid.additions[path].filename - assert plot.prover.get_size() == delta.valid.additions[path].size + # TODO: todo_v2_plots support v2 plots + assert plot.prover.get_size().size_v1 == delta.valid.additions[path].size assert plot.prover.get_id() == delta.valid.additions[path].plot_id assert plot.prover.get_compression_level() == delta.valid.additions[path].compression_level assert plot.pool_public_key == delta.valid.additions[path].pool_public_key @@ -255,7 +257,8 @@ async def run_sync_test(self) -> None: for path, plot_info in plot_manager.plots.items(): assert str(path) in receiver.plots() assert plot_info.prover.get_filename() == receiver.plots()[str(path)].filename - assert plot_info.prover.get_size() == receiver.plots()[str(path)].size + # TODO: todo_v2_plots support v2 plots + assert plot_info.prover.get_size().size_v1 == receiver.plots()[str(path)].size assert plot_info.prover.get_id() == receiver.plots()[str(path)].plot_id assert plot_info.prover.get_compression_level() == receiver.plots()[str(path)].compression_level assert plot_info.pool_public_key == receiver.plots()[str(path)].pool_public_key diff --git a/chia/_tests/plot_sync/test_receiver.py b/chia/_tests/plot_sync/test_receiver.py index 489d916af781..0d155b579435 100644 --- a/chia/_tests/plot_sync/test_receiver.py +++ b/chia/_tests/plot_sync/test_receiver.py @@ -7,7 +7,7 @@ from typing import Any, Callable, Union import pytest -from chia_rs import G1Element +from chia_rs import G1Element, PlotSize from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint8, uint32, uint64 @@ -185,8 +185,12 @@ def plot_sync_setup(seeded_random: random.Random) -> tuple[Receiver, list[SyncSt # Manually add the plots we want to remove in tests receiver._plots = {plot_info.filename: plot_info for plot_info in plot_info_list[0:10]} receiver._total_plot_size = sum(plot.file_size for plot in receiver.plots().values()) + # TODO: todo_v2_plots support v2 plots receiver._total_effective_plot_size = int( - sum(UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(plot.size)) for plot in receiver.plots().values()) + sum( + UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(PlotSize.make_v1(plot.size))) + for plot in receiver.plots().values() + ) ) sync_steps: list[SyncStepData] = [ SyncStepData( @@ -266,7 +270,11 @@ async def test_to_dict(counts_only: bool, seeded_random: random.Random) -> None: assert get_list_or_len(plot_sync_dict_1["duplicates"], not counts_only) == 0 assert plot_sync_dict_1["total_plot_size"] == sum(plot.file_size for plot in receiver.plots().values()) assert plot_sync_dict_1["total_effective_plot_size"] == int( - sum(UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(plot.size)) for plot in receiver.plots().values()) + # TODO: todo_v2_plots support v2 plots + sum( + UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(PlotSize.make_v1(plot.size))) + for plot in receiver.plots().values() + ) ) assert plot_sync_dict_1["syncing"] is None assert plot_sync_dict_1["last_sync_time"] is None @@ -312,8 +320,12 @@ async def test_to_dict(counts_only: bool, seeded_random: random.Random) -> None: assert get_list_or_len(sync_steps[State.duplicates].args[0], counts_only) == plot_sync_dict_3["duplicates"] assert plot_sync_dict_3["total_plot_size"] == sum(plot.file_size for plot in receiver.plots().values()) + # TODO: todo_v2_plots support v2 plots assert plot_sync_dict_3["total_effective_plot_size"] == int( - sum(UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(plot.size)) for plot in receiver.plots().values()) + sum( + UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(PlotSize.make_v1(plot.size))) + for plot in receiver.plots().values() + ) ) assert plot_sync_dict_3["last_sync_time"] > 0 assert plot_sync_dict_3["syncing"] is None diff --git a/chia/_tests/plot_sync/test_sync_simulated.py b/chia/_tests/plot_sync/test_sync_simulated.py index 96d838f9d0e7..8a4cc517fdad 100644 --- a/chia/_tests/plot_sync/test_sync_simulated.py +++ b/chia/_tests/plot_sync/test_sync_simulated.py @@ -20,7 +20,9 @@ from chia._tests.plot_sync.util import start_harvester_service from chia._tests.util.time_out_assert import time_out_assert from chia.farmer.farmer import Farmer +from chia.farmer.farmer_service import FarmerService from chia.harvester.harvester import Harvester +from chia.harvester.harvester_service import HarvesterService from chia.plot_sync.receiver import Receiver from chia.plot_sync.sender import Sender from chia.plot_sync.util import Constants @@ -30,7 +32,6 @@ from chia.protocols.harvester_protocol import PlotSyncError, PlotSyncResponse from chia.protocols.outbound_message import make_msg from chia.protocols.protocol_message_types import ProtocolMessageTypes -from chia.server.aliases import FarmerService, HarvesterService from chia.server.ws_connection import WSChiaConnection from chia.simulator.block_tools import BlockTools from chia.util.batches import to_batches diff --git a/chia/_tests/plot_sync/util.py b/chia/_tests/plot_sync/util.py index e619e759436e..aaf238e1330b 100644 --- a/chia/_tests/plot_sync/util.py +++ b/chia/_tests/plot_sync/util.py @@ -11,11 +11,12 @@ from chia._tests.util.split_managers import SplitAsyncManager, split_async_manager from chia._tests.util.time_out_assert import time_out_assert +from chia.farmer.farmer_service import FarmerService from chia.harvester.harvester import Harvester +from chia.harvester.harvester_service import HarvesterService from chia.plot_sync.sender import Sender from chia.protocols.harvester_protocol import PlotSyncIdentifier from chia.protocols.outbound_message import Message, NodeType -from chia.server.aliases import FarmerService, HarvesterService from chia.types.peer_info import PeerInfo, UnresolvedPeerInfo diff --git a/chia/_tests/plotting/test_plot_manager.py b/chia/_tests/plotting/test_plot_manager.py index 3108da756943..d326da25e4d3 100644 --- a/chia/_tests/plotting/test_plot_manager.py +++ b/chia/_tests/plotting/test_plot_manager.py @@ -117,10 +117,9 @@ def refresh_callback(self, event: PlotRefreshEvents, refresh_result: PlotRefresh if plot_info.prover.get_filename() == value.prover.get_filename(): values_found += 1 continue - else: - if value in expected_list: - values_found += 1 - continue + elif value in expected_list: + values_found += 1 + continue if values_found != len(expected_list): log.error(f"{name} invalid: values_found {values_found} expected {len(expected_list)}") return @@ -509,7 +508,8 @@ async def test_plot_info_caching(environment, bt): assert plot_manager.plots[path].prover.get_filename() == plot_info.prover.get_filename() assert plot_manager.plots[path].prover.get_id() == plot_info.prover.get_id() assert plot_manager.plots[path].prover.get_memo() == plot_info.prover.get_memo() - assert plot_manager.plots[path].prover.get_size() == plot_info.prover.get_size() + assert plot_manager.plots[path].prover.get_size().size_v1 == plot_info.prover.get_size().size_v1 + assert plot_manager.plots[path].prover.get_size().size_v2 == plot_info.prover.get_size().size_v2 assert plot_manager.plots[path].prover.get_compression_level() == plot_info.prover.get_compression_level() assert plot_manager.plots[path].pool_public_key == plot_info.pool_public_key assert plot_manager.plots[path].pool_contract_puzzle_hash == plot_info.pool_contract_puzzle_hash diff --git a/chia/_tests/plotting/test_prover.py b/chia/_tests/plotting/test_prover.py index 592280d2df52..c5ee38e6fee6 100644 --- a/chia/_tests/plotting/test_prover.py +++ b/chia/_tests/plotting/test_prover.py @@ -5,6 +5,8 @@ from unittest.mock import MagicMock, patch import pytest +from chia_rs.sized_bytes import bytes32 +from chia_rs.sized_ints import uint8 from chia.plotting.prover import PlotVersion, V1Prover, V2Prover, get_prover_from_bytes, get_prover_from_file @@ -25,10 +27,9 @@ def test_v2_prover_get_memo_raises_error(self) -> None: with pytest.raises(NotImplementedError, match="V2 plot format is not yet implemented"): prover.get_memo() - def test_v2_prover_get_compression_level_raises_error(self) -> None: + def test_v2_prover_get_compression_level(self) -> None: prover = V2Prover("/nonexistent/path/test.plot2") - with pytest.raises(NotImplementedError, match="V2 plot format is not yet implemented"): - prover.get_compression_level() + assert prover.get_compression_level() == uint8(0) def test_v2_prover_get_id_raises_error(self) -> None: prover = V2Prover("/nonexistent/path/test.plot2") @@ -37,13 +38,15 @@ def test_v2_prover_get_id_raises_error(self) -> None: def test_v2_prover_get_qualities_for_challenge_raises_error(self) -> None: prover = V2Prover("/nonexistent/path/test.plot2") - with pytest.raises(NotImplementedError, match="V2 plot format is not yet implemented"): - prover.get_qualities_for_challenge(b"challenge") + with pytest.raises( + AssertionError, match="V2 plot format does not support qualities directly, use partial proofs" + ): + prover.get_qualities_for_challenge(bytes32(b"1" * 32)) def test_v2_prover_get_full_proof_raises_error(self) -> None: prover = V2Prover("/nonexistent/path/test.plot2") - with pytest.raises(NotImplementedError, match="V2 plot format is not yet implemented"): - prover.get_full_proof(b"challenge", 0) + with pytest.raises(AssertionError, match="V2 plot format require solver to get full proof"): + prover.get_full_proof(bytes32(b"1" * 32), 0) def test_v2_prover_bytes_raises_error(self) -> None: prover = V2Prover("/nonexistent/path/test.plot2") diff --git a/chia/_tests/pools/test_pool_cmdline.py b/chia/_tests/pools/test_pool_cmdline.py index 2dd84d1cbb5e..5684c2a11ce5 100644 --- a/chia/_tests/pools/test_pool_cmdline.py +++ b/chia/_tests/pools/test_pool_cmdline.py @@ -1,6 +1,7 @@ from __future__ import annotations import json +import re from dataclasses import dataclass from io import StringIO from typing import Optional, cast @@ -484,7 +485,7 @@ async def test_plotnft_cli_join( wallet_id = await create_new_plotnft(wallet_environments) # Test joining the same pool again - with pytest.raises(click.ClickException, match="already farming to pool http://pool.example.com"): + with pytest.raises(click.ClickException, match=re.escape("already farming to pool http://pool.example.com")): await JoinPlotNFTCMD( rpc_info=NeedsWalletRPC( client_info=client_info, diff --git a/chia/_tests/pools/test_pool_puzzles_lifecycle.py b/chia/_tests/pools/test_pool_puzzles_lifecycle.py index 2838bc790741..ea3a7dc73275 100644 --- a/chia/_tests/pools/test_pool_puzzles_lifecycle.py +++ b/chia/_tests/pools/test_pool_puzzles_lifecycle.py @@ -1,6 +1,7 @@ from __future__ import annotations import copy +import re from unittest import TestCase import pytest @@ -31,6 +32,7 @@ from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program from chia.types.coin_spend import make_spend +from chia.util.errors import Err from chia.wallet.puzzles import singleton_top_layer from chia.wallet.puzzles.p2_conditions import puzzle_for_conditions from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import ( @@ -242,7 +244,8 @@ def test_pool_lifecycle(self): ) # Spend it and hope it fails! with pytest.raises( - BadSpendBundleError, match="condition validation failure Err.ASSERT_ANNOUNCE_CONSUMED_FAILED" + BadSpendBundleError, + match=re.escape(f"condition validation failure {Err.ASSERT_ANNOUNCE_CONSUMED_FAILED!s}"), ): coin_db.update_coin_store_for_spend_bundle( SpendBundle([singleton_coinsol], G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM @@ -269,7 +272,8 @@ def test_pool_lifecycle(self): ) # Spend it and hope it fails! with pytest.raises( - BadSpendBundleError, match="condition validation failure Err.ASSERT_ANNOUNCE_CONSUMED_FAILED" + BadSpendBundleError, + match=re.escape(f"condition validation failure {Err.ASSERT_ANNOUNCE_CONSUMED_FAILED!s}"), ): coin_db.update_coin_store_for_spend_bundle( SpendBundle([singleton_coinsol, bad_coinsol], G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM @@ -320,7 +324,10 @@ def test_pool_lifecycle(self): (data + singleton.name() + DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA), ) # Spend it and hope it fails! - with pytest.raises(BadSpendBundleError, match="condition validation failure Err.ASSERT_HEIGHT_RELATIVE_FAILED"): + with pytest.raises( + BadSpendBundleError, + match=re.escape(f"condition validation failure {Err.ASSERT_HEIGHT_RELATIVE_FAILED!s}"), + ): coin_db.update_coin_store_for_spend_bundle( SpendBundle([return_coinsol], sig), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM ) diff --git a/chia/_tests/pools/test_pool_rpc.py b/chia/_tests/pools/test_pool_rpc.py index 8e7d2460fe41..4ab209b99a28 100644 --- a/chia/_tests/pools/test_pool_rpc.py +++ b/chia/_tests/pools/test_pool_rpc.py @@ -24,7 +24,6 @@ from chia._tests.util.time_out_assert import time_out_assert from chia.pools.pool_wallet_info import PoolSingletonState, PoolWalletInfo from chia.rpc.rpc_client import ResponseFailureError -from chia.server.aliases import WalletService from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import BlockTools, get_plot_dir from chia.simulator.full_node_simulator import FullNodeSimulator @@ -41,8 +40,19 @@ from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG from chia.wallet.util.wallet_types import WalletType from chia.wallet.wallet_node import WalletNode -from chia.wallet.wallet_request_types import GetWallets, PWAbsorbRewards, PWJoinPool, PWSelfPool, PWStatus +from chia.wallet.wallet_request_types import ( + DeleteUnconfirmedTransactions, + GetTransactions, + GetWalletBalance, + GetWallets, + PWAbsorbRewards, + PWJoinPool, + PWSelfPool, + PWStatus, + SendTransaction, +) from chia.wallet.wallet_rpc_client import WalletRpcClient +from chia.wallet.wallet_service import WalletService from chia.wallet.wallet_state_manager import WalletStateManager # TODO: Compare deducted fees in all tests against reported total_fee @@ -455,7 +465,7 @@ async def pw_created(check_wallet_id: int) -> bool: def mempool_empty() -> bool: return full_node_api.full_node.mempool_manager.mempool.size() == 0 - await client.delete_unconfirmed_transactions(1) + await client.delete_unconfirmed_transactions(DeleteUnconfirmedTransactions(uint32(1))) await full_node_api.process_all_wallet_transactions(wallet=wallet) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) @@ -468,8 +478,8 @@ def mempool_empty() -> bool: assert len(asset_id) > 0 await full_node_api.process_all_wallet_transactions(wallet=wallet) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - bal_0 = await client.get_wallet_balance(cat_0_id) - assert bal_0["confirmed_wallet_balance"] == 20 + bal_0 = (await client.get_wallet_balance(GetWalletBalance(cat_0_id))).wallet_balance + assert bal_0.confirmed_wallet_balance == 20 # Test creation of many pool wallets. Use untrusted since that is the more complicated protocol, but don't # run this code more than once, since it's slow. @@ -535,8 +545,8 @@ async def test_absorb_self( await add_blocks_in_batches(blocks[-3:], full_node_api.full_node) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - bal = await client.get_wallet_balance(2) - assert bal["confirmed_wallet_balance"] == 2 * 1_750_000_000_000 + bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal.confirmed_wallet_balance == 2 * 1_750_000_000_000 # Claim 2 * 1.75, and farm a new 1.75 absorb_txs = ( @@ -561,8 +571,8 @@ async def test_absorb_self( new_status: PoolWalletInfo = (await client.pw_status(PWStatus(uint32(2)))).state assert status.current == new_status.current assert status.tip_singleton_coin_id != new_status.tip_singleton_coin_id - bal = await client.get_wallet_balance(2) - assert bal["confirmed_wallet_balance"] == 1 * 1_750_000_000_000 + bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal.confirmed_wallet_balance == 1 * 1_750_000_000_000 # Claim another 1.75 absorb_txs1 = ( @@ -575,14 +585,20 @@ async def test_absorb_self( await full_node_api.farm_blocks_to_puzzlehash(count=2, farm_to=our_ph, guarantee_transaction_blocks=True) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - bal = await client.get_wallet_balance(2) - assert bal["confirmed_wallet_balance"] == 0 + bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal.confirmed_wallet_balance == 0 assert len(await wallet_node.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(2)) == 0 tr: TransactionRecord = ( await client.send_transaction( - 1, uint64(100), encode_puzzle_hash(status.p2_singleton_puzzle_hash, "txch"), DEFAULT_TX_CONFIG + SendTransaction( + wallet_id=uint32(1), + amount=uint64(100), + address=encode_puzzle_hash(status.p2_singleton_puzzle_hash, "txch"), + push=True, + ), + DEFAULT_TX_CONFIG, ) ).transaction @@ -590,15 +606,15 @@ async def test_absorb_self( await full_node_api.farm_blocks_to_puzzlehash(count=2, farm_to=our_ph, guarantee_transaction_blocks=True) # Balance ignores non coinbase TX - bal = await client.get_wallet_balance(2) - assert bal["confirmed_wallet_balance"] == 0 + bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal.confirmed_wallet_balance == 0 with pytest.raises(ValueError): await client.pw_absorb_rewards( PWAbsorbRewards(wallet_id=uint32(2), fee=uint64(fee), push=True), DEFAULT_TX_CONFIG ) - tx1 = await client.get_transactions(1) + tx1 = (await client.get_transactions(GetTransactions(uint32(1)))).transactions assert (250_000_000_000 + fee) in [tx.amount for tx in tx1] @pytest.mark.anyio @@ -626,8 +642,8 @@ async def test_absorb_self_multiple_coins( pool_expected_confirmed_balance = 0 await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - main_bal = await client.get_wallet_balance(1) - assert main_bal["confirmed_wallet_balance"] == main_expected_confirmed_balance + main_bal = (await client.get_wallet_balance(GetWalletBalance(uint32(1)))).wallet_balance + assert main_bal.confirmed_wallet_balance == main_expected_confirmed_balance status: PoolWalletInfo = (await client.pw_status(PWStatus(uint32(2)))).state assert status.current.state == PoolSingletonState.SELF_POOLING.value @@ -650,10 +666,10 @@ async def test_absorb_self_multiple_coins( pool_expected_confirmed_balance += block_count * 1_750_000_000_000 main_expected_confirmed_balance += block_count * 250_000_000_000 - main_bal = await client.get_wallet_balance(1) - assert main_bal["confirmed_wallet_balance"] == main_expected_confirmed_balance - bal = await client.get_wallet_balance(2) - assert bal["confirmed_wallet_balance"] == pool_expected_confirmed_balance + main_bal = (await client.get_wallet_balance(GetWalletBalance(uint32(1)))).wallet_balance + assert main_bal.confirmed_wallet_balance == main_expected_confirmed_balance + bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal.confirmed_wallet_balance == pool_expected_confirmed_balance # Claim absorb_txs = ( @@ -671,10 +687,10 @@ async def test_absorb_self_multiple_coins( new_status: PoolWalletInfo = (await client.pw_status(PWStatus(uint32(2)))).state assert status.current == new_status.current assert status.tip_singleton_coin_id != new_status.tip_singleton_coin_id - main_bal = await client.get_wallet_balance(1) - pool_bal = await client.get_wallet_balance(2) - assert pool_bal["confirmed_wallet_balance"] == pool_expected_confirmed_balance - assert main_bal["confirmed_wallet_balance"] == main_expected_confirmed_balance # 10499999999999 + main_bal = (await client.get_wallet_balance(GetWalletBalance(uint32(1)))).wallet_balance + pool_bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert pool_bal.confirmed_wallet_balance == pool_expected_confirmed_balance + assert main_bal.confirmed_wallet_balance == main_expected_confirmed_balance # 10499999999999 @pytest.mark.anyio async def test_absorb_pooling( @@ -726,8 +742,8 @@ async def farming_to_pool() -> bool: await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) # Pooled plots don't have balance main_expected_confirmed_balance += block_count * 250_000_000_000 - bal = await client.get_wallet_balance(2) - assert bal["confirmed_wallet_balance"] == 0 + bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal.confirmed_wallet_balance == 0 # Claim block_count * 1.75 ret = await client.pw_absorb_rewards( @@ -751,12 +767,12 @@ async def status_updated() -> bool: await time_out_assert(20, status_updated) new_status = (await client.pw_status(PWStatus(uint32(2)))).state - bal = await client.get_wallet_balance(2) - assert bal["confirmed_wallet_balance"] == 0 + bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal.confirmed_wallet_balance == 0 await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - bal = await client.get_wallet_balance(2) - assert bal["confirmed_wallet_balance"] == 0 + bal = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal.confirmed_wallet_balance == 0 assert len(await wallet_node.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(2)) == 0 peak = full_node_api.full_node.blockchain.get_peak() assert peak is not None @@ -798,8 +814,8 @@ async def status_updated() -> bool: status = (await client.pw_status(PWStatus(uint32(2)))).state assert ret.fee_transaction is None - bal2 = await client.get_wallet_balance(2) - assert bal2["confirmed_wallet_balance"] == 0 + bal2 = (await client.get_wallet_balance(GetWalletBalance(uint32(2)))).wallet_balance + assert bal2.confirmed_wallet_balance == 0 @pytest.mark.anyio async def test_self_pooling_to_pooling(self, setup: Setup, fee: uint64, self_hostname: str) -> None: @@ -1252,7 +1268,7 @@ async def test_join_pool_unsynced( mock.return_value = False # Test joining the same pool via the RPC client - with pytest.raises(ResponseFailureError, match="Wallet needs to be fully synced."): + with pytest.raises(ResponseFailureError, match="Wallet needs to be fully synced"): await wallet_rpc.pw_join_pool( PWJoinPool( wallet_id=uint32(wallet_id), diff --git a/chia/_tests/solver/__init__.py b/chia/_tests/solver/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/chia/_tests/solver/config.py b/chia/_tests/solver/config.py new file mode 100644 index 000000000000..b593bfe59ade --- /dev/null +++ b/chia/_tests/solver/config.py @@ -0,0 +1,4 @@ +from __future__ import annotations + +job_timeout = 70 +checkout_blocks_and_plots = True diff --git a/chia/_tests/solver/test_solver_service.py b/chia/_tests/solver/test_solver_service.py new file mode 100644 index 000000000000..ccb683a9ef39 --- /dev/null +++ b/chia/_tests/solver/test_solver_service.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from pathlib import Path +from unittest.mock import patch + +import pytest +from chia_rs import ConsensusConstants + +from chia.protocols.outbound_message import Message +from chia.protocols.solver_protocol import SolverInfo +from chia.simulator.block_tools import create_block_tools_async +from chia.simulator.keyring import TempKeyring +from chia.simulator.setup_services import setup_solver + + +@pytest.mark.anyio +async def test_solver_api_methods(blockchain_constants: ConsensusConstants, tmp_path: Path) -> None: + with TempKeyring(populate=True) as keychain: + bt = await create_block_tools_async(constants=blockchain_constants, keychain=keychain) + async with setup_solver(tmp_path, bt, blockchain_constants) as solver_service: + solver = solver_service._node + solver_api = solver_service._api + assert solver_api.ready() is True + test_info = SolverInfo(partial_proof=b"test_partial_proof_42") + expected_proof = b"test_proof_data_12345" + with patch.object(solver, "solve", return_value=expected_proof): + api_result = await solver_api.solve(test_info) + assert api_result is not None + assert isinstance(api_result, Message) diff --git a/chia/_tests/timelord/test_new_peak.py b/chia/_tests/timelord/test_new_peak.py index a11cd3e2b832..7fd284ccfa31 100644 --- a/chia/_tests/timelord/test_new_peak.py +++ b/chia/_tests/timelord/test_new_peak.py @@ -13,8 +13,8 @@ from chia.consensus.blockchain import Blockchain from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty from chia.consensus.make_sub_epoch_summary import next_sub_epoch_summary +from chia.full_node.full_node_service import FullNodeService from chia.protocols import timelord_protocol -from chia.server.aliases import FullNodeService from chia.server.server import ChiaServer from chia.simulator.block_tools import BlockTools from chia.simulator.full_node_simulator import FullNodeSimulator diff --git a/chia/_tests/timelord/test_timelord.py b/chia/_tests/timelord/test_timelord.py index 7e3dbec459ea..80ca02ed057e 100644 --- a/chia/_tests/timelord/test_timelord.py +++ b/chia/_tests/timelord/test_timelord.py @@ -2,7 +2,7 @@ import pytest -from chia.server.aliases import TimelordService +from chia.timelord.timelord_service import TimelordService @pytest.mark.anyio diff --git a/chia/_tests/util/build_network_protocol_files.py b/chia/_tests/util/build_network_protocol_files.py index 8e2f6b34c217..de5990c7095d 100644 --- a/chia/_tests/util/build_network_protocol_files.py +++ b/chia/_tests/util/build_network_protocol_files.py @@ -33,6 +33,12 @@ def visit_farmer_protocol(visitor: Callable[[Any, str], None]) -> None: visitor(request_signed_values, "request_signed_values") visitor(farming_info, "farming_info") visitor(signed_values, "signed_values") + visitor(partial_proof, "partial_proof") + + +def visit_solver_protocol(visitor: Callable[[Any, str], None]) -> None: + visitor(solver_info, "solver_info") + visitor(solver_response, "solver_response") def visit_full_node(visitor: Callable[[Any, str], None]) -> None: @@ -117,6 +123,7 @@ def visit_harvester_protocol(visitor: Callable[[Any, str], None]) -> None: visitor(pool_difficulty, "pool_difficulty") visitor(harvester_handhsake, "harvester_handhsake") visitor(new_signage_point_harvester, "new_signage_point_harvester") + visitor(new_signage_point_harvester2, "new_signage_point_harvester2") visitor(new_proof_of_space, "new_proof_of_space") visitor(request_signatures, "request_signatures") visitor(respond_signatures, "respond_signatures") @@ -170,6 +177,7 @@ def visit_all_messages(visitor: Callable[[Any, str], None]) -> None: visit_pool_protocol(visitor) visit_timelord_protocol(visitor) visit_shared_protocol(visitor) + visit_solver_protocol(visitor) def get_protocol_bytes() -> bytes: diff --git a/chia/_tests/util/network_protocol_data.py b/chia/_tests/util/network_protocol_data.py index 7e0ca731697c..465a6834aa25 100644 --- a/chia/_tests/util/network_protocol_data.py +++ b/chia/_tests/util/network_protocol_data.py @@ -37,6 +37,7 @@ harvester_protocol, introducer_protocol, pool_protocol, + solver_protocol, timelord_protocol, wallet_protocol, ) @@ -150,6 +151,26 @@ ), ) +partial_proof = harvester_protocol.PartialProofsData( + bytes32.fromhex("42743566108589c11bb3811b347900b6351fd3e25bad6c956c0bf1c05a4d93fb"), + bytes32.fromhex("8a346e8dc02e9b44c0571caa74fd99f163d4c5d7deaedac87125528721493f7a"), + "plot-filename", + [b"partial-proof1", b"partial-proof2"], + uint8(4), + uint8(32), + G1Element.from_bytes( + bytes.fromhex( + "a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c" + ), + ), + bytes32.fromhex("91240fbacdf93b44c0571caa74fd99f163d4c5d7deaedac87125528721493f7a"), + G1Element.from_bytes( + bytes.fromhex( + "a04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c" + ), + ), +) + # FULL NODE PROTOCOL. new_peak = full_node_protocol.NewPeak( @@ -789,6 +810,16 @@ ) new_signage_point_harvester = harvester_protocol.NewSignagePointHarvester( + bytes32(bytes.fromhex("e342c21b4aeaa52349d42492be934692db58494ca9bce4a8697d06fdf8e583bb")), + uint64(15615706268399948682), + uint64(10520767421667792980), + uint8(148), + bytes32(bytes.fromhex("b78c9fca155e9742df835cbe84bb7e518bee70d78b6be6e39996c0a02e0cfe4c")), + [pool_difficulty], + uint8(3), +) + +new_signage_point_harvester2 = harvester_protocol.NewSignagePointHarvester2( bytes32(bytes.fromhex("e342c21b4aeaa52349d42492be934692db58494ca9bce4a8697d06fdf8e583bb")), uint64(15615706268399948682), uint64(10520767421667792980), @@ -799,6 +830,7 @@ uint32(0), ) + new_proof_of_space = harvester_protocol.NewProofOfSpace( bytes32.fromhex("1b64ec6bf3fe33bb80eca5b64ff1c88be07771eaed1e98a7199510522087e56e"), bytes32.fromhex("ad1f8a74376ce8c5c93b7fbb355c2fb6d689ae4f4a7134166593d95265a3da30"), @@ -1082,3 +1114,8 @@ uint32(386395693), uint8(224), ) + +# SOLVER PROTOCOL +solver_info = solver_protocol.SolverInfo(partial_proof=b"partial-proof") + +solver_response = solver_protocol.SolverResponse(b"partial-proof", b"full-proof") diff --git a/chia/_tests/util/protocol_messages_bytes-v1.0 b/chia/_tests/util/protocol_messages_bytes-v1.0 index 0d9160933489..9b3c128ca799 100644 Binary files a/chia/_tests/util/protocol_messages_bytes-v1.0 and b/chia/_tests/util/protocol_messages_bytes-v1.0 differ diff --git a/chia/_tests/util/protocol_messages_json.py b/chia/_tests/util/protocol_messages_json.py index c89601f7c90e..dcac00927b95 100644 --- a/chia/_tests/util/protocol_messages_json.py +++ b/chia/_tests/util/protocol_messages_json.py @@ -65,6 +65,18 @@ "foliage_transaction_block_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", } +partial_proof_json: dict[str, Any] = { + "challenge_hash": "0x42743566108589c11bb3811b347900b6351fd3e25bad6c956c0bf1c05a4d93fb", + "sp_hash": "0x8a346e8dc02e9b44c0571caa74fd99f163d4c5d7deaedac87125528721493f7a", + "plot_identifier": "plot-filename", + "partial_proofs": ["0x7061727469616c2d70726f6f6631", "0x7061727469616c2d70726f6f6632"], + "signage_point_index": 4, + "plot_size": 32, + "pool_public_key": "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c", + "pool_contract_puzzle_hash": "0x91240fbacdf93b44c0571caa74fd99f163d4c5d7deaedac87125528721493f7a", + "plot_public_key": "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c", +} + new_peak_json: dict[str, Any] = { "header_hash": "0x8a346e8dc02e9b44c0571caa74fd99f163d4c5d7deae9f8ddb00528721493f7a", "height": 2653549198, @@ -2148,6 +2160,22 @@ } new_signage_point_harvester_json: dict[str, Any] = { + "challenge_hash": "0xe342c21b4aeaa52349d42492be934692db58494ca9bce4a8697d06fdf8e583bb", + "difficulty": 15615706268399948682, + "sub_slot_iters": 10520767421667792980, + "signage_point_index": 148, + "sp_hash": "0xb78c9fca155e9742df835cbe84bb7e518bee70d78b6be6e39996c0a02e0cfe4c", + "pool_difficulties": [ + { + "difficulty": 14819251421858580996, + "sub_slot_iters": 12852879676624401630, + "pool_contract_puzzle_hash": "0xc9423123ea65e6923e973b95531b4874570dae942cb757a2daec4a6971753886", + } + ], + "filter_prefix_bits": 3, +} + +new_signage_point_harvester2_json: dict[str, Any] = { "challenge_hash": "0xe342c21b4aeaa52349d42492be934692db58494ca9bce4a8697d06fdf8e583bb", "difficulty": 15615706268399948682, "sub_slot_iters": 10520767421667792980, @@ -2701,3 +2729,10 @@ error_without_data_json: dict[str, Any] = {"code": 1, "message": "Unknown", "data": None} error_with_data_json: dict[str, Any] = {"code": 1, "message": "Unknown", "data": "0x65787472612064617461"} + +solver_info_json: dict[str, Any] = {"partial_proof": "0x7061727469616c2d70726f6f66"} + +solver_response_json: dict[str, Any] = { + "partial_proof": "0x7061727469616c2d70726f6f66", + "proof": "0x66756c6c2d70726f6f66", +} diff --git a/chia/_tests/util/setup_nodes.py b/chia/_tests/util/setup_nodes.py index f8aa40ab3f0f..80a9c0d78487 100644 --- a/chia/_tests/util/setup_nodes.py +++ b/chia/_tests/util/setup_nodes.py @@ -18,11 +18,13 @@ from chia._tests.environments.wallet import WalletEnvironment from chia.daemon.server import WebSocketServer from chia.farmer.farmer import Farmer +from chia.farmer.farmer_service import FarmerService from chia.full_node.full_node_api import FullNodeAPI +from chia.full_node.full_node_service import FullNodeService from chia.harvester.harvester import Harvester +from chia.harvester.harvester_service import HarvesterService from chia.introducer.introducer_api import IntroducerAPI from chia.protocols.shared_protocol import Capability -from chia.server.aliases import FarmerService, FullNodeService, HarvesterService, TimelordService, WalletService from chia.server.server import ChiaServer from chia.simulator.block_tools import BlockTools, create_block_tools_async from chia.simulator.full_node_simulator import FullNodeSimulator @@ -33,6 +35,7 @@ setup_full_node, setup_harvester, setup_introducer, + setup_solver, setup_timelord, setup_vdf_client, setup_vdf_clients, @@ -40,12 +43,15 @@ ) from chia.simulator.socket import find_available_listen_port from chia.simulator.start_simulator import SimulatorFullNodeService +from chia.solver.solver_service import SolverService +from chia.timelord.timelord_service import TimelordService from chia.types.peer_info import UnresolvedPeerInfo from chia.util.hash import std_hash from chia.util.keychain import Keychain from chia.util.timing import adjusted_timeout, backoff_times from chia.wallet.wallet_node import WalletNode from chia.wallet.wallet_rpc_client import WalletRpcClient +from chia.wallet.wallet_service import WalletService OldSimulatorsAndWallets = tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools] SimulatorsAndWalletsServices = tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools] @@ -60,6 +66,7 @@ class FullSystem: introducer: IntroducerAPI timelord: TimelordService timelord_bluebox: TimelordService + solver: SolverService daemon: WebSocketServer @@ -335,6 +342,74 @@ async def setup_farmer_multi_harvester( yield harvester_services, farmer_service, block_tools +@asynccontextmanager +async def setup_farmer_multi_harvester_with_solver( + block_tools: BlockTools, + harvester_count: int, + temp_dir: Path, + consensus_constants: ConsensusConstants, + *, + start_services: bool, +) -> AsyncIterator[tuple[list[HarvesterService], FarmerService, SolverService, BlockTools]]: + async with AsyncExitStack() as async_exit_stack: + farmer_service = await async_exit_stack.enter_async_context( + setup_farmer( + block_tools, + temp_dir / "farmer", + block_tools.config["self_hostname"], + consensus_constants, + port=uint16(0), + start_service=start_services, + ) + ) + if start_services: + farmer_peer = UnresolvedPeerInfo(block_tools.config["self_hostname"], farmer_service._server.get_port()) + else: + farmer_peer = None + harvester_services = [ + await async_exit_stack.enter_async_context( + setup_harvester( + block_tools, + temp_dir / f"harvester_{i}", + farmer_peer, + consensus_constants, + start_service=start_services, + ) + ) + for i in range(harvester_count) + ] + + # Setup solver with farmer peer - CRITICAL: use same BlockTools root path for SSL CA consistency + solver_service = await async_exit_stack.enter_async_context( + setup_solver( + temp_dir / "solver", # Use temp_dir like harvester, not block_tools.root_path + block_tools, # Pass BlockTools so SSL CA can be consistent + consensus_constants, + start_service=start_services, + farmer_peer=farmer_peer, + ) + ) + + # Wait for farmer to be fully started before expecting solver connection + if start_services: + import asyncio + + # Wait for farmer to be fully initialized + timeout = 30 + for i in range(timeout): + if farmer_service._node.started: + print(f"Farmer fully started after {i} seconds") + break + await asyncio.sleep(1) + else: + print(f"WARNING: Farmer not started after {timeout} seconds") + + # Give solver additional time to connect + await asyncio.sleep(3) + + yield harvester_services, farmer_service, solver_service, block_tools + + @asynccontextmanager async def setup_full_system( consensus_constants: ConsensusConstants, @@ -469,6 +544,15 @@ async def setup_full_system_inner( await asyncio.sleep(backoff) + solver_service = await async_exit_stack.enter_async_context( + setup_solver( + shared_b_tools.root_path / "solver", + shared_b_tools, + consensus_constants, + True, + ) + ) + full_system = FullSystem( node_1=node_1, node_2=node_2, @@ -477,6 +561,7 @@ async def setup_full_system_inner( introducer=introducer, timelord=timelord, timelord_bluebox=timelord_bluebox_service, + solver=solver_service, daemon=daemon_ws, ) yield full_system diff --git a/chia/_tests/util/test_condition_tools.py b/chia/_tests/util/test_condition_tools.py index e561da8763ca..61301095dc2f 100644 --- a/chia/_tests/util/test_condition_tools.py +++ b/chia/_tests/util/test_condition_tools.py @@ -51,8 +51,9 @@ def mk_agg_sig_conditions( flags=0, execution_cost=0, condition_cost=0, + fingerprint=b"", ) - return SpendBundleConditions([spend], 0, 0, 0, None, None, agg_sig_unsafe_data, 0, 0, 0, False, 0, 0) + return SpendBundleConditions([spend], 0, 0, 0, None, None, agg_sig_unsafe_data, 0, 0, 0, False, 0, 0, 0, 0, 0) @pytest.mark.parametrize( @@ -99,7 +100,7 @@ def test_pkm_pairs_vs_for_conditions_dict(opcode: ConditionOpcode) -> None: class TestPkmPairs: def test_empty_list(self) -> None: - conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, False, 0, 0) + conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, False, 0, 0, 0, 0, 0) pks, msgs = pkm_pairs(conds, b"foobar") assert pks == [] assert msgs == [] diff --git a/chia/_tests/util/test_full_block_utils.py b/chia/_tests/util/test_full_block_utils.py index 7dcfb9168398..3a3b3ed23970 100644 --- a/chia/_tests/util/test_full_block_utils.py +++ b/chia/_tests/util/test_full_block_utils.py @@ -67,14 +67,15 @@ def vdf_proof() -> VDFProof: def get_proof_of_space() -> Generator[ProofOfSpace, None, None]: for pool_pk in [g1(), None]: for plot_hash in [hsh(), None]: - yield ProofOfSpace( - hsh(), # challenge - pool_pk, - plot_hash, - g1(), # plot_public_key - uint8(32), - random.randbytes(8 * 32), - ) + for pos_version in [0, 0x80]: + yield ProofOfSpace( + hsh(), # challenge + pool_pk, + plot_hash, + g1(), # plot_public_key + uint8(pos_version | 32), # this is version and k-size + random.randbytes(8 * 32), + ) def get_reward_chain_block(height: uint32) -> Generator[RewardChainBlock, None, None]: diff --git a/chia/_tests/util/test_misc.py b/chia/_tests/util/test_misc.py index 7978eb19881d..62556508de56 100644 --- a/chia/_tests/util/test_misc.py +++ b/chia/_tests/util/test_misc.py @@ -182,7 +182,7 @@ def test_split_manager_raises_on_second_entry() -> None: split = SplitManager(manager=sync_manager(y=x), object=None) split.enter() - with pytest.raises(Exception, match="^already entered$"): + with pytest.raises(Exception, match=r"^already entered$"): split.enter() @@ -193,7 +193,7 @@ def test_split_manager_raises_on_second_entry_after_exiting() -> None: split.enter() split.exit() - with pytest.raises(Exception, match="^already entered, already exited$"): + with pytest.raises(Exception, match=r"^already entered, already exited$"): split.enter() @@ -204,7 +204,7 @@ def test_split_manager_raises_on_second_exit() -> None: split.enter() split.exit() - with pytest.raises(Exception, match="^already exited$"): + with pytest.raises(Exception, match=r"^already exited$"): split.exit() @@ -213,7 +213,7 @@ def test_split_manager_raises_on_exit_without_entry() -> None: split = SplitManager(manager=sync_manager(y=x), object=None) - with pytest.raises(Exception, match="^not yet entered$"): + with pytest.raises(Exception, match=r"^not yet entered$"): split.exit() @@ -274,7 +274,7 @@ async def test_split_async_manager_raises_on_second_entry() -> None: split = SplitAsyncManager(manager=async_manager(y=x), object=None) await split.enter() - with pytest.raises(Exception, match="^already entered$"): + with pytest.raises(Exception, match=r"^already entered$"): await split.enter() @@ -286,7 +286,7 @@ async def test_split_async_manager_raises_on_second_entry_after_exiting() -> Non await split.enter() await split.exit() - with pytest.raises(Exception, match="^already entered, already exited$"): + with pytest.raises(Exception, match=r"^already entered, already exited$"): await split.enter() @@ -298,7 +298,7 @@ async def test_split_async_manager_raises_on_second_exit() -> None: await split.enter() await split.exit() - with pytest.raises(Exception, match="^already exited$"): + with pytest.raises(Exception, match=r"^already exited$"): await split.exit() @@ -308,7 +308,7 @@ async def test_split_async_manager_raises_on_exit_without_entry() -> None: split = SplitAsyncManager(manager=async_manager(y=x), object=None) - with pytest.raises(Exception, match="^not yet entered$"): + with pytest.raises(Exception, match=r"^not yet entered$"): await split.exit() @@ -390,7 +390,7 @@ async def test_valued_event_set_again_raises_and_does_not_change_value() -> None value = 37 valued_event.set(value) - with pytest.raises(Exception, match="^Value already set$"): + with pytest.raises(Exception, match=r"^Value already set$"): valued_event.set(value + 1) with anyio.fail_after(adjusted_timeout(10)): @@ -404,7 +404,7 @@ async def test_valued_event_wait_raises_if_not_set() -> None: valued_event = ValuedEvent[int]() valued_event._event.set() - with pytest.raises(Exception, match="^Value not set despite event being set$"): + with pytest.raises(Exception, match=r"^Value not set despite event being set$"): with anyio.fail_after(adjusted_timeout(10)): await valued_event.wait() diff --git a/chia/_tests/util/test_network_protocol_files.py b/chia/_tests/util/test_network_protocol_files.py index 071a263e8366..359f1c477af6 100644 --- a/chia/_tests/util/test_network_protocol_files.py +++ b/chia/_tests/util/test_network_protocol_files.py @@ -51,528 +51,548 @@ def test_protocol_bytes() -> None: assert bytes(message_4) == bytes(signed_values) message_bytes, input_bytes = parse_blob(input_bytes) - message_5 = type(new_peak).from_bytes(message_bytes) - assert message_5 == new_peak - assert bytes(message_5) == bytes(new_peak) + message_5 = type(partial_proof).from_bytes(message_bytes) + assert message_5 == partial_proof + assert bytes(message_5) == bytes(partial_proof) message_bytes, input_bytes = parse_blob(input_bytes) - message_6 = type(new_transaction).from_bytes(message_bytes) - assert message_6 == new_transaction - assert bytes(message_6) == bytes(new_transaction) + message_6 = type(new_peak).from_bytes(message_bytes) + assert message_6 == new_peak + assert bytes(message_6) == bytes(new_peak) message_bytes, input_bytes = parse_blob(input_bytes) - message_7 = type(request_transaction).from_bytes(message_bytes) - assert message_7 == request_transaction - assert bytes(message_7) == bytes(request_transaction) + message_7 = type(new_transaction).from_bytes(message_bytes) + assert message_7 == new_transaction + assert bytes(message_7) == bytes(new_transaction) message_bytes, input_bytes = parse_blob(input_bytes) - message_8 = type(respond_transaction).from_bytes(message_bytes) - assert message_8 == respond_transaction - assert bytes(message_8) == bytes(respond_transaction) + message_8 = type(request_transaction).from_bytes(message_bytes) + assert message_8 == request_transaction + assert bytes(message_8) == bytes(request_transaction) message_bytes, input_bytes = parse_blob(input_bytes) - message_9 = type(request_proof_of_weight).from_bytes(message_bytes) - assert message_9 == request_proof_of_weight - assert bytes(message_9) == bytes(request_proof_of_weight) + message_9 = type(respond_transaction).from_bytes(message_bytes) + assert message_9 == respond_transaction + assert bytes(message_9) == bytes(respond_transaction) message_bytes, input_bytes = parse_blob(input_bytes) - message_10 = type(respond_proof_of_weight).from_bytes(message_bytes) - assert message_10 == respond_proof_of_weight - assert bytes(message_10) == bytes(respond_proof_of_weight) + message_10 = type(request_proof_of_weight).from_bytes(message_bytes) + assert message_10 == request_proof_of_weight + assert bytes(message_10) == bytes(request_proof_of_weight) message_bytes, input_bytes = parse_blob(input_bytes) - message_11 = type(request_block).from_bytes(message_bytes) - assert message_11 == request_block - assert bytes(message_11) == bytes(request_block) + message_11 = type(respond_proof_of_weight).from_bytes(message_bytes) + assert message_11 == respond_proof_of_weight + assert bytes(message_11) == bytes(respond_proof_of_weight) message_bytes, input_bytes = parse_blob(input_bytes) - message_12 = type(reject_block).from_bytes(message_bytes) - assert message_12 == reject_block - assert bytes(message_12) == bytes(reject_block) + message_12 = type(request_block).from_bytes(message_bytes) + assert message_12 == request_block + assert bytes(message_12) == bytes(request_block) message_bytes, input_bytes = parse_blob(input_bytes) - message_13 = type(request_blocks).from_bytes(message_bytes) - assert message_13 == request_blocks - assert bytes(message_13) == bytes(request_blocks) + message_13 = type(reject_block).from_bytes(message_bytes) + assert message_13 == reject_block + assert bytes(message_13) == bytes(reject_block) message_bytes, input_bytes = parse_blob(input_bytes) - message_14 = type(respond_blocks).from_bytes(message_bytes) - assert message_14 == respond_blocks - assert bytes(message_14) == bytes(respond_blocks) + message_14 = type(request_blocks).from_bytes(message_bytes) + assert message_14 == request_blocks + assert bytes(message_14) == bytes(request_blocks) message_bytes, input_bytes = parse_blob(input_bytes) - message_15 = type(reject_blocks).from_bytes(message_bytes) - assert message_15 == reject_blocks - assert bytes(message_15) == bytes(reject_blocks) + message_15 = type(respond_blocks).from_bytes(message_bytes) + assert message_15 == respond_blocks + assert bytes(message_15) == bytes(respond_blocks) message_bytes, input_bytes = parse_blob(input_bytes) - message_16 = type(respond_block).from_bytes(message_bytes) - assert message_16 == respond_block - assert bytes(message_16) == bytes(respond_block) + message_16 = type(reject_blocks).from_bytes(message_bytes) + assert message_16 == reject_blocks + assert bytes(message_16) == bytes(reject_blocks) message_bytes, input_bytes = parse_blob(input_bytes) - message_17 = type(new_unfinished_block).from_bytes(message_bytes) - assert message_17 == new_unfinished_block - assert bytes(message_17) == bytes(new_unfinished_block) + message_17 = type(respond_block).from_bytes(message_bytes) + assert message_17 == respond_block + assert bytes(message_17) == bytes(respond_block) message_bytes, input_bytes = parse_blob(input_bytes) - message_18 = type(request_unfinished_block).from_bytes(message_bytes) - assert message_18 == request_unfinished_block - assert bytes(message_18) == bytes(request_unfinished_block) + message_18 = type(new_unfinished_block).from_bytes(message_bytes) + assert message_18 == new_unfinished_block + assert bytes(message_18) == bytes(new_unfinished_block) message_bytes, input_bytes = parse_blob(input_bytes) - message_19 = type(respond_unfinished_block).from_bytes(message_bytes) - assert message_19 == respond_unfinished_block - assert bytes(message_19) == bytes(respond_unfinished_block) + message_19 = type(request_unfinished_block).from_bytes(message_bytes) + assert message_19 == request_unfinished_block + assert bytes(message_19) == bytes(request_unfinished_block) message_bytes, input_bytes = parse_blob(input_bytes) - message_20 = type(new_signage_point_or_end_of_subslot).from_bytes(message_bytes) - assert message_20 == new_signage_point_or_end_of_subslot - assert bytes(message_20) == bytes(new_signage_point_or_end_of_subslot) + message_20 = type(respond_unfinished_block).from_bytes(message_bytes) + assert message_20 == respond_unfinished_block + assert bytes(message_20) == bytes(respond_unfinished_block) message_bytes, input_bytes = parse_blob(input_bytes) - message_21 = type(request_signage_point_or_end_of_subslot).from_bytes(message_bytes) - assert message_21 == request_signage_point_or_end_of_subslot - assert bytes(message_21) == bytes(request_signage_point_or_end_of_subslot) + message_21 = type(new_signage_point_or_end_of_subslot).from_bytes(message_bytes) + assert message_21 == new_signage_point_or_end_of_subslot + assert bytes(message_21) == bytes(new_signage_point_or_end_of_subslot) message_bytes, input_bytes = parse_blob(input_bytes) - message_22 = type(respond_signage_point).from_bytes(message_bytes) - assert message_22 == respond_signage_point - assert bytes(message_22) == bytes(respond_signage_point) + message_22 = type(request_signage_point_or_end_of_subslot).from_bytes(message_bytes) + assert message_22 == request_signage_point_or_end_of_subslot + assert bytes(message_22) == bytes(request_signage_point_or_end_of_subslot) message_bytes, input_bytes = parse_blob(input_bytes) - message_23 = type(respond_end_of_subslot).from_bytes(message_bytes) - assert message_23 == respond_end_of_subslot - assert bytes(message_23) == bytes(respond_end_of_subslot) + message_23 = type(respond_signage_point).from_bytes(message_bytes) + assert message_23 == respond_signage_point + assert bytes(message_23) == bytes(respond_signage_point) message_bytes, input_bytes = parse_blob(input_bytes) - message_24 = type(request_mempool_transaction).from_bytes(message_bytes) - assert message_24 == request_mempool_transaction - assert bytes(message_24) == bytes(request_mempool_transaction) + message_24 = type(respond_end_of_subslot).from_bytes(message_bytes) + assert message_24 == respond_end_of_subslot + assert bytes(message_24) == bytes(respond_end_of_subslot) message_bytes, input_bytes = parse_blob(input_bytes) - message_25 = type(new_compact_vdf).from_bytes(message_bytes) - assert message_25 == new_compact_vdf - assert bytes(message_25) == bytes(new_compact_vdf) + message_25 = type(request_mempool_transaction).from_bytes(message_bytes) + assert message_25 == request_mempool_transaction + assert bytes(message_25) == bytes(request_mempool_transaction) message_bytes, input_bytes = parse_blob(input_bytes) - message_26 = type(request_compact_vdf).from_bytes(message_bytes) - assert message_26 == request_compact_vdf - assert bytes(message_26) == bytes(request_compact_vdf) + message_26 = type(new_compact_vdf).from_bytes(message_bytes) + assert message_26 == new_compact_vdf + assert bytes(message_26) == bytes(new_compact_vdf) message_bytes, input_bytes = parse_blob(input_bytes) - message_27 = type(respond_compact_vdf).from_bytes(message_bytes) - assert message_27 == respond_compact_vdf - assert bytes(message_27) == bytes(respond_compact_vdf) + message_27 = type(request_compact_vdf).from_bytes(message_bytes) + assert message_27 == request_compact_vdf + assert bytes(message_27) == bytes(request_compact_vdf) message_bytes, input_bytes = parse_blob(input_bytes) - message_28 = type(request_peers).from_bytes(message_bytes) - assert message_28 == request_peers - assert bytes(message_28) == bytes(request_peers) + message_28 = type(respond_compact_vdf).from_bytes(message_bytes) + assert message_28 == respond_compact_vdf + assert bytes(message_28) == bytes(respond_compact_vdf) message_bytes, input_bytes = parse_blob(input_bytes) - message_29 = type(respond_peers).from_bytes(message_bytes) - assert message_29 == respond_peers - assert bytes(message_29) == bytes(respond_peers) + message_29 = type(request_peers).from_bytes(message_bytes) + assert message_29 == request_peers + assert bytes(message_29) == bytes(request_peers) message_bytes, input_bytes = parse_blob(input_bytes) - message_30 = type(new_unfinished_block2).from_bytes(message_bytes) - assert message_30 == new_unfinished_block2 - assert bytes(message_30) == bytes(new_unfinished_block2) + message_30 = type(respond_peers).from_bytes(message_bytes) + assert message_30 == respond_peers + assert bytes(message_30) == bytes(respond_peers) message_bytes, input_bytes = parse_blob(input_bytes) - message_31 = type(request_unfinished_block2).from_bytes(message_bytes) - assert message_31 == request_unfinished_block2 - assert bytes(message_31) == bytes(request_unfinished_block2) + message_31 = type(new_unfinished_block2).from_bytes(message_bytes) + assert message_31 == new_unfinished_block2 + assert bytes(message_31) == bytes(new_unfinished_block2) message_bytes, input_bytes = parse_blob(input_bytes) - message_32 = type(request_puzzle_solution).from_bytes(message_bytes) - assert message_32 == request_puzzle_solution - assert bytes(message_32) == bytes(request_puzzle_solution) + message_32 = type(request_unfinished_block2).from_bytes(message_bytes) + assert message_32 == request_unfinished_block2 + assert bytes(message_32) == bytes(request_unfinished_block2) message_bytes, input_bytes = parse_blob(input_bytes) - message_33 = type(puzzle_solution_response).from_bytes(message_bytes) - assert message_33 == puzzle_solution_response - assert bytes(message_33) == bytes(puzzle_solution_response) + message_33 = type(request_puzzle_solution).from_bytes(message_bytes) + assert message_33 == request_puzzle_solution + assert bytes(message_33) == bytes(request_puzzle_solution) message_bytes, input_bytes = parse_blob(input_bytes) - message_34 = type(respond_puzzle_solution).from_bytes(message_bytes) - assert message_34 == respond_puzzle_solution - assert bytes(message_34) == bytes(respond_puzzle_solution) + message_34 = type(puzzle_solution_response).from_bytes(message_bytes) + assert message_34 == puzzle_solution_response + assert bytes(message_34) == bytes(puzzle_solution_response) message_bytes, input_bytes = parse_blob(input_bytes) - message_35 = type(reject_puzzle_solution).from_bytes(message_bytes) - assert message_35 == reject_puzzle_solution - assert bytes(message_35) == bytes(reject_puzzle_solution) + message_35 = type(respond_puzzle_solution).from_bytes(message_bytes) + assert message_35 == respond_puzzle_solution + assert bytes(message_35) == bytes(respond_puzzle_solution) message_bytes, input_bytes = parse_blob(input_bytes) - message_36 = type(send_transaction).from_bytes(message_bytes) - assert message_36 == send_transaction - assert bytes(message_36) == bytes(send_transaction) + message_36 = type(reject_puzzle_solution).from_bytes(message_bytes) + assert message_36 == reject_puzzle_solution + assert bytes(message_36) == bytes(reject_puzzle_solution) message_bytes, input_bytes = parse_blob(input_bytes) - message_37 = type(transaction_ack).from_bytes(message_bytes) - assert message_37 == transaction_ack - assert bytes(message_37) == bytes(transaction_ack) + message_37 = type(send_transaction).from_bytes(message_bytes) + assert message_37 == send_transaction + assert bytes(message_37) == bytes(send_transaction) message_bytes, input_bytes = parse_blob(input_bytes) - message_38 = type(new_peak_wallet).from_bytes(message_bytes) - assert message_38 == new_peak_wallet - assert bytes(message_38) == bytes(new_peak_wallet) + message_38 = type(transaction_ack).from_bytes(message_bytes) + assert message_38 == transaction_ack + assert bytes(message_38) == bytes(transaction_ack) message_bytes, input_bytes = parse_blob(input_bytes) - message_39 = type(request_block_header).from_bytes(message_bytes) - assert message_39 == request_block_header - assert bytes(message_39) == bytes(request_block_header) + message_39 = type(new_peak_wallet).from_bytes(message_bytes) + assert message_39 == new_peak_wallet + assert bytes(message_39) == bytes(new_peak_wallet) message_bytes, input_bytes = parse_blob(input_bytes) - message_40 = type(request_block_headers).from_bytes(message_bytes) - assert message_40 == request_block_headers - assert bytes(message_40) == bytes(request_block_headers) + message_40 = type(request_block_header).from_bytes(message_bytes) + assert message_40 == request_block_header + assert bytes(message_40) == bytes(request_block_header) message_bytes, input_bytes = parse_blob(input_bytes) - message_41 = type(respond_header_block).from_bytes(message_bytes) - assert message_41 == respond_header_block - assert bytes(message_41) == bytes(respond_header_block) + message_41 = type(request_block_headers).from_bytes(message_bytes) + assert message_41 == request_block_headers + assert bytes(message_41) == bytes(request_block_headers) message_bytes, input_bytes = parse_blob(input_bytes) - message_42 = type(respond_block_headers).from_bytes(message_bytes) - assert message_42 == respond_block_headers - assert bytes(message_42) == bytes(respond_block_headers) + message_42 = type(respond_header_block).from_bytes(message_bytes) + assert message_42 == respond_header_block + assert bytes(message_42) == bytes(respond_header_block) message_bytes, input_bytes = parse_blob(input_bytes) - message_43 = type(reject_header_request).from_bytes(message_bytes) - assert message_43 == reject_header_request - assert bytes(message_43) == bytes(reject_header_request) + message_43 = type(respond_block_headers).from_bytes(message_bytes) + assert message_43 == respond_block_headers + assert bytes(message_43) == bytes(respond_block_headers) message_bytes, input_bytes = parse_blob(input_bytes) - message_44 = type(request_removals).from_bytes(message_bytes) - assert message_44 == request_removals - assert bytes(message_44) == bytes(request_removals) + message_44 = type(reject_header_request).from_bytes(message_bytes) + assert message_44 == reject_header_request + assert bytes(message_44) == bytes(reject_header_request) message_bytes, input_bytes = parse_blob(input_bytes) - message_45 = type(respond_removals).from_bytes(message_bytes) - assert message_45 == respond_removals - assert bytes(message_45) == bytes(respond_removals) + message_45 = type(request_removals).from_bytes(message_bytes) + assert message_45 == request_removals + assert bytes(message_45) == bytes(request_removals) message_bytes, input_bytes = parse_blob(input_bytes) - message_46 = type(reject_removals_request).from_bytes(message_bytes) - assert message_46 == reject_removals_request - assert bytes(message_46) == bytes(reject_removals_request) + message_46 = type(respond_removals).from_bytes(message_bytes) + assert message_46 == respond_removals + assert bytes(message_46) == bytes(respond_removals) message_bytes, input_bytes = parse_blob(input_bytes) - message_47 = type(request_additions).from_bytes(message_bytes) - assert message_47 == request_additions - assert bytes(message_47) == bytes(request_additions) + message_47 = type(reject_removals_request).from_bytes(message_bytes) + assert message_47 == reject_removals_request + assert bytes(message_47) == bytes(reject_removals_request) message_bytes, input_bytes = parse_blob(input_bytes) - message_48 = type(respond_additions).from_bytes(message_bytes) - assert message_48 == respond_additions - assert bytes(message_48) == bytes(respond_additions) + message_48 = type(request_additions).from_bytes(message_bytes) + assert message_48 == request_additions + assert bytes(message_48) == bytes(request_additions) message_bytes, input_bytes = parse_blob(input_bytes) - message_49 = type(reject_additions).from_bytes(message_bytes) - assert message_49 == reject_additions - assert bytes(message_49) == bytes(reject_additions) + message_49 = type(respond_additions).from_bytes(message_bytes) + assert message_49 == respond_additions + assert bytes(message_49) == bytes(respond_additions) message_bytes, input_bytes = parse_blob(input_bytes) - message_50 = type(request_header_blocks).from_bytes(message_bytes) - assert message_50 == request_header_blocks - assert bytes(message_50) == bytes(request_header_blocks) + message_50 = type(reject_additions).from_bytes(message_bytes) + assert message_50 == reject_additions + assert bytes(message_50) == bytes(reject_additions) message_bytes, input_bytes = parse_blob(input_bytes) - message_51 = type(reject_header_blocks).from_bytes(message_bytes) - assert message_51 == reject_header_blocks - assert bytes(message_51) == bytes(reject_header_blocks) + message_51 = type(request_header_blocks).from_bytes(message_bytes) + assert message_51 == request_header_blocks + assert bytes(message_51) == bytes(request_header_blocks) message_bytes, input_bytes = parse_blob(input_bytes) - message_52 = type(respond_header_blocks).from_bytes(message_bytes) - assert message_52 == respond_header_blocks - assert bytes(message_52) == bytes(respond_header_blocks) + message_52 = type(reject_header_blocks).from_bytes(message_bytes) + assert message_52 == reject_header_blocks + assert bytes(message_52) == bytes(reject_header_blocks) message_bytes, input_bytes = parse_blob(input_bytes) - message_53 = type(coin_state).from_bytes(message_bytes) - assert message_53 == coin_state - assert bytes(message_53) == bytes(coin_state) + message_53 = type(respond_header_blocks).from_bytes(message_bytes) + assert message_53 == respond_header_blocks + assert bytes(message_53) == bytes(respond_header_blocks) message_bytes, input_bytes = parse_blob(input_bytes) - message_54 = type(register_for_ph_updates).from_bytes(message_bytes) - assert message_54 == register_for_ph_updates - assert bytes(message_54) == bytes(register_for_ph_updates) + message_54 = type(coin_state).from_bytes(message_bytes) + assert message_54 == coin_state + assert bytes(message_54) == bytes(coin_state) message_bytes, input_bytes = parse_blob(input_bytes) - message_55 = type(reject_block_headers).from_bytes(message_bytes) - assert message_55 == reject_block_headers - assert bytes(message_55) == bytes(reject_block_headers) + message_55 = type(register_for_ph_updates).from_bytes(message_bytes) + assert message_55 == register_for_ph_updates + assert bytes(message_55) == bytes(register_for_ph_updates) message_bytes, input_bytes = parse_blob(input_bytes) - message_56 = type(respond_to_ph_updates).from_bytes(message_bytes) - assert message_56 == respond_to_ph_updates - assert bytes(message_56) == bytes(respond_to_ph_updates) + message_56 = type(reject_block_headers).from_bytes(message_bytes) + assert message_56 == reject_block_headers + assert bytes(message_56) == bytes(reject_block_headers) message_bytes, input_bytes = parse_blob(input_bytes) - message_57 = type(register_for_coin_updates).from_bytes(message_bytes) - assert message_57 == register_for_coin_updates - assert bytes(message_57) == bytes(register_for_coin_updates) + message_57 = type(respond_to_ph_updates).from_bytes(message_bytes) + assert message_57 == respond_to_ph_updates + assert bytes(message_57) == bytes(respond_to_ph_updates) message_bytes, input_bytes = parse_blob(input_bytes) - message_58 = type(respond_to_coin_updates).from_bytes(message_bytes) - assert message_58 == respond_to_coin_updates - assert bytes(message_58) == bytes(respond_to_coin_updates) + message_58 = type(register_for_coin_updates).from_bytes(message_bytes) + assert message_58 == register_for_coin_updates + assert bytes(message_58) == bytes(register_for_coin_updates) message_bytes, input_bytes = parse_blob(input_bytes) - message_59 = type(coin_state_update).from_bytes(message_bytes) - assert message_59 == coin_state_update - assert bytes(message_59) == bytes(coin_state_update) + message_59 = type(respond_to_coin_updates).from_bytes(message_bytes) + assert message_59 == respond_to_coin_updates + assert bytes(message_59) == bytes(respond_to_coin_updates) message_bytes, input_bytes = parse_blob(input_bytes) - message_60 = type(request_children).from_bytes(message_bytes) - assert message_60 == request_children - assert bytes(message_60) == bytes(request_children) + message_60 = type(coin_state_update).from_bytes(message_bytes) + assert message_60 == coin_state_update + assert bytes(message_60) == bytes(coin_state_update) message_bytes, input_bytes = parse_blob(input_bytes) - message_61 = type(respond_children).from_bytes(message_bytes) - assert message_61 == respond_children - assert bytes(message_61) == bytes(respond_children) + message_61 = type(request_children).from_bytes(message_bytes) + assert message_61 == request_children + assert bytes(message_61) == bytes(request_children) message_bytes, input_bytes = parse_blob(input_bytes) - message_62 = type(request_ses_info).from_bytes(message_bytes) - assert message_62 == request_ses_info - assert bytes(message_62) == bytes(request_ses_info) + message_62 = type(respond_children).from_bytes(message_bytes) + assert message_62 == respond_children + assert bytes(message_62) == bytes(respond_children) message_bytes, input_bytes = parse_blob(input_bytes) - message_63 = type(respond_ses_info).from_bytes(message_bytes) - assert message_63 == respond_ses_info - assert bytes(message_63) == bytes(respond_ses_info) + message_63 = type(request_ses_info).from_bytes(message_bytes) + assert message_63 == request_ses_info + assert bytes(message_63) == bytes(request_ses_info) message_bytes, input_bytes = parse_blob(input_bytes) - message_64 = type(coin_state_filters).from_bytes(message_bytes) - assert message_64 == coin_state_filters - assert bytes(message_64) == bytes(coin_state_filters) + message_64 = type(respond_ses_info).from_bytes(message_bytes) + assert message_64 == respond_ses_info + assert bytes(message_64) == bytes(respond_ses_info) message_bytes, input_bytes = parse_blob(input_bytes) - message_65 = type(request_remove_puzzle_subscriptions).from_bytes(message_bytes) - assert message_65 == request_remove_puzzle_subscriptions - assert bytes(message_65) == bytes(request_remove_puzzle_subscriptions) + message_65 = type(coin_state_filters).from_bytes(message_bytes) + assert message_65 == coin_state_filters + assert bytes(message_65) == bytes(coin_state_filters) message_bytes, input_bytes = parse_blob(input_bytes) - message_66 = type(respond_remove_puzzle_subscriptions).from_bytes(message_bytes) - assert message_66 == respond_remove_puzzle_subscriptions - assert bytes(message_66) == bytes(respond_remove_puzzle_subscriptions) + message_66 = type(request_remove_puzzle_subscriptions).from_bytes(message_bytes) + assert message_66 == request_remove_puzzle_subscriptions + assert bytes(message_66) == bytes(request_remove_puzzle_subscriptions) message_bytes, input_bytes = parse_blob(input_bytes) - message_67 = type(request_remove_coin_subscriptions).from_bytes(message_bytes) - assert message_67 == request_remove_coin_subscriptions - assert bytes(message_67) == bytes(request_remove_coin_subscriptions) + message_67 = type(respond_remove_puzzle_subscriptions).from_bytes(message_bytes) + assert message_67 == respond_remove_puzzle_subscriptions + assert bytes(message_67) == bytes(respond_remove_puzzle_subscriptions) message_bytes, input_bytes = parse_blob(input_bytes) - message_68 = type(respond_remove_coin_subscriptions).from_bytes(message_bytes) - assert message_68 == respond_remove_coin_subscriptions - assert bytes(message_68) == bytes(respond_remove_coin_subscriptions) + message_68 = type(request_remove_coin_subscriptions).from_bytes(message_bytes) + assert message_68 == request_remove_coin_subscriptions + assert bytes(message_68) == bytes(request_remove_coin_subscriptions) message_bytes, input_bytes = parse_blob(input_bytes) - message_69 = type(request_puzzle_state).from_bytes(message_bytes) - assert message_69 == request_puzzle_state - assert bytes(message_69) == bytes(request_puzzle_state) + message_69 = type(respond_remove_coin_subscriptions).from_bytes(message_bytes) + assert message_69 == respond_remove_coin_subscriptions + assert bytes(message_69) == bytes(respond_remove_coin_subscriptions) message_bytes, input_bytes = parse_blob(input_bytes) - message_70 = type(reject_puzzle_state).from_bytes(message_bytes) - assert message_70 == reject_puzzle_state - assert bytes(message_70) == bytes(reject_puzzle_state) + message_70 = type(request_puzzle_state).from_bytes(message_bytes) + assert message_70 == request_puzzle_state + assert bytes(message_70) == bytes(request_puzzle_state) message_bytes, input_bytes = parse_blob(input_bytes) - message_71 = type(respond_puzzle_state).from_bytes(message_bytes) - assert message_71 == respond_puzzle_state - assert bytes(message_71) == bytes(respond_puzzle_state) + message_71 = type(reject_puzzle_state).from_bytes(message_bytes) + assert message_71 == reject_puzzle_state + assert bytes(message_71) == bytes(reject_puzzle_state) message_bytes, input_bytes = parse_blob(input_bytes) - message_72 = type(request_coin_state).from_bytes(message_bytes) - assert message_72 == request_coin_state - assert bytes(message_72) == bytes(request_coin_state) + message_72 = type(respond_puzzle_state).from_bytes(message_bytes) + assert message_72 == respond_puzzle_state + assert bytes(message_72) == bytes(respond_puzzle_state) message_bytes, input_bytes = parse_blob(input_bytes) - message_73 = type(respond_coin_state).from_bytes(message_bytes) - assert message_73 == respond_coin_state - assert bytes(message_73) == bytes(respond_coin_state) + message_73 = type(request_coin_state).from_bytes(message_bytes) + assert message_73 == request_coin_state + assert bytes(message_73) == bytes(request_coin_state) message_bytes, input_bytes = parse_blob(input_bytes) - message_74 = type(reject_coin_state).from_bytes(message_bytes) - assert message_74 == reject_coin_state - assert bytes(message_74) == bytes(reject_coin_state) + message_74 = type(respond_coin_state).from_bytes(message_bytes) + assert message_74 == respond_coin_state + assert bytes(message_74) == bytes(respond_coin_state) message_bytes, input_bytes = parse_blob(input_bytes) - message_75 = type(request_cost_info).from_bytes(message_bytes) - assert message_75 == request_cost_info - assert bytes(message_75) == bytes(request_cost_info) + message_75 = type(reject_coin_state).from_bytes(message_bytes) + assert message_75 == reject_coin_state + assert bytes(message_75) == bytes(reject_coin_state) message_bytes, input_bytes = parse_blob(input_bytes) - message_76 = type(respond_cost_info).from_bytes(message_bytes) - assert message_76 == respond_cost_info - assert bytes(message_76) == bytes(respond_cost_info) + message_76 = type(request_cost_info).from_bytes(message_bytes) + assert message_76 == request_cost_info + assert bytes(message_76) == bytes(request_cost_info) message_bytes, input_bytes = parse_blob(input_bytes) - message_77 = type(pool_difficulty).from_bytes(message_bytes) - assert message_77 == pool_difficulty - assert bytes(message_77) == bytes(pool_difficulty) + message_77 = type(respond_cost_info).from_bytes(message_bytes) + assert message_77 == respond_cost_info + assert bytes(message_77) == bytes(respond_cost_info) message_bytes, input_bytes = parse_blob(input_bytes) - message_78 = type(harvester_handhsake).from_bytes(message_bytes) - assert message_78 == harvester_handhsake - assert bytes(message_78) == bytes(harvester_handhsake) + message_78 = type(pool_difficulty).from_bytes(message_bytes) + assert message_78 == pool_difficulty + assert bytes(message_78) == bytes(pool_difficulty) message_bytes, input_bytes = parse_blob(input_bytes) - message_79 = type(new_signage_point_harvester).from_bytes(message_bytes) - assert message_79 == new_signage_point_harvester - assert bytes(message_79) == bytes(new_signage_point_harvester) + message_79 = type(harvester_handhsake).from_bytes(message_bytes) + assert message_79 == harvester_handhsake + assert bytes(message_79) == bytes(harvester_handhsake) message_bytes, input_bytes = parse_blob(input_bytes) - message_80 = type(new_proof_of_space).from_bytes(message_bytes) - assert message_80 == new_proof_of_space - assert bytes(message_80) == bytes(new_proof_of_space) + message_80 = type(new_signage_point_harvester).from_bytes(message_bytes) + assert message_80 == new_signage_point_harvester + assert bytes(message_80) == bytes(new_signage_point_harvester) message_bytes, input_bytes = parse_blob(input_bytes) - message_81 = type(request_signatures).from_bytes(message_bytes) - assert message_81 == request_signatures - assert bytes(message_81) == bytes(request_signatures) + message_81 = type(new_signage_point_harvester2).from_bytes(message_bytes) + assert message_81 == new_signage_point_harvester2 + assert bytes(message_81) == bytes(new_signage_point_harvester2) message_bytes, input_bytes = parse_blob(input_bytes) - message_82 = type(respond_signatures).from_bytes(message_bytes) - assert message_82 == respond_signatures - assert bytes(message_82) == bytes(respond_signatures) + message_82 = type(new_proof_of_space).from_bytes(message_bytes) + assert message_82 == new_proof_of_space + assert bytes(message_82) == bytes(new_proof_of_space) message_bytes, input_bytes = parse_blob(input_bytes) - message_83 = type(plot).from_bytes(message_bytes) - assert message_83 == plot - assert bytes(message_83) == bytes(plot) + message_83 = type(request_signatures).from_bytes(message_bytes) + assert message_83 == request_signatures + assert bytes(message_83) == bytes(request_signatures) message_bytes, input_bytes = parse_blob(input_bytes) - message_84 = type(request_plots).from_bytes(message_bytes) - assert message_84 == request_plots - assert bytes(message_84) == bytes(request_plots) + message_84 = type(respond_signatures).from_bytes(message_bytes) + assert message_84 == respond_signatures + assert bytes(message_84) == bytes(respond_signatures) message_bytes, input_bytes = parse_blob(input_bytes) - message_85 = type(respond_plots).from_bytes(message_bytes) - assert message_85 == respond_plots - assert bytes(message_85) == bytes(respond_plots) + message_85 = type(plot).from_bytes(message_bytes) + assert message_85 == plot + assert bytes(message_85) == bytes(plot) message_bytes, input_bytes = parse_blob(input_bytes) - message_86 = type(request_peers_introducer).from_bytes(message_bytes) - assert message_86 == request_peers_introducer - assert bytes(message_86) == bytes(request_peers_introducer) + message_86 = type(request_plots).from_bytes(message_bytes) + assert message_86 == request_plots + assert bytes(message_86) == bytes(request_plots) message_bytes, input_bytes = parse_blob(input_bytes) - message_87 = type(respond_peers_introducer).from_bytes(message_bytes) - assert message_87 == respond_peers_introducer - assert bytes(message_87) == bytes(respond_peers_introducer) + message_87 = type(respond_plots).from_bytes(message_bytes) + assert message_87 == respond_plots + assert bytes(message_87) == bytes(respond_plots) message_bytes, input_bytes = parse_blob(input_bytes) - message_88 = type(authentication_payload).from_bytes(message_bytes) - assert message_88 == authentication_payload - assert bytes(message_88) == bytes(authentication_payload) + message_88 = type(request_peers_introducer).from_bytes(message_bytes) + assert message_88 == request_peers_introducer + assert bytes(message_88) == bytes(request_peers_introducer) message_bytes, input_bytes = parse_blob(input_bytes) - message_89 = type(get_pool_info_response).from_bytes(message_bytes) - assert message_89 == get_pool_info_response - assert bytes(message_89) == bytes(get_pool_info_response) + message_89 = type(respond_peers_introducer).from_bytes(message_bytes) + assert message_89 == respond_peers_introducer + assert bytes(message_89) == bytes(respond_peers_introducer) message_bytes, input_bytes = parse_blob(input_bytes) - message_90 = type(post_partial_payload).from_bytes(message_bytes) - assert message_90 == post_partial_payload - assert bytes(message_90) == bytes(post_partial_payload) + message_90 = type(authentication_payload).from_bytes(message_bytes) + assert message_90 == authentication_payload + assert bytes(message_90) == bytes(authentication_payload) message_bytes, input_bytes = parse_blob(input_bytes) - message_91 = type(post_partial_request).from_bytes(message_bytes) - assert message_91 == post_partial_request - assert bytes(message_91) == bytes(post_partial_request) + message_91 = type(get_pool_info_response).from_bytes(message_bytes) + assert message_91 == get_pool_info_response + assert bytes(message_91) == bytes(get_pool_info_response) message_bytes, input_bytes = parse_blob(input_bytes) - message_92 = type(post_partial_response).from_bytes(message_bytes) - assert message_92 == post_partial_response - assert bytes(message_92) == bytes(post_partial_response) + message_92 = type(post_partial_payload).from_bytes(message_bytes) + assert message_92 == post_partial_payload + assert bytes(message_92) == bytes(post_partial_payload) message_bytes, input_bytes = parse_blob(input_bytes) - message_93 = type(get_farmer_response).from_bytes(message_bytes) - assert message_93 == get_farmer_response - assert bytes(message_93) == bytes(get_farmer_response) + message_93 = type(post_partial_request).from_bytes(message_bytes) + assert message_93 == post_partial_request + assert bytes(message_93) == bytes(post_partial_request) message_bytes, input_bytes = parse_blob(input_bytes) - message_94 = type(post_farmer_payload).from_bytes(message_bytes) - assert message_94 == post_farmer_payload - assert bytes(message_94) == bytes(post_farmer_payload) + message_94 = type(post_partial_response).from_bytes(message_bytes) + assert message_94 == post_partial_response + assert bytes(message_94) == bytes(post_partial_response) message_bytes, input_bytes = parse_blob(input_bytes) - message_95 = type(post_farmer_request).from_bytes(message_bytes) - assert message_95 == post_farmer_request - assert bytes(message_95) == bytes(post_farmer_request) + message_95 = type(get_farmer_response).from_bytes(message_bytes) + assert message_95 == get_farmer_response + assert bytes(message_95) == bytes(get_farmer_response) message_bytes, input_bytes = parse_blob(input_bytes) - message_96 = type(post_farmer_response).from_bytes(message_bytes) - assert message_96 == post_farmer_response - assert bytes(message_96) == bytes(post_farmer_response) + message_96 = type(post_farmer_payload).from_bytes(message_bytes) + assert message_96 == post_farmer_payload + assert bytes(message_96) == bytes(post_farmer_payload) message_bytes, input_bytes = parse_blob(input_bytes) - message_97 = type(put_farmer_payload).from_bytes(message_bytes) - assert message_97 == put_farmer_payload - assert bytes(message_97) == bytes(put_farmer_payload) + message_97 = type(post_farmer_request).from_bytes(message_bytes) + assert message_97 == post_farmer_request + assert bytes(message_97) == bytes(post_farmer_request) message_bytes, input_bytes = parse_blob(input_bytes) - message_98 = type(put_farmer_request).from_bytes(message_bytes) - assert message_98 == put_farmer_request - assert bytes(message_98) == bytes(put_farmer_request) + message_98 = type(post_farmer_response).from_bytes(message_bytes) + assert message_98 == post_farmer_response + assert bytes(message_98) == bytes(post_farmer_response) message_bytes, input_bytes = parse_blob(input_bytes) - message_99 = type(put_farmer_response).from_bytes(message_bytes) - assert message_99 == put_farmer_response - assert bytes(message_99) == bytes(put_farmer_response) + message_99 = type(put_farmer_payload).from_bytes(message_bytes) + assert message_99 == put_farmer_payload + assert bytes(message_99) == bytes(put_farmer_payload) message_bytes, input_bytes = parse_blob(input_bytes) - message_100 = type(error_response).from_bytes(message_bytes) - assert message_100 == error_response - assert bytes(message_100) == bytes(error_response) + message_100 = type(put_farmer_request).from_bytes(message_bytes) + assert message_100 == put_farmer_request + assert bytes(message_100) == bytes(put_farmer_request) message_bytes, input_bytes = parse_blob(input_bytes) - message_101 = type(new_peak_timelord).from_bytes(message_bytes) - assert message_101 == new_peak_timelord - assert bytes(message_101) == bytes(new_peak_timelord) + message_101 = type(put_farmer_response).from_bytes(message_bytes) + assert message_101 == put_farmer_response + assert bytes(message_101) == bytes(put_farmer_response) message_bytes, input_bytes = parse_blob(input_bytes) - message_102 = type(new_unfinished_block_timelord).from_bytes(message_bytes) - assert message_102 == new_unfinished_block_timelord - assert bytes(message_102) == bytes(new_unfinished_block_timelord) + message_102 = type(error_response).from_bytes(message_bytes) + assert message_102 == error_response + assert bytes(message_102) == bytes(error_response) message_bytes, input_bytes = parse_blob(input_bytes) - message_103 = type(new_infusion_point_vdf).from_bytes(message_bytes) - assert message_103 == new_infusion_point_vdf - assert bytes(message_103) == bytes(new_infusion_point_vdf) + message_103 = type(new_peak_timelord).from_bytes(message_bytes) + assert message_103 == new_peak_timelord + assert bytes(message_103) == bytes(new_peak_timelord) message_bytes, input_bytes = parse_blob(input_bytes) - message_104 = type(new_signage_point_vdf).from_bytes(message_bytes) - assert message_104 == new_signage_point_vdf - assert bytes(message_104) == bytes(new_signage_point_vdf) + message_104 = type(new_unfinished_block_timelord).from_bytes(message_bytes) + assert message_104 == new_unfinished_block_timelord + assert bytes(message_104) == bytes(new_unfinished_block_timelord) message_bytes, input_bytes = parse_blob(input_bytes) - message_105 = type(new_end_of_sub_slot_bundle).from_bytes(message_bytes) - assert message_105 == new_end_of_sub_slot_bundle - assert bytes(message_105) == bytes(new_end_of_sub_slot_bundle) + message_105 = type(new_infusion_point_vdf).from_bytes(message_bytes) + assert message_105 == new_infusion_point_vdf + assert bytes(message_105) == bytes(new_infusion_point_vdf) message_bytes, input_bytes = parse_blob(input_bytes) - message_106 = type(request_compact_proof_of_time).from_bytes(message_bytes) - assert message_106 == request_compact_proof_of_time - assert bytes(message_106) == bytes(request_compact_proof_of_time) + message_106 = type(new_signage_point_vdf).from_bytes(message_bytes) + assert message_106 == new_signage_point_vdf + assert bytes(message_106) == bytes(new_signage_point_vdf) message_bytes, input_bytes = parse_blob(input_bytes) - message_107 = type(respond_compact_proof_of_time).from_bytes(message_bytes) - assert message_107 == respond_compact_proof_of_time - assert bytes(message_107) == bytes(respond_compact_proof_of_time) + message_107 = type(new_end_of_sub_slot_bundle).from_bytes(message_bytes) + assert message_107 == new_end_of_sub_slot_bundle + assert bytes(message_107) == bytes(new_end_of_sub_slot_bundle) message_bytes, input_bytes = parse_blob(input_bytes) - message_108 = type(error_without_data).from_bytes(message_bytes) - assert message_108 == error_without_data - assert bytes(message_108) == bytes(error_without_data) + message_108 = type(request_compact_proof_of_time).from_bytes(message_bytes) + assert message_108 == request_compact_proof_of_time + assert bytes(message_108) == bytes(request_compact_proof_of_time) message_bytes, input_bytes = parse_blob(input_bytes) - message_109 = type(error_with_data).from_bytes(message_bytes) - assert message_109 == error_with_data - assert bytes(message_109) == bytes(error_with_data) + message_109 = type(respond_compact_proof_of_time).from_bytes(message_bytes) + assert message_109 == respond_compact_proof_of_time + assert bytes(message_109) == bytes(respond_compact_proof_of_time) + + message_bytes, input_bytes = parse_blob(input_bytes) + message_110 = type(error_without_data).from_bytes(message_bytes) + assert message_110 == error_without_data + assert bytes(message_110) == bytes(error_without_data) + + message_bytes, input_bytes = parse_blob(input_bytes) + message_111 = type(error_with_data).from_bytes(message_bytes) + assert message_111 == error_with_data + assert bytes(message_111) == bytes(error_with_data) + + message_bytes, input_bytes = parse_blob(input_bytes) + message_112 = type(solver_info).from_bytes(message_bytes) + assert message_112 == solver_info + assert bytes(message_112) == bytes(solver_info) + + message_bytes, input_bytes = parse_blob(input_bytes) + message_113 = type(solver_response).from_bytes(message_bytes) + assert message_113 == solver_response + assert bytes(message_113) == bytes(solver_response) assert input_bytes == b"" diff --git a/chia/_tests/util/test_network_protocol_json.py b/chia/_tests/util/test_network_protocol_json.py index 3acb9240a83e..958f1453284c 100644 --- a/chia/_tests/util/test_network_protocol_json.py +++ b/chia/_tests/util/test_network_protocol_json.py @@ -18,6 +18,8 @@ def test_protocol_json() -> None: assert type(farming_info).from_json_dict(farming_info_json) == farming_info assert str(signed_values_json) == str(signed_values.to_json_dict()) assert type(signed_values).from_json_dict(signed_values_json) == signed_values + assert str(partial_proof_json) == str(partial_proof.to_json_dict()) + assert type(partial_proof).from_json_dict(partial_proof_json) == partial_proof assert str(new_peak_json) == str(new_peak.to_json_dict()) assert type(new_peak).from_json_dict(new_peak_json) == new_peak assert str(new_transaction_json) == str(new_transaction.to_json_dict()) @@ -194,6 +196,11 @@ def test_protocol_json() -> None: type(new_signage_point_harvester).from_json_dict(new_signage_point_harvester_json) == new_signage_point_harvester ) + assert str(new_signage_point_harvester2_json) == str(new_signage_point_harvester2.to_json_dict()) + assert ( + type(new_signage_point_harvester2).from_json_dict(new_signage_point_harvester2_json) + == new_signage_point_harvester2 + ) assert str(new_proof_of_space_json) == str(new_proof_of_space.to_json_dict()) assert type(new_proof_of_space).from_json_dict(new_proof_of_space_json) == new_proof_of_space assert str(request_signatures_json) == str(request_signatures.to_json_dict()) @@ -265,3 +272,7 @@ def test_protocol_json() -> None: assert type(error_without_data).from_json_dict(error_without_data_json) == error_without_data assert str(error_with_data_json) == str(error_with_data.to_json_dict()) assert type(error_with_data).from_json_dict(error_with_data_json) == error_with_data + assert str(solver_info_json) == str(solver_info.to_json_dict()) + assert type(solver_info).from_json_dict(solver_info_json) == solver_info + assert str(solver_response_json) == str(solver_response.to_json_dict()) + assert type(solver_response).from_json_dict(solver_response_json) == solver_response diff --git a/chia/_tests/util/test_network_protocol_test.py b/chia/_tests/util/test_network_protocol_test.py index 93a73b7d2528..0adb46afa5e6 100644 --- a/chia/_tests/util/test_network_protocol_test.py +++ b/chia/_tests/util/test_network_protocol_test.py @@ -12,6 +12,7 @@ pool_protocol, protocol_message_types, shared_protocol, + solver_protocol, timelord_protocol, wallet_protocol, ) @@ -58,6 +59,7 @@ def test_message_ids() -> None: assert isinstance(entry.value, ast.Constant) assert isinstance(entry.targets[0], ast.Name) message_id = entry.value.value + assert isinstance(message_id, int), f"message ID must be an int, got {type(message_id).__name__}" message_name = entry.targets[0].id if message_id in message_ids: # pragma: no cover raise AssertionError( @@ -172,6 +174,7 @@ def test_missing_messages() -> None: "ProofOfSpaceFeeInfo", "NewProofOfSpace", "NewSignagePointHarvester", + "NewSignagePointHarvester2", "Plot", "PlotSyncDone", "PlotSyncError", @@ -187,6 +190,7 @@ def test_missing_messages() -> None: "RequestSignatures", "RespondPlots", "RespondSignatures", + "PartialProofsData", } introducer_msgs = {"RequestPeersIntroducer", "RespondPeersIntroducer"} @@ -218,6 +222,8 @@ def test_missing_messages() -> None: "RespondCompactProofOfTime", } + solver_msgs = {"SolverInfo", "SolverResponse"} + shared_msgs = {"Handshake", "Capability", "Error"} # if these asserts fail, make sure to add the new network protocol messages @@ -251,6 +257,10 @@ def test_missing_messages() -> None: f"message types were added or removed from timelord_protocol. {STANDARD_ADVICE}" ) + assert types_in_module(solver_protocol) == solver_msgs, ( + f"message types were added or removed from shared_protocol. {STANDARD_ADVICE}" + ) + assert types_in_module(shared_protocol) == shared_msgs, ( f"message types were added or removed from shared_protocol. {STANDARD_ADVICE}" ) diff --git a/chia/_tests/util/test_replace_str_to_bytes.py b/chia/_tests/util/test_replace_str_to_bytes.py index e171ac6d8e12..20525ac9ef87 100644 --- a/chia/_tests/util/test_replace_str_to_bytes.py +++ b/chia/_tests/util/test_replace_str_to_bytes.py @@ -65,12 +65,12 @@ PLOT_FILTER_128_HEIGHT=uint32(10542000), PLOT_FILTER_64_HEIGHT=uint32(15592000), PLOT_FILTER_32_HEIGHT=uint32(20643000), - PLOT_DIFFICULTY_INITIAL=uint8(2), - PLOT_DIFFICULTY_4_HEIGHT=uint32(0xFFFFFFFF), - PLOT_DIFFICULTY_5_HEIGHT=uint32(0xFFFFFFFF), - PLOT_DIFFICULTY_6_HEIGHT=uint32(0xFFFFFFFF), - PLOT_DIFFICULTY_7_HEIGHT=uint32(0xFFFFFFFF), - PLOT_DIFFICULTY_8_HEIGHT=uint32(0xFFFFFFFF), + PLOT_STRENGTH_INITIAL=uint8(2), + PLOT_STRENGTH_4_HEIGHT=uint32(0xFFFFFFFF), + PLOT_STRENGTH_5_HEIGHT=uint32(0xFFFFFFFF), + PLOT_STRENGTH_6_HEIGHT=uint32(0xFFFFFFFF), + PLOT_STRENGTH_7_HEIGHT=uint32(0xFFFFFFFF), + PLOT_STRENGTH_8_HEIGHT=uint32(0xFFFFFFFF), ) diff --git a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py index 93d04a608149..4fc1d16baf41 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py @@ -44,7 +44,7 @@ from chia.wallet.wallet_info import WalletInfo from chia.wallet.wallet_interested_store import WalletInterestedStore from chia.wallet.wallet_node import WalletNode -from chia.wallet.wallet_request_types import GetTransactionMemo, PushTX +from chia.wallet.wallet_request_types import GetTransactionMemo, PushTX, SendTransaction from chia.wallet.wallet_state_manager import WalletStateManager @@ -377,9 +377,9 @@ async def test_cat_spend(wallet_environments: WalletTestFramework, wallet_type: if tx_record.spend_bundle is not None: tx_id = tx_record.name assert tx_id is not None - memos = await env_1.rpc_client.get_transaction_memo(GetTransactionMemo(transaction_id=tx_id)) - assert len(memos.coins_with_memos) == 2 - assert cat_2_hash in {coin_w_memos.memos[0] for coin_w_memos in memos.coins_with_memos} + memo_response = await env_1.rpc_client.get_transaction_memo(GetTransactionMemo(transaction_id=tx_id)) + assert len(memo_response.memo_dict) == 2 + assert cat_2_hash in {memos[0] for memos in memo_response.memo_dict.values()} await wallet_environments.process_pending_states( [ @@ -454,9 +454,9 @@ async def test_cat_spend(wallet_environments: WalletTestFramework, wallet_type: assert len(coins) == 1 coin = coins.pop() tx_id = coin.name() - memos = await env_2.rpc_client.get_transaction_memo(GetTransactionMemo(transaction_id=tx_id)) - assert len(memos.coins_with_memos) == 2 - assert cat_2_hash in {coin_w_memos.memos[0] for coin_w_memos in memos.coins_with_memos} + memo_response = await env_2.rpc_client.get_transaction_memo(GetTransactionMemo(transaction_id=tx_id)) + assert len(memo_response.memo_dict) == 2 + assert cat_2_hash in {memos[0] for memos in memo_response.memo_dict.values()} async with cat_wallet.wallet_state_manager.new_action_scope( wallet_environments.tx_config, push=True ) as action_scope: @@ -1153,7 +1153,7 @@ async def check_all_there() -> bool: assert action_scope.side_effects.transactions[0].amount == uint64(max_sent_amount) # 3) Generate transaction that is greater than limit - with pytest.raises(ValueError, match="Can't select amount higher than our spendable balance."): + with pytest.raises(ValueError, match="Can't select amount higher than our spendable balance"): async with cat_wallet.wallet_state_manager.new_action_scope( wallet_environments.tx_config, push=False ) as action_scope: @@ -1429,7 +1429,12 @@ async def test_cat_change_detection(wallet_environments: WalletTestFramework, wa cat_amount_0 = uint64(100) cat_amount_1 = uint64(5) - tx = (await env.rpc_client.send_transaction(1, cat_amount_0, addr, wallet_environments.tx_config)).transaction + tx = ( + await env.rpc_client.send_transaction( + SendTransaction(wallet_id=uint32(1), amount=cat_amount_0, address=addr, push=True), + wallet_environments.tx_config, + ) + ).transaction spend_bundle = tx.spend_bundle assert spend_bundle is not None diff --git a/chia/_tests/wallet/cat_wallet/test_trades.py b/chia/_tests/wallet/cat_wallet/test_trades.py index 17de5c60bdf6..9ffecf46d9f3 100644 --- a/chia/_tests/wallet/cat_wallet/test_trades.py +++ b/chia/_tests/wallet/cat_wallet/test_trades.py @@ -1629,8 +1629,9 @@ async def assert_trade_tx_number(wallet_node: WalletNode, trade_id: bytes32, num indirect=True, ) @pytest.mark.limit_consensus_modes(reason="irrelevant") +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_trade_cancellation(wallet_environments: WalletTestFramework) -> None: +async def test_trade_cancellation(wallet_environments: WalletTestFramework, wallet_type: type[CATWallet]) -> None: env_maker = wallet_environments.environments[0] env_taker = wallet_environments.environments[1] @@ -1645,16 +1646,15 @@ async def test_trade_cancellation(wallet_environments: WalletTestFramework) -> N xch_to_cat_amount = uint64(100) - async with env_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_maker = await CATWallet.create_new_cat_wallet( - env_maker.wallet_state_manager, - env_maker.xch_wallet, - {"identifier": "genesis_by_id"}, - xch_to_cat_amount, - action_scope, - ) + cat_wallet_maker = await mint_cat( + wallet_environments, + env_maker, + "xch", + "cat", + xch_to_cat_amount, + wallet_type, + "cat", + ) await wallet_environments.process_pending_states( [ @@ -1941,8 +1941,9 @@ async def test_trade_cancellation(wallet_environments: WalletTestFramework) -> N indirect=True, ) @pytest.mark.limit_consensus_modes(reason="irrelevant") +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_trade_conflict(wallet_environments: WalletTestFramework) -> None: +async def test_trade_conflict(wallet_environments: WalletTestFramework, wallet_type: type[CATWallet]) -> None: env_maker = wallet_environments.environments[0] env_taker = wallet_environments.environments[1] env_trader = wallet_environments.environments[2] @@ -1963,16 +1964,15 @@ async def test_trade_conflict(wallet_environments: WalletTestFramework) -> None: xch_to_cat_amount = uint64(100) fee = uint64(10) - async with env_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - await CATWallet.create_new_cat_wallet( - env_maker.wallet_state_manager, - env_maker.xch_wallet, - {"identifier": "genesis_by_id"}, - xch_to_cat_amount, - action_scope, - ) + await mint_cat( + wallet_environments, + env_maker, + "xch", + "cat", + xch_to_cat_amount, + wallet_type, + "cat", + ) await wallet_environments.process_pending_states( [ @@ -2138,8 +2138,9 @@ async def test_trade_conflict(wallet_environments: WalletTestFramework) -> None: indirect=True, ) @pytest.mark.limit_consensus_modes(reason="irrelevant") +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_trade_bad_spend(wallet_environments: WalletTestFramework) -> None: +async def test_trade_bad_spend(wallet_environments: WalletTestFramework, wallet_type: type[CATWallet]) -> None: env_maker = wallet_environments.environments[0] env_taker = wallet_environments.environments[1] @@ -2154,16 +2155,15 @@ async def test_trade_bad_spend(wallet_environments: WalletTestFramework) -> None xch_to_cat_amount = uint64(100) - async with env_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - await CATWallet.create_new_cat_wallet( - env_maker.wallet_state_manager, - env_maker.xch_wallet, - {"identifier": "genesis_by_id"}, - xch_to_cat_amount, - action_scope, - ) + await mint_cat( + wallet_environments, + env_maker, + "xch", + "cat", + xch_to_cat_amount, + wallet_type, + "cat", + ) await wallet_environments.process_pending_states( [ @@ -2260,8 +2260,9 @@ def check_wallet_cache_empty() -> bool: indirect=True, ) @pytest.mark.limit_consensus_modes(reason="irrelevant") +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_trade_high_fee(wallet_environments: WalletTestFramework) -> None: +async def test_trade_high_fee(wallet_environments: WalletTestFramework, wallet_type: type[CATWallet]) -> None: env_maker = wallet_environments.environments[0] env_taker = wallet_environments.environments[1] @@ -2276,16 +2277,15 @@ async def test_trade_high_fee(wallet_environments: WalletTestFramework) -> None: xch_to_cat_amount = uint64(100) - async with env_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - await CATWallet.create_new_cat_wallet( - env_maker.wallet_state_manager, - env_maker.xch_wallet, - {"identifier": "genesis_by_id"}, - xch_to_cat_amount, - action_scope, - ) + await mint_cat( + wallet_environments, + env_maker, + "xch", + "cat", + xch_to_cat_amount, + wallet_type, + "cat", + ) await wallet_environments.process_pending_states( [ @@ -2405,8 +2405,9 @@ async def test_trade_high_fee(wallet_environments: WalletTestFramework) -> None: indirect=True, ) @pytest.mark.limit_consensus_modes(reason="irrelevant") +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_aggregated_trade_state(wallet_environments: WalletTestFramework) -> None: +async def test_aggregated_trade_state(wallet_environments: WalletTestFramework, wallet_type: type[CATWallet]) -> None: env_maker = wallet_environments.environments[0] env_taker = wallet_environments.environments[1] @@ -2421,16 +2422,15 @@ async def test_aggregated_trade_state(wallet_environments: WalletTestFramework) xch_to_cat_amount = uint64(100) - async with env_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - await CATWallet.create_new_cat_wallet( - env_maker.wallet_state_manager, - env_maker.xch_wallet, - {"identifier": "genesis_by_id"}, - xch_to_cat_amount, - action_scope, - ) + await mint_cat( + wallet_environments, + env_maker, + "xch", + "cat", + xch_to_cat_amount, + wallet_type, + "cat", + ) await wallet_environments.process_pending_states( [ diff --git a/chia/_tests/wallet/did_wallet/test_did.py b/chia/_tests/wallet/did_wallet/test_did.py index 0fe6677d7990..45fc25b39cb8 100644 --- a/chia/_tests/wallet/did_wallet/test_did.py +++ b/chia/_tests/wallet/did_wallet/test_did.py @@ -23,6 +23,7 @@ from chia.types.peer_info import PeerInfo from chia.types.signing_mode import CHIP_0002_SIGN_MESSAGE_PREFIX from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash +from chia.util.byte_types import hexstr_to_bytes from chia.wallet.did_wallet.did_wallet import DIDWallet from chia.wallet.singleton import ( create_singleton_puzzle, @@ -791,7 +792,7 @@ async def test_get_info(wallet_environments: WalletTestFramework): coin = (await wallet_0.select_coins(uint64(1), action_scope)).pop() assert coin.amount % 2 == 1 coin_id = coin.name() - with pytest.raises(ValueError, match="The coin is not a DID."): + with pytest.raises(ValueError, match="The coin is not a DID"): await api_0.get_did_info(DIDGetInfo(coin_id.hex())) # Test multiple odd coins @@ -845,7 +846,7 @@ async def test_get_info(wallet_environments: WalletTestFramework): ] ) - with pytest.raises(ValueError, match="This is not a singleton, multiple children coins found."): + with pytest.raises(ValueError, match=r"This is not a singleton, multiple children coins found."): await api_0.get_did_info(DIDGetInfo(coin_1.name().hex())) @@ -1091,9 +1092,9 @@ async def test_did_sign_message(wallet_environments: WalletTestFramework): ) puzzle: Program = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, message)) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), puzzle.get_tree_hash(), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) # Test hex string message = "0123456789ABCDEF" @@ -1107,9 +1108,9 @@ async def test_did_sign_message(wallet_environments: WalletTestFramework): puzzle = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, bytes.fromhex(message))) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), puzzle.get_tree_hash(), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) # Test BLS sign string @@ -1119,15 +1120,15 @@ async def test_did_sign_message(wallet_environments: WalletTestFramework): { "id": encode_puzzle_hash(did_wallet_1.did_info.origin_coin.name(), AddressType.DID.value), "message": message, - "is_hex": "False", - "safe_mode": "False", + "is_hex": False, + "safe_mode": False, } ) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), bytes(message, "utf-8"), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) # Test BLS sign hex message = "0123456789ABCDEF" @@ -1142,9 +1143,9 @@ async def test_did_sign_message(wallet_environments: WalletTestFramework): ) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), - bytes.fromhex(message), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), + hexstr_to_bytes(message), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) diff --git a/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py b/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py index e8af3626d7f7..0b09bc0d2c5b 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py @@ -9,8 +9,10 @@ from chia._tests.environments.wallet import WalletStateTransition, WalletTestFramework from chia._tests.util.time_out_assert import time_out_assert +from chia._tests.wallet.cat_wallet.test_cat_wallet import mint_cat from chia.types.blockchain_format.program import Program from chia.wallet.cat_wallet.cat_wallet import CATWallet +from chia.wallet.cat_wallet.r_cat_wallet import RCATWallet from chia.wallet.did_wallet.did_wallet import DIDWallet from chia.wallet.nft_wallet.nft_wallet import NFTWallet from chia.wallet.outer_puzzles import create_asset_id, match_puzzle @@ -777,8 +779,11 @@ async def test_nft_offer_sell_did_to_did(wallet_environments: WalletTestFramewor @pytest.mark.limit_consensus_modes @pytest.mark.parametrize("wallet_environments", [{"num_environments": 2, "blocks_needed": [1, 1]}], indirect=True) @pytest.mark.parametrize("zero_royalties", [True, False]) +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_nft_offer_sell_nft_for_cat(wallet_environments: WalletTestFramework, zero_royalties: bool) -> None: +async def test_nft_offer_sell_nft_for_cat( + wallet_environments: WalletTestFramework, zero_royalties: bool, wallet_type: type[CATWallet] +) -> None: env_maker = wallet_environments.environments[0] env_taker = wallet_environments.environments[1] wallet_maker = env_maker.xch_wallet @@ -910,46 +915,24 @@ async def test_nft_offer_sell_nft_for_cat(wallet_environments: WalletTestFramewo # Create new CAT and wallets for maker and taker # Trade them between maker and taker to ensure multiple coins for each cat - cats_to_mint = 100000 + cats_to_mint = uint64(100000) cats_to_trade = uint64(10000) - async with wallet_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_maker = await CATWallet.create_new_cat_wallet( - env_maker.wallet_state_manager, - wallet_maker, - {"identifier": "genesis_by_id"}, - uint64(cats_to_mint), - action_scope, - ) - - await wallet_environments.process_pending_states( - [ - WalletStateTransition( - pre_block_balance_updates={ - "xch": { - "set_remainder": True, - }, - "cat": { - "init": True, - "set_remainder": True, - }, - }, - post_block_balance_updates={ - "xch": { - "set_remainder": True, - }, - "cat": { - "set_remainder": True, - }, - }, - ), - WalletStateTransition(), - ] + cat_wallet_maker = await mint_cat( + wallet_environments, + env_maker, + "xch", + "cat", + cats_to_mint, + wallet_type, + "cat", ) - cat_wallet_taker: CATWallet = await CATWallet.get_or_create_wallet_for_cat( - env_taker.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id() + if wallet_type is RCATWallet: + extra_args: Any = (bytes32.zeros,) + else: + extra_args = tuple() + cat_wallet_taker: CATWallet = await wallet_type.get_or_create_wallet_for_cat( + env_taker.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id(), *extra_args ) await env_taker.change_balances({"cat": {"init": True}}) @@ -1120,8 +1103,11 @@ async def test_nft_offer_sell_nft_for_cat(wallet_environments: WalletTestFramewo @pytest.mark.limit_consensus_modes @pytest.mark.parametrize("wallet_environments", [{"num_environments": 2, "blocks_needed": [1, 1]}], indirect=True) @pytest.mark.parametrize("test_change", [True, False]) +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_nft_offer_request_nft_for_cat(wallet_environments: WalletTestFramework, test_change: bool) -> None: +async def test_nft_offer_request_nft_for_cat( + wallet_environments: WalletTestFramework, test_change: bool, wallet_type: type[CATWallet] +) -> None: env_maker = wallet_environments.environments[0] env_taker = wallet_environments.environments[1] wallet_maker = env_maker.xch_wallet @@ -1255,46 +1241,24 @@ async def test_nft_offer_request_nft_for_cat(wallet_environments: WalletTestFram # Create new CAT and wallets for maker and taker # Trade them between maker and taker to ensure multiple coins for each cat - cats_to_mint = 100000 + cats_to_mint = uint64(100000) cats_to_trade = uint64(20000) - async with wallet_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_maker = await CATWallet.create_new_cat_wallet( - env_maker.wallet_state_manager, - wallet_maker, - {"identifier": "genesis_by_id"}, - uint64(cats_to_mint), - action_scope, - ) - - await wallet_environments.process_pending_states( - [ - WalletStateTransition( - pre_block_balance_updates={ - "xch": { - "set_remainder": True, - }, - "cat": { - "init": True, - "set_remainder": True, - }, - }, - post_block_balance_updates={ - "xch": { - "set_remainder": True, - }, - "cat": { - "set_remainder": True, - }, - }, - ), - WalletStateTransition(), - ] + cat_wallet_maker = await mint_cat( + wallet_environments, + env_maker, + "xch", + "cat", + cats_to_mint, + wallet_type, + "cat", ) - await CATWallet.get_or_create_wallet_for_cat( - env_taker.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id() + if wallet_type is RCATWallet: + extra_args: Any = (bytes32.zeros,) + else: + extra_args = tuple() + await wallet_type.get_or_create_wallet_for_cat( + env_taker.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id(), *extra_args ) await env_taker.change_balances({"cat": {"init": True}}) @@ -1690,8 +1654,11 @@ async def get_trade_and_status(trade_manager: Any, trade: Any) -> TradeStatus: [{"num_environments": 2, "blocks_needed": [3, 3], "config_overrides": {"automatically_add_unknown_cats": True}}], indirect=True, ) +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_complex_nft_offer(wallet_environments: WalletTestFramework, royalty_pts: tuple[int, int, int]) -> None: +async def test_complex_nft_offer( + wallet_environments: WalletTestFramework, royalty_pts: tuple[int, int, int], wallet_type: type[CATWallet] +) -> None: """ This test is going to create an offer where the maker offers 1 NFT and 1 CAT for 2 NFTs, an XCH and a CAT """ @@ -1729,20 +1696,24 @@ async def test_complex_nft_offer(wallet_environments: WalletTestFramework, royal ph_taker = await action_scope.get_puzzle_hash(wallet_taker.wallet_state_manager) CAT_AMOUNT = uint64(100000000) - async with wallet_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_maker = await CATWallet.create_new_cat_wallet( - wsm_maker, wallet_maker, {"identifier": "genesis_by_id"}, CAT_AMOUNT, action_scope - ) - async with wallet_taker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_taker = await CATWallet.create_new_cat_wallet( - wsm_taker, wallet_taker, {"identifier": "genesis_by_id"}, CAT_AMOUNT, action_scope - ) - await env_maker.change_balances({"cat_maker": {"init": True}}) - await env_taker.change_balances({"cat_taker": {"init": True}}) + cat_wallet_maker = await mint_cat( + wallet_environments, + env_maker, + "xch", + "cat_maker", + CAT_AMOUNT, + wallet_type, + "cat_maker", + ) + cat_wallet_taker = await mint_cat( + wallet_environments, + env_taker, + "xch", + "cat_taker", + CAT_AMOUNT, + wallet_type, + "cat_taker", + ) # We'll need these later basic_nft_wallet_maker = await NFTWallet.create_new_nft_wallet(wsm_maker, wallet_maker, name="NFT WALLET MAKER") @@ -2025,6 +1996,11 @@ async def test_complex_nft_offer(wallet_environments: WalletTestFramework, royal { "type": "CAT", "tail": "0x" + cat_wallet_taker.get_asset_id(), + **( + {} + if wallet_type is CATWallet + else {"also": {"type": "revocation layer", "hidden_puzzle_hash": "0x" + bytes32.zeros.hex()}} + ), } ), } @@ -2090,7 +2066,8 @@ async def test_complex_nft_offer(wallet_environments: WalletTestFramework, royal + taker_royalty_summary[nft_to_offer_asset_id_taker_2][0]["amount"] ) - xch_coins = int(XCH_REQUESTED / 1_750_000_000_000) + 2 + # in the zero royalty case, exact change ends up being selected which complicates things a bit + xch_coins = int(XCH_REQUESTED / 1_750_000_000_000) + 2 - (1 if royalty_basis_pts_maker == 0 else 0) fee_coins = int(FEE / 1_750_000_000_000) + 1 if FEE > 1_750_000_000_000 else 1 await wallet_environments.process_pending_states( [ @@ -2151,7 +2128,7 @@ async def test_complex_nft_offer(wallet_environments: WalletTestFramework, royal "unconfirmed_wallet_balance": -XCH_REQUESTED - maker_xch_royalties_expected - FEE, "<=#spendable_balance": -XCH_REQUESTED - maker_xch_royalties_expected - FEE, "<=#max_send_amount": -XCH_REQUESTED - maker_xch_royalties_expected - FEE, - ">=#pending_change": 1, + ">=#pending_change": 0, "pending_coin_removal_count": xch_coins + fee_coins, }, "cat_taker": { @@ -2173,9 +2150,9 @@ async def test_complex_nft_offer(wallet_environments: WalletTestFramework, royal post_block_balance_updates={ "xch": { "confirmed_wallet_balance": -XCH_REQUESTED - maker_xch_royalties_expected - FEE, - ">=#spendable_balance": 1, - ">=#max_send_amount": 1, - "<=#pending_change": -1, + ">=#spendable_balance": 0, + ">=#max_send_amount": 0, + "<=#pending_change": 0, "pending_coin_removal_count": -fee_coins - xch_coins, # Parametrizations make unspent_coin_count too complicated "set_remainder": True, diff --git a/chia/_tests/wallet/nft_wallet/test_nft_offers.py b/chia/_tests/wallet/nft_wallet/test_nft_offers.py index 80eb211f06cf..816f17f8b6ba 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_offers.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_offers.py @@ -5,11 +5,14 @@ import pytest from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint64 +from typing_extensions import Any from chia._tests.environments.wallet import WalletStateTransition, WalletTestFramework from chia._tests.util.time_out_assert import time_out_assert +from chia._tests.wallet.cat_wallet.test_cat_wallet import mint_cat from chia.types.blockchain_format.program import Program from chia.wallet.cat_wallet.cat_wallet import CATWallet +from chia.wallet.cat_wallet.r_cat_wallet import RCATWallet from chia.wallet.nft_wallet.nft_wallet import NFTWallet from chia.wallet.outer_puzzles import create_asset_id, match_puzzle from chia.wallet.puzzle_drivers import PuzzleInfo @@ -696,8 +699,9 @@ async def test_nft_offer_with_metadata_update(wallet_environments: WalletTestFra @pytest.mark.limit_consensus_modes(reason="irrelevant") @pytest.mark.parametrize("wallet_environments", [{"num_environments": 2, "blocks_needed": [1, 1]}], indirect=True) +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_nft_offer_nft_for_cat(wallet_environments: WalletTestFramework) -> None: +async def test_nft_offer_nft_for_cat(wallet_environments: WalletTestFramework, wallet_type: type[CATWallet]) -> None: env_0 = wallet_environments.environments[0] env_1 = wallet_environments.environments[1] wallet_maker = env_0.xch_wallet @@ -785,73 +789,42 @@ async def test_nft_offer_nft_for_cat(wallet_environments: WalletTestFramework) - assert await nft_wallet_taker.get_nft_count() == 0 # Create two new CATs and wallets for maker and taker - cats_to_mint = 10000 - async with wallet_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_maker = await CATWallet.create_new_cat_wallet( - env_0.wallet_state_manager, - wallet_maker, - {"identifier": "genesis_by_id"}, - uint64(cats_to_mint), - action_scope, - ) - - async with wallet_taker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_taker = await CATWallet.create_new_cat_wallet( - env_1.wallet_state_manager, - wallet_taker, - {"identifier": "genesis_by_id"}, - uint64(cats_to_mint), - action_scope, - ) + cats_to_mint = uint64(10000) + cat_wallet_maker = await mint_cat( + wallet_environments, + env_0, + "xch", + "maker cat", + cats_to_mint, + wallet_type, + "maker cat", + ) - # mostly set_remainder here as minting CATs is tested elsewhere - await wallet_environments.process_pending_states( - [ - WalletStateTransition( - pre_block_balance_updates={ - "xch": {"set_remainder": True}, - "maker cat": { - "init": True, - "set_remainder": True, - }, - }, - post_block_balance_updates={ - "xch": {"set_remainder": True}, - "maker cat": { - "set_remainder": True, - }, - }, - ), - WalletStateTransition( - pre_block_balance_updates={ - "xch": {"set_remainder": True}, - "taker cat": { - "init": True, - "set_remainder": True, - }, - }, - post_block_balance_updates={ - "xch": {"set_remainder": True}, - "taker cat": { - "set_remainder": True, - }, - }, - ), - ] + cat_wallet_taker = await mint_cat( + wallet_environments, + env_1, + "xch", + "taker cat", + cats_to_mint, + wallet_type, + "taker cat", ) - wallet_maker_for_taker_cat: CATWallet = await CATWallet.get_or_create_wallet_for_cat( - env_0.wallet_state_manager, wallet_maker, cat_wallet_taker.get_asset_id() + if wallet_type is RCATWallet: + extra_args: Any = (bytes32.zeros,) + else: + extra_args = tuple() + wallet_maker_for_taker_cat: CATWallet = await wallet_type.get_or_create_wallet_for_cat( + env_0.wallet_state_manager, wallet_maker, cat_wallet_taker.get_asset_id(), *extra_args ) - await CATWallet.get_or_create_wallet_for_cat( - env_1.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id() + await wallet_type.get_or_create_wallet_for_cat( + env_1.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id(), *extra_args ) + await env_0.change_balances({"taker cat": {"init": True}}) + await env_1.change_balances({"maker cat": {"init": True}}) + # MAKE FIRST TRADE: 1 NFT for 10 taker cats nft_to_offer = coins_maker[0] nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle)) @@ -902,9 +875,7 @@ async def test_nft_offer_nft_for_cat(wallet_environments: WalletTestFramework) - "<=#max_send_amount": -maker_fee, "pending_coin_removal_count": 1, }, - "taker cat": { - "init": True, - }, + "taker cat": {}, "nft": { "pending_coin_removal_count": 1, }, @@ -940,9 +911,7 @@ async def test_nft_offer_nft_for_cat(wallet_environments: WalletTestFramework) - ">=#pending_change": 1, "pending_coin_removal_count": 1, }, - "maker cat": { - "init": True, - }, + "maker cat": {}, "taker cat": { "unconfirmed_wallet_balance": -taker_cat_offered, "<=#spendable_balance": -taker_cat_offered, @@ -1367,8 +1336,11 @@ async def test_nft_offer_nft_for_nft(wallet_environments: WalletTestFramework) - @pytest.mark.limit_consensus_modes(reason="irrelevant") @pytest.mark.parametrize("wallet_environments", [{"num_environments": 2, "blocks_needed": [1, 1]}], indirect=True) +@pytest.mark.parametrize("wallet_type", [CATWallet, RCATWallet]) @pytest.mark.anyio -async def test_nft_offer_nft0_and_xch_for_cat(wallet_environments: WalletTestFramework) -> None: +async def test_nft_offer_nft0_and_xch_for_cat( + wallet_environments: WalletTestFramework, wallet_type: type[CATWallet] +) -> None: env_0 = wallet_environments.environments[0] env_1 = wallet_environments.environments[1] wallet_maker = env_0.xch_wallet @@ -1456,80 +1428,41 @@ async def test_nft_offer_nft0_and_xch_for_cat(wallet_environments: WalletTestFra assert await nft_wallet_taker.get_nft_count() == 0 # Create two new CATs and wallets for maker and taker - cats_to_mint = 10000 - async with wallet_maker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_maker = await CATWallet.create_new_cat_wallet( - env_0.wallet_state_manager, - wallet_maker, - {"identifier": "genesis_by_id"}, - uint64(cats_to_mint), - action_scope, - ) + cats_to_mint = uint64(10000) + cat_wallet_maker = await mint_cat( + wallet_environments, + env_0, + "xch", + "maker cat", + cats_to_mint, + CATWallet, + "maker cat", + ) - async with wallet_taker.wallet_state_manager.new_action_scope( - wallet_environments.tx_config, push=True - ) as action_scope: - cat_wallet_taker = await CATWallet.create_new_cat_wallet( - env_1.wallet_state_manager, - wallet_taker, - {"identifier": "genesis_by_id"}, - uint64(cats_to_mint), - action_scope, - ) + cat_wallet_taker = await mint_cat( + wallet_environments, + env_1, + "xch", + "taker cat", + cats_to_mint, + CATWallet, + "taker cat", + ) + if wallet_type is RCATWallet: + extra_args: Any = (bytes32.zeros,) + else: + extra_args = tuple() wallet_maker_for_taker_cat: CATWallet = await CATWallet.get_or_create_wallet_for_cat( - env_0.wallet_state_manager, wallet_maker, cat_wallet_taker.get_asset_id() + env_0.wallet_state_manager, wallet_maker, cat_wallet_taker.get_asset_id(), *extra_args ) await CATWallet.get_or_create_wallet_for_cat( - env_1.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id() + env_1.wallet_state_manager, wallet_taker, cat_wallet_maker.get_asset_id(), *extra_args ) - # mostly set_remainder here as minting CATs is tested elsewhere - await wallet_environments.process_pending_states( - [ - WalletStateTransition( - pre_block_balance_updates={ - "xch": {"set_remainder": True}, - "maker cat": { - "init": True, - "set_remainder": True, - }, - "taker cat": { - "init": True, - "set_remainder": True, - }, - }, - post_block_balance_updates={ - "xch": {"set_remainder": True}, - "maker cat": { - "set_remainder": True, - }, - }, - ), - WalletStateTransition( - pre_block_balance_updates={ - "xch": {"set_remainder": True}, - "maker cat": { - "init": True, - "set_remainder": True, - }, - "taker cat": { - "init": True, - "set_remainder": True, - }, - }, - post_block_balance_updates={ - "xch": {"set_remainder": True}, - "taker cat": { - "set_remainder": True, - }, - }, - ), - ] - ) + await env_0.change_balances({"taker cat": {"init": True}}) + await env_1.change_balances({"maker cat": {"init": True}}) # MAKE FIRST TRADE: 1 NFT for 10 taker cats nft_to_offer = coins_maker[0] diff --git a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py index 87d793bd5273..6bdb43e48e48 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py @@ -5,7 +5,7 @@ from typing import Any, Callable import pytest -from chia_rs import AugSchemeMPL, G1Element, G2Element +from chia_rs import AugSchemeMPL from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint16, uint32, uint64 from clvm_tools.binutils import disassemble @@ -41,6 +41,7 @@ NFTTransferBulk, NFTTransferNFT, NFTWalletWithDID, + SignMessageByID, ) from chia.wallet.wallet_rpc_api import MAX_NFT_CHUNK_SIZE from chia.wallet.wallet_state_manager import WalletStateManager @@ -528,7 +529,7 @@ async def test_nft_wallet_creation_and_transfer(wallet_environments: WalletTestF await time_out_assert(30, get_nft_count, 1, nft_wallet_1) # Test an error case - with pytest.raises(ResponseFailureError, match="The NFT doesn't support setting a DID."): + with pytest.raises(ResponseFailureError, match="The NFT doesn't support setting a DID"): await env_1.rpc_client.set_nft_did( NFTSetNFTDID( wallet_id=uint32(env_1.wallet_aliases["nft"]), @@ -659,7 +660,7 @@ async def test_nft_wallet_rpc_creation_and_list(wallet_environments: WalletTestF # test counts assert (await env.rpc_client.count_nfts(NFTCountNFTs(uint32(env.wallet_aliases["nft"])))).count == 2 assert (await env.rpc_client.count_nfts(NFTCountNFTs())).count == 2 - with pytest.raises(ResponseFailureError, match="Wallet 50 not found."): + with pytest.raises(ResponseFailureError, match="Wallet 50 not found"): await env.rpc_client.count_nfts(NFTCountNFTs(uint32(50))) @@ -2757,51 +2758,55 @@ async def test_nft_sign_message(wallet_environments: WalletTestFramework) -> Non assert not coin.pending_transaction # Test general string message = "Hello World" - pubkey, sig, _ = await env.rpc_client.sign_message_by_id( - id=encode_puzzle_hash(coin.launcher_id, AddressType.NFT.value), message=message + sign_by_id_res = await env.rpc_client.sign_message_by_id( + SignMessageByID(id=encode_puzzle_hash(coin.launcher_id, AddressType.NFT.value), message=message) ) puzzle = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, message)) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(pubkey)), + sign_by_id_res.pubkey, puzzle.get_tree_hash(), - G2Element.from_bytes(bytes.fromhex(sig)), + sign_by_id_res.signature, ) # Test hex string message = "0123456789ABCDEF" - pubkey, sig, _ = await env.rpc_client.sign_message_by_id( - id=encode_puzzle_hash(coin.launcher_id, AddressType.NFT.value), message=message, is_hex=True + sign_by_id_res = await env.rpc_client.sign_message_by_id( + SignMessageByID(id=encode_puzzle_hash(coin.launcher_id, AddressType.NFT.value), message=message, is_hex=True) ) puzzle = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, bytes.fromhex(message))) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(pubkey)), + sign_by_id_res.pubkey, puzzle.get_tree_hash(), - G2Element.from_bytes(bytes.fromhex(sig)), + sign_by_id_res.signature, ) # Test BLS sign string message = "Hello World" - pubkey, sig, _ = await env.rpc_client.sign_message_by_id( - id=encode_puzzle_hash(coin.launcher_id, AddressType.NFT.value), - message=message, - is_hex=False, - safe_mode=False, + sign_by_id_res = await env.rpc_client.sign_message_by_id( + SignMessageByID( + id=encode_puzzle_hash(coin.launcher_id, AddressType.NFT.value), + message=message, + is_hex=False, + safe_mode=False, + ) ) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(pubkey)), + sign_by_id_res.pubkey, bytes(message, "utf-8"), - G2Element.from_bytes(bytes.fromhex(sig)), + sign_by_id_res.signature, ) # Test BLS sign hex message = "0123456789ABCDEF" - pubkey, sig, _ = await env.rpc_client.sign_message_by_id( - id=encode_puzzle_hash(coin.launcher_id, AddressType.NFT.value), - message=message, - is_hex=True, - safe_mode=False, + sign_by_id_res = await env.rpc_client.sign_message_by_id( + SignMessageByID( + id=encode_puzzle_hash(coin.launcher_id, AddressType.NFT.value), + message=message, + is_hex=True, + safe_mode=False, + ) ) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(pubkey)), + sign_by_id_res.pubkey, bytes.fromhex(message), - G2Element.from_bytes(bytes.fromhex(sig)), + sign_by_id_res.signature, ) diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index a41d7d6484a1..1c60c5ffcc8e 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -8,7 +8,7 @@ import random from collections.abc import AsyncIterator from operator import attrgetter -from typing import Any, Optional, cast +from typing import Any, Optional from unittest.mock import patch import aiosqlite @@ -52,7 +52,6 @@ from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward from chia.full_node.full_node_rpc_client import FullNodeRpcClient from chia.rpc.rpc_client import ResponseFailureError -from chia.server.aliases import WalletService from chia.server.server import ChiaServer from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import FarmNewBlockProtocol @@ -106,9 +105,12 @@ from chia.wallet.wallet_request_types import ( AddKey, CheckDeleteKey, + ClawbackPuzzleDecoratorOverride, CombineCoins, DefaultCAT, DeleteKey, + DeleteNotifications, + DeleteUnconfirmedTransactions, DIDCreateBackupFile, DIDGetDID, DIDGetMetadata, @@ -119,10 +121,16 @@ DIDTransferDID, DIDUpdateMetadata, FungibleAsset, + GetNextAddress, GetNotifications, GetPrivateKey, GetSyncStatusResponse, GetTimestampForHeight, + GetTransaction, + GetTransactionCount, + GetTransactions, + GetWalletBalance, + GetWalletBalances, GetWallets, LogIn, NFTCalculateRoyalties, @@ -133,13 +141,16 @@ PushTransactions, PushTX, RoyaltyAsset, + SendTransaction, SetWalletResyncOnStartup, + SpendClawbackCoins, SplitCoins, VerifySignature, VerifySignatureResponse, ) from chia.wallet.wallet_rpc_api import WalletRpcApi from chia.wallet.wallet_rpc_client import WalletRpcClient +from chia.wallet.wallet_service import WalletService from chia.wallet.wallet_spend_bundle import WalletSpendBundle log = logging.getLogger(__name__) @@ -190,9 +201,11 @@ async def farm_transaction( async def generate_funds(full_node_api: FullNodeSimulator, wallet_bundle: WalletBundle, num_blocks: int = 1) -> int: - wallet_id = 1 - initial_balances = await wallet_bundle.rpc_client.get_wallet_balance(wallet_id) - ph: bytes32 = decode_puzzle_hash(await wallet_bundle.rpc_client.get_next_address(wallet_id, True)) + wallet_id = uint32(1) + initial_balances = (await wallet_bundle.rpc_client.get_wallet_balance(GetWalletBalance(wallet_id))).wallet_balance + ph: bytes32 = decode_puzzle_hash( + (await wallet_bundle.rpc_client.get_next_address(GetNextAddress(wallet_id, True))).address + ) generated_funds = 0 for _ in range(num_blocks): await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph)) @@ -203,8 +216,8 @@ async def generate_funds(full_node_api: FullNodeSimulator, wallet_bundle: Wallet # Farm a dummy block to confirm the created funds await farm_transaction_block(full_node_api, wallet_bundle.node) - expected_confirmed = initial_balances["confirmed_wallet_balance"] + generated_funds - expected_unconfirmed = initial_balances["unconfirmed_wallet_balance"] + generated_funds + expected_confirmed = initial_balances.confirmed_wallet_balance + generated_funds + expected_unconfirmed = initial_balances.unconfirmed_wallet_balance + generated_funds await time_out_assert(20, get_confirmed_balance, expected_confirmed, wallet_bundle.rpc_client, wallet_id) await time_out_assert(20, get_unconfirmed_balance, expected_unconfirmed, wallet_bundle.rpc_client, wallet_id) await time_out_assert(20, check_client_synced, True, wallet_bundle.rpc_client) @@ -326,30 +339,35 @@ async def assert_push_tx_error(node_rpc: FullNodeRpcClient, tx: TransactionRecor async def assert_get_balance(rpc_client: WalletRpcClient, wallet_node: WalletNode, wallet: WalletProtocol[Any]) -> None: expected_balance = await wallet_node.get_balance(wallet.id()) expected_balance_dict = expected_balance.to_json_dict() + expected_balance_dict.setdefault("pending_approval_balance", None) expected_balance_dict["wallet_id"] = wallet.id() expected_balance_dict["wallet_type"] = wallet.type() expected_balance_dict["fingerprint"] = wallet_node.logged_in_fingerprint if wallet.type() in {WalletType.CAT, WalletType.CRCAT}: assert isinstance(wallet, CATWallet) - expected_balance_dict["asset_id"] = wallet.get_asset_id() - assert await rpc_client.get_wallet_balance(wallet.id()) == expected_balance_dict + expected_balance_dict["asset_id"] = "0x" + wallet.get_asset_id() + else: + expected_balance_dict["asset_id"] = None + assert ( + await rpc_client.get_wallet_balance(GetWalletBalance(wallet.id())) + ).wallet_balance.to_json_dict() == expected_balance_dict async def tx_in_mempool(client: WalletRpcClient, transaction_id: bytes32) -> bool: - tx = await client.get_transaction(transaction_id) + tx = (await client.get_transaction(GetTransaction(transaction_id))).transaction return tx.is_in_mempool() async def get_confirmed_balance(client: WalletRpcClient, wallet_id: int) -> uint128: - balance = await client.get_wallet_balance(wallet_id) - # TODO: casting due to lack of type checked deserialization - return cast(uint128, balance["confirmed_wallet_balance"]) + return ( + await client.get_wallet_balance(GetWalletBalance(uint32(wallet_id))) + ).wallet_balance.confirmed_wallet_balance async def get_unconfirmed_balance(client: WalletRpcClient, wallet_id: int) -> uint128: - balance = await client.get_wallet_balance(wallet_id) - # TODO: casting due to lack of type checked deserialization - return cast(uint128, balance["unconfirmed_wallet_balance"]) + return ( + await client.get_wallet_balance(GetWalletBalance(uint32(wallet_id))) + ).wallet_balance.unconfirmed_wallet_balance @pytest.mark.anyio @@ -367,24 +385,25 @@ async def test_send_transaction(wallet_rpc_environment: WalletRpcTestEnvironment addr = encode_puzzle_hash(await action_scope.get_puzzle_hash(wallet_2.wallet_state_manager), "txch") tx_amount = uint64(15600000) with pytest.raises(ValueError): - await client.send_transaction(1, uint64(100000000000000001), addr, DEFAULT_TX_CONFIG) + await client.send_transaction( + SendTransaction(wallet_id=uint32(1), amount=uint64(100000000000000001), address=addr, push=True), + DEFAULT_TX_CONFIG, + ) # Tests sending a basic transaction extra_conditions = (Remark(Program.to(("test", None))),) non_existent_coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) tx_no_push = ( await client.send_transaction( - 1, - tx_amount, - addr, - memos=["this is a basic tx"], + SendTransaction( + wallet_id=uint32(1), amount=tx_amount, address=addr, memos=["this is a basic tx"], push=False + ), tx_config=DEFAULT_TX_CONFIG.override( excluded_coin_amounts=[uint64(250000000000)], excluded_coin_ids=[non_existent_coin.name()], reuse_puzhash=True, ), extra_conditions=extra_conditions, - push=False, ) ).transaction response = await client.fetch( @@ -406,7 +425,7 @@ async def test_send_transaction(wallet_rpc_environment: WalletRpcTestEnvironment }, ) assert response["success"] - tx = TransactionRecord.from_json_dict_convenience(response["transactions"][0]) + tx = TransactionRecord.from_json_dict(response["transactions"][0]) [ byte_deserialize_clvm_streamable( bytes.fromhex(utx), UnsignedTransaction, translation_layer=BLIND_SIGNER_TRANSLATION @@ -424,7 +443,7 @@ async def test_send_transaction(wallet_rpc_environment: WalletRpcTestEnvironment await farm_transaction(full_node_api, wallet_node, spend_bundle) # Checks that the memo can be retrieved - tx_confirmed = await client.get_transaction(transaction_id) + tx_confirmed = (await client.get_transaction(GetTransaction(transaction_id))).transaction assert tx_confirmed.confirmed assert len(tx_confirmed.memos) == 1 assert [b"this is a basic tx"] in tx_confirmed.memos.values() @@ -458,9 +477,7 @@ async def test_push_transactions(wallet_rpc_environment: WalletRpcTestEnvironmen PushTransactions(transactions=[tx], fee=uint64(10)), DEFAULT_TX_CONFIG, ) - resp = await client.fetch( - "push_transactions", {"transactions": [tx.to_json_dict_convenience(wallet_node.config)], "fee": 10} - ) + resp = await client.fetch("push_transactions", {"transactions": [tx.to_json_dict()], "fee": 10}) assert resp["success"] resp = await client.fetch("push_transactions", {"transactions": [bytes(tx).hex()], "fee": 10}) assert resp["success"] @@ -472,7 +489,7 @@ async def test_push_transactions(wallet_rpc_environment: WalletRpcTestEnvironmen await farm_transaction(full_node_api, wallet_node, spend_bundle) for tx in resp_client.transactions: - assert (await client.get_transaction(transaction_id=tx.name)).confirmed + assert (await client.get_transaction(GetTransaction(transaction_id=tx.name))).transaction.confirmed # Just testing NOT failure here really (parsing) await client.push_tx(PushTX(spend_bundle)) @@ -817,7 +834,6 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron wallet_1 = wallet_1_node.wallet_state_manager.main_wallet wallet_2 = wallet_2_node.wallet_state_manager.main_wallet full_node_api: FullNodeSimulator = env.full_node.api - wallet_2_api = WalletRpcApi(wallet_2_node) generated_funds = await generate_funds(full_node_api, env.wallet_1, 1) await generate_funds(full_node_api, env.wallet_2, 1) @@ -828,12 +844,14 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron wallet_2_puzhash = await action_scope.get_puzzle_hash(wallet_2.wallet_state_manager) tx = ( await wallet_1_rpc.send_transaction( - wallet_id=1, - amount=uint64(500), - address=encode_puzzle_hash(wallet_2_puzhash, "txch"), + SendTransaction( + wallet_id=uint32(1), + amount=uint64(500), + address=encode_puzzle_hash(wallet_2_puzhash, "txch"), + puzzle_decorator=[ClawbackPuzzleDecoratorOverride(decorator="CLAWBACK", clawback_timelock=uint64(5))], + push=True, + ), tx_config=DEFAULT_TX_CONFIG, - fee=uint64(0), - puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], ) ).transaction clawback_coin_id_1 = tx.additions[0].name() @@ -842,12 +860,14 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron await full_node_api.wait_for_wallet_synced(wallet_node=wallet_2_node, timeout=20) tx = ( await wallet_2_rpc.send_transaction( - wallet_id=1, - amount=uint64(500), - address=encode_puzzle_hash(wallet_1_puzhash, "txch"), + SendTransaction( + wallet_id=uint32(1), + amount=uint64(500), + address=encode_puzzle_hash(wallet_1_puzhash, "txch"), + puzzle_decorator=[ClawbackPuzzleDecoratorOverride(decorator="CLAWBACK", clawback_timelock=uint64(5))], + push=True, + ), tx_config=DEFAULT_TX_CONFIG, - fee=uint64(0), - puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], ) ).transaction assert tx.spend_bundle is not None @@ -856,32 +876,28 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron await time_out_assert(20, get_confirmed_balance, generated_funds - 500, wallet_1_rpc, 1) await time_out_assert(20, get_confirmed_balance, generated_funds - 500, wallet_2_rpc, 1) await asyncio.sleep(10) - # Test missing coin_ids - has_exception = False - try: - await wallet_2_api.spend_clawback_coins({}) - except ValueError: - has_exception = True - assert has_exception # Test coin ID is not a Clawback coin invalid_coin_id = tx.removals[0].name() - resp = await wallet_2_rpc.spend_clawback_coins([invalid_coin_id], 500) - assert resp["success"] - assert resp["transaction_ids"] == [] + resp = await wallet_2_rpc.spend_clawback_coins( + SpendClawbackCoins(coin_ids=[invalid_coin_id], fee=uint64(500), push=True), tx_config=DEFAULT_TX_CONFIG + ) + assert resp.transaction_ids == [] # Test unsupported wallet coin_record = await wallet_1_node.wallet_state_manager.coin_store.get_coin_record(clawback_coin_id_1) assert coin_record is not None await wallet_1_node.wallet_state_manager.coin_store.add_coin_record( dataclasses.replace(coin_record, wallet_type=WalletType.CAT) ) - resp = await wallet_1_rpc.spend_clawback_coins([clawback_coin_id_1], 100) - assert resp["success"] - assert len(resp["transaction_ids"]) == 0 + resp = await wallet_1_rpc.spend_clawback_coins( + SpendClawbackCoins(coin_ids=[clawback_coin_id_1], fee=uint64(100), push=True), tx_config=DEFAULT_TX_CONFIG + ) + assert len(resp.transaction_ids) == 0 # Test missing metadata await wallet_1_node.wallet_state_manager.coin_store.add_coin_record(dataclasses.replace(coin_record, metadata=None)) - resp = await wallet_1_rpc.spend_clawback_coins([clawback_coin_id_1], 100) - assert resp["success"] - assert len(resp["transaction_ids"]) == 0 + resp = await wallet_1_rpc.spend_clawback_coins( + SpendClawbackCoins(coin_ids=[clawback_coin_id_1], fee=uint64(100), push=True), tx_config=DEFAULT_TX_CONFIG + ) + assert len(resp.transaction_ids) == 0 # Test missing incoming tx coin_record = await wallet_1_node.wallet_state_manager.coin_store.get_coin_record(clawback_coin_id_2) assert coin_record is not None @@ -889,8 +905,10 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron await wallet_1_node.wallet_state_manager.coin_store.add_coin_record( dataclasses.replace(coin_record, coin=fake_coin) ) - resp = await wallet_1_rpc.spend_clawback_coins([fake_coin.name()], 100) - assert resp["transaction_ids"] == [] + resp = await wallet_1_rpc.spend_clawback_coins( + SpendClawbackCoins(coin_ids=[fake_coin.name()], fee=uint64(100), push=True), tx_config=DEFAULT_TX_CONFIG + ) + assert resp.transaction_ids == [] # Test coin puzzle hash doesn't match the puzzle farmed_tx = (await wallet_1.wallet_state_manager.tx_store.get_farming_rewards())[0] await wallet_1.wallet_state_manager.tx_store.add_transaction_record( @@ -899,8 +917,10 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron await wallet_1_node.wallet_state_manager.coin_store.add_coin_record( dataclasses.replace(coin_record, coin=fake_coin) ) - resp = await wallet_1_rpc.spend_clawback_coins([fake_coin.name()], 100) - assert resp["transaction_ids"] == [] + resp = await wallet_1_rpc.spend_clawback_coins( + SpendClawbackCoins(coin_ids=[fake_coin.name()], fee=uint64(100), push=True), tx_config=DEFAULT_TX_CONFIG + ) + assert resp.transaction_ids == [] # Test claim spend await wallet_2_rpc.set_auto_claim( AutoClaimSettings( @@ -910,11 +930,12 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron batch_size=uint16(1), ) ) - resp = await wallet_2_rpc.spend_clawback_coins([clawback_coin_id_1, clawback_coin_id_2], 100) - assert resp["success"] - assert len(resp["transaction_ids"]) == 2 - for _tx in resp["transactions"]: - clawback_tx = TransactionRecord.from_json_dict_convenience(_tx) + resp = await wallet_2_rpc.spend_clawback_coins( + SpendClawbackCoins(coin_ids=[clawback_coin_id_1, clawback_coin_id_2], fee=uint64(100), push=True), + tx_config=DEFAULT_TX_CONFIG, + ) + assert len(resp.transaction_ids) == 2 + for clawback_tx in resp.transactions: if clawback_tx.spend_bundle is not None: await time_out_assert_not_none( 10, full_node_api.full_node.mempool_manager.get_spendbundle, clawback_tx.spend_bundle.name() @@ -922,9 +943,10 @@ async def test_spend_clawback_coins(wallet_rpc_environment: WalletRpcTestEnviron await farm_transaction_block(full_node_api, wallet_2_node) await time_out_assert(20, get_confirmed_balance, generated_funds + 300, wallet_2_rpc, 1) # Test spent coin - resp = await wallet_2_rpc.spend_clawback_coins([clawback_coin_id_1], 500) - assert resp["success"] - assert resp["transaction_ids"] == [] + resp = await wallet_2_rpc.spend_clawback_coins( + SpendClawbackCoins(coin_ids=[clawback_coin_id_1], fee=uint64(500), push=True), tx_config=DEFAULT_TX_CONFIG + ) + assert resp.transaction_ids == [] @pytest.mark.anyio @@ -966,7 +988,7 @@ async def test_send_transaction_multi(wallet_rpc_environment: WalletRpcTestEnvir await time_out_assert(20, get_confirmed_balance, generated_funds - amount_outputs - amount_fee, client, 1) # Checks that the memo can be retrieved - tx_confirmed = await client.get_transaction(send_tx_res.name) + tx_confirmed = (await client.get_transaction(GetTransaction(send_tx_res.name))).transaction assert tx_confirmed.confirmed memos = tx_confirmed.memos assert len(memos) == len(outputs) @@ -989,18 +1011,20 @@ async def test_get_transactions(wallet_rpc_environment: WalletRpcTestEnvironment await generate_funds(full_node_api, env.wallet_1, 5) - all_transactions = await client.get_transactions(1) + all_transactions = (await client.get_transactions(GetTransactions(uint32(1)))).transactions assert len(all_transactions) >= 10 # Test transaction pagination - some_transactions = await client.get_transactions(1, 0, 5) - some_transactions_2 = await client.get_transactions(1, 5, 10) + some_transactions = (await client.get_transactions(GetTransactions(uint32(1), uint16(0), uint16(5)))).transactions + some_transactions_2 = ( + await client.get_transactions(GetTransactions(uint32(1), uint16(5), uint16(10))) + ).transactions assert some_transactions == all_transactions[0:5] assert some_transactions_2 == all_transactions[5:10] # Testing sorts # Test the default sort (CONFIRMED_AT_HEIGHT) assert all_transactions == sorted(all_transactions, key=attrgetter("confirmed_at_height")) - all_transactions = await client.get_transactions(1, reverse=True) + all_transactions = (await client.get_transactions(GetTransactions(uint32(1), reverse=True))).transactions assert all_transactions == sorted(all_transactions, key=attrgetter("confirmed_at_height"), reverse=True) # Test RELEVANCE @@ -1008,16 +1032,24 @@ async def test_get_transactions(wallet_rpc_environment: WalletRpcTestEnvironment puzhash = await action_scope.get_puzzle_hash(wallet.wallet_state_manager) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) await client.send_transaction( - 1, uint64(1), encode_puzzle_hash(puzhash, "txch"), DEFAULT_TX_CONFIG + SendTransaction(wallet_id=uint32(1), amount=uint64(1), address=encode_puzzle_hash(puzhash, "txch"), push=True), + DEFAULT_TX_CONFIG, ) # Create a pending tx - all_transactions = await client.get_transactions(1, sort_key=SortKey.RELEVANCE) + with pytest.raises(ValueError, match="There is no known sort foo"): + await client.get_transactions(GetTransactions(uint32(1), sort_key="foo")) + + all_transactions = ( + await client.get_transactions(GetTransactions(uint32(1), sort_key=SortKey.RELEVANCE.name)) + ).transactions sorted_transactions = sorted(all_transactions, key=attrgetter("created_at_time"), reverse=True) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed_at_height"), reverse=True) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed")) assert all_transactions == sorted_transactions - all_transactions = await client.get_transactions(1, sort_key=SortKey.RELEVANCE, reverse=True) + all_transactions = ( + await client.get_transactions(GetTransactions(uint32(1), sort_key=SortKey.RELEVANCE.name, reverse=True)) + ).transactions sorted_transactions = sorted(all_transactions, key=attrgetter("created_at_time")) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed_at_height")) sorted_transactions = sorted(sorted_transactions, key=attrgetter("confirmed"), reverse=True) @@ -1027,23 +1059,32 @@ async def test_get_transactions(wallet_rpc_environment: WalletRpcTestEnvironment async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: ph_by_addr = await action_scope.get_puzzle_hash(wallet.wallet_state_manager) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - await client.send_transaction(1, uint64(1), encode_puzzle_hash(ph_by_addr, "txch"), DEFAULT_TX_CONFIG) + await client.send_transaction( + SendTransaction( + wallet_id=uint32(1), amount=uint64(1), address=encode_puzzle_hash(ph_by_addr, "txch"), push=True + ), + DEFAULT_TX_CONFIG, + ) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - tx_for_address = await client.get_transactions(1, to_address=encode_puzzle_hash(ph_by_addr, "txch")) + tx_for_address = ( + await client.get_transactions(GetTransactions(uint32(1), to_address=encode_puzzle_hash(ph_by_addr, "txch"))) + ).transactions assert len(tx_for_address) == 1 assert tx_for_address[0].to_puzzle_hash == ph_by_addr # Test type filter - all_transactions = await client.get_transactions( - 1, type_filter=TransactionTypeFilter.include([TransactionType.COINBASE_REWARD]) - ) + all_transactions = ( + await client.get_transactions( + GetTransactions(uint32(1), type_filter=TransactionTypeFilter.include([TransactionType.COINBASE_REWARD])) + ) + ).transactions assert len(all_transactions) == 5 assert all(transaction.type == TransactionType.COINBASE_REWARD.value for transaction in all_transactions) # Test confirmed filter - all_transactions = await client.get_transactions(1, confirmed=True) + all_transactions = (await client.get_transactions(GetTransactions(uint32(1), confirmed=True))).transactions assert len(all_transactions) == 10 assert all(transaction.confirmed for transaction in all_transactions) - all_transactions = await client.get_transactions(1, confirmed=False) + all_transactions = (await client.get_transactions(GetTransactions(uint32(1), confirmed=False))).transactions assert len(all_transactions) == 2 assert all(not transaction.confirmed for transaction in all_transactions) @@ -1051,9 +1092,15 @@ async def test_get_transactions(wallet_rpc_environment: WalletRpcTestEnvironment await wallet.wallet_state_manager.tx_store.add_transaction_record( dataclasses.replace(all_transactions[0], type=uint32(TransactionType.INCOMING_CLAWBACK_SEND)) ) - all_transactions = await client.get_transactions( - 1, type_filter=TransactionTypeFilter.include([TransactionType.INCOMING_CLAWBACK_SEND]), confirmed=False - ) + all_transactions = ( + await client.get_transactions( + GetTransactions( + uint32(1), + type_filter=TransactionTypeFilter.include([TransactionType.INCOMING_CLAWBACK_SEND]), + confirmed=False, + ) + ) + ).transactions assert len(all_transactions) == 1 @@ -1066,16 +1113,18 @@ async def test_get_transaction_count(wallet_rpc_environment: WalletRpcTestEnviro await generate_funds(full_node_api, env.wallet_1) - all_transactions = await client.get_transactions(1) + all_transactions = (await client.get_transactions(GetTransactions(uint32(1)))).transactions assert len(all_transactions) > 0 - transaction_count = await client.get_transaction_count(1) - assert transaction_count == len(all_transactions) - transaction_count = await client.get_transaction_count(1, confirmed=False) - assert transaction_count == 0 - transaction_count = await client.get_transaction_count( - 1, type_filter=TransactionTypeFilter.include([TransactionType.INCOMING_CLAWBACK_SEND]) + transaction_count_response = await client.get_transaction_count(GetTransactionCount(uint32(1))) + assert transaction_count_response.count == len(all_transactions) + transaction_count_response = await client.get_transaction_count(GetTransactionCount(uint32(1), confirmed=False)) + assert transaction_count_response.count == 0 + transaction_count_response = await client.get_transaction_count( + GetTransactionCount( + uint32(1), type_filter=TransactionTypeFilter.include([TransactionType.INCOMING_CLAWBACK_SEND]) + ) ) - assert transaction_count == 0 + assert transaction_count_response.count == 0 @pytest.mark.parametrize( @@ -1133,13 +1182,13 @@ async def test_cat_endpoints(wallet_environments: WalletTestFramework, wallet_ty "cat1", ) - cat_0_id = env_0.wallet_aliases["cat0"] + cat_0_id = uint32(env_0.wallet_aliases["cat0"]) # The RPC response contains more than just the balance info but all the # balance info should match. We're leveraging the `<=` operator to check # for subset on `dict` `.items()`. assert ( env_0.wallet_states[uint32(env_0.wallet_aliases["cat0"])].balance.to_json_dict().items() - <= (await env_0.rpc_client.get_wallet_balance(cat_0_id)).items() + <= (await env_0.rpc_client.get_wallet_balance(GetWalletBalance(cat_0_id))).wallet_balance.to_json_dict().items() ) asset_id = await env_0.rpc_client.get_cat_asset_id(cat_0_id) assert (await env_0.rpc_client.get_cat_name(cat_0_id)) == wallet_type.default_wallet_name_for_unknown_cat( @@ -1182,8 +1231,8 @@ async def test_cat_endpoints(wallet_environments: WalletTestFramework, wallet_ty ] ) - addr_0 = await env_0.rpc_client.get_next_address(cat_0_id, False) - addr_1 = await env_1.rpc_client.get_next_address(cat_1_id, False) + addr_0 = (await env_0.rpc_client.get_next_address(GetNextAddress(cat_0_id, False))).address + addr_1 = (await env_1.rpc_client.get_next_address(GetNextAddress(cat_1_id, False))).address assert addr_0 != addr_1 @@ -1389,7 +1438,7 @@ async def test_offer_endpoints(wallet_environments: WalletTestFramework, wallet_ # Creates a wallet for the same CAT on wallet_2 and send 4 CAT from wallet_1 to it await env_2.rpc_client.create_wallet_for_existing_cat(cat_asset_id) - wallet_2_address = await env_2.rpc_client.get_next_address(cat_wallet_id, False) + wallet_2_address = (await env_2.rpc_client.get_next_address(GetNextAddress(cat_wallet_id, False))).address adds = [{"puzzle_hash": decode_puzzle_hash(wallet_2_address), "amount": uint64(4), "memos": ["the cat memo"]}] tx_res = ( await env_1.rpc_client.send_transaction_multi( @@ -1767,7 +1816,12 @@ async def test_get_coin_records_by_names(wallet_rpc_environment: WalletRpcTestEn await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) # Spend half of it back to the same wallet get some spent coins in the wallet - tx = (await client.send_transaction(1, uint64(generated_funds / 2), address, DEFAULT_TX_CONFIG)).transaction + tx = ( + await client.send_transaction( + SendTransaction(wallet_id=uint32(1), amount=uint64(generated_funds / 2), address=address, push=True), + DEFAULT_TX_CONFIG, + ) + ).transaction assert tx.spend_bundle is not None await time_out_assert(20, tx_in_mempool, True, client, tx.name) await farm_transaction(full_node_api, wallet_node, tx.spend_bundle) @@ -2118,7 +2172,7 @@ async def test_key_and_address_endpoints(wallet_rpc_environment: WalletRpcTestEn wallet_node: WalletNode = env.wallet_1.node client: WalletRpcClient = env.wallet_1.rpc_client - address = await client.get_next_address(1, True) + address = (await client.get_next_address(GetNextAddress(uint32(1), True))).address assert len(address) > 10 pks = (await client.get_public_keys()).pk_fingerprints @@ -2133,11 +2187,15 @@ async def test_key_and_address_endpoints(wallet_rpc_environment: WalletRpcTestEn addr = encode_puzzle_hash(ph, "txch") tx_amount = uint64(15600000) await env.full_node.api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - created_tx = (await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG)).transaction + created_tx = ( + await client.send_transaction( + SendTransaction(wallet_id=uint32(1), amount=tx_amount, address=addr, push=True), DEFAULT_TX_CONFIG + ) + ).transaction await time_out_assert(20, tx_in_mempool, True, client, created_tx.name) assert len(await wallet.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(1)) == 1 - await client.delete_unconfirmed_transactions(1) + await client.delete_unconfirmed_transactions(DeleteUnconfirmedTransactions(uint32(1))) assert len(await wallet.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(1)) == 0 sk_resp = await client.get_private_key(GetPrivateKey(pks[0])) @@ -2196,7 +2254,10 @@ async def test_key_and_address_endpoints(wallet_rpc_environment: WalletRpcTestEn assert await get_unconfirmed_balance(client, int(wallets[0].id)) == 0 with pytest.raises(ValueError): - await client.send_transaction(wallets[0].id, uint64(100), addr, DEFAULT_TX_CONFIG) + await client.send_transaction( + SendTransaction(wallet_id=uint32(wallets[0].id), amount=uint64(100), address=addr, push=True), + DEFAULT_TX_CONFIG, + ) # Delete all keys await client.delete_all_keys() @@ -2222,7 +2283,11 @@ async def test_select_coins_rpc(wallet_rpc_environment: WalletRpcTestEnvironment for tx_amount in tx_amounts: funds -= tx_amount # create coins for tests - tx = (await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG)).transaction + tx = ( + await client.send_transaction( + SendTransaction(wallet_id=uint32(1), amount=tx_amount, address=addr, push=True), DEFAULT_TX_CONFIG + ) + ).transaction spend_bundle = tx.spend_bundle assert spend_bundle is not None for coin in spend_bundle.additions(): @@ -2526,7 +2591,7 @@ async def test_notification_rpcs(wallet_rpc_environment: WalletRpcTestEnvironmen assert [notification] == (await client_2.get_notifications(GetNotifications(None, None, uint32(1)))).notifications assert [] == (await client_2.get_notifications(GetNotifications(None, uint32(1), None))).notifications assert [notification] == (await client_2.get_notifications(GetNotifications(None, None, None))).notifications - assert await client_2.delete_notifications() + await client_2.delete_notifications(DeleteNotifications()) assert [] == (await client_2.get_notifications(GetNotifications([notification.id]))).notifications async with wallet_2.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: @@ -2548,7 +2613,7 @@ async def test_notification_rpcs(wallet_rpc_environment: WalletRpcTestEnvironmen await time_out_assert(20, env.wallet_2.wallet.get_confirmed_balance, uint64(200000000000)) notification = (await client_2.get_notifications(GetNotifications())).notifications[0] - assert await client_2.delete_notifications([notification.id]) + await client_2.delete_notifications(DeleteNotifications([notification.id])) assert [] == (await client_2.get_notifications(GetNotifications([notification.id]))).notifications @@ -2762,7 +2827,7 @@ async def test_set_wallet_resync_on_startup(wallet_rpc_environment: WalletRpcTes nft_wallet = await wc.create_new_nft_wallet(None) nft_wallet_id = nft_wallet["wallet_id"] - address = await wc.get_next_address(env.wallet_1.wallet.id(), True) + address = (await wc.get_next_address(GetNextAddress(env.wallet_1.wallet.id(), True))).address await wc.mint_nft( request=NFTMintNFTRequest( wallet_id=nft_wallet_id, @@ -2783,12 +2848,14 @@ async def test_set_wallet_resync_on_startup(wallet_rpc_environment: WalletRpcTes # Test Clawback resync tx = ( await wc.send_transaction( - wallet_id=1, - amount=uint64(500), - address=address, + SendTransaction( + wallet_id=uint32(1), + amount=uint64(500), + address=address, + puzzle_decorator=[ClawbackPuzzleDecoratorOverride(decorator="CLAWBACK", clawback_timelock=uint64(5))], + push=True, + ), tx_config=DEFAULT_TX_CONFIG, - fee=uint64(0), - puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 5}], ) ).transaction clawback_coin_id = tx.additions[0].name() @@ -2796,12 +2863,11 @@ async def test_set_wallet_resync_on_startup(wallet_rpc_environment: WalletRpcTes await farm_transaction(full_node_api, wallet_node, tx.spend_bundle) await time_out_assert(20, check_client_synced, True, wc) await asyncio.sleep(10) - resp = await wc.spend_clawback_coins([clawback_coin_id], 0) - assert resp["success"] - assert len(resp["transaction_ids"]) == 1 - await time_out_assert_not_none( - 10, full_node_api.full_node.mempool_manager.get_spendbundle, bytes32.from_hexstr(resp["transaction_ids"][0]) + resp = await wc.spend_clawback_coins( + SpendClawbackCoins(coin_ids=[clawback_coin_id], fee=uint64(0), push=True), tx_config=DEFAULT_TX_CONFIG ) + assert len(resp.transaction_ids) == 1 + await time_out_assert_not_none(10, full_node_api.full_node.mempool_manager.get_spendbundle, resp.transaction_ids[0]) await farm_transaction_block(full_node_api, wallet_node) await time_out_assert(20, check_client_synced, True, wc) wallet_node_2._close() @@ -2931,7 +2997,11 @@ async def test_cat_spend_run_tail(wallet_rpc_environment: WalletRpcTestEnvironme ) tx_amount = uint64(100) - tx = (await client.send_transaction(1, tx_amount, addr, DEFAULT_TX_CONFIG)).transaction + tx = ( + await client.send_transaction( + SendTransaction(wallet_id=uint32(1), amount=tx_amount, address=addr, push=True), DEFAULT_TX_CONFIG + ) + ).transaction transaction_id = tx.name spend_bundle = tx.spend_bundle assert spend_bundle is not None @@ -3011,15 +3081,15 @@ async def test_get_balances(wallet_rpc_environment: WalletRpcTestEnvironment) -> await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 2) await farm_transaction_block(full_node_api, wallet_node) await time_out_assert(20, check_client_synced, True, client) - bal = await client.get_wallet_balances() - assert len(bal) == 3 - assert bal["1"]["confirmed_wallet_balance"] == 1999999999880 - assert bal["2"]["confirmed_wallet_balance"] == 100 - assert bal["3"]["confirmed_wallet_balance"] == 20 - bal_ids = await client.get_wallet_balances([3, 2]) - assert len(bal_ids) == 2 - assert bal["2"]["confirmed_wallet_balance"] == 100 - assert bal["3"]["confirmed_wallet_balance"] == 20 + bals_response = await client.get_wallet_balances(GetWalletBalances()) + assert len(bals_response.wallet_balances) == 3 + assert bals_response.wallet_balances[uint32(1)].confirmed_wallet_balance == 1999999999880 + assert bals_response.wallet_balances[uint32(2)].confirmed_wallet_balance == 100 + assert bals_response.wallet_balances[uint32(3)].confirmed_wallet_balance == 20 + bals_response = await client.get_wallet_balances(GetWalletBalances([uint32(3), uint32(2)])) + assert len(bals_response.wallet_balances) == 2 + assert bals_response.wallet_balances[uint32(2)].confirmed_wallet_balance == 100 + assert bals_response.wallet_balances[uint32(3)].confirmed_wallet_balance == 20 @pytest.mark.parametrize( diff --git a/chia/_tests/wallet/test_new_wallet_protocol.py b/chia/_tests/wallet/test_new_wallet_protocol.py index 948eba40fc8a..66df0c728e86 100644 --- a/chia/_tests/wallet/test_new_wallet_protocol.py +++ b/chia/_tests/wallet/test_new_wallet_protocol.py @@ -21,7 +21,6 @@ from chia.protocols.outbound_message import Message, NodeType from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.shared_protocol import Capability -from chia.server.aliases import WalletService from chia.server.ws_connection import WSChiaConnection from chia.simulator import simulator_protocol from chia.simulator.block_tools import BlockTools @@ -30,6 +29,7 @@ from chia.types.coin_record import CoinRecord from chia.types.mempool_inclusion_status import MempoolInclusionStatus from chia.util.hash import std_hash +from chia.wallet.wallet_service import WalletService IDENTITY_PUZZLE = Program.to(1) IDENTITY_PUZZLE_HASH = IDENTITY_PUZZLE.get_tree_hash() diff --git a/chia/_tests/wallet/test_signer_protocol.py b/chia/_tests/wallet/test_signer_protocol.py index 29668eec8d48..c1d7a0dc590e 100644 --- a/chia/_tests/wallet/test_signer_protocol.py +++ b/chia/_tests/wallet/test_signer_protocol.py @@ -848,8 +848,8 @@ def run(self) -> None: with open("some file", "wb") as file: file.write(byte_serialize_clvm_streamable(coin, translation_layer=FOO_COIN_TRANSLATION)) - with open("some file2", "wb") as file: - file.write(byte_serialize_clvm_streamable(coin, translation_layer=FOO_COIN_TRANSLATION)) + with open("some file2", "wb") as file2: + file2.write(byte_serialize_clvm_streamable(coin, translation_layer=FOO_COIN_TRANSLATION)) result = runner.invoke( cmd, ["temp_cmd", "--signer-protocol-input", "some file", "--signer-protocol-input", "some file2"] diff --git a/chia/_tests/wallet/test_singleton_lifecycle_fast.py b/chia/_tests/wallet/test_singleton_lifecycle_fast.py index bc92c86ff59b..4f9e0b708748 100644 --- a/chia/_tests/wallet/test_singleton_lifecycle_fast.py +++ b/chia/_tests/wallet/test_singleton_lifecycle_fast.py @@ -65,10 +65,9 @@ def satisfies_hint(obj: T, type_hint: type[T]) -> bool: object_hint_pairs.extend((v, args[1]) for v in obj.values()) else: raise NotImplementedError(f"Type {origin} is not yet supported") - else: - # Handle concrete types - if type(obj) is not type_hint: - return False + # Handle concrete types + elif type(obj) is not type_hint: + return False return True diff --git a/chia/_tests/wallet/test_transaction_store.py b/chia/_tests/wallet/test_transaction_store.py index 00b89f1f9d70..fc2078481435 100644 --- a/chia/_tests/wallet/test_transaction_store.py +++ b/chia/_tests/wallet/test_transaction_store.py @@ -11,6 +11,7 @@ from chia._tests.util.db_connection import DBConnection from chia.types.blockchain_format.coin import Coin from chia.types.mempool_inclusion_status import MempoolInclusionStatus +from chia.util.bech32m import encode_puzzle_hash from chia.util.errors import Err from chia.wallet.conditions import ConditionValidTimes from chia.wallet.transaction_record import TransactionRecord, TransactionRecordOld, minimum_send_attempts @@ -32,7 +33,7 @@ tr1 = TransactionRecord( uint32(0), # confirmed height uint64(1000), # created_at_time - bytes32(bytes32.random(module_seeded_random)), # to_puzzle_hash + bytes32(bytes32.zeros), # to_puzzle_hash uint64(1234), # amount uint64(12), # fee_amount False, # confirmed @@ -47,13 +48,19 @@ bytes32(bytes32.random(module_seeded_random)), # name {}, # memos ConditionValidTimes(), + encode_puzzle_hash(bytes32(bytes32.zeros), "txch"), ) +MINIMUM_CONFIG = { + "network_overrides": {"config": {"testnet": {"address_prefix": "txch"}}}, + "selected_network": "testnet", +} + @pytest.mark.anyio async def test_add() -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) assert await store.get_transaction_record(tr1.name) is None await store.add_transaction_record(tr1) @@ -63,7 +70,7 @@ async def test_add() -> None: @pytest.mark.anyio async def test_delete() -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) await store.add_transaction_record(tr1) assert await store.get_transaction_record(tr1.name) == tr1 @@ -74,7 +81,7 @@ async def test_delete() -> None: @pytest.mark.anyio async def test_set_confirmed() -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) await store.add_transaction_record(tr1) await store.set_confirmed(tr1.name, uint32(100)) @@ -87,7 +94,7 @@ async def test_set_confirmed() -> None: @pytest.mark.anyio async def test_increment_sent_noop(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) assert ( await store.increment_sent( @@ -100,7 +107,7 @@ async def test_increment_sent_noop(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_increment_sent() -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) await store.add_transaction_record(tr1) tr = await store.get_transaction_record(tr1.name) @@ -126,7 +133,7 @@ async def test_increment_sent() -> None: @pytest.mark.anyio async def test_increment_sent_error() -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) await store.add_transaction_record(tr1) tr = await store.get_transaction_record(tr1.name) @@ -156,7 +163,7 @@ def test_filter_ok_mempool_status() -> None: @pytest.mark.anyio async def test_tx_reorged_update() -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr = dataclasses.replace(tr1, sent=uint32(2), sent_to=[("peer1", uint8(1), None), ("peer2", uint8(1), None)]) await store.add_transaction_record(tr) @@ -173,7 +180,7 @@ async def test_tx_reorged_update() -> None: @pytest.mark.anyio async def test_tx_reorged_add() -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr = dataclasses.replace(tr1, sent=uint32(2), sent_to=[("peer1", uint8(1), None), ("peer2", uint8(1), None)]) @@ -187,7 +194,7 @@ async def test_tx_reorged_add() -> None: @pytest.mark.anyio async def test_get_tx_record(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr2 = dataclasses.replace(tr1, name=bytes32.random(seeded_random)) tr3 = dataclasses.replace(tr1, name=bytes32.random(seeded_random)) @@ -212,7 +219,7 @@ async def test_get_tx_record(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_get_farming_rewards(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) test_trs: list[TransactionRecord] = [] # tr1 is type OUTGOING_TX @@ -249,7 +256,7 @@ async def test_get_farming_rewards(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_get_all_unconfirmed(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr2 = dataclasses.replace( tr1, name=bytes32.random(seeded_random), confirmed=True, confirmed_at_height=uint32(100) @@ -263,7 +270,7 @@ async def test_get_all_unconfirmed(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_get_unconfirmed_for_wallet(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr2 = dataclasses.replace( tr1, name=bytes32.random(seeded_random), confirmed=True, confirmed_at_height=uint32(100) @@ -282,7 +289,7 @@ async def test_get_unconfirmed_for_wallet(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_transaction_count_for_wallet(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr2 = dataclasses.replace(tr1, name=bytes32.random(seeded_random), wallet_id=uint32(2)) @@ -322,7 +329,7 @@ async def test_transaction_count_for_wallet(seeded_random: random.Random) -> Non @pytest.mark.anyio async def test_all_transactions_for_wallet(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) test_trs: list[TransactionRecord] = [] for wallet_id in [1, 2]: @@ -373,7 +380,7 @@ def cmp(lhs: list[Any], rhs: list[Any]) -> bool: @pytest.mark.anyio async def test_get_all_transactions(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) test_trs: list[TransactionRecord] = [] assert await store.get_all_transactions() == [] @@ -390,7 +397,7 @@ async def test_get_all_transactions(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_get_transaction_above(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) test_trs: list[TransactionRecord] = [] assert await store.get_transaction_above(uint32(0)) == [] @@ -410,7 +417,7 @@ async def test_get_transaction_above(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_get_tx_by_trade_id(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr2 = dataclasses.replace(tr1, name=bytes32.random(seeded_random), trade_id=bytes32.random(seeded_random)) tr3 = dataclasses.replace(tr1, name=bytes32.random(seeded_random), trade_id=bytes32.random(seeded_random)) @@ -442,7 +449,7 @@ async def test_get_tx_by_trade_id(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_rollback_to_block(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) test_trs: list[TransactionRecord] = [] for height in range(10): @@ -465,7 +472,7 @@ async def test_rollback_to_block(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_delete_unconfirmed(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr2 = dataclasses.replace(tr1, name=bytes32.random(seeded_random), confirmed=True) tr3 = dataclasses.replace(tr1, name=bytes32.random(seeded_random), confirmed=True, wallet_id=uint32(2)) @@ -493,7 +500,7 @@ async def test_delete_unconfirmed(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_get_transactions_between_confirmed(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr2 = dataclasses.replace( tr1, name=bytes32.random(seeded_random), confirmed=True, confirmed_at_height=uint32(1) @@ -585,7 +592,7 @@ async def test_get_transactions_between_confirmed(seeded_random: random.Random) @pytest.mark.anyio async def test_get_transactions_between_relevance(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) t1 = dataclasses.replace( tr1, @@ -716,22 +723,24 @@ async def test_get_transactions_between_relevance(seeded_random: random.Random) @pytest.mark.anyio async def test_get_transactions_between_to_puzzle_hash(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) ph1 = bytes32.random(seeded_random) + ad1 = encode_puzzle_hash(ph1, "txch") ph2 = bytes32.random(seeded_random) + ad2 = encode_puzzle_hash(ph2, "txch") tr2 = dataclasses.replace( - tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(1), to_puzzle_hash=ph1 + tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(1), to_puzzle_hash=ph1, to_address=ad1 ) tr3 = dataclasses.replace( - tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(2), to_puzzle_hash=ph1 + tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(2), to_puzzle_hash=ph1, to_address=ad1 ) tr4 = dataclasses.replace( - tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(3), to_puzzle_hash=ph2 + tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(3), to_puzzle_hash=ph2, to_address=ad2 ) tr5 = dataclasses.replace( - tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(4), to_puzzle_hash=ph2 + tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(4), to_puzzle_hash=ph2, to_address=ad2 ) await store.add_transaction_record(tr1) @@ -762,7 +771,7 @@ async def test_get_transactions_between_to_puzzle_hash(seeded_random: random.Ran @pytest.mark.anyio async def test_get_not_sent(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tr2 = dataclasses.replace( tr1, name=bytes32.random(seeded_random), confirmed=True, confirmed_at_height=uint32(1) @@ -881,7 +890,7 @@ async def test_valid_times_migration() -> None: ), ) - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) rec = await store.get_transaction_record(old_record.name) assert rec is not None assert rec.valid_times == ConditionValidTimes() @@ -890,7 +899,7 @@ async def test_valid_times_migration() -> None: @pytest.mark.anyio async def test_large_tx_record_query() -> None: async with DBConnection(1) as db_wrapper: - store = await WalletTransactionStore.create(db_wrapper) + store = await WalletTransactionStore.create(db_wrapper, MINIMUM_CONFIG) tx_records_to_insert = [] for _ in range(db_wrapper.host_parameter_limit + 1): name = bytes32.random() diff --git a/chia/_tests/wallet/test_wallet.py b/chia/_tests/wallet/test_wallet.py index 72acf55da663..1bd42dbd512c 100644 --- a/chia/_tests/wallet/test_wallet.py +++ b/chia/_tests/wallet/test_wallet.py @@ -19,6 +19,7 @@ from chia.types.peer_info import PeerInfo from chia.types.signing_mode import CHIP_0002_SIGN_MESSAGE_PREFIX from chia.util.bech32m import encode_puzzle_hash +from chia.util.byte_types import hexstr_to_bytes from chia.util.errors import Err from chia.wallet.conditions import ConditionValidTimes from chia.wallet.derive_keys import master_sk_to_wallet_sk @@ -395,9 +396,7 @@ async def test_wallet_clawback_clawback(self, wallet_environments: WalletTestFra assert len(txs["transactions"]) == 1 assert not txs["transactions"][0]["confirmed"] assert txs["transactions"][0]["metadata"]["recipient_puzzle_hash"][2:] == normal_puzhash.hex() - assert txs["transactions"][0]["metadata"]["coin_id"] == merkle_coin.name().hex() - with pytest.raises(ValueError): - await api_0.spend_clawback_coins({}) + assert txs["transactions"][0]["metadata"]["coin_id"] == "0x" + merkle_coin.name().hex() test_fee = 10 resp = await api_0.spend_clawback_coins( @@ -407,7 +406,6 @@ async def test_wallet_clawback_clawback(self, wallet_environments: WalletTestFra **wallet_environments.tx_config.to_json_dict(), } ) - assert resp["success"] assert len(resp["transaction_ids"]) == 1 await wallet_environments.process_pending_states( @@ -540,7 +538,6 @@ async def test_wallet_clawback_sent_self(self, wallet_environments: WalletTestFr **wallet_environments.tx_config.to_json_dict(), } ) - assert resp["success"] assert len(resp["transaction_ids"]) == 1 # Wait mempool update await wallet_environments.process_pending_states( @@ -677,7 +674,6 @@ async def test_wallet_clawback_claim_manual(self, wallet_environments: WalletTes **wallet_environments.tx_config.to_json_dict(), } ) - assert resp["success"] assert len(resp["transaction_ids"]) == 1 await wallet_environments.process_pending_states( @@ -1093,10 +1089,8 @@ async def test_clawback_resync(self, self_hostname: str, wallet_environments: Wa await time_out_assert(20, wsm_2.coin_store.count_small_unspent, 1, 1000, CoinType.CLAWBACK) # clawback merkle coin resp = await api_1.spend_clawback_coins({"coin_ids": [clawback_coin_id_1.hex()], "fee": 0}) - assert resp["success"] assert len(resp["transaction_ids"]) == 1 resp = await api_1.spend_clawback_coins({"coin_ids": [clawback_coin_id_2.hex()], "fee": 0}) - assert resp["success"] assert len(resp["transaction_ids"]) == 1 await wallet_environments.process_pending_states( @@ -1542,9 +1536,9 @@ async def test_wallet_make_transaction_with_memo(self, wallet_environments: Wall fees = estimate_fees(tx.spend_bundle) assert fees == tx_fee - memos = await env_0.rpc_client.get_transaction_memo(GetTransactionMemo(transaction_id=tx.name)) - assert len(memos.coins_with_memos) == 1 - assert memos.coins_with_memos[0].memos[0] == ph_2 + memo_response = await env_0.rpc_client.get_transaction_memo(GetTransactionMemo(transaction_id=tx.name)) + assert len(memo_response.memo_dict) == 1 + assert next(iter(memo_response.memo_dict.values()))[0] == ph_2 await wallet_environments.process_pending_states( [ @@ -1589,13 +1583,9 @@ async def test_wallet_make_transaction_with_memo(self, wallet_environments: Wall if coin.amount == tx_amount: tx_id = coin.name() assert tx_id is not None - memos = await env_1.rpc_client.get_transaction_memo(GetTransactionMemo(transaction_id=tx_id)) - assert len(memos.coins_with_memos) == 1 - assert memos.coins_with_memos[0].memos[0] == ph_2 - # test json serialization - assert memos.to_json_dict() == { - tx_id.hex(): {memos.coins_with_memos[0].coin_id.hex(): [memos.coins_with_memos[0].memos[0].hex()]} - } + memo_response = await env_1.rpc_client.get_transaction_memo(GetTransactionMemo(transaction_id=tx_id)) + assert len(memo_response.memo_dict) == 1 + assert next(iter(memo_response.memo_dict.values()))[0] == ph_2 @pytest.mark.parametrize( "wallet_environments", @@ -1721,6 +1711,7 @@ async def test_wallet_prevent_fee_theft(self, wallet_environments: WalletTestFra confirmed_at_height=uint32(0), created_at_time=uint64(0), to_puzzle_hash=bytes32(32 * b"0"), + to_address=encode_puzzle_hash(bytes32(32 * b"0"), "txch"), amount=uint64(0), fee_amount=uint64(0), confirmed=False, @@ -2011,9 +2002,9 @@ async def test_sign_message(self, wallet_environments: WalletTestFramework) -> N puzzle: Program = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, message)) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), puzzle.get_tree_hash(), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) # Test hex string message = "0123456789ABCDEF" @@ -2023,9 +2014,9 @@ async def test_sign_message(self, wallet_environments: WalletTestFramework) -> N puzzle = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, bytes.fromhex(message))) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), puzzle.get_tree_hash(), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) # Test informal input message = "0123456789ABCDEF" @@ -2035,9 +2026,9 @@ async def test_sign_message(self, wallet_environments: WalletTestFramework) -> N puzzle = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, bytes.fromhex(message))) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), puzzle.get_tree_hash(), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) # Test BLS sign string message = "Hello World" @@ -2046,9 +2037,9 @@ async def test_sign_message(self, wallet_environments: WalletTestFramework) -> N ) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), bytes(message, "utf-8"), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) # Test BLS sign hex message = "0123456789ABCDEF" @@ -2057,9 +2048,9 @@ async def test_sign_message(self, wallet_environments: WalletTestFramework) -> N ) assert AugSchemeMPL.verify( - G1Element.from_bytes(bytes.fromhex(response["pubkey"])), - bytes.fromhex(message), - G2Element.from_bytes(bytes.fromhex(response["signature"])), + G1Element.from_bytes(hexstr_to_bytes(response["pubkey"])), + hexstr_to_bytes(message), + G2Element.from_bytes(hexstr_to_bytes(response["signature"])), ) @pytest.mark.parametrize( diff --git a/chia/_tests/wallet/test_wallet_state_manager.py b/chia/_tests/wallet/test_wallet_state_manager.py index 0429e6090dba..1289f37cca53 100644 --- a/chia/_tests/wallet/test_wallet_state_manager.py +++ b/chia/_tests/wallet/test_wallet_state_manager.py @@ -21,7 +21,7 @@ from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.transaction_type import TransactionType from chia.wallet.util.wallet_types import WalletType -from chia.wallet.wallet_request_types import PushTransactions +from chia.wallet.wallet_request_types import ExtendDerivationIndex, PushTransactions from chia.wallet.wallet_rpc_api import MAX_DERIVATION_INDEX_DELTA from chia.wallet.wallet_spend_bundle import WalletSpendBundle from chia.wallet.wallet_state_manager import WalletStateManager @@ -286,6 +286,11 @@ async def get_puzzle_hash_state() -> PuzzleHashState: expected_state = await get_puzzle_hash_state() + # Quick test of this RPC + assert ( + await wallet_environments.environments[0].rpc_client.get_current_derivation_index() + ).index == expected_state.highest_index + # `create_more_puzzle_hashes` # No-op result = await wsm.create_more_puzzle_hashes() @@ -420,7 +425,7 @@ async def get_puzzle_hash_state() -> PuzzleHashState: (0,), ) with pytest.raises(ValueError): - await rpc_client.extend_derivation_index(0) + await rpc_client.extend_derivation_index(ExtendDerivationIndex(uint32(0))) # Reset to a normal state await wsm.puzzle_store.delete_wallet(wsm.main_wallet.id()) @@ -431,15 +436,17 @@ async def get_puzzle_hash_state() -> PuzzleHashState: # Test an index already created with pytest.raises(ValueError): - await rpc_client.extend_derivation_index(0) + await rpc_client.extend_derivation_index(ExtendDerivationIndex(uint32(0))) # Test an index too far in the future with pytest.raises(ValueError): - await rpc_client.extend_derivation_index(MAX_DERIVATION_INDEX_DELTA + expected_state.highest_index + 1) + await rpc_client.extend_derivation_index( + ExtendDerivationIndex(uint32(MAX_DERIVATION_INDEX_DELTA + expected_state.highest_index + 1)) + ) # Test the actual functionality - assert await rpc_client.extend_derivation_index(expected_state.highest_index + 5) == str( - expected_state.highest_index + 5 - ) + assert ( + await rpc_client.extend_derivation_index(ExtendDerivationIndex(uint32(expected_state.highest_index + 5))) + ).index == expected_state.highest_index + 5 expected_state = PuzzleHashState(expected_state.highest_index + 5, expected_state.used_up_to_index) assert await get_puzzle_hash_state() == expected_state diff --git a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py index 2776f4951dcb..b76cfe3ecab2 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py @@ -7,7 +7,7 @@ import pytest from chia_rs import G2Element from chia_rs.sized_bytes import bytes32 -from chia_rs.sized_ints import uint8, uint16, uint64 +from chia_rs.sized_ints import uint8, uint16, uint32, uint64 from typing_extensions import Literal from chia._tests.environments.wallet import WalletEnvironment, WalletStateTransition, WalletTestFramework @@ -31,6 +31,7 @@ from chia.wallet.wallet import Wallet from chia.wallet.wallet_node import WalletNode from chia.wallet.wallet_request_types import ( + GetTransactions, GetWallets, VCAddProofs, VCGet, @@ -454,13 +455,17 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: assert await wallet_node_1.wallet_state_manager.wallets[env_1.dealias_wallet_id("crcat")].match_hinted_coin( next(c for tx in txs for c in tx.additions if c.amount == 90), wallet_1_ph ) - pending_tx = await client_1.get_transactions( - env_1.dealias_wallet_id("crcat"), - 0, - 1, - reverse=True, - type_filter=TransactionTypeFilter.include([TransactionType.INCOMING_CRCAT_PENDING]), - ) + pending_tx = ( + await client_1.get_transactions( + GetTransactions( + uint32(env_1.dealias_wallet_id("crcat")), + uint16(0), + uint16(1), + reverse=True, + type_filter=TransactionTypeFilter.include([TransactionType.INCOMING_CRCAT_PENDING]), + ) + ) + ).transactions assert len(pending_tx) == 1 # Send the VC to wallet_1 to use for the CR-CATs diff --git a/chia/apis.py b/chia/apis.py index 3445b7dd15c6..f30eac27fafb 100644 --- a/chia/apis.py +++ b/chia/apis.py @@ -6,6 +6,7 @@ from chia.introducer.introducer_api import IntroducerAPI from chia.protocols.outbound_message import NodeType from chia.server.api_protocol import ApiProtocol +from chia.solver.solver_api import SolverAPI from chia.timelord.timelord_api import TimelordAPI from chia.wallet.wallet_node_api import WalletNodeAPI @@ -16,4 +17,5 @@ NodeType.TIMELORD: TimelordAPI, NodeType.FARMER: FarmerAPI, NodeType.HARVESTER: HarvesterAPI, + NodeType.SOLVER: SolverAPI, } diff --git a/chia/cmds/beta.py b/chia/cmds/beta.py index 6b44a5e2a8e8..4b9dc58a47b5 100644 --- a/chia/cmds/beta.py +++ b/chia/cmds/beta.py @@ -92,10 +92,14 @@ def enable_cmd(ctx: click.Context, force: bool, path: Optional[str]) -> None: current_path = config.get("beta", {}).get("path") current_path = None if current_path is None else Path(current_path) - if path is None and current_path is None: - beta_root_path = prompt_for_beta_path(current_path or default_beta_root_path()) + path_to_use: Optional[Path] = None if path is None else Path(path) + if path_to_use is None: + path_to_use = current_path + + if path_to_use is None: + beta_root_path = prompt_for_beta_path(default_beta_root_path()) else: - beta_root_path = Path(path or current_path) + beta_root_path = path_to_use validate_beta_path(beta_root_path) update_beta_config(True, beta_root_path, metrics_log_interval_default, config) diff --git a/chia/cmds/chia.py b/chia/cmds/chia.py index 988d95f78a8d..0420676e2672 100644 --- a/chia/cmds/chia.py +++ b/chia/cmds/chia.py @@ -24,6 +24,7 @@ from chia.cmds.plotters import plotters_cmd from chia.cmds.rpc import rpc_cmd from chia.cmds.show import show_cmd +from chia.cmds.solver import solver_cmd from chia.cmds.start import start_cmd from chia.cmds.stop import stop_cmd from chia.cmds.wallet import wallet_cmd @@ -127,6 +128,7 @@ def run_daemon_cmd(ctx: click.Context, wait_for_unlock: bool) -> None: cli.add_command(init_cmd) cli.add_command(rpc_cmd) cli.add_command(show_cmd) +cli.add_command(solver_cmd) cli.add_command(start_cmd) cli.add_command(stop_cmd) cli.add_command(netspace_cmd) diff --git a/chia/cmds/cmd_classes.py b/chia/cmds/cmd_classes.py index 4f9dc5ed5e79..e52e1421066d 100644 --- a/chia/cmds/cmd_classes.py +++ b/chia/cmds/cmd_classes.py @@ -181,10 +181,10 @@ def _generate_command_parser(cls: type[ChiaCommand]) -> _CommandParsingStage: needs_context: bool = False hints = get_type_hints(cls) - _fields = fields(cls) # type: ignore[arg-type] + cls_fields = fields(cls) # type: ignore[arg-type] - for _field in _fields: - field_name = _field.name + for cls_field in cls_fields: + field_name = cls_field.name if getattr(hints[field_name], COMMAND_HELPER_ATTRIBUTE_NAME, False): members[field_name] = _generate_command_parser(hints[field_name]) elif field_name == "context": @@ -193,9 +193,9 @@ def _generate_command_parser(cls: type[ChiaCommand]) -> _CommandParsingStage: else: needs_context = True kwarg_names.append(field_name) - elif "option_args" in _field.metadata: + elif "option_args" in cls_field.metadata: option_args: dict[str, Any] = {"multiple": False, "required": False} - option_args.update(_field.metadata["option_args"]) + option_args.update(cls_field.metadata["option_args"]) if "type" not in option_args: origin = get_origin(hints[field_name]) diff --git a/chia/cmds/cmds_util.py b/chia/cmds/cmds_util.py index 1362a3b83529..0ba57ea62717 100644 --- a/chia/cmds/cmds_util.py +++ b/chia/cmds/cmds_util.py @@ -23,6 +23,7 @@ from chia.harvester.harvester_rpc_client import HarvesterRpcClient from chia.rpc.rpc_client import ResponseFailureError, RpcClient from chia.simulator.simulator_full_node_rpc_client import SimulatorFullNodeRpcClient +from chia.solver.solver_rpc_client import SolverRpcClient from chia.types.mempool_submission_status import MempoolSubmissionStatus from chia.util.config import load_config from chia.util.errors import CliRpcConnectionError, InvalidPathError @@ -42,6 +43,7 @@ "harvester": HarvesterRpcClient, "data_layer": DataLayerRpcClient, "simulator": SimulatorFullNodeRpcClient, + "solver": SolverRpcClient, } node_config_section_names: dict[type[RpcClient], str] = { @@ -52,6 +54,7 @@ HarvesterRpcClient: "harvester", DataLayerRpcClient: "data_layer", SimulatorFullNodeRpcClient: "full_node", + SolverRpcClient: "solver", } _T_RpcClient = TypeVar("_T_RpcClient", bound=RpcClient) diff --git a/chia/cmds/farm.py b/chia/cmds/farm.py index e14658a4b797..2fd131e39866 100644 --- a/chia/cmds/farm.py +++ b/chia/cmds/farm.py @@ -49,6 +49,13 @@ def farm_cmd() -> None: default=None, show_default=True, ) +@click.option( + "-i", + "--include-pool-rewards", + help="Include pool farming rewards in the total farmed amount", + is_flag=True, + default=False, +) @click.pass_context def summary_cmd( ctx: click.Context, @@ -56,6 +63,7 @@ def summary_cmd( wallet_rpc_port: Optional[int], harvester_rpc_port: Optional[int], farmer_rpc_port: Optional[int], + include_pool_rewards: bool, ) -> None: import asyncio @@ -67,6 +75,7 @@ def summary_cmd( wallet_rpc_port, harvester_rpc_port, farmer_rpc_port, + include_pool_rewards, root_path=ChiaCliContext.set_default(ctx).root_path, ) ) diff --git a/chia/cmds/farm_funcs.py b/chia/cmds/farm_funcs.py index b251edded023..560a65b39b76 100644 --- a/chia/cmds/farm_funcs.py +++ b/chia/cmds/farm_funcs.py @@ -49,9 +49,13 @@ async def get_average_block_time(rpc_port: Optional[int], root_path: Path) -> fl return (curr.timestamp - past_curr.timestamp) / (curr.height - past_curr.height) -async def get_wallets_stats(wallet_rpc_port: Optional[int], root_path: Path) -> Optional[dict[str, Any]]: +async def get_wallets_stats( + wallet_rpc_port: Optional[int], + root_path: Path, + include_pool_rewards: bool, +) -> Optional[dict[str, Any]]: async with get_any_service_client(WalletRpcClient, root_path, wallet_rpc_port) as (wallet_client, _): - return await wallet_client.get_farmed_amount() + return await wallet_client.get_farmed_amount(include_pool_rewards) async def get_challenges(root_path: Path, farmer_rpc_port: Optional[int]) -> Optional[list[dict[str, Any]]]: @@ -80,6 +84,7 @@ async def summary( wallet_rpc_port: Optional[int], harvester_rpc_port: Optional[int], farmer_rpc_port: Optional[int], + include_pool_rewards: bool, root_path: Path, ) -> None: harvesters_summary = await get_harvesters_summary(farmer_rpc_port, root_path) @@ -97,7 +102,7 @@ async def summary( wallet_not_ready: bool = False amounts = None try: - amounts = await get_wallets_stats(wallet_rpc_port, root_path) + amounts = await get_wallets_stats(wallet_rpc_port, root_path, include_pool_rewards) except CliRpcConnectionError: wallet_not_ready = True except Exception: @@ -120,8 +125,21 @@ async def summary( if amounts is not None: print(f"Total chia farmed: {amounts['farmed_amount'] / units['chia']}") print(f"User transaction fees: {amounts['fee_amount'] / units['chia']}") - print(f"Block rewards: {(amounts['farmer_reward_amount'] + amounts['pool_reward_amount']) / units['chia']}") - print(f"Last height farmed: {amounts['last_height_farmed']}") + if include_pool_rewards: + print(f"Farmer rewards: {amounts['farmer_reward_amount'] / units['chia']}") + print(f"Pool rewards: {amounts['pool_reward_amount'] / units['chia']}") + print(f"Total rewards: {(amounts['farmer_reward_amount'] + amounts['pool_reward_amount']) / units['chia']}") + if blockchain_state is not None and blockchain_state["peak"] is not None: + peak_height = blockchain_state["peak"].height + blocks_since_last_farm = peak_height - amounts["last_height_farmed"] + print(f"Current/Last height farmed: {peak_height}/{amounts['last_height_farmed']}") + print(f"Blocks since last farmed: {blocks_since_last_farm}") + print( + f"Time since last farmed: {format_minutes(int((blocks_since_last_farm * SECONDS_PER_BLOCK) / 60))}" + ) + else: + print(f"Block rewards: {(amounts['farmer_reward_amount'] + amounts['pool_reward_amount']) / units['chia']}") + print(f"Last height farmed: {amounts['last_height_farmed']}") class PlotStats: total_plot_size = 0 diff --git a/chia/cmds/keys_funcs.py b/chia/cmds/keys_funcs.py index 5b0eef7b7a3f..c4ad0053f7d5 100644 --- a/chia/cmds/keys_funcs.py +++ b/chia/cmds/keys_funcs.py @@ -743,11 +743,10 @@ def derive_child_key( if non_observer_derivation: assert current_sk is not None # semantics above guarantee this current_sk = _derive_path(current_sk, path_indices) + elif current_sk is not None: + current_sk = _derive_path_unhardened(current_sk, path_indices) else: - if current_sk is not None: - current_sk = _derive_path_unhardened(current_sk, path_indices) - else: - current_pk = _derive_pk_unhardened(current_pk, path_indices) + current_pk = _derive_pk_unhardened(current_pk, path_indices) derivation_root_sk = current_sk derivation_root_pk = current_pk @@ -768,13 +767,12 @@ def derive_child_key( assert derivation_root_sk is not None # semantics above guarantee this sk = _derive_path(derivation_root_sk, [i]) pk = sk.get_g1() + elif derivation_root_sk is not None: + sk = _derive_path_unhardened(derivation_root_sk, [i]) + pk = sk.get_g1() else: - if derivation_root_sk is not None: - sk = _derive_path_unhardened(derivation_root_sk, [i]) - pk = sk.get_g1() - else: - sk = None - pk = _derive_pk_unhardened(derivation_root_pk, [i]) + sk = None + pk = _derive_pk_unhardened(derivation_root_pk, [i]) hd_path: str = ( " (" + hd_path_root + str(i) + ("n" if non_observer_derivation else "") + ")" if show_hd_path else "" ) diff --git a/chia/cmds/plotnft_funcs.py b/chia/cmds/plotnft_funcs.py index 329b59a6fdac..6b3bd830776a 100644 --- a/chia/cmds/plotnft_funcs.py +++ b/chia/cmds/plotnft_funcs.py @@ -43,6 +43,8 @@ from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG from chia.wallet.util.wallet_types import WalletType from chia.wallet.wallet_request_types import ( + GetTransaction, + GetWalletBalance, GetWallets, PWAbsorbRewards, PWJoinPool, @@ -120,7 +122,7 @@ async def create( start = time.time() while time.time() - start < 10: await asyncio.sleep(0.1) - tx = await wallet_info.client.get_transaction(tx_record.name) + tx = (await wallet_info.client.get_transaction(GetTransaction(tx_record.name))).transaction if len(tx.sent_to) > 0: print(transaction_submitted_msg(tx)) print(transaction_status_msg(wallet_info.fingerprint, tx_record.name)) @@ -161,8 +163,8 @@ async def pprint_pool_wallet_state( print(f"Target state: {PoolSingletonState(pool_wallet_info.target.state).name}") print(f"Target pool URL: {pool_wallet_info.target.pool_url}") if pool_wallet_info.current.state == PoolSingletonState.SELF_POOLING.value: - balances: dict[str, Any] = await wallet_client.get_wallet_balance(wallet_id) - balance = balances["confirmed_wallet_balance"] + balances = (await wallet_client.get_wallet_balance(GetWalletBalance(uint32(wallet_id)))).wallet_balance + balance = balances.confirmed_wallet_balance typ = WalletType(int(WalletType.POOLING_WALLET)) address_prefix, scale = wallet_coin_unit(typ, address_prefix) print(f"Claimable balance: {print_balance(balance, scale, address_prefix)}") @@ -285,7 +287,7 @@ async def submit_tx_with_confirmation( continue while time.time() - start < 10: await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(tx_record.name) + tx = (await wallet_client.get_transaction(GetTransaction(tx_record.name))).transaction if len(tx.sent_to) > 0: print(transaction_submitted_msg(tx)) print(transaction_status_msg(fingerprint, tx_record.name)) @@ -444,8 +446,10 @@ async def change_payout_instructions(launcher_id: bytes32, address: CliAddress, for pool_config in old_configs: if pool_config.launcher_id == launcher_id: id_found = True - pool_config = replace(pool_config, payout_instructions=puzzle_hash.hex()) - new_pool_configs.append(pool_config) + new_pool_config = replace(pool_config, payout_instructions=puzzle_hash.hex()) + else: + new_pool_config = pool_config + new_pool_configs.append(new_pool_config) if id_found: print(f"Launcher Id: {launcher_id.hex()} Found, Updating Config.") await update_pool_config(root_path, new_pool_configs) diff --git a/chia/cmds/rpc.py b/chia/cmds/rpc.py index d04cdb41e346..0b2f474ab037 100644 --- a/chia/cmds/rpc.py +++ b/chia/cmds/rpc.py @@ -13,7 +13,17 @@ from chia.cmds.cmd_classes import ChiaCliContext from chia.util.config import load_config -services: list[str] = ["crawler", "daemon", "farmer", "full_node", "harvester", "timelord", "wallet", "data_layer"] +services: list[str] = [ + "crawler", + "daemon", + "farmer", + "full_node", + "harvester", + "timelord", + "wallet", + "data_layer", + "solver", +] async def call_endpoint( diff --git a/chia/cmds/show_funcs.py b/chia/cmds/show_funcs.py index 845eb2bc43e3..8d5a68a7a117 100644 --- a/chia/cmds/show_funcs.py +++ b/chia/cmds/show_funcs.py @@ -39,7 +39,7 @@ async def print_blockchain_state(node_client: FullNodeRpcClient, config: dict[st if synced: print("Current Blockchain Status: Full Node Synced") - print("\nPeak: Hash:", bytes32(peak.header_hash) if peak is not None else "") + print("\nPeak: Hash:", peak.header_hash if peak is not None else "") elif peak is not None and sync_mode: sync_max_block = blockchain_state["sync"]["sync_tip_height"] sync_current_block = blockchain_state["sync"]["sync_progress_height"] @@ -48,7 +48,7 @@ async def print_blockchain_state(node_client: FullNodeRpcClient, config: dict[st f"({sync_max_block - sync_current_block} behind). " f"({sync_current_block * 100.0 / sync_max_block:2.2f}% synced)" ) - print("Peak: Hash:", bytes32(peak.header_hash) if peak is not None else "") + print("Peak: Hash:", peak.header_hash if peak is not None else "") elif peak is not None: print(f"Current Blockchain Status: Not Synced. Peak height: {peak.height}") else: @@ -59,7 +59,7 @@ async def print_blockchain_state(node_client: FullNodeRpcClient, config: dict[st if peak.is_transaction_block: peak_time = peak.timestamp else: - peak_hash = bytes32(peak.header_hash) + peak_hash = peak.header_hash curr = await node_client.get_block_record(peak_hash) while curr is not None and not curr.is_transaction_block: curr = await node_client.get_block_record(curr.prev_hash) @@ -88,7 +88,7 @@ async def print_blockchain_state(node_client: FullNodeRpcClient, config: dict[st curr = await node_client.get_block_record(curr.prev_hash) for b in added_blocks: - print(f"{b.height:>9} | {bytes32(b.header_hash)}") + print(f"{b.height:>9} | {b.header_hash}") else: print("Blockchain has no blocks yet") return False @@ -125,7 +125,7 @@ async def print_block_from_hash( cost = str(full_block.transactions_info.cost) tx_filter_hash: Union[str, bytes32] = "Not a transaction block" if full_block.foliage_transaction_block: - tx_filter_hash = bytes32(full_block.foliage_transaction_block.filter_hash) + tx_filter_hash = full_block.foliage_transaction_block.filter_hash fees: Any = block.fees else: block_time_string = "Not a transaction block" diff --git a/chia/cmds/solver.py b/chia/cmds/solver.py new file mode 100644 index 000000000000..4b34a5e7b14e --- /dev/null +++ b/chia/cmds/solver.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from typing import Optional + +import click + +from chia.cmds.cmd_classes import ChiaCliContext + + +@click.group("solver", help="Manage your solver") +def solver_cmd() -> None: + pass + + +@solver_cmd.command("get_state", help="Get current solver state") +@click.option( + "-sp", + "--solver-rpc-port", + help="Set the port where the Solver is hosting the RPC interface. See the rpc_port under solver in config.yaml", + type=int, + default=None, + show_default=True, +) +@click.pass_context +def get_state_cmd( + ctx: click.Context, + solver_rpc_port: Optional[int], +) -> None: + import asyncio + + from chia.cmds.solver_funcs import get_state + + asyncio.run(get_state(ChiaCliContext.set_default(ctx), solver_rpc_port)) diff --git a/chia/cmds/solver_funcs.py b/chia/cmds/solver_funcs.py new file mode 100644 index 000000000000..91da321ba5ae --- /dev/null +++ b/chia/cmds/solver_funcs.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +import json +from typing import Optional + +from chia.cmds.cmd_classes import ChiaCliContext +from chia.cmds.cmds_util import get_any_service_client +from chia.solver.solver_rpc_client import SolverRpcClient + + +async def get_state( + ctx: ChiaCliContext, + solver_rpc_port: Optional[int] = None, +) -> None: + """Get solver state via RPC.""" + try: + async with get_any_service_client(SolverRpcClient, ctx.root_path, solver_rpc_port) as (client, _): + response = await client.get_state() + print(json.dumps(response, indent=2)) + except Exception as e: + print(f"Failed to get solver state: {e}") diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index 9cc877999db8..f111500df6e3 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -40,11 +40,15 @@ from chia.wallet.util.puzzle_decorator_type import PuzzleDecoratorType from chia.wallet.util.query_filter import HashFilter, TransactionTypeFilter from chia.wallet.util.transaction_type import CLAWBACK_INCOMING_TRANSACTION_TYPES, TransactionType +from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG from chia.wallet.util.wallet_types import WalletType from chia.wallet.vc_wallet.vc_store import VCProofs from chia.wallet.wallet_coin_store import GetCoinRecords from chia.wallet.wallet_request_types import ( CATSpendResponse, + ClawbackPuzzleDecoratorOverride, + DeleteNotifications, + DeleteUnconfirmedTransactions, DIDFindLostDID, DIDGetDID, DIDGetInfo, @@ -52,8 +56,13 @@ DIDSetWalletName, DIDTransferDID, DIDUpdateMetadata, + ExtendDerivationIndex, FungibleAsset, + GetNextAddress, GetNotifications, + GetTransaction, + GetTransactions, + GetWalletBalance, GetWallets, NFTAddURI, NFTCalculateRoyalties, @@ -65,7 +74,13 @@ NFTSetNFTDID, NFTTransferNFT, RoyaltyAsset, + SendTransaction, SendTransactionResponse, + SignMessageByAddress, + SignMessageByAddressResponse, + SignMessageByID, + SignMessageByIDResponse, + SpendClawbackCoins, VCAddProofs, VCGet, VCGetList, @@ -178,10 +193,12 @@ async def get_unit_name_for_wallet_id( async def get_transaction( *, root_path: pathlib.Path, wallet_rpc_port: Optional[int], fingerprint: Optional[int], tx_id: str, verbose: int ) -> None: - async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, _, config): transaction_id = bytes32.from_hexstr(tx_id) address_prefix = selected_network_address_prefix(config) - tx: TransactionRecord = await wallet_client.get_transaction(transaction_id=transaction_id) + tx: TransactionRecord = ( + await wallet_client.get_transaction(GetTransaction(transaction_id=transaction_id)) + ).transaction try: wallet_type = await get_wallet_type(wallet_id=tx.wallet_id, wallet_client=wallet_client) @@ -229,9 +246,18 @@ async def get_transactions( [TransactionType.INCOMING_CLAWBACK_RECEIVE, TransactionType.INCOMING_CLAWBACK_SEND] ) ) - txs: list[TransactionRecord] = await wallet_client.get_transactions( - wallet_id, start=offset, end=(offset + limit), sort_key=sort_key, reverse=reverse, type_filter=type_filter - ) + txs = ( + await wallet_client.get_transactions( + GetTransactions( + uint32(wallet_id), + start=uint16(offset), + end=uint16(offset + limit), + sort_key=sort_key.name, + reverse=reverse, + type_filter=type_filter, + ) + ) + ).transactions address_prefix = selected_network_address_prefix(config) if len(txs) == 0: @@ -264,7 +290,9 @@ async def get_transactions( if len(coin_records["coin_records"]) > 0: coin_record = coin_records["coin_records"][0] else: - j -= 1 + # Ignoring this because it seems useful to the loop + # But we should probably consider a better loop + j -= 1 # noqa: PLW2901 skipped += 1 continue print_transaction( @@ -311,7 +339,7 @@ async def send( ) -> list[TransactionRecord]: async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if memo is None: - memos = None + memos = [] else: memos = [memo] @@ -340,23 +368,29 @@ async def send( if typ == WalletType.STANDARD_WALLET: print("Submitting transaction...") res: Union[CATSpendResponse, SendTransactionResponse] = await wallet_client.send_transaction( - wallet_id, - final_amount, - address.original_address, - CMDTXConfigLoader( + SendTransaction( + wallet_id=uint32(wallet_id), + amount=final_amount, + address=address.original_address, + fee=fee, + memos=memos, + push=push, + puzzle_decorator=( + [ + ClawbackPuzzleDecoratorOverride( + PuzzleDecoratorType.CLAWBACK.name, clawback_timelock=uint64(clawback_time_lock) + ) + ] + if clawback_time_lock > 0 + else None + ), + ), + tx_config=CMDTXConfigLoader( min_coin_amount=min_coin_amount, max_coin_amount=max_coin_amount, excluded_coin_ids=list(excluded_coin_ids), reuse_puzhash=reuse_puzhash, ).to_tx_config(mojo_per_unit, config, fingerprint), - fee, - memos, - puzzle_decorator_override=( - [{"decorator": PuzzleDecoratorType.CLAWBACK.name, "clawback_timelock": clawback_time_lock}] - if clawback_time_lock > 0 - else None - ), - push=push, timelock_info=condition_valid_times, ) elif typ in {WalletType.CAT, WalletType.CRCAT, WalletType.RCAT}: @@ -385,7 +419,7 @@ async def send( start = time.time() while time.time() - start < 10: await asyncio.sleep(0.1) - tx = await wallet_client.get_transaction(tx_id) + tx = (await wallet_client.get_transaction(GetTransaction(tx_id))).transaction if len(tx.sent_to) > 0: print(transaction_submitted_msg(tx)) print(transaction_status_msg(fingerprint, tx_id)) @@ -402,7 +436,7 @@ async def get_address( root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int, new_address: bool ) -> None: async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): - res = await wallet_client.get_next_address(wallet_id, new_address) + res = (await wallet_client.get_next_address(GetNextAddress(uint32(wallet_id), new_address))).address print(res) @@ -410,14 +444,14 @@ async def delete_unconfirmed_transactions( root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id: int ) -> None: async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, _): - await wallet_client.delete_unconfirmed_transactions(wallet_id) + await wallet_client.delete_unconfirmed_transactions(DeleteUnconfirmedTransactions(uint32(wallet_id))) print(f"Successfully deleted all unconfirmed transactions for wallet id {wallet_id} on key {fingerprint}") async def get_derivation_index(root_path: pathlib.Path, wallet_rpc_port: Optional[int], fp: Optional[int]) -> None: async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): res = await wallet_client.get_current_derivation_index() - print(f"Last derivation index: {res}") + print(f"Last derivation index: {res.index}") async def update_derivation_index( @@ -425,8 +459,8 @@ async def update_derivation_index( ) -> None: async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): print("Updating derivation index... This may take a while.") - res = await wallet_client.extend_derivation_index(index) - print(f"Updated derivation index: {res}") + res = await wallet_client.extend_derivation_index(ExtendDerivationIndex(uint32(index))) + print(f"Updated derivation index: {res.index}") print("Your balances may take a while to update.") @@ -435,13 +469,18 @@ async def add_token( ) -> None: async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, fingerprint, _): existing_info: Optional[tuple[Optional[uint32], str]] = await wallet_client.cat_asset_id_to_name(asset_id) - if existing_info is None or existing_info[0] is None: + if existing_info is None: + wallet_id = None + old_name = None + else: + wallet_id, old_name = existing_info + + if wallet_id is None: response = await wallet_client.create_wallet_for_existing_cat(asset_id) wallet_id = response["wallet_id"] await wallet_client.set_cat_name(wallet_id, token_name) print(f"Successfully added {token_name} with wallet id {wallet_id} on key {fingerprint}") else: - wallet_id, old_name = existing_info await wallet_client.set_cat_name(wallet_id, token_name) print(f"Successfully renamed {old_name} with wallet_id {wallet_id} on key {fingerprint} to {token_name}") @@ -939,14 +978,14 @@ async def print_balances( # A future RPC update may split them apart, but for now we'll show the first 32 bytes (64 chars) asset_id = summary.data[:64] wallet_id = summary.id - balances = await wallet_client.get_wallet_balance(wallet_id) + balances = (await wallet_client.get_wallet_balance(GetWalletBalance(uint32(wallet_id)))).wallet_balance typ = WalletType(int(summary.type)) address_prefix, scale = wallet_coin_unit(typ, address_prefix) - total_balance: str = print_balance(balances["confirmed_wallet_balance"], scale, address_prefix) + total_balance: str = print_balance(balances.confirmed_wallet_balance, scale, address_prefix) unconfirmed_wallet_balance: str = print_balance( - balances["unconfirmed_wallet_balance"], scale, address_prefix + balances.unconfirmed_wallet_balance, scale, address_prefix ) - spendable_balance: str = print_balance(balances["spendable_balance"], scale, address_prefix) + spendable_balance: str = print_balance(balances.spendable_balance, scale, address_prefix) my_did: Optional[str] = None ljust = 23 if typ == WalletType.CRCAT: @@ -955,9 +994,10 @@ async def print_balances( print(f"{summary.name}:") print(f"{indent}{'-Total Balance:'.ljust(ljust)} {total_balance}") if typ == WalletType.CRCAT: + assert balances.pending_approval_balance is not None print( f"{indent}{'-Balance Pending VC Approval:'.ljust(ljust)} " - f"{print_balance(balances['pending_approval_balance'], scale, address_prefix)}" + f"{print_balance(balances.pending_approval_balance, scale, address_prefix)}" ) print(f"{indent}{'-Pending Total Balance:'.ljust(ljust)} {unconfirmed_wallet_balance}") print(f"{indent}{'-Spendable:'.ljust(ljust)} {spendable_balance}") @@ -1152,7 +1192,7 @@ async def transfer_did( if push: print(f"Successfully transferred DID to {target_address}") print(f"Transaction ID: {response.transaction_id.hex()}") - print(f"Transaction: {response.transaction.to_json_dict_convenience(config)}") + print(f"Transaction: {response.transaction.to_json_dict()}") return response.transactions except Exception as e: print(f"Failed to transfer DID: {e}") @@ -1237,9 +1277,8 @@ async def mint_nft( raise ValueError("Disabling DID ownership is not supported for this NFT wallet, it does have a DID") else: did_id = None - else: - if not wallet_has_did: - did_id = "" + elif not wallet_has_did: + did_id = "" mint_response = await wallet_client.mint_nft( request=NFTMintNFTRequest( @@ -1578,9 +1617,11 @@ async def delete_notifications( ) -> None: async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): if delete_all: - print(f"Success: {await wallet_client.delete_notifications()}") + await wallet_client.delete_notifications(DeleteNotifications()) + print("Success!") else: - print(f"Success: {await wallet_client.delete_notifications(ids=list(ids))}") + await wallet_client.delete_notifications(DeleteNotifications(ids=list(ids))) + print("Success!") async def sign_message( @@ -1595,31 +1636,32 @@ async def sign_message( nft_id: Optional[CliAddress] = None, ) -> None: async with get_wallet_client(root_path, wallet_rpc_port, fp) as (wallet_client, _, _): + response: Union[SignMessageByAddressResponse, SignMessageByIDResponse] if addr_type == AddressType.XCH: if address is None: print("Address is required for XCH address type.") return - pubkey, signature, signing_mode = await wallet_client.sign_message_by_address( - address.original_address, message + response = await wallet_client.sign_message_by_address( + SignMessageByAddress(address.original_address, message) ) elif addr_type == AddressType.DID: if did_id is None: print("DID id is required for DID address type.") return - pubkey, signature, signing_mode = await wallet_client.sign_message_by_id(did_id.original_address, message) + response = await wallet_client.sign_message_by_id(SignMessageByID(did_id.original_address, message)) elif addr_type == AddressType.NFT: if nft_id is None: print("NFT id is required for NFT address type.") return - pubkey, signature, signing_mode = await wallet_client.sign_message_by_id(nft_id.original_address, message) + response = await wallet_client.sign_message_by_id(SignMessageByID(nft_id.original_address, message)) else: print("Invalid wallet type.") return print("") print(f"Message: {message}") - print(f"Public Key: {pubkey}") - print(f"Signature: {signature}") - print(f"Signing Mode: {signing_mode}") + print(f"Public Key: {response.pubkey!s}") + print(f"Signature: {response.signature!s}") + print(f"Signing Mode: {response.signing_mode}") async def spend_clawback( @@ -1644,14 +1686,12 @@ async def spend_clawback( print("Batch fee cannot be negative.") return [] response = await wallet_client.spend_clawback_coins( - tx_ids, - fee, - force, - push=push, + SpendClawbackCoins(coin_ids=tx_ids, fee=fee, force=force, push=push), + tx_config=DEFAULT_TX_CONFIG, timelock_info=condition_valid_times, ) print(str(response)) - return [TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"]] + return response.transactions async def mint_vc( @@ -1858,7 +1898,7 @@ async def approve_r_cats( push: bool, condition_valid_times: ConditionValidTimes, ) -> list[TransactionRecord]: - async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): + async with get_wallet_client(root_path, wallet_rpc_port, fingerprint) as (wallet_client, fp, config): if wallet_client is None: return txs = await wallet_client.crcat_approve_pending( @@ -1869,7 +1909,7 @@ async def approve_r_cats( min_coin_amount=min_coin_amount, max_coin_amount=max_coin_amount, reuse_puzhash=reuse, - ).to_tx_config(units["cat"], config, fingerprint), + ).to_tx_config(units["cat"], config, fp), push=push, timelock_info=condition_valid_times, ) diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index 81898af94782..c113184cf9c6 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -116,9 +116,9 @@ def include_spends(self, conds: Optional[SpendBundleConditions], block: FullBloc timestamp = block.foliage_transaction_block.timestamp for spend in conds.spends: spend_coin_id = bytes32(spend.coin_id) - self.removals_since_fork[spend_coin_id] = ForkRem(bytes32(spend.puzzle_hash), block.height) + self.removals_since_fork[spend_coin_id] = ForkRem(spend.puzzle_hash, block.height) for puzzle_hash, amount, hint in spend.create_coin: - coin = Coin(spend_coin_id, bytes32(puzzle_hash), uint64(amount)) + coin = Coin(spend_coin_id, puzzle_hash, uint64(amount)) same_as_parent = coin.puzzle_hash == spend.puzzle_hash and amount == spend.coin_amount self.additions_since_fork[coin.name()] = ForkAdd( coin, block.height, timestamp, hint=hint, is_coinbase=False, same_as_parent=same_as_parent @@ -137,8 +137,8 @@ def include_block( timestamp = block.foliage_transaction_block.timestamp spent_coins: dict[bytes32, Coin] = {} for spend_id, spend in removals: - spent_coins[bytes32(spend_id)] = spend - self.removals_since_fork[bytes32(spend_id)] = ForkRem(bytes32(spend.puzzle_hash), block.height) + spent_coins[spend_id] = spend + self.removals_since_fork[spend_id] = ForkRem(spend.puzzle_hash, block.height) for coin, hint in additions: parent = spent_coins.get(coin.parent_coin_info) assert parent is not None @@ -334,9 +334,8 @@ async def validate_block_body( if block.transactions_generator is not None: if std_hash(bytes(block.transactions_generator)) != block.transactions_info.generator_root: return Err.INVALID_TRANSACTIONS_GENERATOR_HASH - else: - if block.transactions_info.generator_root != bytes([0] * 32): - return Err.INVALID_TRANSACTIONS_GENERATOR_HASH + elif block.transactions_info.generator_root != bytes([0] * 32): + return Err.INVALID_TRANSACTIONS_GENERATOR_HASH # 8a. The generator_ref_list must be the hash of the serialized bytes of # the generator ref list for this block (or 'one' bytes [0x01] if no generator) @@ -382,9 +381,9 @@ async def validate_block_body( for spend in conds.spends: removals.append(bytes32(spend.coin_id)) - removals_puzzle_dic[bytes32(spend.coin_id)] = bytes32(spend.puzzle_hash) + removals_puzzle_dic[spend.coin_id] = spend.puzzle_hash for puzzle_hash, amount, _ in spend.create_coin: - c = Coin(bytes32(spend.coin_id), bytes32(puzzle_hash), uint64(amount)) + c = Coin(spend.coin_id, puzzle_hash, uint64(amount)) additions.append((c, c.name())) else: assert conds is None diff --git a/chia/consensus/block_creation.py b/chia/consensus/block_creation.py index 7ad94421aea2..45b14f63feb9 100644 --- a/chia/consensus/block_creation.py +++ b/chia/consensus/block_creation.py @@ -354,17 +354,16 @@ def create_unfinished_block( else: if new_sub_slot: rc_sp_hash = finished_sub_slots[-1].reward_chain.get_hash() + elif is_genesis: + rc_sp_hash = constants.GENESIS_CHALLENGE else: - if is_genesis: - rc_sp_hash = constants.GENESIS_CHALLENGE - else: - assert prev_block is not None - assert blocks is not None - curr = prev_block - while not curr.first_in_sub_slot: - curr = blocks.block_record(curr.prev_hash) - assert curr.finished_reward_slot_hashes is not None - rc_sp_hash = curr.finished_reward_slot_hashes[-1] + assert prev_block is not None + assert blocks is not None + curr = prev_block + while not curr.first_in_sub_slot: + curr = blocks.block_record(curr.prev_hash) + assert curr.finished_reward_slot_hashes is not None + rc_sp_hash = curr.finished_reward_slot_hashes[-1] signage_point = SignagePoint(None, None, None, None) cc_sp_signature: Optional[G2Element] = get_plot_signature(cc_sp_hash, proof_of_space.plot_public_key) diff --git a/chia/consensus/block_header_validation.py b/chia/consensus/block_header_validation.py index 9fb5174a8d8b..3ec214a04736 100644 --- a/chia/consensus/block_header_validation.py +++ b/chia/consensus/block_header_validation.py @@ -138,13 +138,12 @@ def validate_unfinished_header_block( if not curr.finished_challenge_slot_hashes[-1] == challenge_hash: print(curr.finished_challenge_slot_hashes[-1], challenge_hash) return None, ValidationError(Err.INVALID_PREV_CHALLENGE_SLOT_HASH) - else: - # 2c. check sub-slot challenge hash for empty slot - if ( - not header_block.finished_sub_slots[finished_sub_slot_n - 1].challenge_chain.get_hash() - == challenge_hash - ): - return None, ValidationError(Err.INVALID_PREV_CHALLENGE_SLOT_HASH) + # 2c. check sub-slot challenge hash for empty slot + elif ( + not header_block.finished_sub_slots[finished_sub_slot_n - 1].challenge_chain.get_hash() + == challenge_hash + ): + return None, ValidationError(Err.INVALID_PREV_CHALLENGE_SLOT_HASH) if genesis_block: # 2d. Validate that genesis block has no ICC @@ -176,20 +175,19 @@ def validate_unfinished_header_block( icc_vdf_input = ClassgroupElement.get_default_element() else: icc_vdf_input = prev_b.infused_challenge_vdf_output - else: - # This is not the first sub slot after the last block, so we might not have an ICC - if ( - header_block.finished_sub_slots[finished_sub_slot_n - 1].reward_chain.deficit - < constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK - ): - finished_ss = header_block.finished_sub_slots[finished_sub_slot_n - 1] - assert finished_ss.infused_challenge_chain is not None + # This is not the first sub slot after the last block, so we might not have an ICC + elif ( + header_block.finished_sub_slots[finished_sub_slot_n - 1].reward_chain.deficit + < constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK + ): + finished_ss = header_block.finished_sub_slots[finished_sub_slot_n - 1] + assert finished_ss.infused_challenge_chain is not None - # Only sets the icc iff the previous sub slots deficit is 4 or less - icc_challenge_hash = finished_ss.infused_challenge_chain.get_hash() - icc_iters_committed = prev_b.sub_slot_iters - icc_iters_proof = icc_iters_committed - icc_vdf_input = ClassgroupElement.get_default_element() + # Only sets the icc iff the previous sub slots deficit is 4 or less + icc_challenge_hash = finished_ss.infused_challenge_chain.get_hash() + icc_iters_committed = prev_b.sub_slot_iters + icc_iters_proof = icc_iters_committed + icc_vdf_input = ClassgroupElement.get_default_element() # 2e. Validate that there is not icc iff icc_challenge hash is None assert (sub_slot.infused_challenge_chain is None) == (icc_challenge_hash is None) @@ -241,10 +239,9 @@ def validate_unfinished_header_block( != sub_slot.challenge_chain.infused_challenge_chain_sub_slot_hash ): return None, ValidationError(Err.INVALID_ICC_HASH_CC) - else: - # 2h. Check infused challenge sub-slot hash not included for other deficits - if sub_slot.challenge_chain.infused_challenge_chain_sub_slot_hash is not None: - return None, ValidationError(Err.INVALID_ICC_HASH_CC) + # 2h. Check infused challenge sub-slot hash not included for other deficits + elif sub_slot.challenge_chain.infused_challenge_chain_sub_slot_hash is not None: + return None, ValidationError(Err.INVALID_ICC_HASH_CC) # 2i. Check infused challenge sub-slot hash in reward sub-slot if ( @@ -396,10 +393,9 @@ def validate_unfinished_header_block( f"{sub_slot.reward_chain.deficit}", ), ) - else: - # 2t. Otherwise, deficit stays the same at the slot ends, cannot reset until 0 - if sub_slot.reward_chain.deficit != prev_b.deficit: - return None, ValidationError(Err.INVALID_DEFICIT, "deficit is wrong at slot end") + # 2t. Otherwise, deficit stays the same at the slot ends, cannot reset until 0 + elif sub_slot.reward_chain.deficit != prev_b.deficit: + return None, ValidationError(Err.INVALID_DEFICIT, "deficit is wrong at slot end") # 3. Check sub-epoch summary # Note that the subepoch summary is the summary of the previous subepoch (not the one that just finished) @@ -635,16 +631,15 @@ def validate_unfinished_header_block( return None, ValidationError(Err.INVALID_RC_SP_VDF) if new_sub_slot: rc_sp_hash = header_block.finished_sub_slots[-1].reward_chain.get_hash() + elif genesis_block: + rc_sp_hash = constants.GENESIS_CHALLENGE else: - if genesis_block: - rc_sp_hash = constants.GENESIS_CHALLENGE - else: - assert prev_b is not None - curr = prev_b - while not curr.first_in_sub_slot: - curr = blocks.block_record(curr.prev_hash) - assert curr.finished_reward_slot_hashes is not None - rc_sp_hash = curr.finished_reward_slot_hashes[-1] + assert prev_b is not None + curr = prev_b + while not curr.first_in_sub_slot: + curr = blocks.block_record(curr.prev_hash) + assert curr.finished_reward_slot_hashes is not None + rc_sp_hash = curr.finished_reward_slot_hashes[-1] # 12. Check reward chain sp signature if not AugSchemeMPL.verify( @@ -761,25 +756,24 @@ def validate_unfinished_header_block( != constants.GENESIS_PRE_FARM_FARMER_PUZZLE_HASH ): return None, ValidationError(Err.INVALID_PREFARM) + # 20b. If pospace has a pool pk, heck pool target signature. Should not check this for genesis block. + elif header_block.reward_chain_block.proof_of_space.pool_public_key is not None: + assert header_block.reward_chain_block.proof_of_space.pool_contract_puzzle_hash is None + assert header_block.foliage.foliage_block_data.pool_signature is not None + if not AugSchemeMPL.verify( + header_block.reward_chain_block.proof_of_space.pool_public_key, + bytes(header_block.foliage.foliage_block_data.pool_target), + header_block.foliage.foliage_block_data.pool_signature, + ): + return None, ValidationError(Err.INVALID_POOL_SIGNATURE) else: - # 20b. If pospace has a pool pk, heck pool target signature. Should not check this for genesis block. - if header_block.reward_chain_block.proof_of_space.pool_public_key is not None: - assert header_block.reward_chain_block.proof_of_space.pool_contract_puzzle_hash is None - assert header_block.foliage.foliage_block_data.pool_signature is not None - if not AugSchemeMPL.verify( - header_block.reward_chain_block.proof_of_space.pool_public_key, - bytes(header_block.foliage.foliage_block_data.pool_target), - header_block.foliage.foliage_block_data.pool_signature, - ): - return None, ValidationError(Err.INVALID_POOL_SIGNATURE) - else: - # 20c. Otherwise, the plot is associated with a contract puzzle hash, not a public key - assert header_block.reward_chain_block.proof_of_space.pool_contract_puzzle_hash is not None - if ( - header_block.foliage.foliage_block_data.pool_target.puzzle_hash - != header_block.reward_chain_block.proof_of_space.pool_contract_puzzle_hash - ): - return None, ValidationError(Err.INVALID_POOL_TARGET) + # 20c. Otherwise, the plot is associated with a contract puzzle hash, not a public key + assert header_block.reward_chain_block.proof_of_space.pool_contract_puzzle_hash is not None + if ( + header_block.foliage.foliage_block_data.pool_target.puzzle_hash + != header_block.reward_chain_block.proof_of_space.pool_contract_puzzle_hash + ): + return None, ValidationError(Err.INVALID_POOL_TARGET) # 21. Check extension data if applicable. None for mainnet. # 22. Check if foliage block is present @@ -928,18 +922,17 @@ def validate_finished_header_block( # 29. Check challenge chain infusion point VDF if new_sub_slot: cc_vdf_challenge = header_block.finished_sub_slots[-1].challenge_chain.get_hash() + # Not first block in slot + elif genesis_block: + # genesis block + cc_vdf_challenge = constants.GENESIS_CHALLENGE else: - # Not first block in slot - if genesis_block: - # genesis block - cc_vdf_challenge = constants.GENESIS_CHALLENGE - else: - assert prev_b is not None - # Not genesis block, go back to first block in slot - curr = prev_b - while curr.finished_challenge_slot_hashes is None: - curr = blocks.block_record(curr.prev_hash) - cc_vdf_challenge = curr.finished_challenge_slot_hashes[-1] + assert prev_b is not None + # Not genesis block, go back to first block in slot + curr = prev_b + while curr.finished_challenge_slot_hashes is None: + curr = blocks.block_record(curr.prev_hash) + cc_vdf_challenge = curr.finished_challenge_slot_hashes[-1] cc_target_vdf_info = VDFInfo( cc_vdf_challenge, @@ -1047,9 +1040,8 @@ def validate_finished_header_block( icc_target_vdf_info, ): return None, ValidationError(Err.INVALID_ICC_VDF, "invalid icc proof") - else: - if header_block.infused_challenge_chain_ip_proof is not None: - return None, ValidationError(Err.INVALID_ICC_VDF) + elif header_block.infused_challenge_chain_ip_proof is not None: + return None, ValidationError(Err.INVALID_ICC_VDF) # 32. Check reward block hash if header_block.foliage.reward_block_hash != header_block.reward_chain_block.get_hash(): diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 2117f963f308..00d9df5d1a4c 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -708,9 +708,8 @@ async def validate_unfinished_block_header( if block.transactions_generator is not None: if std_hash(bytes(block.transactions_generator)) != block.transactions_info.generator_root: return None, Err.INVALID_TRANSACTIONS_GENERATOR_HASH - else: - if block.transactions_info.generator_root != bytes([0] * 32): - return None, Err.INVALID_TRANSACTIONS_GENERATOR_HASH + elif block.transactions_info.generator_root != bytes([0] * 32): + return None, Err.INVALID_TRANSACTIONS_GENERATOR_HASH if ( block.foliage_transaction_block is None diff --git a/chia/consensus/default_constants.py b/chia/consensus/default_constants.py index 32654008213c..7d2b93fe5ad4 100644 --- a/chia/consensus/default_constants.py +++ b/chia/consensus/default_constants.py @@ -91,12 +91,12 @@ PLOT_FILTER_64_HEIGHT=uint32(15592000), # June 2033 PLOT_FILTER_32_HEIGHT=uint32(20643000), - PLOT_DIFFICULTY_INITIAL=uint8(2), - PLOT_DIFFICULTY_4_HEIGHT=uint32(0xFFFFFFFB), - PLOT_DIFFICULTY_5_HEIGHT=uint32(0xFFFFFFFC), - PLOT_DIFFICULTY_6_HEIGHT=uint32(0xFFFFFFFD), - PLOT_DIFFICULTY_7_HEIGHT=uint32(0xFFFFFFFE), - PLOT_DIFFICULTY_8_HEIGHT=uint32(0xFFFFFFFF), + PLOT_STRENGTH_INITIAL=uint8(2), + PLOT_STRENGTH_4_HEIGHT=uint32(0xFFFFFFFB), + PLOT_STRENGTH_5_HEIGHT=uint32(0xFFFFFFFC), + PLOT_STRENGTH_6_HEIGHT=uint32(0xFFFFFFFD), + PLOT_STRENGTH_7_HEIGHT=uint32(0xFFFFFFFE), + PLOT_STRENGTH_8_HEIGHT=uint32(0xFFFFFFFF), ) diff --git a/chia/consensus/generator_tools.py b/chia/consensus/generator_tools.py index 3de68924ce9f..4b4906d3dfd3 100644 --- a/chia/consensus/generator_tools.py +++ b/chia/consensus/generator_tools.py @@ -65,6 +65,6 @@ def tx_removals_and_additions(results: Optional[SpendBundleConditions]) -> tuple for spend in results.spends: removals.append(bytes32(spend.coin_id)) for puzzle_hash, amount, _ in spend.create_coin: - additions.append(Coin(bytes32(spend.coin_id), bytes32(puzzle_hash), uint64(amount))) + additions.append(Coin(spend.coin_id, puzzle_hash, uint64(amount))) return removals, additions diff --git a/chia/consensus/get_block_challenge.py b/chia/consensus/get_block_challenge.py index de4a6952f040..9064218d104f 100644 --- a/chia/consensus/get_block_challenge.py +++ b/chia/consensus/get_block_challenge.py @@ -72,34 +72,33 @@ def get_block_challenge( else: # No overflow, new slot with a new challenge challenge = header_block.finished_sub_slots[-1].challenge_chain.get_hash() + elif genesis_block: + challenge = constants.GENESIS_CHALLENGE else: - if genesis_block: - challenge = constants.GENESIS_CHALLENGE - else: - if overflow: - if skip_overflow_last_ss_validation: - # Overflow infusion without the new slot, so get the last challenge - challenges_to_look_for = 1 - else: - # Overflow infusion, so get the second to last challenge. skip_overflow_last_ss_validation is False, - # Which means no sub slots are omitted - challenges_to_look_for = 2 - else: + if overflow: + if skip_overflow_last_ss_validation: + # Overflow infusion without the new slot, so get the last challenge challenges_to_look_for = 1 - reversed_challenge_hashes: list[bytes32] = [] - curr: BlockRecord = blocks.block_record(header_block.prev_header_hash) - while len(reversed_challenge_hashes) < challenges_to_look_for: - if curr.first_in_sub_slot: - assert curr.finished_challenge_slot_hashes is not None - reversed_challenge_hashes += reversed(curr.finished_challenge_slot_hashes) - if len(reversed_challenge_hashes) >= challenges_to_look_for: - break - if curr.height == 0: - assert curr.finished_challenge_slot_hashes is not None - assert len(curr.finished_challenge_slot_hashes) > 0 + else: + # Overflow infusion, so get the second to last challenge. skip_overflow_last_ss_validation is False, + # Which means no sub slots are omitted + challenges_to_look_for = 2 + else: + challenges_to_look_for = 1 + reversed_challenge_hashes: list[bytes32] = [] + curr: BlockRecord = blocks.block_record(header_block.prev_header_hash) + while len(reversed_challenge_hashes) < challenges_to_look_for: + if curr.first_in_sub_slot: + assert curr.finished_challenge_slot_hashes is not None + reversed_challenge_hashes += reversed(curr.finished_challenge_slot_hashes) + if len(reversed_challenge_hashes) >= challenges_to_look_for: break - curr = blocks.block_record(curr.prev_hash) - challenge = reversed_challenge_hashes[challenges_to_look_for - 1] + if curr.height == 0: + assert curr.finished_challenge_slot_hashes is not None + assert len(curr.finished_challenge_slot_hashes) > 0 + break + curr = blocks.block_record(curr.prev_hash) + challenge = reversed_challenge_hashes[challenges_to_look_for - 1] return challenge diff --git a/chia/consensus/pos_quality.py b/chia/consensus/pos_quality.py index 5527a3bb5c6a..c6f3bc403fb4 100644 --- a/chia/consensus/pos_quality.py +++ b/chia/consensus/pos_quality.py @@ -1,13 +1,28 @@ from __future__ import annotations +from chia_rs import PlotSize from chia_rs.sized_ints import uint64 # The actual space in bytes of a plot, is _expected_plot_size(k) * UI_ACTUAL_SPACE_CONSTANT_FACTO # This is not used in consensus, only for display purposes UI_ACTUAL_SPACE_CONSTANT_FACTOR = 0.78 +# TODO: todo_v2_plots these values prelimenary. When the plotter is complete, +# replace this table with a closed form formula +v2_plot_sizes: dict[int, uint64] = { + 16: uint64(222_863), + 18: uint64(1_048_737), + 20: uint64(4_824_084), + 22: uint64(21_812_958), + 24: uint64(97_318_160), + 26: uint64(429_539_960), + 28: uint64(1_879_213_114), + 30: uint64(8_161_097_549), + 32: uint64(35_221_370_574), +} -def _expected_plot_size(k: int) -> uint64: + +def _expected_plot_size(size: PlotSize) -> uint64: """ Given the plot size parameter k (which is between 32 and 59), computes the expected size of the plot in bytes (times a constant factor). This is based on efficient encoding @@ -16,4 +31,15 @@ def _expected_plot_size(k: int) -> uint64: is necessary to store the entries in the plot. """ - return uint64(((2 * k) + 1) * (2 ** (k - 1))) + k: int + if size.size_v1 is not None: + k = size.size_v1 + return uint64(((2 * k) + 1) * (2 ** (k - 1))) + else: + assert size.size_v2 is not None + k = size.size_v2 + if k in v2_plot_sizes: + return v2_plot_sizes[k] + else: + # TODO: todo_v2_plots support test plots with lower k-values + return uint64(0) diff --git a/chia/consensus/pot_iterations.py b/chia/consensus/pot_iterations.py index b1dbba146edd..f61a5feca883 100644 --- a/chia/consensus/pot_iterations.py +++ b/chia/consensus/pot_iterations.py @@ -110,17 +110,18 @@ def calculate_iterations_quality( """ if size.size_v1 is not None: assert size.size_v2 is None - sp_quality_string: bytes32 = std_hash(quality_string + cc_sp_output_hash) phase_out = calculate_phase_out(constants, ssi, prev_transaction_block_height) - iters = uint64( - ( - int(difficulty) - * int(constants.DIFFICULTY_CONSTANT_FACTOR) - * int.from_bytes(sp_quality_string, "big", signed=False) - // (int(pow(2, 256)) * int(_expected_plot_size(size.size_v1))) - ) - + phase_out - ) - return max(iters, uint64(1)) else: - raise NotImplementedError + phase_out = uint64(0) + + sp_quality_string: bytes32 = std_hash(quality_string + cc_sp_output_hash) + iters = uint64( + ( + int(difficulty) + * int(constants.DIFFICULTY_CONSTANT_FACTOR) + * int.from_bytes(sp_quality_string, "big", signed=False) + // (int(pow(2, 256)) * int(_expected_plot_size(size))) + ) + + phase_out + ) + return max(iters, uint64(1)) diff --git a/chia/daemon/keychain_proxy.py b/chia/daemon/keychain_proxy.py index 2f89dc0d5720..bff3124bf56d 100644 --- a/chia/daemon/keychain_proxy.py +++ b/chia/daemon/keychain_proxy.py @@ -384,6 +384,11 @@ async def get_key_for_fingerprint( self.log.error(f"{err}") raise KeychainMalformedResponse(f"{err}") elif private: + if ent is None: + err = f"Missing ent in {response.get('command')} response" + self.log.error(f"{err}") + raise KeychainMalformedResponse(f"{err}") + mnemonic = bytes_to_mnemonic(bytes.fromhex(ent)) seed = mnemonic_to_seed(mnemonic) private_key = AugSchemeMPL.key_gen(seed) diff --git a/chia/daemon/server.py b/chia/daemon/server.py index c03fdecf5ff9..0189650cd47b 100644 --- a/chia/daemon/server.py +++ b/chia/daemon/server.py @@ -1373,9 +1373,8 @@ async def register_service(self, websocket: WebSocketResponse, request: dict[str "service": service, "queue": self.extract_plot_queue(), } - else: - if self.ping_job is None: - self.ping_job = create_referenced_task(self.ping_task()) + elif self.ping_job is None: + self.ping_job = create_referenced_task(self.ping_task()) self.log.info(f"registered for service {service}") log.info(f"{response}") return response diff --git a/chia/data_layer/data_layer_service.py b/chia/data_layer/data_layer_service.py new file mode 100644 index 000000000000..310d672bc7c5 --- /dev/null +++ b/chia/data_layer/data_layer_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.data_layer.data_layer import DataLayer +from chia.data_layer.data_layer_api import DataLayerAPI +from chia.data_layer.data_layer_rpc_api import DataLayerRpcApi +from chia.server.start_service import Service + +DataLayerService = Service[DataLayer, DataLayerAPI, DataLayerRpcApi] diff --git a/chia/data_layer/data_layer_wallet.py b/chia/data_layer/data_layer_wallet.py index 27a2e755bb65..6a6abb9a931e 100644 --- a/chia/data_layer/data_layer_wallet.py +++ b/chia/data_layer/data_layer_wallet.py @@ -539,6 +539,7 @@ async def create_update_state_spend( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=new_puz_hash, + to_address=self.wallet_state_manager.encode_puzzle_hash(new_puz_hash), amount=uint64(singleton_record.lineage_proof.amount), fee_amount=fee, confirmed=False, @@ -590,15 +591,14 @@ async def generate_signed_transaction( if coins is None or len(coins) == 0: if launcher_id is None: raise ValueError("Not enough info to know which DL coin to send") + elif len(coins) != 1: + raise ValueError("The wallet can only send one DL coin at a time") else: - if len(coins) != 1: - raise ValueError("The wallet can only send one DL coin at a time") + record = await self.wallet_state_manager.dl_store.get_singleton_record(next(iter(coins)).name()) + if record is None: + raise ValueError("The specified coin is not a tracked DL") else: - record = await self.wallet_state_manager.dl_store.get_singleton_record(next(iter(coins)).name()) - if record is None: - raise ValueError("The specified coin is not a tracked DL") - else: - launcher_id = record.launcher_id + launcher_id = record.launcher_id if len(amounts) != 1 or len(puzzle_hashes) != 1: raise ValueError("The wallet can only send one DL coin to one place at a time") @@ -734,6 +734,7 @@ async def delete_mirror( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=new_puzhash, + to_address=self.wallet_state_manager.encode_puzzle_hash(new_puzhash), amount=uint64(mirror_coin.amount), fee_amount=fee, confirmed=False, @@ -1089,12 +1090,13 @@ async def finish_graftroot_solutions(offer: Offer, solver: Solver) -> Offer: # Create all of the new solutions new_spends: list[CoinSpend] = [] for spend in offer.coin_spends(): + spend_to_add = spend solution = Program.from_serialized(spend.solution) if match_dl_singleton(spend.puzzle_reveal)[0]: try: graftroot: Program = solution.at("rrffrf") except EvalError: - new_spends.append(spend) + new_spends.append(spend_to_add) continue mod, curried_args_prg = graftroot.uncurry() if mod == GRAFTROOT_DL_OFFERS: @@ -1137,9 +1139,9 @@ async def finish_graftroot_solutions(offer: Offer, solver: Solver) -> Offer: ] ) ) - new_spend: CoinSpend = spend.replace(solution=new_solution.to_serialized()) - spend = new_spend - new_spends.append(spend) + spend_to_add = spend.replace(solution=new_solution.to_serialized()) + + new_spends.append(spend_to_add) return Offer({}, WalletSpendBundle(new_spends, offer.aggregated_signature()), offer.driver_dict) diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index 58b162924e11..882579f69a5e 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -1818,13 +1818,12 @@ async def get_subscriptions(self) -> list[Subscription]: ) else: subscriptions.append(Subscription(store_id, [])) - else: - if url is not None and num_consecutive_failures is not None and ignore_till is not None: - new_servers_info = subscription.servers_info - new_servers_info.append(ServerInfo(url, num_consecutive_failures, ignore_till)) - new_subscription = replace(subscription, servers_info=new_servers_info) - subscriptions.remove(subscription) - subscriptions.append(new_subscription) + elif url is not None and num_consecutive_failures is not None and ignore_till is not None: + new_servers_info = subscription.servers_info + new_servers_info.append(ServerInfo(url, num_consecutive_failures, ignore_till)) + new_subscription = replace(subscription, servers_info=new_servers_info) + subscriptions.remove(subscription) + subscriptions.append(new_subscription) return subscriptions diff --git a/chia/server/start_data_layer.py b/chia/data_layer/start_data_layer.py similarity index 97% rename from chia/server/start_data_layer.py rename to chia/data_layer/start_data_layer.py index 7991546a8ea1..706c56300c05 100644 --- a/chia/server/start_data_layer.py +++ b/chia/data_layer/start_data_layer.py @@ -12,10 +12,10 @@ from chia.data_layer.data_layer import DataLayer from chia.data_layer.data_layer_api import DataLayerAPI from chia.data_layer.data_layer_rpc_api import DataLayerRpcApi +from chia.data_layer.data_layer_service import DataLayerService from chia.data_layer.data_layer_util import PluginRemote from chia.data_layer.util.plugin import load_plugin_configurations from chia.protocols.outbound_message import NodeType -from chia.server.aliases import DataLayerService, WalletService from chia.server.signal_handlers import SignalHandlers from chia.server.start_service import RpcInfo, Service, async_run from chia.ssl.create_ssl import create_all_ssl @@ -24,6 +24,7 @@ from chia.util.default_root import resolve_root_path from chia.util.task_timing import maybe_manage_task_instrumentation from chia.wallet.wallet_rpc_client import WalletRpcClient +from chia.wallet.wallet_service import WalletService # See: https://bugs.python.org/issue29288 "".encode("idna") diff --git a/chia/farmer/farmer.py b/chia/farmer/farmer.py index 5c15be8a4ffc..1634eff2619c 100644 --- a/chia/farmer/farmer.py +++ b/chia/farmer/farmer.py @@ -143,6 +143,9 @@ def __init__( # Quality string to plot identifier and challenge_hash, for use with harvester.RequestSignatures self.quality_str_to_identifiers: dict[bytes32, tuple[str, bytes32, bytes32, bytes32]] = {} + # Track pending solver requests, keyed by partial proof + self.pending_solver_requests: dict[bytes, dict[str, Any]] = {} + # number of responses to each signage point self.number_of_responses: dict[bytes32, int] = {} diff --git a/chia/farmer/farmer_api.py b/chia/farmer/farmer_api.py index 3a084212964d..58255d11252d 100644 --- a/chia/farmer/farmer_api.py +++ b/chia/farmer/farmer_api.py @@ -6,9 +6,10 @@ from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast import aiohttp -from chia_rs import AugSchemeMPL, G2Element, PoolTarget, PrivateKey +from chia_rs import AugSchemeMPL, G2Element, PlotSize, PoolTarget, PrivateKey, ProofOfSpace from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint8, uint16, uint32, uint64 +from packaging.version import Version from chia import __version__ from chia.consensus.pot_iterations import calculate_iterations_quality, calculate_sp_interval_iters @@ -17,6 +18,7 @@ from chia.protocols import farmer_protocol, harvester_protocol from chia.protocols.farmer_protocol import DeclareProofOfSpace, SignedValues from chia.protocols.harvester_protocol import ( + PartialProofsData, PlotSyncDone, PlotSyncPathList, PlotSyncPlotList, @@ -33,11 +35,13 @@ get_current_authentication_token, ) from chia.protocols.protocol_message_types import ProtocolMessageTypes +from chia.protocols.solver_protocol import SolverInfo, SolverResponse from chia.server.api_protocol import ApiMetadata from chia.server.server import ssl_context_for_root from chia.server.ws_connection import WSChiaConnection from chia.ssl.create_ssl import get_mozilla_ca_crt from chia.types.blockchain_format.proof_of_space import ( + calculate_prefix_bits, generate_plot_public_key, generate_taproot_sk, get_plot_id, @@ -478,6 +482,103 @@ async def new_proof_of_space( return + @metadata.request(peer_required=True) + async def partial_proofs(self, partial_proof_data: PartialProofsData, peer: WSChiaConnection) -> None: + """ + This is a response from the harvester for V2 plots, containing only partial proof data. + We send these to the solver service and wait for a response with the full proof. + """ + if partial_proof_data.sp_hash not in self.farmer.number_of_responses: + self.farmer.number_of_responses[partial_proof_data.sp_hash] = 0 + self.farmer.cache_add_time[partial_proof_data.sp_hash] = uint64(time.time()) + + if partial_proof_data.sp_hash not in self.farmer.sps: + self.farmer.log.warning( + f"Received partial proofs for a signage point that we do not have {partial_proof_data.sp_hash}" + ) + return None + + self.farmer.cache_add_time[partial_proof_data.sp_hash] = uint64(time.time()) + + self.farmer.log.info( + f"Received V2 partial proof collection with {len(partial_proof_data.partial_proofs)} partail proofs " + f"for plot {partial_proof_data.plot_identifier[:10]}... from {peer.peer_node_id}" + ) + + # Process each partial proof chain through solver service to get full proofs + for partial_proof in partial_proof_data.partial_proofs: + solver_info = SolverInfo(partial_proof=partial_proof) + + try: + # store pending request data for matching with response + self.farmer.pending_solver_requests[partial_proof] = { + "proof_data": partial_proof_data, + "peer": peer, + } + + # send solve request to all solver connections + msg = make_msg(ProtocolMessageTypes.solve, solver_info) + await self.farmer.server.send_to_all([msg], NodeType.SOLVER) + self.farmer.log.debug(f"Sent solve request for partial proof {partial_proof.hex()[:10]}...") + + except Exception as e: + self.farmer.log.error( + f"Failed to call solver service for partial proof {partial_proof.hex()[:10]}...: {e}" + ) + # clean up pending request + if partial_proof in self.farmer.pending_solver_requests: + del self.farmer.pending_solver_requests[partial_proof] + + @metadata.request() + async def solution_response(self, response: SolverResponse, peer: WSChiaConnection) -> None: + """ + Handle solution response from solver service. + This is called when a solver responds to a solve request. + """ + self.farmer.log.debug(f"Received solution response: {len(response.proof)} bytes from {peer.peer_node_id}") + + # find the matching pending request using partial_proof + + if response.partial_proof not in self.farmer.pending_solver_requests: + self.farmer.log.warning( + f"Received solver response for unknown partial proof {response.partial_proof.hex()}" + ) + return + + # get the original request data + request_data = self.farmer.pending_solver_requests.pop(response.partial_proof) + proof_data = request_data["proof_data"] + original_peer = request_data["peer"] + partial_proof = response.partial_proof + + # create the proof of space with the solver's proof + proof_bytes = response.proof + if proof_bytes is None or len(proof_bytes) == 0: + self.farmer.log.warning(f"Received empty proof from solver for proof {partial_proof.hex()}...") + return + + sp_challenge_hash = proof_data.challenge_hash + new_proof_of_space = harvester_protocol.NewProofOfSpace( + proof_data.challenge_hash, + proof_data.sp_hash, + proof_data.plot_identifier, + ProofOfSpace( + sp_challenge_hash, + proof_data.pool_public_key, + proof_data.pool_contract_puzzle_hash, + proof_data.plot_public_key, + proof_data.plot_size, + proof_bytes, + ), + proof_data.signage_point_index, + include_source_signature_data=False, + farmer_reward_address_override=None, + fee_info=None, + ) + + # process the proof of space + await self.new_proof_of_space(new_proof_of_space, original_peer) + @metadata.request() async def respond_signatures(self, response: harvester_protocol.RespondSignatures) -> None: request = self._process_respond_signatures(response) @@ -528,7 +629,8 @@ async def new_signage_point(self, new_signage_point: farmer_protocol.NewSignageP p2_singleton_puzzle_hash, ) ) - message = harvester_protocol.NewSignagePointHarvester( + + message2 = harvester_protocol.NewSignagePointHarvester2( new_signage_point.challenge_hash, new_signage_point.difficulty, new_signage_point.sub_slot_iters, @@ -539,8 +641,31 @@ async def new_signage_point(self, new_signage_point: farmer_protocol.NewSignageP new_signage_point.last_tx_height, ) - msg = make_msg(ProtocolMessageTypes.new_signage_point_harvester, message) - await self.farmer.server.send_to_all([msg], NodeType.HARVESTER) + # The plot size in the call to calculate_prefix_bits is only used + # to distinguish v1 and v2 plots. The value does not matter + message1 = harvester_protocol.NewSignagePointHarvester( + new_signage_point.challenge_hash, + new_signage_point.difficulty, + new_signage_point.sub_slot_iters, + new_signage_point.signage_point_index, + new_signage_point.challenge_chain_sp, + pool_difficulties, + uint8( + calculate_prefix_bits(self.farmer.constants, new_signage_point.peak_height, PlotSize.make_v1(32)) + ), + ) + + def old_harvesters(conn: WSChiaConnection) -> bool: + return conn.protocol_version <= Version("0.0.36") + + def new_harvesters(conn: WSChiaConnection) -> bool: + return conn.protocol_version > Version("0.0.36") + + msg1 = make_msg(ProtocolMessageTypes.new_signage_point_harvester, message1) + await self.farmer.server.send_to_all_if([msg1], NodeType.HARVESTER, old_harvesters) + + msg2 = make_msg(ProtocolMessageTypes.new_signage_point_harvester, message2) + await self.farmer.server.send_to_all_if([msg2], NodeType.HARVESTER, new_harvesters) except Exception as exception: # Remove here, as we want to reprocess the SP should it be sent again self.farmer.sps[new_signage_point.challenge_chain_sp].remove(new_signage_point) diff --git a/chia/farmer/farmer_service.py b/chia/farmer/farmer_service.py new file mode 100644 index 000000000000..967bd0ec870e --- /dev/null +++ b/chia/farmer/farmer_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.farmer.farmer import Farmer +from chia.farmer.farmer_api import FarmerAPI +from chia.farmer.farmer_rpc_api import FarmerRpcApi +from chia.server.start_service import Service + +FarmerService = Service[Farmer, FarmerAPI, FarmerRpcApi] diff --git a/chia/server/start_farmer.py b/chia/farmer/start_farmer.py similarity index 98% rename from chia/server/start_farmer.py rename to chia/farmer/start_farmer.py index 58913eaf58b5..bd12c4e466a9 100644 --- a/chia/server/start_farmer.py +++ b/chia/farmer/start_farmer.py @@ -13,8 +13,8 @@ from chia.farmer.farmer import Farmer from chia.farmer.farmer_api import FarmerAPI from chia.farmer.farmer_rpc_api import FarmerRpcApi +from chia.farmer.farmer_service import FarmerService from chia.protocols.outbound_message import NodeType -from chia.server.aliases import FarmerService from chia.server.resolve_peer_info import get_unresolved_peer_infos from chia.server.signal_handlers import SignalHandlers from chia.server.start_service import RpcInfo, Service, async_run diff --git a/chia/full_node/coin_store.py b/chia/full_node/coin_store.py index f2da3b66f644..4767dd5e749e 100644 --- a/chia/full_node/coin_store.py +++ b/chia/full_node/coin_store.py @@ -30,6 +30,9 @@ class CoinStore: """ db_wrapper: DBWrapper2 + # Fall back to the `coin_puzzle_hash` index if the ff unspent index + # does not exist. + _unspent_lineage_for_ph_idx: str = "coin_puzzle_hash" @classmethod async def create(cls, db_wrapper: DBWrapper2) -> CoinStore: @@ -82,6 +85,12 @@ async def create(cls, db_wrapper: DBWrapper2) -> CoinStore: WHERE spent_index = -1 """ ) + async with conn.execute( + "SELECT 1 FROM sqlite_master WHERE type = 'index' AND name = 'coin_record_ph_ff_unspent_idx'" + ) as cursor: + has_ff_unspent_idx = await cursor.fetchone() is not None + if has_ff_unspent_idx: + self._unspent_lineage_for_ph_idx = "coin_record_ph_ff_unspent_idx" return self @@ -154,13 +163,21 @@ async def new_block( await self._set_spent(tx_removals, height) end = time.monotonic() - log.log( - logging.WARNING if end - start > 10 else logging.DEBUG, + took_too_long = end - start > 10 + + message = ( f"Height {height}: It took {end - start:0.2f}s to apply {len(tx_additions)} additions and " - + f"{len(tx_removals)} removals to the coin store. Make sure " - + "blockchain database is on a fast drive", + + f"{len(tx_removals)} removals to the coin store." ) + if took_too_long: + level = logging.WARNING + message += " Make sure blockchain database is on a fast drive" + else: + level = logging.DEBUG + + log.log(level, message) + # Checks DB and DiffStores for CoinRecord with coin_name and returns it async def get_coin_record(self, coin_name: bytes32) -> Optional[CoinRecord]: async with self.db_wrapper.reader_no_transaction() as conn: @@ -639,6 +656,7 @@ async def get_unspent_lineage_info_for_puzzle_hash(self, puzzle_hash: bytes32) - "unspent.coin_parent, " "parent.coin_parent " "FROM coin_record AS unspent " + f"INDEXED BY {self._unspent_lineage_for_ph_idx} " "LEFT JOIN coin_record AS parent ON unspent.coin_parent = parent.coin_name " "WHERE unspent.spent_index = -1 " "AND parent.spent_index > 0 " diff --git a/chia/full_node/eligible_coin_spends.py b/chia/full_node/eligible_coin_spends.py index 5a93fb22708d..559b682a2514 100644 --- a/chia/full_node/eligible_coin_spends.py +++ b/chia/full_node/eligible_coin_spends.py @@ -277,11 +277,11 @@ def process_fast_forward_spends( # Run the new spend bundle to make sure it remains valid. What we # care about here is whether this call throws or not. get_conditions_from_spendbundle(new_sb, mempool_item.conds.cost, constants, height) - # get_conditions_from_spendbundle raises a TypeError with an error code - except TypeError as e: + # get_conditions_from_spendbundle raises a ValueError with an error code + except ValueError as e: # Convert that to a ValidationError - if len(e.args) > 0: - error = Err(e.args[0]) + if len(e.args) > 1: + error = Err(e.args[1]) raise ValueError(f"Mempool item became invalid after singleton fast forward with error {error}.") else: raise ValueError( diff --git a/chia/full_node/full_node_api.py b/chia/full_node/full_node_api.py index dc435c436ced..21113fee45f4 100644 --- a/chia/full_node/full_node_api.py +++ b/chia/full_node/full_node_api.py @@ -72,6 +72,7 @@ from chia.types.peer_info import PeerInfo from chia.util.batches import to_batches from chia.util.db_wrapper import SQLITE_MAX_VARIABLE_NUMBER +from chia.util.errors import Err, ValidationError from chia.util.hash import std_hash from chia.util.limited_semaphore import LimitedSemaphoreFullError from chia.util.task_referencer import create_referenced_task @@ -1133,6 +1134,12 @@ async def signed_values( try: await self.full_node.add_unfinished_block(new_candidate, None, True) except Exception as e: + if isinstance(e, ValidationError) and e.code == Err.NO_OVERFLOWS_IN_FIRST_SUB_SLOT_NEW_EPOCH: + self.full_node.log.info( + f"Failed to farm block {e}. Consensus rules prevent this block from being farmed. Not retrying" + ) + return None + # If we have an error with this block, try making an empty block self.full_node.log.error(f"Error farming block {e} {new_candidate}") candidate_tuple = self.full_node.full_node_store.get_candidate_block( @@ -1393,14 +1400,11 @@ async def send_transaction( error_name = error.name if error is not None else None if status == MempoolInclusionStatus.SUCCESS: response = wallet_protocol.TransactionAck(spend_name, uint8(status.value), error_name) + # If it failed/pending, but it previously succeeded (in mempool), this is idempotence, return SUCCESS + elif self.full_node.mempool_manager.get_spendbundle(spend_name) is not None: + response = wallet_protocol.TransactionAck(spend_name, uint8(MempoolInclusionStatus.SUCCESS.value), None) else: - # If it failed/pending, but it previously succeeded (in mempool), this is idempotence, return SUCCESS - if self.full_node.mempool_manager.get_spendbundle(spend_name) is not None: - response = wallet_protocol.TransactionAck( - spend_name, uint8(MempoolInclusionStatus.SUCCESS.value), None - ) - else: - response = wallet_protocol.TransactionAck(spend_name, uint8(status.value), error_name) + response = wallet_protocol.TransactionAck(spend_name, uint8(status.value), error_name) return make_msg(ProtocolMessageTypes.transaction_ack, response) @metadata.request() diff --git a/chia/full_node/full_node_rpc_api.py b/chia/full_node/full_node_rpc_api.py index cd08ce424dfa..3330583ee67a 100644 --- a/chia/full_node/full_node_rpc_api.py +++ b/chia/full_node/full_node_rpc_api.py @@ -2,6 +2,7 @@ import asyncio import time +from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timezone from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast @@ -89,6 +90,8 @@ async def get_average_block_time( class FullNodeRpcApi: + executor: ThreadPoolExecutor + if TYPE_CHECKING: from chia.rpc.rpc_server import RpcApiProtocol @@ -98,6 +101,7 @@ def __init__(self, service: FullNode) -> None: self.service = service self.service_name = "chia_full_node" self.cached_blockchain_state: Optional[dict[str, Any]] = None + self.executor = ThreadPoolExecutor(max_workers=2, thread_name_prefix="node-rpc-") def get_routes(self) -> dict[str, Endpoint]: return { @@ -481,21 +485,21 @@ async def get_block_spends(self, request: dict[str, Any]) -> EndpointResult: if full_block is None: raise ValueError(f"Block {header_hash.hex()} not found") - spends: list[dict[str, list[CoinSpend]]] = [] block_generator = await get_block_generator(self.service.blockchain.lookup_block_generators, full_block) if block_generator is None: # if block is not a transaction block. - return {"block_spends": spends} + return {"block_spends": []} - spends = get_spends_for_trusted_block( + flags = get_flags_for_height_and_constants(full_block.height, self.service.constants) + spends = await asyncio.get_running_loop().run_in_executor( + self.executor, + get_spends_for_trusted_block, self.service.constants, block_generator.program, block_generator.generator_refs, - get_flags_for_height_and_constants(full_block.height, self.service.constants), + flags, ) - # chia_rs returning a list is a mistake that will be fixed in the next release - # it ought to be returning a dict of {"block_spends": [spends]} - return spends[0] + return spends async def get_block_spends_with_conditions(self, request: dict[str, Any]) -> EndpointResult: if "header_hash" not in request: @@ -509,11 +513,14 @@ async def get_block_spends_with_conditions(self, request: dict[str, Any]) -> End if block_generator is None: # if block is not a transaction block. return {"block_spends_with_conditions": []} - spends_with_conditions = get_spends_for_trusted_block_with_conditions( + flags = get_flags_for_height_and_constants(full_block.height, self.service.constants) + spends_with_conditions = await asyncio.get_running_loop().run_in_executor( + self.executor, + get_spends_for_trusted_block_with_conditions, self.service.constants, block_generator.program, block_generator.generator_refs, - get_flags_for_height_and_constants(full_block.height, self.service.constants), + flags, ) return {"block_spends_with_conditions": spends_with_conditions} @@ -888,7 +895,7 @@ async def create_block_generator(self, _: dict[str, Any]) -> EndpointResult: if maybe_gen is not None: # this also validates the signature err, conds = await asyncio.get_running_loop().run_in_executor( - self.service.blockchain.pool, + self.executor, run_block_generator2, bytes(gen.program), gen.generator_refs, @@ -1004,6 +1011,7 @@ async def get_fee_estimate(self, request: dict[str, Any]) -> dict[str, Any]: while last_tx_block is None or last_peak_timestamp is None: peak_with_timestamp -= 1 last_tx_block = self.service.blockchain.height_to_block_record(peak_with_timestamp) + assert last_tx_block.timestamp is not None # mypy last_peak_timestamp = last_tx_block.timestamp assert last_tx_block is not None # mypy diff --git a/chia/full_node/full_node_service.py b/chia/full_node/full_node_service.py new file mode 100644 index 000000000000..f6fb53398fe0 --- /dev/null +++ b/chia/full_node/full_node_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.full_node.full_node import FullNode +from chia.full_node.full_node_api import FullNodeAPI +from chia.full_node.full_node_rpc_api import FullNodeRpcApi +from chia.server.start_service import Service + +FullNodeService = Service[FullNode, FullNodeAPI, FullNodeRpcApi] diff --git a/chia/full_node/mempool.py b/chia/full_node/mempool.py index e4ea1afc0efe..25c256b44814 100644 --- a/chia/full_node/mempool.py +++ b/chia/full_node/mempool.py @@ -750,6 +750,7 @@ def create_block_generator2( f"cost: {batch_cost} total cost: {block_cost}" ) else: + log.info(f"Skipping transaction batch cumulative cost: {block_cost} batch cost: {batch_cost}") skipped_items += 1 batch_cost = 0 @@ -775,12 +776,12 @@ def create_block_generator2( if len(batch_transactions) > 0: added, _ = builder.add_spend_bundles(batch_transactions, uint64(batch_cost), constants) + log.info(f"trying to add residual batch: {len(batch_transactions)} batch cost: {batch_cost} added: {added}") if added: added_spends += batch_spends additions.extend(batch_additions) removals.extend([cs.coin for sb in batch_transactions for cs in sb.coin_spends]) - block_cost = builder.cost() log.info( f"adding TX batch, additions: {len(batch_additions)} removals: {batch_spends} " f"cost: {batch_cost} total cost: {block_cost}" @@ -797,7 +798,6 @@ def create_block_generator2( f"create_block_generator2() took {duration:0.4f} seconds. " f"block cost: {cost} spends: {added_spends} additions: {len(additions)}", ) - assert block_cost == cost return NewBlockGenerator( SerializedProgram.from_bytes(block_program), @@ -806,5 +806,5 @@ def create_block_generator2( signature, additions, removals, - uint64(block_cost), + uint64(cost), ) diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index 1943be4dc1b0..ac70ddfce5e8 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -11,10 +11,12 @@ from chia_rs import ( ELIGIBLE_FOR_DEDUP, ELIGIBLE_FOR_FF, + MEMPOOL_MODE, BLSCache, ConsensusConstants, SpendBundle, SpendBundleConditions, + get_flags_for_height_and_constants, supports_fast_forward, validate_clvm_and_signature, ) @@ -24,7 +26,6 @@ from chia.consensus.block_record import BlockRecordProtocol from chia.consensus.check_time_locks import check_time_locks -from chia.consensus.cost_calculator import NPCResult from chia.full_node.bitcoin_fee_estimator import create_bitcoin_fee_estimator from chia.full_node.fee_estimation import FeeBlockInfo, MempoolInfo, MempoolItemInfo from chia.full_node.fee_estimator_interface import FeeEstimatorInterface @@ -257,7 +258,22 @@ def check_removals( for item in conflicting_items: if item in conflicts: continue - conflict_bcs = item.bundle_coin_spends[coin_id] + conflict_bcs = item.bundle_coin_spends.get(coin_id) + if conflict_bcs is None: + # Check if this is an item that spends an older ff singleton + # version with a latest version that matches our coin ID. + conflict_bcs = next( + ( + bcs + for bcs in item.bundle_coin_spends.values() + if bcs.latest_singleton_lineage is not None and bcs.latest_singleton_lineage.coin_id == coin_id + ), + None, + ) + # We're not expected to get here but let's handle it gracefully + if conflict_bcs is None: + log.warning(f"Coin ID {coin_id} expected but not found in mempool item {item.name}") + return Err.INVALID_SPEND_BUNDLE, [] # if the spend we're adding to the mempool is not DEDUP nor FF, it's # just a regular conflict if not coin_bcs.eligible_for_fast_forward and not coin_bcs.eligible_for_dedup: @@ -440,19 +456,20 @@ async def pre_validate_spendbundle( self._worker_queue_size += 1 try: + flags = get_flags_for_height_and_constants(self.peak.height, self.constants) sbc, new_cache_entries, duration = await asyncio.get_running_loop().run_in_executor( self.pool, validate_clvm_and_signature, spend_bundle, self.max_tx_clvm_cost, self.constants, - self.peak.height, + flags | MEMPOOL_MODE, ) - # validate_clvm_and_signature raises a TypeError with an error code - except Exception as e: + # validate_clvm_and_signature raises a ValueError with an error code + except ValueError as e: # Convert that to a ValidationError - if len(e.args) > 0: - error = Err(e.args[0]) + if len(e.args) > 1: + error = Err(e.args[1]) raise ValidationError(error) else: raise ValidationError(Err.UNKNOWN) # pragma: no cover @@ -462,8 +479,6 @@ async def pre_validate_spendbundle( if bls_cache is not None: bls_cache.update(new_cache_entries) - ret = NPCResult(None, sbc) - if spend_bundle_id is None: spend_bundle_id = spend_bundle.name() @@ -472,10 +487,7 @@ async def pre_validate_spendbundle( f"pre_validate_spendbundle took {duration:0.4f} seconds " f"for {spend_bundle_id} (queue-size: {self._worker_queue_size})", ) - if ret.error is not None: - raise ValidationError(Err(ret.error), "pre_validate_spendbundle failed") - assert ret.conds is not None - return ret.conds + return sbc async def add_spend_bundle( self, @@ -489,14 +501,16 @@ async def add_spend_bundle( ] = None, ) -> SpendBundleAddInfo: """ - Validates and adds to mempool a new_spend with the given NPCResult, and spend_name, and the current mempool. - The mempool should be locked during this call (blockchain lock). If there are mempool conflicts, the conflicting - spends might be removed (if the new spend is a superset of the previous). Otherwise, the new spend might be + Validates and adds to mempool a new_spend with the given + SpendBundleConditions, and spend_name, and the current mempool. The mempool + should be locked during this call (blockchain lock). If there are mempool + conflicts, the conflicting spends might be removed (if the new spend is + a superset of the previous). Otherwise, the new spend might be added to the potential pool. Args: new_spend: spend bundle to validate and add - conds: result of running the clvm transaction in a fake block + conds: SpendBundleConditions resulting from running the clvm in the spend bundle's coin spends spend_name: hash of the spend bundle data, passed in as an optimization Returns: @@ -607,11 +621,16 @@ async def validate_spend_bundle( eligible_for_ff = bool(spend_conds.flags & ELIGIBLE_FOR_FF) and supports_fast_forward(coin_spend) if eligible_for_ff: # Make sure the fast forward spend still has a version that is - # still unspent, because if the singleton has been melted, the - # fast forward spend will never become valid. - lineage_info = await get_unspent_lineage_info_for_puzzle_hash(bytes32(spend_conds.puzzle_hash)) + # still unspent, because if the singleton has been spent in a + # non-FF spend, this fast forward spend will never become valid. + # So treat this as a normal spend, which requires the exact coin + # to exist and be unspent. + # Singletons that were created before the optimization of using + # spent_index will also fail this test, and such spends will + # fall back to be treated as non-FF spends. + lineage_info = await get_unspent_lineage_info_for_puzzle_hash(spend_conds.puzzle_hash) if lineage_info is None: - return Err.DOUBLE_SPEND, None, [] + eligible_for_ff = False spend_additions = [] for puzzle_hash, amount, _ in spend_conds.create_coin: @@ -834,7 +853,7 @@ async def new_peak( # rebasing a fast forward spend is more expensive than to just # evict the item. So, any FF spend we may need to rebase, defer # them until after we've gone through all spends - deferred_ff_items: set[tuple[bytes32, bytes32]] = set() + deferred_ff_items: set[tuple[bytes32, MempoolItem]] = set() for spend in spent_coins: items = self.mempool.get_items_by_coin_id(spend) @@ -857,7 +876,7 @@ async def new_peak( spendbundle_ids_to_remove.add(item_name) continue - deferred_ff_items.add((spend, item_name)) + deferred_ff_items.add((spend, item)) # fast forward spends are indexed under the latest singleton coin ID # if it's spent, we need to update the index in the mempool. This @@ -865,7 +884,8 @@ async def new_peak( # new_coin_id, current_coin_id, mempool item name spends_to_update: list[tuple[bytes32, bytes32, bytes32]] = [] - for spend, item_name in deferred_ff_items: + for spend, item in deferred_ff_items: + item_name = item.spend_bundle_name if item_name in spendbundle_ids_to_remove: continue # there may be multiple matching spends in the mempool diff --git a/chia/server/start_full_node.py b/chia/full_node/start_full_node.py similarity index 98% rename from chia/server/start_full_node.py rename to chia/full_node/start_full_node.py index 0afac9320527..05bbb556ff8d 100644 --- a/chia/server/start_full_node.py +++ b/chia/full_node/start_full_node.py @@ -15,8 +15,8 @@ from chia.full_node.full_node import FullNode from chia.full_node.full_node_api import FullNodeAPI from chia.full_node.full_node_rpc_api import FullNodeRpcApi +from chia.full_node.full_node_service import FullNodeService from chia.protocols.outbound_message import NodeType -from chia.server.aliases import FullNodeService from chia.server.resolve_peer_info import get_unresolved_peer_infos from chia.server.signal_handlers import SignalHandlers from chia.server.start_service import RpcInfo, Service, async_run diff --git a/chia/full_node/subscriptions.py b/chia/full_node/subscriptions.py index 1921e7ff3c6b..cd80e84ee98e 100644 --- a/chia/full_node/subscriptions.py +++ b/chia/full_node/subscriptions.py @@ -225,11 +225,11 @@ def peers_for_spend_bundle( for spend in conds.spends: coin_ids.add(bytes32(spend.coin_id)) - puzzle_hashes.add(bytes32(spend.puzzle_hash)) + puzzle_hashes.add(spend.puzzle_hash) for puzzle_hash, amount, memo in spend.create_coin: coin_ids.add(Coin(spend.coin_id, puzzle_hash, uint64(amount)).name()) - puzzle_hashes.add(bytes32(puzzle_hash)) + puzzle_hashes.add(puzzle_hash) if memo is not None and len(memo) == 32: puzzle_hashes.add(bytes32(memo)) diff --git a/chia/full_node/weight_proof.py b/chia/full_node/weight_proof.py index 71886ad759fe..24b45b0694db 100644 --- a/chia/full_node/weight_proof.py +++ b/chia/full_node/weight_proof.py @@ -411,17 +411,16 @@ async def __first_sub_slot_vdfs( while not curr_sub_rec.sub_epoch_summary_included: curr_sub_rec = blocks[curr_sub_rec.prev_hash] first_rc_end_of_slot_vdf = self.first_rc_end_of_slot_vdf(header_block, blocks, header_blocks) + elif header_block_sub_rec.overflow and header_block_sub_rec.first_in_sub_slot: + sub_slots_num = 2 + while sub_slots_num > 0 and curr_sub_rec.height > 0: + if curr_sub_rec.first_in_sub_slot: + assert curr_sub_rec.finished_challenge_slot_hashes is not None + sub_slots_num -= len(curr_sub_rec.finished_challenge_slot_hashes) + curr_sub_rec = blocks[curr_sub_rec.prev_hash] else: - if header_block_sub_rec.overflow and header_block_sub_rec.first_in_sub_slot: - sub_slots_num = 2 - while sub_slots_num > 0 and curr_sub_rec.height > 0: - if curr_sub_rec.first_in_sub_slot: - assert curr_sub_rec.finished_challenge_slot_hashes is not None - sub_slots_num -= len(curr_sub_rec.finished_challenge_slot_hashes) - curr_sub_rec = blocks[curr_sub_rec.prev_hash] - else: - while not curr_sub_rec.first_in_sub_slot and curr_sub_rec.height > 0: - curr_sub_rec = blocks[curr_sub_rec.prev_hash] + while not curr_sub_rec.first_in_sub_slot and curr_sub_rec.height > 0: + curr_sub_rec = blocks[curr_sub_rec.prev_hash] curr = header_blocks[curr_sub_rec.header_hash] sub_slots_data: list[SubSlotData] = [] @@ -1533,10 +1532,10 @@ def _get_last_ses_hash( ) -> tuple[Optional[bytes32], uint32]: for idx, block in enumerate(reversed(recent_reward_chain)): if (block.reward_chain_block.height % constants.SUB_EPOCH_BLOCKS) == 0: - idx = len(recent_reward_chain) - 1 - idx # reverse + original_idx = len(recent_reward_chain) - 1 - idx # reverse # find first block after sub slot end - while idx < len(recent_reward_chain): - curr = recent_reward_chain[idx] + while original_idx < len(recent_reward_chain): + curr = recent_reward_chain[original_idx] if len(curr.finished_sub_slots) > 0: for slot in curr.finished_sub_slots: if slot.challenge_chain.subepoch_summary_hash is not None: @@ -1544,7 +1543,7 @@ def _get_last_ses_hash( slot.challenge_chain.subepoch_summary_hash, curr.reward_chain_block.height, ) - idx += 1 + original_idx += 1 return None, uint32(0) diff --git a/chia/harvester/harvester.py b/chia/harvester/harvester.py index cfeccb3905a9..04fb200e2e9a 100644 --- a/chia/harvester/harvester.py +++ b/chia/harvester/harvester.py @@ -190,10 +190,17 @@ def get_plots(self) -> tuple[list[dict[str, Any]], list[str], list[str]]: with self.plot_manager: for path, plot_info in self.plot_manager.plots.items(): prover = plot_info.prover + size = prover.get_size() + if size.size_v1 is not None: + k = size.size_v1 + else: + assert size.size_v2 is not None + k = size.size_v2 + # TODO: todo_v2_plots support v2 plots in RPC response response_plots.append( { "filename": str(path), - "size": prover.get_size(), + "size": k, "plot_id": prover.get_id(), "pool_public_key": plot_info.pool_public_key, "pool_contract_puzzle_hash": plot_info.pool_contract_puzzle_hash, diff --git a/chia/harvester/harvester_api.py b/chia/harvester/harvester_api.py index 994dbb0313a5..5438d19c4289 100644 --- a/chia/harvester/harvester_api.py +++ b/chia/harvester/harvester_api.py @@ -3,22 +3,24 @@ import asyncio import logging import time +from collections.abc import Awaitable, Sequence from pathlib import Path from typing import TYPE_CHECKING, ClassVar, Optional, cast -from chia_rs import AugSchemeMPL, G1Element, G2Element, PlotSize, ProofOfSpace +from chia_rs import AugSchemeMPL, G1Element, G2Element, ProofOfSpace from chia_rs.sized_bytes import bytes32 -from chia_rs.sized_ints import uint8, uint32, uint64 +from chia_rs.sized_ints import uint32, uint64 from chia.consensus.pot_iterations import ( calculate_iterations_quality, calculate_sp_interval_iters, ) from chia.harvester.harvester import Harvester +from chia.plotting.prover import PlotVersion from chia.plotting.util import PlotInfo, parse_plot_info from chia.protocols import harvester_protocol from chia.protocols.farmer_protocol import FarmingInfo -from chia.protocols.harvester_protocol import Plot, PlotSyncResponse +from chia.protocols.harvester_protocol import PartialProofsData, Plot, PlotSyncResponse from chia.protocols.outbound_message import Message, make_msg from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.server.api_protocol import ApiMetadata @@ -26,8 +28,11 @@ from chia.types.blockchain_format.proof_of_space import ( calculate_pos_challenge, calculate_prefix_bits, + calculate_required_plot_strength, generate_plot_public_key, + make_pos, passes_plot_filter, + quality_for_partial_proof, ) from chia.wallet.derive_keys import master_sk_to_local_sk @@ -49,6 +54,60 @@ def __init__(self, harvester: Harvester): def ready(self) -> bool: return True + def _plot_passes_filter(self, plot_info: PlotInfo, challenge: harvester_protocol.NewSignagePointHarvester2) -> bool: + filter_prefix_bits = calculate_prefix_bits( + self.harvester.constants, + challenge.peak_height, + plot_info.prover.get_size(), + ) + return passes_plot_filter( + filter_prefix_bits, + plot_info.prover.get_id(), + challenge.challenge_hash, + challenge.sp_hash, + ) + + async def _handle_v1_responses( + self, + awaitables: Sequence[Awaitable[tuple[Path, list[harvester_protocol.NewProofOfSpace]]]], + start_time: float, + peer: WSChiaConnection, + ) -> int: + proofs_found = 0 + for filename_sublist_awaitable in asyncio.as_completed(awaitables): + filename, sublist = await filename_sublist_awaitable + time_taken = time.monotonic() - start_time + if time_taken > 8: + self.harvester.log.warning( + f"Looking up qualities on {filename} took: {time_taken}. This should be below 8 seconds" + f" to minimize risk of losing rewards." + ) + for response in sublist: + proofs_found += 1 + msg = make_msg(ProtocolMessageTypes.new_proof_of_space, response) + await peer.send_message(msg) + return proofs_found + + async def _handle_v2_responses( + self, v2_awaitables: Sequence[Awaitable[Optional[PartialProofsData]]], start_time: float, peer: WSChiaConnection + ) -> int: + partial_proofs_found = 0 + for quality_awaitable in asyncio.as_completed(v2_awaitables): + partial_proofs_data = await quality_awaitable + if partial_proofs_data is None: + continue + time_taken = time.monotonic() - start_time + if time_taken > 8: + self.harvester.log.warning( + f"Looking up partial proofs on {partial_proofs_data.plot_identifier}" + f"took: {time_taken}. This should be below 8 seconds" + f"to minimize risk of losing rewards." + ) + partial_proofs_found += len(partial_proofs_data.partial_proofs) + msg = make_msg(ProtocolMessageTypes.partial_proofs, partial_proofs_data) + await peer.send_message(msg) + return partial_proofs_found + @metadata.request(peer_required=True) async def harvester_handshake( self, harvester_handshake: harvester_protocol.HarvesterHandshake, peer: WSChiaConnection @@ -65,9 +124,9 @@ async def harvester_handshake( await self.harvester.plot_sync_sender.start() self.harvester.plot_manager.start_refreshing() - @metadata.request(peer_required=True) + @metadata.request(peer_required=True, request_type=ProtocolMessageTypes.new_signage_point_harvester) async def new_signage_point_harvester( - self, new_challenge: harvester_protocol.NewSignagePointHarvester, peer: WSChiaConnection + self, new_challenge: harvester_protocol.NewSignagePointHarvester2, peer: WSChiaConnection ) -> None: """ The harvester receives a new signage point from the farmer, this happens at the start of each slot. @@ -94,8 +153,80 @@ async def new_signage_point_harvester( start = time.monotonic() assert len(new_challenge.challenge_hash) == 32 + required_plot_strength = calculate_required_plot_strength( + self.harvester.constants, new_challenge.last_tx_height + ) + loop = asyncio.get_running_loop() + def blocking_lookup_v2_partial_proofs(filename: Path, plot_info: PlotInfo) -> Optional[PartialProofsData]: + # Uses the V2 Prover object to lookup qualities only. No full proofs generated. + try: + plot_id = plot_info.prover.get_id() + sp_challenge_hash = calculate_pos_challenge( + plot_id, + new_challenge.challenge_hash, + new_challenge.sp_hash, + ) + partial_proofs = plot_info.prover.get_partial_proofs_for_challenge( + sp_challenge_hash, required_plot_strength + ) + + # If no partial proofs are found, return None + if len(partial_proofs) == 0: + return None + + # Get the appropriate difficulty for this plot + difficulty = new_challenge.difficulty + sub_slot_iters = new_challenge.sub_slot_iters + if plot_info.pool_contract_puzzle_hash is not None: + # Check for pool-specific difficulty + for pool_difficulty in new_challenge.pool_difficulties: + if pool_difficulty.pool_contract_puzzle_hash == plot_info.pool_contract_puzzle_hash: + difficulty = pool_difficulty.difficulty + sub_slot_iters = pool_difficulty.sub_slot_iters + break + + # Filter qualities that pass the required_iters check (same as V1 flow) + good_partial_proofs = [] + sp_interval_iters = calculate_sp_interval_iters(self.harvester.constants, sub_slot_iters) + + for partial_proof in partial_proofs: + quality_str = quality_for_partial_proof(partial_proof, new_challenge.challenge_hash) + required_iters: uint64 = calculate_iterations_quality( + self.harvester.constants, + quality_str, + plot_info.prover.get_size(), + difficulty, + new_challenge.sp_hash, + sub_slot_iters, + new_challenge.last_tx_height, + ) + + if required_iters < sp_interval_iters: + good_partial_proofs.append(partial_proof) + + if len(good_partial_proofs) == 0: + return None + + size = plot_info.prover.get_size().size_v2 + assert size is not None + return PartialProofsData( + new_challenge.challenge_hash, + new_challenge.sp_hash, + good_partial_proofs[0].hex() + str(filename.resolve()), + good_partial_proofs, + new_challenge.signage_point_index, + size, + plot_info.pool_public_key, + plot_info.pool_contract_puzzle_hash, + plot_info.plot_public_key, + ) + return None + except Exception: + self.harvester.log.exception("Failed V2 partial proof lookup") + return None + def blocking_lookup(filename: Path, plot_info: PlotInfo) -> list[tuple[bytes32, ProofOfSpace]]: # Uses the Prover object to lookup qualities. This is a blocking call, # so it should be run in a thread pool. @@ -147,11 +278,10 @@ def blocking_lookup(filename: Path, plot_info: PlotInfo) -> list[tuple[bytes32, # Found proofs of space (on average 1 is expected per plot) for index, quality_str in enumerate(quality_strings): - # TODO: todo_v2_plots required_iters: uint64 = calculate_iterations_quality( self.harvester.constants, quality_str, - PlotSize.make_v1(plot_info.prover.get_size()), + plot_info.prover.get_size(), difficulty, new_challenge.sp_hash, sub_slot_iters, @@ -200,12 +330,12 @@ def blocking_lookup(filename: Path, plot_info: PlotInfo) -> list[tuple[bytes32, responses.append( ( quality_str, - ProofOfSpace( + make_pos( sp_challenge_hash, plot_info.pool_public_key, plot_info.pool_contract_puzzle_hash, plot_info.plot_public_key, - uint8(plot_info.prover.get_size()), + plot_info.prover.get_size(), proof_xs, ), ) @@ -241,6 +371,7 @@ async def lookup_challenge( return filename, all_responses awaitables = [] + v2_awaitables = [] passed = 0 total = 0 with self.harvester.plot_manager: @@ -249,21 +380,19 @@ async def lookup_challenge( # Passes the plot filter (does not check sp filter yet though, since we have not reached sp) # This is being executed at the beginning of the slot total += 1 - - # TODO: todo_v2_plots support v2 plots in PlotManager - filter_prefix_bits = uint8( - calculate_prefix_bits( - self.harvester.constants, - new_challenge.peak_height, - PlotSize.make_v1(try_plot_info.prover.get_size()), + if not self._plot_passes_filter(try_plot_info, new_challenge): + continue + if try_plot_info.prover.get_version() == PlotVersion.V2: + v2_awaitables.append( + loop.run_in_executor( + self.harvester.executor, + blocking_lookup_v2_partial_proofs, + try_plot_filename, + try_plot_info, + ) ) - ) - if passes_plot_filter( - filter_prefix_bits, - try_plot_info.prover.get_id(), - new_challenge.challenge_hash, - new_challenge.sp_hash, - ): + passed += 1 + else: passed += 1 awaitables.append(lookup_challenge(try_plot_filename, try_plot_info)) self.harvester.log.debug(f"new_signage_point_harvester {passed} plots passed the plot filter") @@ -271,21 +400,24 @@ async def lookup_challenge( # Concurrently executes all lookups on disk, to take advantage of multiple disk parallelism time_taken = time.monotonic() - start total_proofs_found = 0 - for filename_sublist_awaitable in asyncio.as_completed(awaitables): - filename, sublist = await filename_sublist_awaitable - time_taken = time.monotonic() - start - if time_taken > 8: - self.harvester.log.warning( - f"Looking up qualities on {filename} took: {time_taken}. This should be below 8 seconds" - f" to minimize risk of losing rewards." - ) - else: - pass - # self.harvester.log.info(f"Looking up qualities on {filename} took: {time_taken}") - for response in sublist: - total_proofs_found += 1 - msg = make_msg(ProtocolMessageTypes.new_proof_of_space, response) - await peer.send_message(msg) + total_v2_partial_proofs_found = 0 + + # run both concurrently + tasks = [] + if awaitables: + tasks.append(self._handle_v1_responses(awaitables, start, peer)) + if v2_awaitables: + tasks.append(self._handle_v2_responses(v2_awaitables, start, peer)) + + if tasks: + results = await asyncio.gather(*tasks) + if len(results) == 2: + total_proofs_found, total_v2_partial_proofs_found = results + elif len(results) == 1: + if awaitables: + total_proofs_found = results[0] + else: + total_v2_partial_proofs_found = results[0] now = uint64(time.time()) @@ -302,9 +434,10 @@ async def lookup_challenge( await peer.send_message(pass_msg) self.harvester.log.info( - f"{len(awaitables)} plots were eligible for farming {new_challenge.challenge_hash.hex()[:10]}..." - f" Found {total_proofs_found} proofs. Time: {time_taken:.5f} s. " - f"Total {self.harvester.plot_manager.plot_count()} plots" + f"challenge_hash: {new_challenge.challenge_hash.hex()[:10]} ..." + f"{len(awaitables) + len(v2_awaitables)} plots were eligible for farming challenge" + f"Found {total_proofs_found} V1 proofs and {total_v2_partial_proofs_found} V2 qualities." + f" Time: {time_taken:.5f} s. Total {self.harvester.plot_manager.plot_count()} plots" ) self.harvester.state_changed( "farming_info", @@ -312,7 +445,8 @@ async def lookup_challenge( "challenge_hash": new_challenge.challenge_hash.hex(), "total_plots": self.harvester.plot_manager.plot_count(), "found_proofs": total_proofs_found, - "eligible_plots": len(awaitables), + "found_v2_partial_proofs": total_v2_partial_proofs_found, + "eligible_plots": len(awaitables) + len(v2_awaitables), "time": time_taken, }, ) diff --git a/chia/harvester/harvester_service.py b/chia/harvester/harvester_service.py new file mode 100644 index 000000000000..60779c8f2cbe --- /dev/null +++ b/chia/harvester/harvester_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.harvester.harvester import Harvester +from chia.harvester.harvester_api import HarvesterAPI +from chia.harvester.harvester_rpc_api import HarvesterRpcApi +from chia.server.start_service import Service + +HarvesterService = Service[Harvester, HarvesterAPI, HarvesterRpcApi] diff --git a/chia/server/start_harvester.py b/chia/harvester/start_harvester.py similarity index 98% rename from chia/server/start_harvester.py rename to chia/harvester/start_harvester.py index 4a31227bff48..1ad4dc934c68 100644 --- a/chia/server/start_harvester.py +++ b/chia/harvester/start_harvester.py @@ -13,8 +13,8 @@ from chia.harvester.harvester import Harvester from chia.harvester.harvester_api import HarvesterAPI from chia.harvester.harvester_rpc_api import HarvesterRpcApi +from chia.harvester.harvester_service import HarvesterService from chia.protocols.outbound_message import NodeType -from chia.server.aliases import HarvesterService from chia.server.resolve_peer_info import get_unresolved_peer_infos from chia.server.signal_handlers import SignalHandlers from chia.server.start_service import RpcInfo, Service, async_run diff --git a/chia/introducer/introducer_service.py b/chia/introducer/introducer_service.py new file mode 100644 index 000000000000..31abc6fc25f3 --- /dev/null +++ b/chia/introducer/introducer_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.full_node.full_node_rpc_api import FullNodeRpcApi +from chia.introducer.introducer import Introducer +from chia.introducer.introducer_api import IntroducerAPI +from chia.server.start_service import Service + +IntroducerService = Service[Introducer, IntroducerAPI, FullNodeRpcApi] diff --git a/chia/server/start_introducer.py b/chia/introducer/start_introducer.py similarity index 97% rename from chia/server/start_introducer.py rename to chia/introducer/start_introducer.py index a93b95cf7e12..619a48e9257c 100644 --- a/chia/server/start_introducer.py +++ b/chia/introducer/start_introducer.py @@ -8,8 +8,8 @@ from chia.apis import ApiProtocolRegistry from chia.introducer.introducer import Introducer from chia.introducer.introducer_api import IntroducerAPI +from chia.introducer.introducer_service import IntroducerService from chia.protocols.outbound_message import NodeType -from chia.server.aliases import IntroducerService from chia.server.signal_handlers import SignalHandlers from chia.server.start_service import Service, async_run from chia.util.chia_logging import initialize_service_logging diff --git a/chia/plot_sync/receiver.py b/chia/plot_sync/receiver.py index 39c18d266eaf..bec063b03d5e 100644 --- a/chia/plot_sync/receiver.py +++ b/chia/plot_sync/receiver.py @@ -6,6 +6,7 @@ from dataclasses import dataclass, field from typing import Any, Callable, Optional, Union +from chia_rs import PlotSize from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import int16, uint32, uint64 from typing_extensions import Protocol @@ -349,7 +350,11 @@ async def _sync_done(self, data: PlotSyncDone) -> None: self._duplicates = self._current_sync.delta.duplicates.additions.copy() self._total_plot_size = sum(plot.file_size for plot in self._plots.values()) self._total_effective_plot_size = int( - sum(UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(plot.size)) for plot in self._plots.values()) + # TODO: todo_v2_plots support v2 plots + sum( + UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(PlotSize.make_v1(plot.size))) + for plot in self._plots.values() + ) ) # Save current sync as last sync and create a new current sync self._last_sync = self._current_sync diff --git a/chia/plot_sync/sender.py b/chia/plot_sync/sender.py index 707d5b7d8731..46189c2c53a0 100644 --- a/chia/plot_sync/sender.py +++ b/chia/plot_sync/sender.py @@ -37,10 +37,13 @@ def _convert_plot_info_list(plot_infos: list[PlotInfo]) -> list[Plot]: converted: list[Plot] = [] for plot_info in plot_infos: + # TODO: todo_v2_plots support v2 plots + k = plot_info.prover.get_size().size_v1 + assert k is not None converted.append( Plot( filename=plot_info.prover.get_filename(), - size=plot_info.prover.get_size(), + size=k, plot_id=plot_info.prover.get_id(), pool_public_key=plot_info.pool_public_key, pool_contract_puzzle_hash=plot_info.pool_contract_puzzle_hash, diff --git a/chia/plotting/cache.py b/chia/plotting/cache.py index d2cbd5000403..d97698df5071 100644 --- a/chia/plotting/cache.py +++ b/chia/plotting/cache.py @@ -165,28 +165,33 @@ def load(self) -> None: # it's here to filter invalid cache entries coming from bladebit RAM plotting. # Related: - https://github.com/Chia-Network/chia-blockchain/issues/13084 # - https://github.com/Chia-Network/chiapos/pull/337 - k = new_entry.prover.get_size() - if k not in estimated_c2_sizes: - estimated_c2_sizes[k] = ceil(2**k / 100_000_000) * ceil(k / 8) - memo_size = len(new_entry.prover.get_memo()) - prover_size = len(cache_entry.prover_data) - # Estimated C2 size + memo size + 2000 (static data + path) - # static data: version(2) + table pointers (<=96) + id(32) + k(1) => ~130 - # path: up to ~1870, all above will lead to false positive. - # See https://github.com/Chia-Network/chiapos/blob/3ee062b86315823dd775453ad320b8be892c7df3/src/prover_disk.hpp#L282-L287 # noqa: E501 - - # Use experimental measurements if more than estimates - # https://github.com/Chia-Network/chia-blockchain/issues/16063 - check_size = estimated_c2_sizes[k] + memo_size + 2000 - if k in measured_sizes: - check_size = max(check_size, measured_sizes[k]) - - if prover_size > check_size: - log.warning( - "Suspicious cache entry dropped. Recommended: stop the harvester, remove " - f"{self._path}, restart. Entry: size {prover_size}, path {path}" - ) - else: + ps = new_entry.prover.get_size() + if ps.size_v1 is not None: + k = ps.size_v1 + if k not in estimated_c2_sizes: + estimated_c2_sizes[k] = ceil(2**k / 100_000_000) * ceil(k / 8) + memo_size = len(new_entry.prover.get_memo()) + prover_size = len(cache_entry.prover_data) + # Estimated C2 size + memo size + 2000 (static data + path) + # static data: version(2) + table pointers (<=96) + id(32) + k(1) => ~130 + # path: up to ~1870, all above will lead to false positive. + # See https://github.com/Chia-Network/chiapos/blob/3ee062b86315823dd775453ad320b8be892c7df3/src/prover_disk.hpp#L282-L287 # noqa: E501 + + # Use experimental measurements if more than estimates + # https://github.com/Chia-Network/chia-blockchain/issues/16063 + check_size = estimated_c2_sizes[k] + memo_size + 2000 + if k in measured_sizes: + check_size = max(check_size, measured_sizes[k]) + + if prover_size > check_size: + log.warning( + "Suspicious cache entry dropped. Recommended: stop the harvester, remove " + f"{self._path}, restart. Entry: size {prover_size}, path {path}" + ) + else: + self._data[Path(path)] = new_entry + elif ps.size_v2 is not None: + # TODO: todo_v2_plots validate prover size self._data[Path(path)] = new_entry log.info(f"Parsed {len(self._data)} cache entries in {time.time() - start:.2f}s") diff --git a/chia/plotting/check_plots.py b/chia/plotting/check_plots.py index 0fe7c2567fa5..443510e5f734 100644 --- a/chia/plotting/check_plots.py +++ b/chia/plotting/check_plots.py @@ -5,7 +5,7 @@ from collections import Counter from pathlib import Path from threading import Lock -from time import sleep, time +from time import monotonic, sleep from typing import Optional from chia_rs import G1Element @@ -133,14 +133,16 @@ def check_plots( log.info("") log.info("") log.info(f"Starting to test each plot with {num} challenges each\n") - total_good_plots: Counter[uint8] = Counter() + total_good_plots_v1: Counter[uint8] = Counter() + total_good_plots_v2: Counter[uint8] = Counter() total_size = 0 bad_plots_list: list[Path] = [] with plot_manager: def process_plot(plot_path: Path, plot_info: PlotInfo, num_start: int, num_end: int, lock: Lock) -> None: - nonlocal total_good_plots + nonlocal total_good_plots_v1 + nonlocal total_good_plots_v2 nonlocal total_size nonlocal bad_plots_list @@ -170,44 +172,16 @@ def process_plot(plot_path: Path, plot_info: PlotInfo, num_start: int, num_end: challenge = std_hash(i.to_bytes(32, "big")) # Some plot errors cause get_qualities_for_challenge to throw a RuntimeError try: - quality_start_time = int(round(time() * 1000)) - for index, quality_str in enumerate(pr.get_qualities_for_challenge(challenge)): - quality_spent_time = int(round(time() * 1000)) - quality_start_time - if quality_spent_time > 8000: - log.warning( - f"\tLooking up qualities took: {quality_spent_time} ms. This should be below 8 seconds " - f"to minimize risk of losing rewards. Filepath: {plot_path}" - ) - else: - log.info(f"\tLooking up qualities took: {quality_spent_time} ms. Filepath: {plot_path}") - - # Other plot errors cause get_full_proof or validate_proof to throw an AssertionError - try: - proof_start_time = int(round(time() * 1000)) - proof = pr.get_full_proof(challenge, index, parallel_read) - proof_spent_time = int(round(time() * 1000)) - proof_start_time - if proof_spent_time > 15000: - log.warning( - f"\tFinding proof took: {proof_spent_time} ms. This should be below 15 seconds " - f"to minimize risk of losing rewards. Filepath: {plot_path}" - ) - else: - log.info(f"\tFinding proof took: {proof_spent_time} ms. Filepath: {plot_path}") - - ver_quality_str = v.validate_proof(pr.get_id(), pr.get_size(), challenge, proof) - if quality_str == ver_quality_str: - total_proofs += 1 - else: - log.warning( - f"\tQuality doesn't match with proof. Filepath: {plot_path} " - "This can occasionally happen with a compressed plot." - ) - except AssertionError as e: - log.error( - f"{type(e)}: {e} error in proving/verifying for plot {plot_path}. Filepath: {plot_path}" - ) - caught_exception = True - quality_start_time = int(round(time() * 1000)) + quality_start_time = round(monotonic() * 1000) + qualities = pr.get_qualities_for_challenge(challenge) + quality_spent_time = round(monotonic() * 1000) - quality_start_time + if quality_spent_time > 8000: + log.warning( + f"\tLooking up qualities took: {quality_spent_time} ms. This should be below 8 seconds " + f"to minimize risk of losing rewards. Filepath: {plot_path}" + ) + else: + log.info(f"\tLooking up qualities took: {quality_spent_time} ms. Filepath: {plot_path}") except KeyboardInterrupt: log.warning("Interrupted, closing") return @@ -221,9 +195,40 @@ def process_plot(plot_path: Path, plot_info: PlotInfo, num_start: int, num_end: else: log.error(f"{type(e)}: {e} error in getting challenge qualities for plot {plot_path}") caught_exception = True + continue except Exception as e: log.error(f"{type(e)}: {e} error in getting challenge qualities for plot {plot_path}") caught_exception = True + break + + for index, quality_str in enumerate(qualities): + # Other plot errors cause get_full_proof or validate_proof to throw an AssertionError + try: + proof_start_time = round(monotonic() * 1000) + # TODO : todo_v2_plots handle v2 plots + proof = pr.get_full_proof(challenge, index, parallel_read) + proof_spent_time = round(monotonic() * 1000) - proof_start_time + if proof_spent_time > 15000: + log.warning( + f"\tFinding proof took: {proof_spent_time} ms. This should be below 15 seconds " + f"to minimize risk of losing rewards. Filepath: {plot_path}" + ) + else: + log.info(f"\tFinding proof took: {proof_spent_time} ms. Filepath: {plot_path}") + + ver_quality_str = v.validate_proof(pr.get_id(), pr.get_size().size_v1, challenge, proof) + if quality_str == ver_quality_str: + total_proofs += 1 + else: + log.warning( + f"\tQuality doesn't match with proof. Filepath: {plot_path} " + "This can occasionally happen with a compressed plot." + ) + except AssertionError as e: + log.error( + f"{type(e)}: {e} error in proving/verifying for plot {plot_path}. Filepath: {plot_path}" + ) + caught_exception = True if caught_exception is True: break @@ -232,8 +237,16 @@ def process_plot(plot_path: Path, plot_info: PlotInfo, num_start: int, num_end: f"\tProofs {total_proofs} / {challenges}, {round(total_proofs / float(challenges), 4)}. " f"Filepath: {plot_path}" ) - total_good_plots[pr.get_size()] += 1 - total_size += plot_path.stat().st_size + version_and_size = pr.get_size() + if version_and_size.size_v1 is not None: + k = version_and_size.size_v1 + total_good_plots_v1[k] += 1 + total_size += plot_path.stat().st_size + else: + assert version_and_size.size_v2 is not None + k = version_and_size.size_v2 + total_good_plots_v2[k] += 1 + total_size += plot_path.stat().st_size else: log.error( f"\tProofs {total_proofs} / {challenges}, {round(total_proofs / float(challenges), 4)} " @@ -255,10 +268,12 @@ def process_plot(plot_path: Path, plot_info: PlotInfo, num_start: int, num_end: log.info("") log.info("") log.info("Summary") - total_plots: int = sum(list(total_good_plots.values())) + total_plots: int = sum(list(total_good_plots_v1.values()) + list(total_good_plots_v2.values())) log.info(f"Found {total_plots} valid plots, total size {total_size / (1024 * 1024 * 1024 * 1024):.5f} TiB") - for k, count in sorted(dict(total_good_plots).items()): - log.info(f"{count} plots of size {k}") + for k, count in sorted(dict(total_good_plots_v1).items()): + log.info(f"{count} v1 plots of size {k}") + for k, count in sorted(dict(total_good_plots_v2).items()): + log.info(f"{count} v2 plots of size {k}") grand_total_bad = len(bad_plots_list) + len(plot_manager.failed_to_open_filenames) if grand_total_bad > 0: log.warning(f"{grand_total_bad} invalid plots found:") diff --git a/chia/plotting/create_plots.py b/chia/plotting/create_plots.py index 08369c9c2c54..14b056c88046 100644 --- a/chia/plotting/create_plots.py +++ b/chia/plotting/create_plots.py @@ -83,10 +83,9 @@ async def resolve(self) -> PlotKeys: if self.pool_contract_address is not None: raise RuntimeError("Choose one of pool_contract_address and pool_public_key") pool_public_key = G1Element.from_bytes(bytes.fromhex(self.pool_public_key)) - else: - if self.pool_contract_address is None: - # If nothing is set, farms to the provided key (or the first key) - pool_public_key = await self.get_pool_public_key(keychain_proxy) + elif self.pool_contract_address is None: + # If nothing is set, farms to the provided key (or the first key) + pool_public_key = await self.get_pool_public_key(keychain_proxy) self.resolved_keys = PlotKeys(farmer_public_key, pool_public_key, self.pool_contract_address) finally: diff --git a/chia/plotting/manager.py b/chia/plotting/manager.py index 5cd9acb6eb08..6ba06c8fa005 100644 --- a/chia/plotting/manager.py +++ b/chia/plotting/manager.py @@ -333,16 +333,20 @@ def process_file(file_path: Path) -> Optional[PlotInfo]: # TODO: consider checking if the file was just written to (which would mean that the file is still # being copied). A segfault might happen in this edge case. + k = prover.get_size() level = prover.get_compression_level() - if level == 0: - if prover.get_size() >= 30 and stat_info.st_size < 0.98 * expected_size: - log.warning( - f"Not farming plot {file_path}. " - f"Size is {stat_info.st_size / (1024**3)} GiB, " - f"but expected at least: {expected_size / (1024**3)} GiB. " - "We assume the file is being copied." - ) - return None + if ( + level == 0 + and stat_info.st_size < 0.98 * expected_size + and ((k.size_v1 is not None and k.size_v1 >= 30) or (k.size_v2 is not None and k.size_v2 >= 28)) + ): + log.warning( + f"Not farming plot {file_path}. " + f"Size is {stat_info.st_size / (1024**3)} GiB, " + f"but expected at least: {expected_size / (1024**3)} GiB. " + "We assume the file is being copied." + ) + return None cache_entry = CacheEntry.from_prover(prover) self.cache.update(file_path, cache_entry) diff --git a/chia/plotting/prover.py b/chia/plotting/prover.py index cd9474b0d9cd..06173338eff2 100644 --- a/chia/plotting/prover.py +++ b/chia/plotting/prover.py @@ -3,6 +3,7 @@ from enum import IntEnum from typing import TYPE_CHECKING, ClassVar, Protocol, cast +from chia_rs import PlotSize from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint8 from chiapos import DiskProver @@ -20,14 +21,20 @@ class PlotVersion(IntEnum): class ProverProtocol(Protocol): def get_filename(self) -> str: ... - def get_size(self) -> uint8: ... + def get_size(self) -> PlotSize: ... def get_memo(self) -> bytes: ... def get_compression_level(self) -> uint8: ... def get_version(self) -> PlotVersion: ... def __bytes__(self) -> bytes: ... def get_id(self) -> bytes32: ... def get_qualities_for_challenge(self, challenge: bytes32) -> list[bytes32]: ... - def get_full_proof(self, challenge: bytes, index: int, parallel_read: bool = True) -> bytes: ... + + # this is only supported by v2 plots + def get_partial_proofs_for_challenge(self, challenge: bytes32, required_plot_strength: uint8) -> list[bytes]: ... + + # this is only supported by v1 plots. v2 plots first get the partial proof + # and turn it into a full proof by calling solve_proof(), or pass it to the solver service + def get_full_proof(self, challenge: bytes32, index: int, parallel_read: bool = True) -> bytes: ... @classmethod def from_bytes(cls, data: bytes) -> ProverProtocol: ... @@ -45,7 +52,7 @@ def __init__(self, filename: str): def get_filename(self) -> str: return str(self._filename) - def get_size(self) -> uint8: + def get_size(self) -> PlotSize: # TODO: todo_v2_plots get k size from plot raise NotImplementedError("V2 plot format is not yet implemented") @@ -54,8 +61,8 @@ def get_memo(self) -> bytes: raise NotImplementedError("V2 plot format is not yet implemented") def get_compression_level(self) -> uint8: - # TODO: todo_v2_plots implement compression level retrieval - raise NotImplementedError("V2 plot format is not yet implemented") + # v2 plots are never compressed + return uint8(0) def get_version(self) -> PlotVersion: return PlotVersion.V2 @@ -68,14 +75,16 @@ def get_id(self) -> bytes32: # TODO: Extract plot ID from V2 plot file raise NotImplementedError("V2 plot format is not yet implemented") - def get_qualities_for_challenge(self, challenge: bytes) -> list[bytes32]: - # TODO: todo_v2_plots Implement plot quality lookup - raise NotImplementedError("V2 plot format is not yet implemented") + def get_qualities_for_challenge(self, challenge: bytes32) -> list[bytes32]: + raise AssertionError("V2 plot format does not support qualities directly, use partial proofs") - def get_full_proof(self, challenge: bytes, index: int, parallel_read: bool = True) -> bytes: - # TODO: todo_v2_plots Implement plot proof generation + def get_partial_proofs_for_challenge(self, challenge: bytes, required_plot_strength: uint8) -> list[bytes]: + # TODO: todo_v2_plots Implement plot partial proof lookup raise NotImplementedError("V2 plot format is not yet implemented") + def get_full_proof(self, challenge: bytes32, index: int, parallel_read: bool = True) -> bytes: + raise AssertionError("V2 plot format require solver to get full proof") + @classmethod def from_bytes(cls, data: bytes) -> V2Prover: # TODO: todo_v2_plots Implement prover deserialization from cache @@ -94,8 +103,8 @@ def __init__(self, disk_prover: DiskProver) -> None: def get_filename(self) -> str: return str(self._disk_prover.get_filename()) - def get_size(self) -> uint8: - return uint8(self._disk_prover.get_size()) + def get_size(self) -> PlotSize: + return PlotSize.make_v1(uint8(self._disk_prover.get_size())) def get_memo(self) -> bytes: return bytes(self._disk_prover.get_memo()) @@ -115,7 +124,10 @@ def get_id(self) -> bytes32: def get_qualities_for_challenge(self, challenge: bytes32) -> list[bytes32]: return [bytes32(quality) for quality in self._disk_prover.get_qualities_for_challenge(challenge)] - def get_full_proof(self, challenge: bytes, index: int, parallel_read: bool = True) -> bytes: + def get_partial_proofs_for_challenge(self, challenge: bytes32, required_plot_strength: uint8) -> list[bytes]: + raise AssertionError("V1 plot format doesn't use partial proofs") + + def get_full_proof(self, challenge: bytes32, index: int, parallel_read: bool = True) -> bytes: return bytes(self._disk_prover.get_full_proof(challenge, index, parallel_read)) @classmethod diff --git a/chia/pools/pool_wallet.py b/chia/pools/pool_wallet.py index 1f0661548e88..bd664f513530 100644 --- a/chia/pools/pool_wallet.py +++ b/chia/pools/pool_wallet.py @@ -527,6 +527,7 @@ async def generate_travel_transactions(self, fee: uint64, action_scope: WalletAc confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=new_full_puzzle.get_tree_hash(), + to_address=self.wallet_state_manager.encode_puzzle_hash(new_full_puzzle.get_tree_hash()), amount=uint64(1), fee_amount=fee, confirmed=False, @@ -800,6 +801,7 @@ async def claim_pool_rewards( confirmed_at_height=uint32(0), created_at_time=current_time, to_puzzle_hash=current_state.current.target_puzzle_hash, + to_address=self.wallet_state_manager.encode_puzzle_hash(current_state.current.target_puzzle_hash), amount=uint64(total_amount), fee_amount=fee, # This will not be double counted in self.standard_wallet confirmed=False, diff --git a/chia/protocols/harvester_protocol.py b/chia/protocols/harvester_protocol.py index 5554d5c95f9a..ff1213c82544 100644 --- a/chia/protocols/harvester_protocol.py +++ b/chia/protocols/harvester_protocol.py @@ -34,6 +34,20 @@ class HarvesterHandshake(Streamable): @streamable @dataclass(frozen=True) class NewSignagePointHarvester(Streamable): + challenge_hash: bytes32 + difficulty: uint64 + sub_slot_iters: uint64 + signage_point_index: uint8 + sp_hash: bytes32 + pool_difficulties: list[PoolDifficulty] + filter_prefix_bits: uint8 + + +# this message has the same message ID as NewSignagePointHarvester, but this +# message format is used if the protocol version is 0.0.37 or higher +@streamable +@dataclass(frozen=True) +class NewSignagePointHarvester2(Streamable): challenge_hash: bytes32 difficulty: uint64 sub_slot_iters: uint64 @@ -63,6 +77,20 @@ class NewProofOfSpace(Streamable): fee_info: Optional[ProofOfSpaceFeeInfo] +@streamable +@dataclass(frozen=True) +class PartialProofsData(Streamable): + challenge_hash: bytes32 + sp_hash: bytes32 + plot_identifier: str + partial_proofs: list[bytes] # 16 * k bits blobs instead of 32-byte quality strings + signage_point_index: uint8 + plot_size: uint8 + pool_public_key: Optional[G1Element] + pool_contract_puzzle_hash: Optional[bytes32] + plot_public_key: G1Element + + # Source data corresponding to the hash that is sent to the Harvester for signing class SigningDataKind(IntEnum): FOLIAGE_BLOCK_DATA = 1 diff --git a/chia/protocols/outbound_message.py b/chia/protocols/outbound_message.py index e3632fa459d4..bf75f2f6a9e0 100644 --- a/chia/protocols/outbound_message.py +++ b/chia/protocols/outbound_message.py @@ -18,6 +18,7 @@ class NodeType(IntEnum): INTRODUCER = 5 WALLET = 6 DATA_LAYER = 7 + SOLVER = 8 @streamable diff --git a/chia/protocols/protocol_message_types.py b/chia/protocols/protocol_message_types.py index 3aea02990a45..b3824dcffb2f 100644 --- a/chia/protocols/protocol_message_types.py +++ b/chia/protocols/protocol_message_types.py @@ -13,6 +13,7 @@ class ProtocolMessageTypes(Enum): new_proof_of_space = 5 request_signatures = 6 respond_signatures = 7 + partial_proofs = 110 # Farmer protocol (farmer <-> full_node) new_signage_point = 8 @@ -136,4 +137,10 @@ class ProtocolMessageTypes(Enum): request_cost_info = 106 respond_cost_info = 107 + # new farmer protocol messages + solution_response = 108 + + # solver protocol + solve = 109 + error = 255 diff --git a/chia/protocols/shared_protocol.py b/chia/protocols/shared_protocol.py index b628ca7ddb2b..ead02792fc79 100644 --- a/chia/protocols/shared_protocol.py +++ b/chia/protocols/shared_protocol.py @@ -17,6 +17,7 @@ NodeType.INTRODUCER: "0.0.36", NodeType.WALLET: "0.0.38", NodeType.DATA_LAYER: "0.0.36", + NodeType.SOLVER: "0.0.37", } """ @@ -65,6 +66,7 @@ class Capability(IntEnum): NodeType.INTRODUCER: _capabilities, NodeType.WALLET: _capabilities, NodeType.DATA_LAYER: _capabilities, + NodeType.SOLVER: _capabilities, } diff --git a/chia/protocols/solver_protocol.py b/chia/protocols/solver_protocol.py new file mode 100644 index 000000000000..891bfb846bb9 --- /dev/null +++ b/chia/protocols/solver_protocol.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from dataclasses import dataclass + +from chia.util.streamable import Streamable, streamable + + +@streamable +@dataclass(frozen=True) +class SolverInfo(Streamable): + partial_proof: bytes # 16 * k bits blob, k (plot size) can be derived from this + + +@streamable +@dataclass(frozen=True) +class SolverResponse(Streamable): + partial_proof: bytes + proof: bytes diff --git a/chia/seeder/crawl_store.py b/chia/seeder/crawl_store.py index dde1a7a18df8..be565efcea81 100644 --- a/chia/seeder/crawl_store.py +++ b/chia/seeder/crawl_store.py @@ -195,11 +195,10 @@ async def get_peers_to_crawl(self, min_batch_size: int, max_batch_size: int) -> counter += 1 if reliability.ignore_till < now and reliability.ban_till < now: add = True - else: - if reliability.ban_till >= now: - self.banned_peers += 1 - elif reliability.ignore_till >= now: - self.ignored_peers += 1 + elif reliability.ban_till >= now: + self.banned_peers += 1 + elif reliability.ignore_till >= now: + self.ignored_peers += 1 record = self.host_to_records[peer_id] if record.last_try_timestamp == 0 and record.connected_timestamp == 0: add = True @@ -342,9 +341,6 @@ def load_host_to_version(self) -> tuple[dict[str, str], dict[str, uint64]]: handshake = {} for host, record in self.host_to_records.items(): - if host not in self.host_to_records: - continue - record = self.host_to_records[host] if record.version == "undefined": continue if record.handshake_time < time.time() - 5 * 24 * 3600: diff --git a/chia/seeder/crawler_service.py b/chia/seeder/crawler_service.py new file mode 100644 index 000000000000..d0fb88d15f64 --- /dev/null +++ b/chia/seeder/crawler_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.seeder.crawler import Crawler +from chia.seeder.crawler_api import CrawlerAPI +from chia.seeder.crawler_rpc_api import CrawlerRpcApi +from chia.server.start_service import Service + +CrawlerService = Service[Crawler, CrawlerAPI, CrawlerRpcApi] diff --git a/chia/seeder/start_crawler.py b/chia/seeder/start_crawler.py index 3799acaf4bb0..abc621b8a370 100644 --- a/chia/seeder/start_crawler.py +++ b/chia/seeder/start_crawler.py @@ -15,7 +15,7 @@ from chia.seeder.crawler import Crawler from chia.seeder.crawler_api import CrawlerAPI from chia.seeder.crawler_rpc_api import CrawlerRpcApi -from chia.server.aliases import CrawlerService +from chia.seeder.crawler_service import CrawlerService from chia.server.signal_handlers import SignalHandlers from chia.server.start_service import RpcInfo, Service, async_run from chia.util.chia_logging import initialize_service_logging diff --git a/chia/server/address_manager.py b/chia/server/address_manager.py index 4a3cb6f9f913..3a3130f03f90 100644 --- a/chia/server/address_manager.py +++ b/chia/server/address_manager.py @@ -177,7 +177,7 @@ def get_bucket_position(self, key: int, is_new: bool, nBucket: int) -> int: def is_terrible(self, now: Optional[int] = None) -> bool: if now is None: - now = int(math.floor(time.time())) + now = math.floor(time.time()) # never remove things tried in the last minute if self.last_try > 0 and self.last_try >= now - 60: return False @@ -202,7 +202,7 @@ def is_terrible(self, now: Optional[int] = None) -> bool: def get_selection_chance(self, now: Optional[int] = None) -> float: if now is None: - now = int(math.floor(time.time())) + now = math.floor(time.time()) chance = 1.0 since_last_try = max(now - self.last_try, 0) # deprioritize very recent attempts away @@ -371,9 +371,8 @@ def _set_new_matrix(self, row: int, col: int, value: int) -> None: if value == -1: if (row, col) in self.used_new_matrix_positions: self.used_new_matrix_positions.remove((row, col)) - else: - if (row, col) not in self.used_new_matrix_positions: - self.used_new_matrix_positions.add((row, col)) + elif (row, col) not in self.used_new_matrix_positions: + self.used_new_matrix_positions.add((row, col)) # Use only this method for modifying tried matrix. def _set_tried_matrix(self, row: int, col: int, value: int) -> None: @@ -381,9 +380,8 @@ def _set_tried_matrix(self, row: int, col: int, value: int) -> None: if value == -1: if (row, col) in self.used_tried_matrix_positions: self.used_tried_matrix_positions.remove((row, col)) - else: - if (row, col) not in self.used_tried_matrix_positions: - self.used_tried_matrix_positions.add((row, col)) + elif (row, col) not in self.used_tried_matrix_positions: + self.used_tried_matrix_positions.add((row, col)) def load_used_table_positions(self) -> None: self.used_new_matrix_positions = set() @@ -587,10 +585,9 @@ def add_to_new_table_(self, addr: TimestampedPeerInfo, source: Optional[PeerInfo info.ref_count += 1 if node_id is not None: self._set_new_matrix(new_bucket, new_bucket_pos, node_id) - else: - if info.ref_count == 0: - if node_id is not None: - self.delete_new_entry_(node_id) + elif info.ref_count == 0: + if node_id is not None: + self.delete_new_entry_(node_id) return is_unique def attempt_(self, addr: PeerInfo, count_failures: bool, timestamp: int) -> None: @@ -737,7 +734,7 @@ def get_peers_(self) -> list[TimestampedPeerInfo]: return addr def cleanup(self, max_timestamp_difference: int, max_consecutive_failures: int) -> None: - now = int(math.floor(time.time())) + now = math.floor(time.time()) for bucket in range(NEW_BUCKET_COUNT): for pos in range(BUCKET_SIZE): if self.new_matrix[bucket][pos] != -1: diff --git a/chia/server/aliases.py b/chia/server/aliases.py deleted file mode 100644 index 7eb09a1eb4f5..000000000000 --- a/chia/server/aliases.py +++ /dev/null @@ -1,35 +0,0 @@ -from __future__ import annotations - -from chia.data_layer.data_layer import DataLayer -from chia.data_layer.data_layer_api import DataLayerAPI -from chia.data_layer.data_layer_rpc_api import DataLayerRpcApi -from chia.farmer.farmer import Farmer -from chia.farmer.farmer_api import FarmerAPI -from chia.farmer.farmer_rpc_api import FarmerRpcApi -from chia.full_node.full_node import FullNode -from chia.full_node.full_node_api import FullNodeAPI -from chia.full_node.full_node_rpc_api import FullNodeRpcApi -from chia.harvester.harvester import Harvester -from chia.harvester.harvester_api import HarvesterAPI -from chia.harvester.harvester_rpc_api import HarvesterRpcApi -from chia.introducer.introducer import Introducer -from chia.introducer.introducer_api import IntroducerAPI -from chia.seeder.crawler import Crawler -from chia.seeder.crawler_api import CrawlerAPI -from chia.seeder.crawler_rpc_api import CrawlerRpcApi -from chia.server.start_service import Service -from chia.timelord.timelord import Timelord -from chia.timelord.timelord_api import TimelordAPI -from chia.timelord.timelord_rpc_api import TimelordRpcApi -from chia.wallet.wallet_node import WalletNode -from chia.wallet.wallet_node_api import WalletNodeAPI -from chia.wallet.wallet_rpc_api import WalletRpcApi - -CrawlerService = Service[Crawler, CrawlerAPI, CrawlerRpcApi] -DataLayerService = Service[DataLayer, DataLayerAPI, DataLayerRpcApi] -FarmerService = Service[Farmer, FarmerAPI, FarmerRpcApi] -FullNodeService = Service[FullNode, FullNodeAPI, FullNodeRpcApi] -HarvesterService = Service[Harvester, HarvesterAPI, HarvesterRpcApi] -IntroducerService = Service[Introducer, IntroducerAPI, FullNodeRpcApi] -TimelordService = Service[Timelord, TimelordAPI, TimelordRpcApi] -WalletService = Service[WalletNode, WalletNodeAPI, WalletRpcApi] diff --git a/chia/server/node_discovery.py b/chia/server/node_discovery.py index b34c4bbb0d02..2d5593cca2f2 100644 --- a/chia/server/node_discovery.py +++ b/chia/server/node_discovery.py @@ -228,12 +228,11 @@ async def start_client_async(self, addr: PeerInfo, is_feeler: bool) -> None: if self.server.is_duplicate_or_self_connection(addr): # Mark it as a softer attempt, without counting the failures. await self.address_manager.attempt(addr, False) + elif client_connected is True: + await self.address_manager.mark_good(addr) + await self.address_manager.connect(addr) else: - if client_connected is True: - await self.address_manager.mark_good(addr) - await self.address_manager.connect(addr) - else: - await self.address_manager.attempt(addr, True) + await self.address_manager.attempt(addr, True) self.pending_outbound_connections.remove(addr.host) except Exception as e: if addr.host in self.pending_outbound_connections: diff --git a/chia/server/rate_limit_numbers.py b/chia/server/rate_limit_numbers.py index 3edccf729370..b7eea59ae8c2 100644 --- a/chia/server/rate_limit_numbers.py +++ b/chia/server/rate_limit_numbers.py @@ -24,6 +24,8 @@ class RLSettings: # this class is used to indicate that a message type is not subject to a rate # limit, but just a per-message size limit. This may be appropriate for response # messages that are implicitly limited by their corresponding request message +# Unlimited message types are also not subject to the overall limit across all +# messages (just like messages in the "tx" category) @dataclasses.dataclass(frozen=True) class Unlimited: max_size: int # Max size of each request diff --git a/chia/server/rate_limits.py b/chia/server/rate_limits.py index e8670288dac6..685ed142517b 100644 --- a/chia/server/rate_limits.py +++ b/chia/server/rate_limits.py @@ -4,7 +4,7 @@ import logging import time from collections import Counter -from typing import Optional +from typing import Callable, Optional from chia.protocols.outbound_message import Message from chia.protocols.protocol_message_types import ProtocolMessageTypes @@ -18,14 +18,22 @@ class RateLimiter: incoming: bool reset_seconds: int - current_minute: int + current_slot: int message_counts: Counter[ProtocolMessageTypes] message_cumulative_sizes: Counter[ProtocolMessageTypes] percentage_of_limit: int non_tx_message_counts: int = 0 non_tx_cumulative_size: int = 0 + get_time: Callable[[], float] - def __init__(self, incoming: bool, reset_seconds: int = 60, percentage_of_limit: int = 100): + def __init__( + self, + incoming: bool, + reset_seconds: int = 60, + percentage_of_limit: int = 100, + *, + get_time: Callable[[], float] = time.monotonic, + ): """ The incoming parameter affects whether counters are incremented unconditionally or not. For incoming messages, the counters are always @@ -33,9 +41,10 @@ def __init__(self, incoming: bool, reset_seconds: int = 60, percentage_of_limit: if they are allowed to be sent by the rate limiter, since we won't send the messages otherwise. """ + self.get_time = get_time self.incoming = incoming self.reset_seconds = reset_seconds - self.current_minute = int(time.time() // reset_seconds) + self.current_slot = int(get_time() // reset_seconds) self.message_counts = Counter() self.message_cumulative_sizes = Counter() self.percentage_of_limit = percentage_of_limit @@ -51,9 +60,9 @@ def process_msg_and_check( hit and the message is good to be sent or received. """ - current_minute = int(time.time() // self.reset_seconds) - if current_minute != self.current_minute: - self.current_minute = current_minute + current_slot = int(self.get_time() // self.reset_seconds) + if current_slot != self.current_slot: + self.current_slot = current_slot self.message_counts = Counter() self.message_cumulative_sizes = Counter() self.non_tx_message_counts = 0 @@ -74,7 +83,7 @@ def process_msg_and_check( rate_limits = get_rate_limits_to_use(our_capabilities, peer_capabilities) try: - limits: RLSettings = rate_limits["default_settings"] + limits: RLSettings if message_type in rate_limits["rate_limits_tx"]: limits = rate_limits["rate_limits_tx"][message_type] elif message_type in rate_limits["rate_limits_other"]: @@ -104,6 +113,7 @@ def process_msg_and_check( log.warning( f"Message type {message_type} not found in rate limits (scale factor: {proportion_of_limit})", ) + limits = rate_limits["default_settings"] if isinstance(limits, Unlimited): # this message type is not rate limited. This is used for @@ -121,9 +131,9 @@ def process_msg_and_check( if new_message_counts > limits.frequency * proportion_of_limit: return " ".join( [ - f"message count: {new_message_counts}" - f"> {limits.frequency * proportion_of_limit}" - f"(scale factor: {proportion_of_limit})" + f"message count: {new_message_counts}", + f"> {limits.frequency * proportion_of_limit}", + f"(scale factor: {proportion_of_limit})", ] ) if len(message.data) > limits.max_size: diff --git a/chia/server/server.py b/chia/server/server.py index 7b64c7877afa..f6f97b50791c 100644 --- a/chia/server/server.py +++ b/chia/server/server.py @@ -172,8 +172,14 @@ def create( private_cert_path, private_key_path = None, None public_cert_path, public_key_path = None, None - authenticated_client_types = {NodeType.HARVESTER} - authenticated_server_types = {NodeType.HARVESTER, NodeType.FARMER, NodeType.WALLET, NodeType.DATA_LAYER} + authenticated_client_types = {NodeType.HARVESTER, NodeType.SOLVER} + authenticated_server_types = { + NodeType.HARVESTER, + NodeType.FARMER, + NodeType.WALLET, + NodeType.DATA_LAYER, + NodeType.SOLVER, + } if local_type in authenticated_client_types: # Authenticated clients @@ -260,9 +266,8 @@ async def garbage_collect_connections_task(self) -> None: if is_crawler is not None: if time.time() - connection.creation_time > 5: to_remove.append(connection) - else: - if time.time() - connection.last_message_time > 1800: - to_remove.append(connection) + elif time.time() - connection.last_message_time > 1800: + to_remove.append(connection) for connection in to_remove: self.log.debug(f"Garbage collecting connection {connection.peer_info.host} due to inactivity") if connection.closed: diff --git a/chia/simulator/block_tools.py b/chia/simulator/block_tools.py index e51143c08e16..77b8ed9f0811 100644 --- a/chia/simulator/block_tools.py +++ b/chia/simulator/block_tools.py @@ -14,7 +14,7 @@ from dataclasses import dataclass, replace from pathlib import Path from random import Random -from typing import Any, Callable, Optional +from typing import Any, Callable, Optional, Union import anyio from chia_puzzles_py.programs import CHIALISP_DESERIALISATION, ROM_BOOTSTRAP_GENERATOR @@ -28,7 +28,6 @@ G1Element, G2Element, InfusedChallengeChainSubSlot, - PlotSize, PoolTarget, PrivateKey, ProofOfSpace, @@ -65,6 +64,7 @@ from chia.full_node.bundle_tools import simple_solution_generator, simple_solution_generator_backrefs from chia.plotting.create_plots import PlotKeys, create_plots from chia.plotting.manager import PlotManager +from chia.plotting.prover import PlotVersion from chia.plotting.util import ( Params, PlotRefreshEvents, @@ -92,9 +92,13 @@ from chia.types.blockchain_format.proof_of_space import ( calculate_pos_challenge, calculate_prefix_bits, + calculate_required_plot_strength, generate_plot_public_key, generate_taproot_sk, + make_pos, passes_plot_filter, + quality_for_partial_proof, + solve_proof, ) from chia.types.blockchain_format.serialized_program import SerializedProgram from chia.types.blockchain_format.vdf import VDFInfo, VDFProof @@ -898,11 +902,10 @@ def get_consecutive_blocks( # address, so continue until a proof of space tied to a pk is found continue pool_target = PoolTarget(proof_of_space.pool_contract_puzzle_hash, uint32(0)) + elif pool_reward_puzzle_hash is not None: + pool_target = PoolTarget(pool_reward_puzzle_hash, uint32(0)) else: - if pool_reward_puzzle_hash is not None: - pool_target = PoolTarget(pool_reward_puzzle_hash, uint32(0)) - else: - pool_target = PoolTarget(self.pool_ph, uint32(0)) + pool_target = PoolTarget(self.pool_ph, uint32(0)) new_gen = self.setup_new_gen( tx_block_heights, @@ -966,7 +969,6 @@ def get_consecutive_blocks( block_list.append(full_block) if include_transactions: - prev_tx_height = full_block.height for coin in full_block.get_included_reward_coins(): if coin.puzzle_hash == self.farmer_ph: pending_rewards.append(coin) @@ -980,6 +982,7 @@ def get_consecutive_blocks( if full_block.transactions_generator is not None: tx_block_heights.append(full_block.height) + prev_tx_height = full_block.height blocks_added_this_sub_slot += 1 blocks[full_block.header_hash] = block_record @@ -1193,11 +1196,10 @@ def get_consecutive_blocks( # address, so continue until a proof of space tied to a pk is found continue pool_target = PoolTarget(proof_of_space.pool_contract_puzzle_hash, uint32(0)) + elif pool_reward_puzzle_hash is not None: + pool_target = PoolTarget(pool_reward_puzzle_hash, uint32(0)) else: - if pool_reward_puzzle_hash is not None: - pool_target = PoolTarget(pool_reward_puzzle_hash, uint32(0)) - else: - pool_target = PoolTarget(self.pool_ph, uint32(0)) + pool_target = PoolTarget(self.pool_ph, uint32(0)) new_gen = self.setup_new_gen( tx_block_heights, @@ -1261,7 +1263,6 @@ def get_consecutive_blocks( block_list.append(full_block) if include_transactions: - prev_tx_height = full_block.height for coin in full_block.get_included_reward_coins(): if coin.puzzle_hash == self.farmer_ph: pending_rewards.append(coin) @@ -1275,6 +1276,7 @@ def get_consecutive_blocks( if full_block.transactions_generator is not None: tx_block_heights.append(full_block.height) + prev_tx_height = full_block.height blocks_added_this_sub_slot += 1 blocks[full_block.header_hash] = block_record @@ -1502,53 +1504,81 @@ def get_pospaces_for_challenge( found_proofs: list[tuple[uint64, ProofOfSpace]] = [] rng = random.Random() rng.seed(seed) + + required_plot_strength = calculate_required_plot_strength(constants, prev_transaction_b_height) + for plot_info in self.plot_manager.plots.values(): plot_id: bytes32 = plot_info.prover.get_id() if force_plot_id is not None and plot_id != force_plot_id: continue - # TODO: todo_v2_plots support v2 plots in the plot manager - prefix_bits = calculate_prefix_bits(constants, height, PlotSize.make_v1(plot_info.prover.get_size())) - if passes_plot_filter(prefix_bits, plot_id, challenge_hash, signage_point): - new_challenge: bytes32 = calculate_pos_challenge(plot_id, challenge_hash, signage_point) - qualities = plot_info.prover.get_qualities_for_challenge(new_challenge) - - for proof_index, quality_str in enumerate(qualities): - required_iters = calculate_iterations_quality( - constants, - quality_str, - # TODO: todo_v2_plots support v2 plots in the plot manager - PlotSize.make_v1(plot_info.prover.get_size()), - difficulty, - signage_point, - sub_slot_iters, - prev_transaction_b_height, - ) - if required_iters < calculate_sp_interval_iters(constants, sub_slot_iters): - proof_xs: bytes = plot_info.prover.get_full_proof(new_challenge, proof_index) + prefix_bits = calculate_prefix_bits(constants, height, plot_info.prover.get_size()) + if not passes_plot_filter(prefix_bits, plot_id, challenge_hash, signage_point): + continue - # Look up local_sk from plot to save locked memory - ( - pool_public_key_or_puzzle_hash, - farmer_public_key, - local_master_sk, - ) = parse_plot_info(plot_info.prover.get_memo()) - local_sk = master_sk_to_local_sk(local_master_sk) - - if isinstance(pool_public_key_or_puzzle_hash, G1Element): - include_taproot = False - else: - assert isinstance(pool_public_key_or_puzzle_hash, bytes32) - include_taproot = True - plot_pk = generate_plot_public_key(local_sk.get_g1(), farmer_public_key, include_taproot) - proof_of_space: ProofOfSpace = ProofOfSpace( - new_challenge, - plot_info.pool_public_key, - plot_info.pool_contract_puzzle_hash, - plot_pk, - plot_info.prover.get_size(), - proof_xs, - ) - found_proofs.append((required_iters, proof_of_space)) + # v2 plots aren't valid until after the hard fork + if ( + prev_transaction_b_height < constants.HARD_FORK2_HEIGHT + and plot_info.prover.get_version() == PlotVersion.V2 + ): + continue + + new_challenge: bytes32 = calculate_pos_challenge(plot_id, challenge_hash, signage_point) + + # these are either qualities (v1) or partial proofs (v2) + proofs: Sequence[Union[bytes32, bytes]] + v = plot_info.prover.get_version() + if v == PlotVersion.V1: + proofs = plot_info.prover.get_qualities_for_challenge(new_challenge) + else: + proofs = plot_info.prover.get_partial_proofs_for_challenge(new_challenge, required_plot_strength) + + for proof_index, proof in enumerate(proofs): + if v == PlotVersion.V2: + quality_str = quality_for_partial_proof(proof, new_challenge) + elif v == PlotVersion.V1: + quality_str = bytes32(proof) + + required_iters = calculate_iterations_quality( + constants, + quality_str, + plot_info.prover.get_size(), + difficulty, + signage_point, + sub_slot_iters, + prev_transaction_b_height, + ) + if required_iters >= calculate_sp_interval_iters(constants, sub_slot_iters): + continue + + proof_xs: bytes + if v == PlotVersion.V1: + proof_xs = plot_info.prover.get_full_proof(new_challenge, proof_index) + else: + proof_xs = solve_proof(proof) + + # Look up local_sk from plot to save locked memory + ( + pool_public_key_or_puzzle_hash, + farmer_public_key, + local_master_sk, + ) = parse_plot_info(plot_info.prover.get_memo()) + local_sk = master_sk_to_local_sk(local_master_sk) + + if isinstance(pool_public_key_or_puzzle_hash, G1Element): + include_taproot = False + else: + assert isinstance(pool_public_key_or_puzzle_hash, bytes32) + include_taproot = True + plot_pk = generate_plot_public_key(local_sk.get_g1(), farmer_public_key, include_taproot) + proof_of_space: ProofOfSpace = make_pos( + new_challenge, + plot_info.pool_public_key, + plot_info.pool_contract_puzzle_hash, + plot_pk, + plot_info.prover.get_size(), + proof_xs, + ) + found_proofs.append((required_iters, proof_of_space)) random_sample = found_proofs if len(found_proofs) >= 1: if rng.random() < 0.1: @@ -1759,6 +1789,7 @@ def load_block_list( sub_slot_iters = uint64(constants.SUB_SLOT_ITERS_STARTING) height_to_hash: dict[uint32, bytes32] = {} blocks: dict[bytes32, BlockRecord] = {} + prev_transaction_b_height = uint32(0) for full_block in block_list: if full_block.height != 0: if len(full_block.finished_sub_slots) > 0: @@ -1775,7 +1806,6 @@ def load_block_list( sp_hash = full_block.reward_chain_block.challenge_chain_sp_vdf.output.get_hash() cache = BlockCache(blocks) - prev_transaction_b_height = uint32(0) # TODO: todo_v2_plots required_iters = validate_pospace_and_get_required_iters( constants, @@ -1789,6 +1819,9 @@ def load_block_list( ) assert required_iters is not None + if full_block.is_transaction_block(): + prev_transaction_b_height = full_block.height + blocks[full_block.header_hash] = block_to_block_record( constants, cache, diff --git a/chia/simulator/setup_services.py b/chia/simulator/setup_services.py index d7c299d33c28..9b95e02acfbc 100644 --- a/chia/simulator/setup_services.py +++ b/chia/simulator/setup_services.py @@ -18,33 +18,31 @@ from chia.cmds.init_funcs import init from chia.consensus.constants import replace_str_to_bytes from chia.daemon.server import WebSocketServer, daemon_launch_lock_path +from chia.farmer.farmer_service import FarmerService +from chia.farmer.start_farmer import create_farmer_service +from chia.full_node.full_node_service import FullNodeService +from chia.full_node.start_full_node import create_full_node_service +from chia.harvester.harvester_service import HarvesterService +from chia.harvester.start_harvester import create_harvester_service +from chia.introducer.introducer_service import IntroducerService +from chia.introducer.start_introducer import create_introducer_service from chia.protocols.outbound_message import NodeType from chia.protocols.shared_protocol import Capability, default_capabilities +from chia.seeder.crawler_service import CrawlerService from chia.seeder.dns_server import DNSServer, create_dns_server_service from chia.seeder.start_crawler import create_full_node_crawler_service -from chia.server.aliases import ( - CrawlerService, - FarmerService, - FullNodeService, - HarvesterService, - IntroducerService, - TimelordService, - WalletService, -) from chia.server.resolve_peer_info import set_peer_info from chia.server.signal_handlers import SignalHandlers -from chia.server.start_farmer import create_farmer_service -from chia.server.start_full_node import create_full_node_service -from chia.server.start_harvester import create_harvester_service -from chia.server.start_introducer import create_introducer_service -from chia.server.start_timelord import create_timelord_service -from chia.server.start_wallet import create_wallet_service from chia.simulator.block_tools import BlockTools, test_constants from chia.simulator.keyring import TempKeyring from chia.simulator.ssl_certs import get_next_nodes_certs_and_keys, get_next_private_ca_cert_and_key from chia.simulator.start_simulator import SimulatorFullNodeService, create_full_node_simulator_service +from chia.solver.solver_service import SolverService +from chia.solver.start_solver import create_solver_service from chia.ssl.create_ssl import create_all_ssl +from chia.timelord.start_timelord import create_timelord_service from chia.timelord.timelord_launcher import VDFClientProcessMgr, find_vdf_client, spawn_process +from chia.timelord.timelord_service import TimelordService from chia.types.peer_info import UnresolvedPeerInfo from chia.util.bech32m import encode_puzzle_hash from chia.util.config import config_path_for_filename, load_config, lock_and_load_config, save_config @@ -52,6 +50,8 @@ from chia.util.keychain import bytes_to_mnemonic from chia.util.lock import Lockfile from chia.util.task_referencer import create_referenced_task +from chia.wallet.start_wallet import create_wallet_service +from chia.wallet.wallet_service import WalletService log = logging.getLogger(__name__) @@ -506,3 +506,30 @@ async def setup_timelord( async with service.manage(): yield service + + +@asynccontextmanager +async def setup_solver( + root_path: Path, + b_tools: BlockTools, + consensus_constants: ConsensusConstants, + start_service: bool = True, + farmer_peer: Optional[UnresolvedPeerInfo] = None, +) -> AsyncGenerator[SolverService, None]: + with create_lock_and_load_config(b_tools.root_path / "config" / "ssl" / "ca", root_path) as config: + config["logging"]["log_stdout"] = True + config["solver"]["enable_upnp"] = True + config["solver"]["selected_network"] = "testnet0" + config["solver"]["port"] = 0 + config["solver"]["rpc_port"] = 0 + config["solver"]["num_threads"] = 1 + save_config(root_path, "config.yaml", config) + service = create_solver_service( + root_path, + config, + consensus_constants, + farmer_peers={farmer_peer} if farmer_peer is not None else set(), + ) + + async with service.manage(start=start_service): + yield service diff --git a/chia/solver/__init__.py b/chia/solver/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/chia/solver/solver.py b/chia/solver/solver.py new file mode 100644 index 000000000000..8b213a80be83 --- /dev/null +++ b/chia/solver/solver.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +import asyncio +import contextlib +import logging +from collections.abc import AsyncIterator +from concurrent.futures.thread import ThreadPoolExecutor +from pathlib import Path +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast + +from chia_rs import ConsensusConstants + +from chia.protocols.outbound_message import NodeType +from chia.rpc.rpc_server import StateChangedProtocol, default_get_connections +from chia.server.server import ChiaServer +from chia.server.ws_connection import WSChiaConnection +from chia.types.blockchain_format.proof_of_space import solve_proof + +log = logging.getLogger(__name__) + + +class Solver: + if TYPE_CHECKING: + from chia.rpc.rpc_server import RpcServiceProtocol + + _protocol_check: ClassVar[RpcServiceProtocol] = cast("Solver", None) + + root_path: Path + _server: Optional[ChiaServer] + _shut_down: bool + started: bool = False + executor: ThreadPoolExecutor + state_changed_callback: Optional[StateChangedProtocol] = None + constants: ConsensusConstants + event_loop: asyncio.events.AbstractEventLoop + + @property + def server(self) -> ChiaServer: + if self._server is None: + raise RuntimeError("server not assigned") + + return self._server + + def __init__(self, root_path: Path, config: dict[str, Any], constants: ConsensusConstants): + self.log = log + self.root_path = root_path + self._shut_down = False + num_threads = config["num_threads"] + self.log.info(f"Initializing solver with {num_threads} threads") + self.executor = ThreadPoolExecutor(max_workers=num_threads, thread_name_prefix="solver-") + self._server = None + self.constants = constants + self.state_changed_callback: Optional[StateChangedProtocol] = None + self.log.info("Solver initialization complete") + + @contextlib.asynccontextmanager + async def manage(self) -> AsyncIterator[None]: + try: + self.log.info("Starting solver service") + self.started = True + self.log.info("Solver service started successfully") + yield + finally: + self.log.info("Shutting down solver service") + self._shut_down = True + self.executor.shutdown(wait=True) + self.log.info("Solver service shutdown complete") + + def solve(self, partial_proof: bytes) -> Optional[bytes]: + self.log.debug(f"Solve request: partial={partial_proof.hex()}") + try: + return solve_proof(partial_proof) + except Exception: + self.log.exception("solve_proof()") + return None + + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: + return default_get_connections(server=self.server, request_node_type=request_node_type) + + async def on_connect(self, connection: WSChiaConnection) -> None: + pass + + async def on_disconnect(self, connection: WSChiaConnection) -> None: + self.log.info(f"peer disconnected {connection.get_peer_logging()}") + + def set_server(self, server: ChiaServer) -> None: + self._server = server + + def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None: + self.state_changed_callback = callback diff --git a/chia/solver/solver_api.py b/chia/solver/solver_api.py new file mode 100644 index 000000000000..4ae365fc17c1 --- /dev/null +++ b/chia/solver/solver_api.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING, ClassVar, Optional, cast + +from chia.protocols.outbound_message import Message, make_msg +from chia.protocols.protocol_message_types import ProtocolMessageTypes +from chia.protocols.solver_protocol import SolverInfo, SolverResponse +from chia.server.api_protocol import ApiMetadata +from chia.solver.solver import Solver + + +class SolverAPI: + if TYPE_CHECKING: + from chia.server.api_protocol import ApiProtocol + + _protocol_check: ClassVar[ApiProtocol] = cast("SolverAPI", None) + + log: logging.Logger + solver: Solver + metadata: ClassVar[ApiMetadata] = ApiMetadata() + + def __init__(self, solver: Solver) -> None: + self.log = logging.getLogger(__name__) + self.solver = solver + + def ready(self) -> bool: + return self.solver.started + + @metadata.request(peer_required=False, reply_types=[ProtocolMessageTypes.solution_response]) + async def solve( + self, + request: SolverInfo, + ) -> Optional[Message]: + """ + Solve a V2 plot partial proof to get the full proof of space. + This is called by the farmer when it receives V2 parital proofs from harvester. + """ + if not self.solver.started: + self.log.error("Solver is not started") + return None + + self.log.debug(f"Solving partial {request.partial_proof.hex()}") + + try: + proof = self.solver.solve(request.partial_proof) + if proof is None: + self.log.warning(f"Solver returned no proof for parital {request.partial_proof.hex()}") + return None + + self.log.debug(f"Successfully solved partial proof, returning {len(proof)} byte proof") + return make_msg( + ProtocolMessageTypes.solution_response, + SolverResponse(proof=proof, partial_proof=request.partial_proof), + ) + + except Exception as e: + self.log.error(f"Error solving parital {request.partial_proof.hex()}: {e}") + return None diff --git a/chia/solver/solver_rpc_api.py b/chia/solver/solver_rpc_api.py new file mode 100644 index 000000000000..b427964371df --- /dev/null +++ b/chia/solver/solver_rpc_api.py @@ -0,0 +1,31 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast + +from chia.rpc.rpc_server import Endpoint, EndpointResult +from chia.solver.solver import Solver +from chia.util.ws_message import WsRpcMessage + + +class SolverRpcApi: + if TYPE_CHECKING: + from chia.rpc.rpc_server import RpcApiProtocol + + _protocol_check: ClassVar[RpcApiProtocol] = cast("SolverRpcApi", None) + + def __init__(self, solver: Solver): + self.service = solver + self.service_name = "chia_solver" + + def get_routes(self) -> dict[str, Endpoint]: + return { + "/get_state": self.get_state, + } + + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> list[WsRpcMessage]: + return [] + + async def get_state(self, _: dict[str, Any]) -> EndpointResult: + return { + "started": self.service.started, + } diff --git a/chia/solver/solver_rpc_client.py b/chia/solver/solver_rpc_client.py new file mode 100644 index 000000000000..44c2a8201ba0 --- /dev/null +++ b/chia/solver/solver_rpc_client.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from typing import Any + +from chia.rpc.rpc_client import RpcClient + + +class SolverRpcClient(RpcClient): + """ + Client to Chia RPC, connects to a local solver. Uses HTTP/JSON, and converts back from + JSON into native python objects before returning. All api calls use POST requests. + """ + + async def get_state(self) -> dict[str, Any]: + """Get solver state.""" + return await self.fetch("get_state", {}) diff --git a/chia/solver/solver_service.py b/chia/solver/solver_service.py new file mode 100644 index 000000000000..aa3fdf67f71d --- /dev/null +++ b/chia/solver/solver_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.server.start_service import Service +from chia.solver.solver import Solver +from chia.solver.solver_api import SolverAPI +from chia.solver.solver_rpc_api import SolverRpcApi + +SolverService = Service[Solver, SolverAPI, SolverRpcApi] diff --git a/chia/solver/start_solver.py b/chia/solver/start_solver.py new file mode 100644 index 000000000000..78b0901502be --- /dev/null +++ b/chia/solver/start_solver.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +import os +import pathlib +import sys +from multiprocessing import freeze_support +from typing import Any, Optional + +from chia_rs import ConsensusConstants +from chia_rs.sized_ints import uint16 + +from chia.apis import ApiProtocolRegistry +from chia.consensus.constants import replace_str_to_bytes +from chia.consensus.default_constants import DEFAULT_CONSTANTS, update_testnet_overrides +from chia.protocols.outbound_message import NodeType +from chia.server.signal_handlers import SignalHandlers +from chia.server.start_service import Service, async_run +from chia.solver.solver import Solver +from chia.solver.solver_api import SolverAPI +from chia.solver.solver_rpc_api import SolverRpcApi +from chia.solver.solver_service import SolverService +from chia.types.peer_info import UnresolvedPeerInfo +from chia.util.chia_logging import initialize_service_logging +from chia.util.config import load_config, load_config_cli +from chia.util.default_root import resolve_root_path +from chia.util.task_timing import maybe_manage_task_instrumentation + +# See: https://bugs.python.org/issue29288 +"".encode("idna") + +SERVICE_NAME = "solver" + + +def create_solver_service( + root_path: pathlib.Path, + config: dict[str, Any], + consensus_constants: ConsensusConstants, + farmer_peers: set[UnresolvedPeerInfo] = set(), + connect_to_daemon: bool = True, + override_capabilities: Optional[list[tuple[uint16, str]]] = None, +) -> SolverService: + service_config = config[SERVICE_NAME] + + network_id = service_config["selected_network"] + upnp_list = [] + if service_config["enable_upnp"]: + upnp_list = [service_config["port"]] + + node = Solver(root_path, service_config, consensus_constants) + peer_api = SolverAPI(node) + network_id = service_config["selected_network"] + + rpc_info = None + if service_config.get("start_rpc_server", True): + rpc_info = (SolverRpcApi, service_config["rpc_port"]) + + return Service( + root_path=root_path, + config=config, + node=node, + peer_api=peer_api, + node_type=NodeType.SOLVER, + advertised_port=service_config["port"], + service_name=SERVICE_NAME, + upnp_ports=upnp_list, + on_connect_callback=node.on_connect, + connect_peers=farmer_peers, + network_id=network_id, + rpc_info=rpc_info, + connect_to_daemon=connect_to_daemon, + override_capabilities=override_capabilities, + class_for_type=ApiProtocolRegistry, + ) + + +async def async_main(service_config: dict[str, Any], root_path: pathlib.Path) -> int: + config = load_config(root_path, "config.yaml") + config[SERVICE_NAME] = service_config + network_id = service_config["selected_network"] + overrides = service_config["network_overrides"]["constants"][network_id] + update_testnet_overrides(network_id, overrides) + updated_constants = replace_str_to_bytes(DEFAULT_CONSTANTS, **overrides) + initialize_service_logging(service_name=SERVICE_NAME, config=config, root_path=root_path) + + service = create_solver_service(root_path, config, updated_constants) + async with SignalHandlers.manage() as signal_handlers: + await service.setup_process_global_state(signal_handlers=signal_handlers) + await service.run() + + return 0 + + +def main() -> int: + freeze_support() + root_path = resolve_root_path(override=None) + + with maybe_manage_task_instrumentation( + enable=os.environ.get(f"CHIA_INSTRUMENT_{SERVICE_NAME.upper()}") is not None + ): + service_config = load_config_cli(root_path, "config.yaml", SERVICE_NAME) + return async_run(coro=async_main(service_config, root_path=root_path)) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/chia/ssl/create_ssl.py b/chia/ssl/create_ssl.py index fc2f63b35975..bd4589f2fa11 100644 --- a/chia/ssl/create_ssl.py +++ b/chia/ssl/create_ssl.py @@ -24,6 +24,7 @@ "crawler", "data_layer", "daemon", + "solver", ] _all_public_node_names: list[str] = ["full_node", "wallet", "farmer", "introducer", "timelord", "data_layer"] diff --git a/chia/server/start_timelord.py b/chia/timelord/start_timelord.py similarity index 98% rename from chia/server/start_timelord.py rename to chia/timelord/start_timelord.py index 77e5e1d8cd29..5c716978742b 100644 --- a/chia/server/start_timelord.py +++ b/chia/timelord/start_timelord.py @@ -11,13 +11,13 @@ from chia.consensus.constants import replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS, update_testnet_overrides from chia.protocols.outbound_message import NodeType -from chia.server.aliases import TimelordService from chia.server.resolve_peer_info import get_unresolved_peer_infos from chia.server.signal_handlers import SignalHandlers from chia.server.start_service import RpcInfo, Service, async_run from chia.timelord.timelord import Timelord from chia.timelord.timelord_api import TimelordAPI from chia.timelord.timelord_rpc_api import TimelordRpcApi +from chia.timelord.timelord_service import TimelordService from chia.util.chia_logging import initialize_service_logging from chia.util.config import load_config, load_config_cli from chia.util.default_root import resolve_root_path diff --git a/chia/timelord/timelord.py b/chia/timelord/timelord.py index f9f490cb9f55..aa559a32f2c5 100644 --- a/chia/timelord/timelord.py +++ b/chia/timelord/timelord.py @@ -160,20 +160,19 @@ async def manage(self) -> AsyncIterator[None]: slow_bluebox = self.config.get("slow_bluebox", False) if not self.bluebox_mode: self.main_loop = create_referenced_task(self._manage_chains()) + elif os.name == "nt" or slow_bluebox: + # `vdf_client` doesn't build on windows, use `prove()` from chiavdf. + workers = self.config.get("slow_bluebox_process_count", 1) + self._executor_shutdown_tempfile = _create_shutdown_file() + self.bluebox_pool = ThreadPoolExecutor( + max_workers=workers, + thread_name_prefix="blue-box-", + ) + self.main_loop = create_referenced_task( + self._start_manage_discriminant_queue_sanitizer_slow(self.bluebox_pool, workers) + ) else: - if os.name == "nt" or slow_bluebox: - # `vdf_client` doesn't build on windows, use `prove()` from chiavdf. - workers = self.config.get("slow_bluebox_process_count", 1) - self._executor_shutdown_tempfile = _create_shutdown_file() - self.bluebox_pool = ThreadPoolExecutor( - max_workers=workers, - thread_name_prefix="blue-box-", - ) - self.main_loop = create_referenced_task( - self._start_manage_discriminant_queue_sanitizer_slow(self.bluebox_pool, workers) - ) - else: - self.main_loop = create_referenced_task(self._manage_discriminant_queue_sanitizer()) + self.main_loop = create_referenced_task(self._manage_discriminant_queue_sanitizer()) log.info(f"Started timelord, listening on port {self.get_vdf_server_port()}") try: yield diff --git a/chia/timelord/timelord_service.py b/chia/timelord/timelord_service.py new file mode 100644 index 000000000000..994a79118a06 --- /dev/null +++ b/chia/timelord/timelord_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.server.start_service import Service +from chia.timelord.timelord import Timelord +from chia.timelord.timelord_api import TimelordAPI +from chia.timelord.timelord_rpc_api import TimelordRpcApi + +TimelordService = Service[Timelord, TimelordAPI, TimelordRpcApi] diff --git a/chia/types/blockchain_format/proof_of_space.py b/chia/types/blockchain_format/proof_of_space.py index 3befb30fc6d9..cb1f63128963 100644 --- a/chia/types/blockchain_format/proof_of_space.py +++ b/chia/types/blockchain_format/proof_of_space.py @@ -13,23 +13,64 @@ log = logging.getLogger(__name__) - -def get_plot_id(pos: ProofOfSpace) -> bytes32: - assert pos.pool_public_key is None or pos.pool_contract_puzzle_hash is None - if pos.pool_public_key is None: - assert pos.pool_contract_puzzle_hash is not None - return calculate_plot_id_ph(pos.pool_contract_puzzle_hash, pos.plot_public_key) - return calculate_plot_id_pk(pos.pool_public_key, pos.plot_public_key) +# These are temporary stubs for chiapos2, that we build against until it's ready to be integrated. # returns quality string for v2 plot, or None if invalid def validate_proof_v2( - plot_id: bytes32, size: uint8, difficulty: uint8, challenge: bytes32, proof: bytes + plot_id: bytes32, size: uint8, required_plot_strength: uint8, challenge: bytes32, proof: bytes ) -> Optional[bytes32]: # TODO: todo_v2_plots call into new chiapos library raise NotImplementedError +# this is compute intensive, solving a partial proof returning a full proof +def solve_proof(partial_proof: bytes) -> bytes: + # TODO: todo_v2_plots call into new chiapos library + raise NotImplementedError + + +# given a partial proof, computes the quality. This is used to compute required iters. +def quality_for_partial_proof(partial_proof: bytes, challenge: bytes32) -> bytes32: + # TODO: todo_v2_plots call into new chiapos library + return std_hash(partial_proof + challenge) + + +def make_pos( + challenge: bytes32, + pool_public_key: Optional[G1Element], + pool_contract_puzzle_hash: Optional[bytes32], + plot_public_key: G1Element, + version_and_size: PlotSize, + proof: bytes, +) -> ProofOfSpace: + k: int + if version_and_size.size_v1 is not None: + k = version_and_size.size_v1 + else: + assert version_and_size.size_v2 is not None + k = version_and_size.size_v2 + assert k is not None + k |= 0x80 + + return ProofOfSpace( + challenge, + pool_public_key, + pool_contract_puzzle_hash, + plot_public_key, + uint8(k), + proof, + ) + + +def get_plot_id(pos: ProofOfSpace) -> bytes32: + assert pos.pool_public_key is None or pos.pool_contract_puzzle_hash is None + if pos.pool_public_key is None: + assert pos.pool_contract_puzzle_hash is not None + return calculate_plot_id_ph(pos.pool_contract_puzzle_hash, pos.plot_public_key) + return calculate_plot_id_pk(pos.pool_public_key, pos.plot_public_key) + + def check_plot_size(constants: ConsensusConstants, ps: PlotSize) -> bool: size_v1 = ps.size_v1 if size_v1 is not None: @@ -50,6 +91,9 @@ def check_plot_size(constants: ConsensusConstants, ps: PlotSize) -> bool: if size_v2 > constants.MAX_PLOT_SIZE_V2: log.error("Plot size is higher than the maximum") return False + if (size_v2 & 1) == 1: + log.error("Plot size is odd") + return False return True @@ -98,8 +142,8 @@ def verify_and_get_quality_string( # === V2 plots === assert plot_size.size_v2 is not None - plot_difficulty = calculate_plot_difficulty(constants, height) - return validate_proof_v2(plot_id, plot_size.size_v2, plot_difficulty, pos.challenge, bytes(pos.proof)) + required_plot_strength = calculate_required_plot_strength(constants, height) + return validate_proof_v2(plot_id, plot_size.size_v2, required_plot_strength, pos.challenge, bytes(pos.proof)) def passes_plot_filter( @@ -137,16 +181,16 @@ def calculate_prefix_bits(constants: ConsensusConstants, height: uint32, plot_si return max(0, prefix_bits) -def calculate_plot_difficulty(constants: ConsensusConstants, height: uint32) -> uint8: - if height < constants.PLOT_DIFFICULTY_4_HEIGHT: - return constants.PLOT_DIFFICULTY_INITIAL - if height < constants.PLOT_DIFFICULTY_5_HEIGHT: +def calculate_required_plot_strength(constants: ConsensusConstants, height: uint32) -> uint8: + if height < constants.PLOT_STRENGTH_4_HEIGHT: + return constants.PLOT_STRENGTH_INITIAL + if height < constants.PLOT_STRENGTH_5_HEIGHT: return uint8(4) - if height < constants.PLOT_DIFFICULTY_6_HEIGHT: + if height < constants.PLOT_STRENGTH_6_HEIGHT: return uint8(5) - if height < constants.PLOT_DIFFICULTY_7_HEIGHT: + if height < constants.PLOT_STRENGTH_7_HEIGHT: return uint8(6) - if height < constants.PLOT_DIFFICULTY_8_HEIGHT: + if height < constants.PLOT_STRENGTH_8_HEIGHT: return uint8(7) else: return uint8(8) diff --git a/chia/types/internal_mempool_item.py b/chia/types/internal_mempool_item.py index 991ae8c34f26..80889e56681b 100644 --- a/chia/types/internal_mempool_item.py +++ b/chia/types/internal_mempool_item.py @@ -14,5 +14,5 @@ class InternalMempoolItem: spend_bundle: SpendBundle conds: SpendBundleConditions height_added_to_mempool: uint32 - # Map of coin ID to coin spend data between the bundle and its NPCResult + # Map of coin ID to coin spend data between the bundle and its SpendBundleConditions bundle_coin_spends: dict[bytes32, BundleCoinSpend] diff --git a/chia/util/db_wrapper.py b/chia/util/db_wrapper.py index da19b0db961f..2591eb637865 100644 --- a/chia/util/db_wrapper.py +++ b/chia/util/db_wrapper.py @@ -124,15 +124,14 @@ def get_host_parameter_limit() -> int: limit_number = sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER host_parameter_limit = connection.getlimit(limit_number) - else: - # guessing based on defaults, seems you can't query + # guessing based on defaults, seems you can't query - # https://www.sqlite.org/changes.html#version_3_32_0 - # Increase the default upper bound on the number of parameters from 999 to 32766. - if sqlite3.sqlite_version_info >= (3, 32, 0): - host_parameter_limit = 32766 - else: - host_parameter_limit = 999 + # https://www.sqlite.org/changes.html#version_3_32_0 + # Increase the default upper bound on the number of parameters from 999 to 32766. + elif sqlite3.sqlite_version_info >= (3, 32, 0): + host_parameter_limit = 32766 + else: + host_parameter_limit = 999 return host_parameter_limit diff --git a/chia/util/initial-config.yaml b/chia/util/initial-config.yaml index ecd65c1bf5c8..6aad76064883 100644 --- a/chia/util/initial-config.yaml +++ b/chia/util/initial-config.yaml @@ -629,6 +629,34 @@ wallet: auto_sign_txs: True +solver: + # The solver server will run on this port + port: 8666 + + # Enable or disable UPnP port forwarding + enable_upnp: False + + # Logging configuration + logging: *logging + + # Network overrides and selected network + network_overrides: *network_overrides + selected_network: *selected_network + + # Number of threads for solver operations + num_threads: 4 + + # RPC server configuration + rpc_port: 8667 + start_rpc_server: True + + # SSL configuration + ssl: + private_crt: "config/ssl/solver/private_solver.crt" + private_key: "config/ssl/solver/private_solver.key" + public_crt: "config/ssl/solver/public_solver.crt" + public_key: "config/ssl/solver/public_solver.key" + data_layer: # TODO: consider name # TODO: organize consistently with other sections diff --git a/chia/util/service_groups.py b/chia/util/service_groups.py index b5c05ddce360..037112cd147c 100644 --- a/chia/util/service_groups.py +++ b/chia/util/service_groups.py @@ -12,6 +12,7 @@ "chia_wallet", "chia_data_layer", "chia_data_layer_http", + "chia_solver", ], "daemon": [], # TODO: should this be `data_layer`? @@ -19,7 +20,7 @@ "data_layer_http": ["chia_data_layer_http"], "node": ["chia_full_node"], "harvester": ["chia_harvester"], - "farmer": ["chia_harvester", "chia_farmer", "chia_full_node", "chia_wallet"], + "farmer": ["chia_harvester", "chia_farmer", "chia_full_node", "chia_wallet", "chia_solver"], "farmer-no-wallet": ["chia_harvester", "chia_farmer", "chia_full_node"], "farmer-only": ["chia_farmer"], "timelord": ["chia_timelord_launcher", "chia_timelord", "chia_full_node"], @@ -31,6 +32,7 @@ "crawler": ["chia_crawler"], "seeder": ["chia_crawler", "chia_seeder"], "seeder-only": ["chia_seeder"], + "solver": ["chia_solver"], } diff --git a/chia/util/streamable.py b/chia/util/streamable.py index 7d04210a01c1..ccca8ecb5d48 100644 --- a/chia/util/streamable.py +++ b/chia/util/streamable.py @@ -8,7 +8,7 @@ import pprint import traceback from collections.abc import Collection -from enum import Enum +from enum import Enum, EnumMeta from typing import TYPE_CHECKING, Any, BinaryIO, Callable, ClassVar, Optional, TypeVar, Union, get_type_hints from chia_rs.sized_bytes import bytes32 @@ -130,6 +130,10 @@ def is_type_Dict(f_type: object) -> bool: return get_origin(f_type) is dict or f_type is dict +def is_type_Enum(f_type: object) -> bool: + return type(f_type) is EnumMeta + + def convert_optional(convert_func: ConvertFunctionType, item: Any) -> Any: if item is None: return None @@ -307,11 +311,10 @@ def recurse_jsonify( val, None, **next_recursion_env ) return new_dict - + elif isinstance(d, Enum): + return next_recursion_step(d.value, None, **next_recursion_env) elif issubclass(type(d), bytes): return f"0x{bytes(d).hex()}" - elif isinstance(d, Enum): - return d.name elif isinstance(d, bool): return d elif isinstance(d, int): @@ -439,6 +442,10 @@ def function_to_parse_one_item(f_type: type[Any]) -> ParseFunctionType: key_parse_inner_type_f = function_to_parse_one_item(inner_types[0]) value_parse_inner_type_f = function_to_parse_one_item(inner_types[1]) return lambda f: parse_dict(f, key_parse_inner_type_f, value_parse_inner_type_f) + if is_type_Enum(f_type): + if not hasattr(f_type, "_streamable_proxy"): + raise UnsupportedType(f"Using Enum ({f_type}) in streamable requires a 'streamable_enum' wrapper.") + return lambda f: f_type(function_to_parse_one_item(f_type._streamable_proxy)(f)) if f_type is str: return parse_str raise UnsupportedType(f"Type {f_type} does not have parse") @@ -529,6 +536,13 @@ def function_to_stream_one_item(f_type: type[Any]) -> StreamFunctionType: key_stream_inner_type_func = function_to_stream_one_item(inner_types[0]) value_stream_inner_type_func = function_to_stream_one_item(inner_types[1]) return lambda item, f: stream_dict(key_stream_inner_type_func, value_stream_inner_type_func, item, f) + elif is_type_Enum(f_type): + if not hasattr(f_type, "_streamable_proxy"): + raise UnsupportedType(f"Using Enum ({f_type}) in streamable requires a 'streamable_enum' wrapper.") + return lambda item, f: function_to_stream_one_item(f_type._streamable_proxy)( + f_type._streamable_proxy(item.value), # type: ignore[attr-defined] + f, + ) elif f_type is str: return stream_str elif f_type is bool: @@ -700,3 +714,15 @@ class UInt32Range(Streamable): class UInt64Range(Streamable): start: uint64 = uint64(0) stop: uint64 = uint64.MAXIMUM + + +_T_Enum = TypeVar("_T_Enum", bound=EnumMeta) + + +def streamable_enum(proxy: type[object]) -> Callable[[_T_Enum], _T_Enum]: + def streamable_enum_wrapper(cls: _T_Enum) -> _T_Enum: + setattr(cls, "_streamable_proxy", proxy) + setattr(cls, "_ignore_", ["_streamable_proxy"]) + return cls + + return streamable_enum_wrapper diff --git a/chia/wallet/cat_wallet/cat_outer_puzzle.py b/chia/wallet/cat_wallet/cat_outer_puzzle.py index 1b20cd2d829f..f5a83d9f8581 100644 --- a/chia/wallet/cat_wallet/cat_outer_puzzle.py +++ b/chia/wallet/cat_wallet/cat_outer_puzzle.py @@ -105,8 +105,11 @@ def solve(self, constructor: PuzzleInfo, solver: Solver, inner_puzzle: Program, parent_coin: Coin = parent_spend.coin also = constructor.also() if also is not None: - solution = self._solve(also, solver, puzzle, solution) - puzzle = self._construct(also, puzzle) + constructed_solution = self._solve(also, solver, puzzle, solution) + constructed_puzzle = self._construct(also, puzzle) + else: + constructed_solution = solution + constructed_puzzle = puzzle args = match_cat_puzzle(uncurry_puzzle(parent_spend.puzzle_reveal)) assert args is not None _, _, parent_inner_puzzle = args @@ -114,8 +117,8 @@ def solve(self, constructor: PuzzleInfo, solver: Solver, inner_puzzle: Program, SpendableCAT( coin, tail_hash, - puzzle, - solution, + constructed_puzzle, + constructed_solution, lineage_proof=LineageProof( parent_coin.parent_coin_info, parent_inner_puzzle.get_tree_hash(), uint64(parent_coin.amount) ), diff --git a/chia/wallet/cat_wallet/cat_wallet.py b/chia/wallet/cat_wallet/cat_wallet.py index 82c7c530943d..112b97216f5b 100644 --- a/chia/wallet/cat_wallet/cat_wallet.py +++ b/chia/wallet/cat_wallet/cat_wallet.py @@ -172,10 +172,12 @@ async def create_new_cat_wallet( raise ValueError("Internal Error, unable to generate new CAT coin") cat_pid: bytes32 = cat_coin.parent_coin_info + converted_ph = await self.convert_puzzle_hash(cat_coin.puzzle_hash) cat_record = TransactionRecord( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), - to_puzzle_hash=(await self.convert_puzzle_hash(cat_coin.puzzle_hash)), + to_puzzle_hash=converted_ph, + to_address=self.wallet_state_manager.encode_puzzle_hash(converted_ph), amount=uint64(cat_coin.amount), fee_amount=fee, confirmed=False, @@ -796,6 +798,7 @@ async def generate_signed_transaction( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=puzzle_hashes[0], + to_address=self.wallet_state_manager.encode_puzzle_hash(puzzle_hashes[0]), amount=uint64(payment_sum), fee_amount=fee, confirmed=False, diff --git a/chia/wallet/did_wallet/did_wallet.py b/chia/wallet/did_wallet/did_wallet.py index 7ce21f0f72a1..6e1540629a27 100644 --- a/chia/wallet/did_wallet/did_wallet.py +++ b/chia/wallet/did_wallet/did_wallet.py @@ -648,12 +648,12 @@ async def create_update_spend( action_scope, extra_conditions=(AssertCoinAnnouncement(asserted_id=coin_name, asserted_msg=coin_name),), ) + to_ph = await action_scope.get_puzzle_hash(self.wallet_state_manager, override_reuse_puzhash_with=True) did_record = TransactionRecord( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), - to_puzzle_hash=await action_scope.get_puzzle_hash( - self.wallet_state_manager, override_reuse_puzhash_with=True - ), + to_puzzle_hash=to_ph, + to_address=self.wallet_state_manager.encode_puzzle_hash(to_ph), amount=uint64(coin.amount), fee_amount=uint64(0), confirmed=False, @@ -734,12 +734,12 @@ async def transfer_did( action_scope, extra_conditions=(AssertCoinAnnouncement(asserted_id=coin_name, asserted_msg=coin_name),), ) + to_ph = await action_scope.get_puzzle_hash(self.wallet_state_manager, override_reuse_puzhash_with=True) did_record = TransactionRecord( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), - to_puzzle_hash=await action_scope.get_puzzle_hash( - self.wallet_state_manager, override_reuse_puzhash_with=True - ), + to_puzzle_hash=to_ph, + to_address=self.wallet_state_manager.encode_puzzle_hash(to_ph), amount=uint64(coin.amount), fee_amount=fee, confirmed=False, @@ -819,6 +819,7 @@ async def create_message_spend( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=p2_ph, + to_address=self.wallet_state_manager.encode_puzzle_hash(p2_ph), amount=uint64(coin.amount), fee_amount=uint64(0), confirmed=False, @@ -1024,13 +1025,13 @@ async def generate_new_decentralised_id( assert self.did_info.origin_coin is not None assert self.did_info.current_inner is not None + to_ph = await action_scope.get_puzzle_hash(self.wallet_state_manager, override_reuse_puzhash_with=True) did_record = TransactionRecord( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), amount=uint64(amount), - to_puzzle_hash=await action_scope.get_puzzle_hash( - self.wallet_state_manager, override_reuse_puzhash_with=True - ), + to_puzzle_hash=to_ph, + to_address=self.wallet_state_manager.encode_puzzle_hash(to_ph), fee_amount=fee, confirmed=False, sent=uint32(0), diff --git a/chia/wallet/nft_wallet/nft_wallet.py b/chia/wallet/nft_wallet/nft_wallet.py index a3f2a288d019..631d4c6cacb2 100644 --- a/chia/wallet/nft_wallet/nft_wallet.py +++ b/chia/wallet/nft_wallet/nft_wallet.py @@ -550,9 +550,9 @@ async def create_from_puzzle_info( name: Optional[str] = None, ) -> Any: # Off the bat we don't support multiple profile but when we do this will have to change - for wallet in wallet_state_manager.wallets.values(): - if wallet.type() == WalletType.NFT.value: - return wallet + for wsm_wallet in wallet_state_manager.wallets.values(): + if wsm_wallet.type() == WalletType.NFT.value: + return wsm_wallet # TODO: These are not the arguments to this function yet but they will be return await cls.create_new_nft_wallet( @@ -617,6 +617,7 @@ async def generate_signed_transaction( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=puzzle_hashes[0], + to_address=self.wallet_state_manager.encode_puzzle_hash(puzzle_hashes[0]), amount=uint64(payment_sum), fee_amount=fee, confirmed=False, @@ -1406,6 +1407,7 @@ async def mint_from_did( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=innerpuz.get_tree_hash(), + to_address=self.wallet_state_manager.encode_puzzle_hash(innerpuz.get_tree_hash()), amount=uint64(1), fee_amount=fee, confirmed=False, diff --git a/chia/wallet/puzzle_drivers.py b/chia/wallet/puzzle_drivers.py index 746c304f9efc..f99f60de7322 100644 --- a/chia/wallet/puzzle_drivers.py +++ b/chia/wallet/puzzle_drivers.py @@ -65,15 +65,14 @@ def check_type(self, types: list[str]) -> bool: return True else: return False - else: - if self.type() == types[0]: - types.pop(0) - if self.also(): - return self.also().check_type(types) # type: ignore - else: - return self.check_type(types) + elif self.type() == types[0]: + types.pop(0) + if self.also(): + return self.also().check_type(types) # type: ignore else: - return False + return self.check_type(types) + else: + return False @dataclass(frozen=True) diff --git a/chia/server/start_wallet.py b/chia/wallet/start_wallet.py similarity index 98% rename from chia/server/start_wallet.py rename to chia/wallet/start_wallet.py index aa8b1b32739b..624dcbb749e7 100644 --- a/chia/server/start_wallet.py +++ b/chia/wallet/start_wallet.py @@ -12,7 +12,6 @@ from chia.consensus.constants import replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS, update_testnet_overrides from chia.protocols.outbound_message import NodeType -from chia.server.aliases import WalletService from chia.server.resolve_peer_info import get_unresolved_peer_infos from chia.server.signal_handlers import SignalHandlers from chia.server.start_service import RpcInfo, Service, async_run @@ -26,6 +25,7 @@ # See: https://bugs.python.org/issue29288 from chia.wallet.wallet_node_api import WalletNodeAPI from chia.wallet.wallet_rpc_api import WalletRpcApi +from chia.wallet.wallet_service import WalletService "".encode("idna") diff --git a/chia/wallet/trade_manager.py b/chia/wallet/trade_manager.py index e19f2065d86e..cddaacf85f49 100644 --- a/chia/wallet/trade_manager.py +++ b/chia/wallet/trade_manager.py @@ -361,6 +361,7 @@ async def cancel_pending_offers( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=new_ph, + to_address=self.wallet_state_manager.encode_puzzle_hash(new_ph), amount=uint64(coin.amount), fee_amount=fee, confirmed=False, @@ -728,6 +729,7 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=to_puzzle_hash, + to_address=self.wallet_state_manager.encode_puzzle_hash(to_puzzle_hash), amount=uint64(addition.amount), fee_amount=uint64(0), confirmed=False, @@ -788,6 +790,7 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=to_puzzle_hash, + to_address=self.wallet_state_manager.encode_puzzle_hash(to_puzzle_hash), amount=uint64(sent_amount), fee_amount=all_fees, confirmed=False, diff --git a/chia/wallet/transaction_record.py b/chia/wallet/transaction_record.py index 2c3d2bd97205..c11c25d6cbef 100644 --- a/chia/wallet/transaction_record.py +++ b/chia/wallet/transaction_record.py @@ -1,8 +1,7 @@ from __future__ import annotations -import builtins from dataclasses import dataclass -from typing import Any, Generic, Optional, TypeVar +from typing import Generic, Optional, TypeVar from chia_rs import SpendBundle from chia_rs.sized_bytes import bytes32 @@ -11,7 +10,7 @@ from chia.consensus.coinbase import farmer_parent_id, pool_parent_id from chia.types.blockchain_format.coin import Coin from chia.types.mempool_inclusion_status import MempoolInclusionStatus -from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash +from chia.util.bech32m import decode_puzzle_hash from chia.util.errors import Err from chia.util.streamable import Streamable, streamable from chia.wallet.conditions import ConditionValidTimes @@ -19,7 +18,6 @@ from chia.wallet.wallet_spend_bundle import WalletSpendBundle T = TypeVar("T") -_T_TransactionRecord = TypeVar("_T_TransactionRecord", bound="TransactionRecordOld") minimum_send_attempts = 6 @@ -81,31 +79,6 @@ def height_farmed(self, genesis_challenge: bytes32) -> Optional[uint32]: return uint32(block_index) return None - @classmethod - def from_json_dict_convenience( - cls: builtins.type[_T_TransactionRecord], modified_tx_input: dict - ) -> _T_TransactionRecord: - modified_tx = modified_tx_input.copy() - if "to_address" in modified_tx: - modified_tx["to_puzzle_hash"] = decode_puzzle_hash(modified_tx["to_address"]).hex() - if "to_address" in modified_tx: - del modified_tx["to_address"] - return cls.from_json_dict(modified_tx) - - @classmethod - def from_json_dict(cls: builtins.type[_T_TransactionRecord], json_dict: dict[str, Any]) -> _T_TransactionRecord: - try: - return super().from_json_dict(json_dict) - except Exception: - return cls.from_json_dict_convenience(json_dict) - - def to_json_dict_convenience(self, config: dict) -> dict: - selected = config["selected_network"] - prefix = config["network_overrides"]["config"][selected]["address_prefix"] - formatted = self.to_json_dict() - formatted["to_address"] = encode_puzzle_hash(self.to_puzzle_hash, prefix) - return formatted - def is_valid(self) -> bool: if len(self.sent_to) < minimum_send_attempts: # we haven't tried enough peers yet @@ -130,6 +103,12 @@ def hint_dict(self) -> dict[bytes32, bytes32]: @dataclass(frozen=True) class TransactionRecord(TransactionRecordOld): valid_times: ConditionValidTimes + to_address: str + + def __post_init__(self) -> None: + if decode_puzzle_hash(self.to_address) != self.to_puzzle_hash: + raise ValueError("Invalid tx record initialization, to_address must match to_puzzle_hash") + return super().__post_init__() @streamable diff --git a/chia/wallet/util/merkle_utils.py b/chia/wallet/util/merkle_utils.py index 9a8df4adc711..d15569479d7c 100644 --- a/chia/wallet/util/merkle_utils.py +++ b/chia/wallet/util/merkle_utils.py @@ -39,9 +39,9 @@ def build_merkle_tree_from_binary_tree(tuples: TupleTree) -> tuple[bytes32, dict proof.append(right_root) new_proofs[name] = (path, proof) for name, (path, proof) in right_proofs.items(): - path |= 1 << len(proof) + appended_path = path | (1 << len(proof)) proof.append(left_root) - new_proofs[name] = (path, proof) + new_proofs[name] = (appended_path, proof) return new_root, new_proofs diff --git a/chia/wallet/vc_wallet/cr_cat_wallet.py b/chia/wallet/vc_wallet/cr_cat_wallet.py index bdd25baf6f21..a6c11b79f998 100644 --- a/chia/wallet/vc_wallet/cr_cat_wallet.py +++ b/chia/wallet/vc_wallet/cr_cat_wallet.py @@ -253,6 +253,7 @@ async def add_crcat_coin(self, coin_spend: CoinSpend, coin: Coin, height: uint32 confirmed_at_height=height, created_at_time=uint64(created_timestamp), to_puzzle_hash=hint_dict[coin.name()], + to_address=self.wallet_state_manager.encode_puzzle_hash(hint_dict[coin.name()]), amount=uint64(coin.amount), fee_amount=uint64(0), confirmed=True, @@ -662,6 +663,7 @@ async def generate_signed_transaction( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=payment.puzzle_hash, + to_address=self.wallet_state_manager.encode_puzzle_hash(payment.puzzle_hash), amount=payment.amount, fee_amount=fee, confirmed=False, @@ -793,6 +795,7 @@ async def claim_pending_approval_balance( confirmed_at_height=uint32(0), created_at_time=uint64(time.time()), to_puzzle_hash=to_puzzle_hash, + to_address=self.wallet_state_manager.encode_puzzle_hash(to_puzzle_hash), amount=uint64(sum(c.amount for c in coins)), fee_amount=fee, confirmed=False, diff --git a/chia/wallet/vc_wallet/vc_wallet.py b/chia/wallet/vc_wallet/vc_wallet.py index 76417e6031c0..bf37b2bd57a2 100644 --- a/chia/wallet/vc_wallet/vc_wallet.py +++ b/chia/wallet/vc_wallet/vc_wallet.py @@ -210,6 +210,7 @@ async def launch_new_vc( confirmed_at_height=uint32(0), created_at_time=now, to_puzzle_hash=inner_puzzle_hash, + to_address=self.wallet_state_manager.encode_puzzle_hash(inner_puzzle_hash), amount=uint64(1), fee_amount=uint64(fee), confirmed=False, @@ -347,6 +348,7 @@ async def generate_signed_transaction( confirmed_at_height=uint32(0), created_at_time=now, to_puzzle_hash=puzzle_hashes[0], + to_address=self.wallet_state_manager.encode_puzzle_hash(puzzle_hashes[0]), amount=uint64(1), fee_amount=uint64(fee), confirmed=False, @@ -453,12 +455,10 @@ async def add_vc_authorization( crcat_spends.append(crcat_spend) if spend in offer._bundle.coin_spends: spends_to_fix[spend.coin.name()] = spend - else: - if spend in offer._bundle.coin_spends: # pragma: no cover - other_spends.append(spend) - else: - if spend in offer._bundle.coin_spends: + elif spend in offer._bundle.coin_spends: # pragma: no cover other_spends.append(spend) + elif spend in offer._bundle.coin_spends: + other_spends.append(spend) # Figure out what VC announcements are needed announcements_to_make: dict[bytes32, list[CreatePuzzleAnnouncement]] = {} diff --git a/chia/wallet/wallet.py b/chia/wallet/wallet.py index 6001d7972dd0..96652747d8e4 100644 --- a/chia/wallet/wallet.py +++ b/chia/wallet/wallet.py @@ -431,12 +431,14 @@ async def generate_signed_transaction( else: assert output_amount == input_amount + to_ph = add_list[0].puzzle_hash if len(add_list) > 0 else bytes32.zeros async with action_scope.use() as interface: interface.side_effects.transactions.append( TransactionRecord( confirmed_at_height=uint32(0), created_at_time=now, - to_puzzle_hash=add_list[0].puzzle_hash if len(add_list) > 0 else bytes32.zeros, + to_puzzle_hash=to_ph, + to_address=self.wallet_state_manager.encode_puzzle_hash(to_ph), amount=uint64(non_change_amount), fee_amount=uint64(fee), confirmed=False, diff --git a/chia/wallet/wallet_node.py b/chia/wallet/wallet_node.py index 88dee1ef39b1..99a87c50b57a 100644 --- a/chia/wallet/wallet_node.py +++ b/chia/wallet/wallet_node.py @@ -911,14 +911,13 @@ async def add_states_from_peer( ): # only one peer told us to rollback so only clear for that peer await self.perform_atomic_rollback(fork_height, cache=cache) - else: - if fork_height is not None: - # only one peer told us to rollback so only clear for that peer - cache.clear_after_height(fork_height) - self.log.info(f"clear_after_height {fork_height} for peer {peer}") - if not trusted: - # Rollback race_cache not in clear_after_height to avoid applying rollbacks from new peak processing - cache.rollback_race_cache(fork_height=fork_height) + elif fork_height is not None: + # only one peer told us to rollback so only clear for that peer + cache.clear_after_height(fork_height) + self.log.info(f"clear_after_height {fork_height} for peer {peer}") + if not trusted: + # Rollback race_cache not in clear_after_height to avoid applying rollbacks from new peak processing + cache.rollback_race_cache(fork_height=fork_height) all_tasks: list[asyncio.Task[None]] = [] target_concurrent_tasks: int = 30 @@ -989,18 +988,17 @@ async def validate_and_add(inner_states: list[CoinState], inner_idx_start: int) ) if not await self.wallet_state_manager.add_coin_states(batch.entries, peer, fork_height): return False + elif fork_height is not None: + cache.add_states_to_race_cache(batch.entries) else: - if fork_height is not None: - cache.add_states_to_race_cache(batch.entries) - else: - while len(all_tasks) >= target_concurrent_tasks: - all_tasks = [task for task in all_tasks if not task.done()] - await asyncio.sleep(0.1) - if self._shut_down: - self.log.info("Terminating receipt and validation due to shut down request") - await asyncio.gather(*all_tasks) - return False - all_tasks.append(create_referenced_task(validate_and_add(batch.entries, idx))) + while len(all_tasks) >= target_concurrent_tasks: + all_tasks = [task for task in all_tasks if not task.done()] + await asyncio.sleep(0.1) + if self._shut_down: + self.log.info("Terminating receipt and validation due to shut down request") + await asyncio.gather(*all_tasks) + return False + all_tasks.append(create_referenced_task(validate_and_add(batch.entries, idx))) idx += len(batch.entries) still_connected = self._server is not None and peer.peer_node_id in self.server.all_connections @@ -1158,9 +1156,8 @@ async def new_peak_wallet(self, new_peak: NewPeakWallet, peer: WSChiaConnection) await self.new_peak_from_trusted( new_peak_hb, latest_timestamp, peer, new_peak.fork_point_with_previous_peak ) - else: - if not await self.new_peak_from_untrusted(new_peak_hb, peer): - return + elif not await self.new_peak_from_untrusted(new_peak_hb, peer): + return # todo why do we call this if there was an exception / the sync is not finished async with self.wallet_state_manager.lock: @@ -1272,10 +1269,9 @@ async def sync_from_untrusted_close_to_peak(self, new_peak_hb: HeaderBlock, peer ) if success: self.synced_peers.add(peer.peer_node_id) - else: - if peak_hb is not None and new_peak_hb.weight <= peak_hb.weight: - # Don't process blocks at the same weight - return False + elif peak_hb is not None and new_peak_hb.weight <= peak_hb.weight: + # Don't process blocks at the same weight + return False # For every block, we need to apply the cache from race_cache for potential_height in range(backtrack_fork_height + 1, new_peak_hb.height + 1): @@ -1663,10 +1659,9 @@ async def validate_block_inclusion( if not prev_block_rc_hash == reversed_slots[-1].reward_chain.end_of_slot_vdf.challenge: self.log.error("Failed validation 7") return False - else: - if not prev_block_rc_hash == reward_chain_hash: - self.log.error("Failed validation 8") - return False + elif not prev_block_rc_hash == reward_chain_hash: + self.log.error("Failed validation 8") + return False blocks_to_cache.append((reward_chain_hash, en_block.height)) agg_sig: G2Element = AugSchemeMPL.aggregate([sig for (_, _, sig) in pk_m_sig]) diff --git a/chia/wallet/wallet_request_types.py b/chia/wallet/wallet_request_types.py index 7a3d18e4b97b..dc76faa16e4e 100644 --- a/chia/wallet/wallet_request_types.py +++ b/chia/wallet/wallet_request_types.py @@ -2,11 +2,11 @@ import sys from dataclasses import dataclass, field -from typing import Any, Optional, final +from typing import Any, BinaryIO, Optional, final from chia_rs import Coin, G1Element, G2Element, PrivateKey from chia_rs.sized_bytes import bytes32 -from chia_rs.sized_ints import uint16, uint32, uint64 +from chia_rs.sized_ints import uint8, uint16, uint32, uint64 from typing_extensions import Self, dataclass_transform from chia.data_layer.data_layer_wallet import Mirror @@ -15,7 +15,7 @@ from chia.types.blockchain_format.program import Program from chia.util.byte_types import hexstr_to_bytes from chia.util.streamable import Streamable, streamable -from chia.wallet.conditions import Condition, ConditionValidTimes +from chia.wallet.conditions import Condition, ConditionValidTimes, conditions_to_json_dicts from chia.wallet.nft_wallet.nft_info import NFTInfo from chia.wallet.notification_store import Notification from chia.wallet.signer_protocol import ( @@ -28,10 +28,14 @@ from chia.wallet.trade_record import TradeRecord from chia.wallet.trading.offer import Offer from chia.wallet.transaction_record import TransactionRecord +from chia.wallet.transaction_sorting import SortKey from chia.wallet.util.clvm_streamable import json_deserialize_with_clvm_streamable +from chia.wallet.util.puzzle_decorator_type import PuzzleDecoratorType +from chia.wallet.util.query_filter import TransactionTypeFilter from chia.wallet.util.tx_config import TXConfig from chia.wallet.vc_wallet.vc_store import VCProofs, VCRecord from chia.wallet.wallet_info import WalletInfo +from chia.wallet.wallet_node import Balance from chia.wallet.wallet_spend_bundle import WalletSpendBundle @@ -221,6 +225,121 @@ class GetWalletsResponse(Streamable): fingerprint: Optional[uint32] = None +@streamable +@dataclass(frozen=True) +class GetWalletBalance(Streamable): + wallet_id: uint32 + + +@streamable +@dataclass(frozen=True) +class GetWalletBalances(Streamable): + wallet_ids: Optional[list[uint32]] = None + + +# utility for GetWalletBalanceResponse(s) +@streamable +@kw_only_dataclass +class BalanceResponse(Balance): + wallet_id: uint32 = field(default_factory=default_raise) + wallet_type: uint8 = field(default_factory=default_raise) + fingerprint: Optional[uint32] = None + asset_id: Optional[bytes32] = None + pending_approval_balance: Optional[uint64] = None + + +@streamable +@dataclass(frozen=True) +class GetWalletBalanceResponse(Streamable): + wallet_balance: BalanceResponse + + +@streamable +@dataclass(frozen=True) +class GetWalletBalancesResponse(Streamable): + wallet_balances: dict[uint32, BalanceResponse] + + +@streamable +@dataclass(frozen=True) +class GetTransaction(Streamable): + transaction_id: bytes32 + + +@streamable +@dataclass(frozen=True) +class GetTransactionResponse(Streamable): + transaction: TransactionRecord + transaction_id: bytes32 + + +@streamable +@dataclass(frozen=True) +class GetTransactions(Streamable): + wallet_id: uint32 + start: Optional[uint16] = None + end: Optional[uint16] = None + sort_key: Optional[str] = None + reverse: bool = False + to_address: Optional[str] = None + type_filter: Optional[TransactionTypeFilter] = None + confirmed: Optional[bool] = None + + def __post_init__(self) -> None: + if self.sort_key is not None and not hasattr(SortKey, self.sort_key): + raise ValueError(f"There is no known sort {self.sort_key}") + + +# utility for GetTransactionsResponse +# this class cannot be a dataclass because if it is, streamable will assume it knows how to serialize it +# TODO: We should put some thought into deprecating this and separating the metadata more reasonably +class TransactionRecordMetadata: + content: dict[str, Any] + coin_id: bytes32 + spent: bool + + def __init__(self, content: dict[str, Any], coin_id: bytes32, spent: bool) -> None: + self.content = content + self.coin_id = coin_id + self.spent = spent + + def __bytes__(self) -> bytes: + raise NotImplementedError("Should not be serializing this object as bytes, it's only for RPC") + + @classmethod + def parse(cls, f: BinaryIO) -> TransactionRecordMetadata: + raise NotImplementedError("Should not be deserializing this object from a stream, it's only for RPC") + + def to_json_dict(self) -> dict[str, Any]: + return { + **self.content, + "coin_id": "0x" + self.coin_id.hex(), + "spent": self.spent, + } + + @classmethod + def from_json_dict(cls, json_dict: dict[str, Any]) -> TransactionRecordMetadata: + return TransactionRecordMetadata( + coin_id=bytes32.from_hexstr(json_dict["coin_id"]), + spent=json_dict["spent"], + content={k: v for k, v in json_dict.items() if k not in {"coin_id", "spent"}}, + ) + + +# utility for GetTransactionsResponse +@streamable +@dataclass(frozen=True) +class TransactionRecordWithMetadata(TransactionRecord): + metadata: Optional[TransactionRecordMetadata] = None + + +@streamable +@dataclass(frozen=True) +class GetTransactionsResponse(Streamable): + transactions: list[TransactionRecordWithMetadata] + wallet_id: uint32 + + @streamable @dataclass(frozen=True) class GetNotifications(Streamable): @@ -235,6 +354,12 @@ class GetNotificationsResponse(Streamable): notifications: list[Notification] +@streamable +@dataclass(frozen=True) +class DeleteNotifications(Streamable): + ids: Optional[list[bytes32]] = None + + @streamable @dataclass(frozen=True) class VerifySignature(Streamable): @@ -254,43 +379,125 @@ class VerifySignatureResponse(Streamable): @streamable @dataclass(frozen=True) -class GetTransactionMemo(Streamable): - transaction_id: bytes32 +class SignMessageByAddress(Streamable): + address: str + message: str + is_hex: bool = False + safe_mode: bool = True -# utility type for GetTransactionMemoResponse @streamable @dataclass(frozen=True) -class CoinIDWithMemos(Streamable): - coin_id: bytes32 - memos: list[bytes] +class SignMessageByAddressResponse(Streamable): + pubkey: G1Element + signature: G2Element + signing_mode: str @streamable @dataclass(frozen=True) -class GetTransactionMemoResponse(Streamable): +class SignMessageByID(Streamable): + id: str + message: str + is_hex: bool = False + safe_mode: bool = True + + +@streamable +@dataclass(frozen=True) +class SignMessageByIDResponse(Streamable): + pubkey: G1Element + signature: G2Element + latest_coin_id: bytes32 + signing_mode: str + + +@streamable +@dataclass(frozen=True) +class GetTransactionMemo(Streamable): transaction_id: bytes32 - coins_with_memos: list[CoinIDWithMemos] + + +@streamable +@dataclass(frozen=True) +class GetTransactionMemoResponse(Streamable): + transaction_memos: dict[bytes32, dict[bytes32, list[bytes]]] + + @property + def memo_dict(self) -> dict[bytes32, list[bytes]]: + return next(iter(self.transaction_memos.values())) # TODO: deprecate the kinda silly format of this RPC and delete these functions def to_json_dict(self) -> dict[str, Any]: - return { - self.transaction_id.hex(): { - cwm.coin_id.hex(): [memo.hex() for memo in cwm.memos] for cwm in self.coins_with_memos - } - } + # This is semantically guaranteed but mypy can't know that + return super().to_json_dict()["transaction_memos"] # type: ignore[no-any-return] @classmethod def from_json_dict(cls, json_dict: dict[str, Any]) -> GetTransactionMemoResponse: - return cls( - bytes32.from_hexstr(next(iter(json_dict.keys()))), - [ - CoinIDWithMemos(bytes32.from_hexstr(coin_id), [bytes32.from_hexstr(memo) for memo in memos]) - for coin_id, memos in next(iter(json_dict.values())).items() - ], + return super().from_json_dict( + # We have to filter out the "success" key here + # because it doesn't match our `transaction_memos` hint + # + # We do this by only allowing the keys with "0x" + # which we can assume exist because we serialize all responses + {"transaction_memos": {key: value for key, value in json_dict.items() if key.startswith("0x")}} ) +@streamable +@dataclass(frozen=True) +class GetTransactionCount(Streamable): + wallet_id: uint32 + confirmed: Optional[bool] = None + type_filter: Optional[TransactionTypeFilter] = None + + +@streamable +@dataclass(frozen=True) +class GetTransactionCountResponse(Streamable): + wallet_id: uint32 + count: uint16 + + +@streamable +@dataclass(frozen=True) +class GetNextAddress(Streamable): + wallet_id: uint32 + new_address: bool = False + save_derivations: bool = True + + +@streamable +@dataclass(frozen=True) +class GetNextAddressResponse(Streamable): + wallet_id: uint32 + address: str + + +@streamable +@dataclass(frozen=True) +class DeleteUnconfirmedTransactions(Streamable): + wallet_id: uint32 + + +@streamable +@dataclass(frozen=True) +class GetCurrentDerivationIndexResponse(Streamable): + index: Optional[uint32] + + +@streamable +@dataclass(frozen=True) +class ExtendDerivationIndex(Streamable): + index: uint32 + + +@streamable +@dataclass(frozen=True) +class ExtendDerivationIndexResponse(Streamable): + index: Optional[uint32] + + @streamable @dataclass(frozen=True) class GetOffersCountResponse(Streamable): @@ -894,7 +1101,7 @@ def json_serialize_for_transport( return { **tx_config.to_json_dict(), **timelock_info.to_json_dict(), - "extra_conditions": [condition.to_json_dict() for condition in extra_conditions], + "extra_conditions": conditions_to_json_dicts(extra_conditions), **self.to_json_dict(_avoid_ban=True), } @@ -906,6 +1113,53 @@ class TransactionEndpointResponse(Streamable): transactions: list[TransactionRecord] +# utility for SendTransaction +@streamable +@dataclass(frozen=True) +class ClawbackPuzzleDecoratorOverride(Streamable): + decorator: str + clawback_timelock: uint64 + + def __post_init__(self) -> None: + if self.decorator != PuzzleDecoratorType.CLAWBACK.name: + raise ValueError("Invalid clawback puzzle decorator override specified") + super().__post_init__() + + +@streamable +@dataclass(frozen=True) +class SendTransaction(TransactionEndpointRequest): + wallet_id: uint32 = field(default_factory=default_raise) + amount: uint64 = field(default_factory=default_raise) + address: str = field(default_factory=default_raise) + memos: list[str] = field(default_factory=list) + # Technically this value was meant to support many types here + # However, only one is supported right now and there are no plans to extend + # So, as a slight hack, we'll specify that only Clawback is supported + puzzle_decorator: Optional[list[ClawbackPuzzleDecoratorOverride]] = None + + +@streamable +@dataclass(frozen=True) +class SendTransactionResponse(TransactionEndpointResponse): + transaction: TransactionRecord + transaction_id: bytes32 + + +@streamable +@dataclass(frozen=True) +class SpendClawbackCoins(TransactionEndpointRequest): + coin_ids: list[bytes32] = field(default_factory=default_raise) + batch_size: Optional[uint16] = None + force: bool = False + + +@streamable +@dataclass(frozen=True) +class SpendClawbackCoinsResponse(TransactionEndpointResponse): + transaction_ids: list[bytes32] + + @streamable @dataclass(frozen=True) class PushTransactions(TransactionEndpointRequest): @@ -920,10 +1174,7 @@ def from_json_dict(cls, json_dict: dict[str, Any]) -> PushTransactions: if isinstance(transaction_hexstr_or_json, str): tx = TransactionRecord.from_bytes(hexstr_to_bytes(transaction_hexstr_or_json)) else: - try: - tx = TransactionRecord.from_json_dict_convenience(transaction_hexstr_or_json) - except AttributeError: - tx = TransactionRecord.from_json_dict(transaction_hexstr_or_json) + tx = TransactionRecord.from_json_dict(transaction_hexstr_or_json) transactions.append(tx) json_dict["transactions"] = [tx.to_json_dict() for tx in transactions] @@ -1344,11 +1595,6 @@ class VCRevokeResponse(TransactionEndpointResponse): # TODO: The section below needs corresponding request types # TODO: The section below should be added to the API (currently only for client) -@streamable -@dataclass(frozen=True) -class SendTransactionResponse(TransactionEndpointResponse): - transaction: TransactionRecord - transaction_id: bytes32 @streamable diff --git a/chia/wallet/wallet_rpc_api.py b/chia/wallet/wallet_rpc_api.py index b764922c2989..db33a641a332 100644 --- a/chia/wallet/wallet_rpc_api.py +++ b/chia/wallet/wallet_rpc_api.py @@ -90,7 +90,7 @@ from chia.wallet.util.compute_hints import compute_spend_hints_and_additions from chia.wallet.util.compute_memos import compute_memos from chia.wallet.util.curry_and_treehash import NIL_TREEHASH -from chia.wallet.util.query_filter import FilterMode, HashFilter, TransactionTypeFilter +from chia.wallet.util.query_filter import FilterMode, HashFilter from chia.wallet.util.transaction_type import CLAWBACK_INCOMING_TRANSACTION_TYPES, TransactionType from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG, TXConfig, TXConfigLoader from chia.wallet.util.wallet_sync_utils import fetch_coin_spend_for_coin_state @@ -111,6 +111,7 @@ AddKeyResponse, ApplySignatures, ApplySignaturesResponse, + BalanceResponse, CheckDeleteKey, CheckDeleteKeyResponse, CombineCoins, @@ -118,6 +119,8 @@ CreateNewDL, CreateNewDLResponse, DeleteKey, + DeleteNotifications, + DeleteUnconfirmedTransactions, DIDCreateBackupFile, DIDCreateBackupFileResponse, DIDFindLostDID, @@ -164,11 +167,16 @@ Empty, ExecuteSigningInstructions, ExecuteSigningInstructionsResponse, + ExtendDerivationIndex, + ExtendDerivationIndexResponse, GatherSigningInfo, GatherSigningInfoResponse, GenerateMnemonicResponse, + GetCurrentDerivationIndexResponse, GetHeightInfoResponse, GetLoggedInFingerprintResponse, + GetNextAddress, + GetNextAddressResponse, GetNotifications, GetNotificationsResponse, GetPrivateKey, @@ -178,6 +186,18 @@ GetSyncStatusResponse, GetTimestampForHeight, GetTimestampForHeightResponse, + GetTransaction, + GetTransactionCount, + GetTransactionCountResponse, + GetTransactionMemo, + GetTransactionMemoResponse, + GetTransactionResponse, + GetTransactions, + GetTransactionsResponse, + GetWalletBalance, + GetWalletBalanceResponse, + GetWalletBalances, + GetWalletBalancesResponse, GetWallets, GetWalletsResponse, LogIn, @@ -222,11 +242,20 @@ PWSelfPoolResponse, PWStatus, PWStatusResponse, + SendTransaction, + SendTransactionResponse, SetWalletResyncOnStartup, + SignMessageByAddress, + SignMessageByAddressResponse, + SignMessageByID, + SignMessageByIDResponse, + SpendClawbackCoins, + SpendClawbackCoinsResponse, SplitCoins, SplitCoinsResponse, SubmitTransactions, SubmitTransactionsResponse, + TransactionRecordWithMetadata, VCAddProofs, VCGet, VCGetList, @@ -243,6 +272,8 @@ VCRevokeResponse, VCSpend, VCSpendResponse, + VerifySignature, + VerifySignatureResponse, WalletInfoResponse, ) from chia.wallet.wallet_spend_bundle import WalletSpendBundle @@ -340,10 +371,7 @@ async def rpc_endpoint( else: response["unsigned_transactions"] = [tx.to_json_dict() for tx in unsigned_txs] - response["transactions"] = [ - TransactionRecord.to_json_dict_convenience(tx, self.service.config) - for tx in action_scope.side_effects.transactions - ] + response["transactions"] = [tx.to_json_dict() for tx in action_scope.side_effects.transactions] # Some backwards compatibility code here because transaction information being returned was not uniform # until the "transactions" key was applied to all of them. Unfortunately, since .add_pending_transactions @@ -447,6 +475,7 @@ async def rpc_endpoint( confirmed_at_height=uint32(0), created_at_time=uint64(0), to_puzzle_hash=bytes32.zeros, + to_address=encode_puzzle_hash(bytes32.zeros, "replace"), amount=uint64(0), fee_amount=uint64(0), confirmed=False, @@ -1109,7 +1138,7 @@ async def create_new_wallet( metadata = request["metadata"] async with self.service.wallet_state_manager.lock: - did_wallet_name: str = request.get("wallet_name", None) + did_wallet_name: Optional[str] = request.get("wallet_name", None) if did_wallet_name is not None: did_wallet_name = did_wallet_name.strip() did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet( @@ -1265,7 +1294,7 @@ async def create_new_wallet( # Wallet ########################################################################################## - async def _get_wallet_balance(self, wallet_id: uint32) -> dict[str, Any]: + async def _get_wallet_balance(self, wallet_id: uint32) -> BalanceResponse: wallet = self.service.wallet_state_manager.wallets[wallet_id] balance = await self.service.get_balance(wallet_id) wallet_balance = balance.to_json_dict() @@ -1280,36 +1309,38 @@ async def _get_wallet_balance(self, wallet_id: uint32) -> dict[str, Any]: assert isinstance(wallet, CRCATWallet) wallet_balance["pending_approval_balance"] = await wallet.get_pending_approval_balance() - return wallet_balance + return BalanceResponse.from_json_dict(wallet_balance) - async def get_wallet_balance(self, request: dict[str, Any]) -> EndpointResult: - wallet_id = uint32(request["wallet_id"]) - wallet_balance = await self._get_wallet_balance(wallet_id) - return {"wallet_balance": wallet_balance} + @marshal + async def get_wallet_balance(self, request: GetWalletBalance) -> GetWalletBalanceResponse: + return GetWalletBalanceResponse(await self._get_wallet_balance(request.wallet_id)) - async def get_wallet_balances(self, request: dict[str, Any]) -> EndpointResult: - try: - wallet_ids: list[uint32] = [uint32(wallet_id) for wallet_id in request["wallet_ids"]] - except (TypeError, KeyError): + @marshal + async def get_wallet_balances(self, request: GetWalletBalances) -> GetWalletBalancesResponse: + if request.wallet_ids is not None: + wallet_ids = request.wallet_ids + else: wallet_ids = list(self.service.wallet_state_manager.wallets.keys()) - wallet_balances: dict[uint32, dict[str, Any]] = {} - for wallet_id in wallet_ids: - wallet_balances[wallet_id] = await self._get_wallet_balance(wallet_id) - return {"wallet_balances": wallet_balances} + return GetWalletBalancesResponse( + {wallet_id: await self._get_wallet_balance(wallet_id) for wallet_id in wallet_ids} + ) - async def get_transaction(self, request: dict[str, Any]) -> EndpointResult: - transaction_id: bytes32 = bytes32.from_hexstr(request["transaction_id"]) - tr: Optional[TransactionRecord] = await self.service.wallet_state_manager.get_transaction(transaction_id) + @marshal + async def get_transaction(self, request: GetTransaction) -> GetTransactionResponse: + tr: Optional[TransactionRecord] = await self.service.wallet_state_manager.get_transaction( + request.transaction_id + ) if tr is None: - raise ValueError(f"Transaction 0x{transaction_id.hex()} not found") + raise ValueError(f"Transaction 0x{request.transaction_id.hex()} not found") - return { - "transaction": (await self._convert_tx_puzzle_hash(tr)).to_json_dict_convenience(self.service.config), - "transaction_id": tr.name, - } + return GetTransactionResponse( + await self._convert_tx_puzzle_hash(tr), + tr.name, + ) - async def get_transaction_memo(self, request: dict[str, Any]) -> EndpointResult: - transaction_id: bytes32 = bytes32.from_hexstr(request["transaction_id"]) + @marshal + async def get_transaction_memo(self, request: GetTransactionMemo) -> GetTransactionMemoResponse: + transaction_id: bytes32 = request.transaction_id tr: Optional[TransactionRecord] = await self.service.wallet_state_manager.get_transaction(transaction_id) if tr is None: raise ValueError(f"Transaction 0x{transaction_id.hex()} not found") @@ -1327,12 +1358,7 @@ async def get_transaction_memo(self, request: dict[str, Any]) -> EndpointResult: else: raise ValueError(f"Transaction 0x{transaction_id.hex()} doesn't have any coin spend.") assert tr.spend_bundle is not None - memos: dict[bytes32, list[bytes]] = compute_memos(tr.spend_bundle) - response = {} - # Convert to hex string - for coin_id, memo_list in memos.items(): - response[coin_id.hex()] = [memo.hex() for memo in memo_list] - return {transaction_id.hex(): response} + return GetTransactionMemoResponse({transaction_id: compute_memos(tr.spend_bundle)}) @tx_endpoint(push=False) @marshal @@ -1489,36 +1515,26 @@ async def combine_coins( return CombineCoinsResponse([], []) # tx_endpoint will take care to fill this out - async def get_transactions(self, request: dict[str, Any]) -> EndpointResult: - wallet_id = int(request["wallet_id"]) - - start = request.get("start", 0) - end = request.get("end", 50) - sort_key = request.get("sort_key", None) - reverse = request.get("reverse", False) - - to_address = request.get("to_address", None) + @marshal + async def get_transactions(self, request: GetTransactions) -> GetTransactionsResponse: to_puzzle_hash: Optional[bytes32] = None - if to_address is not None: - to_puzzle_hash = decode_puzzle_hash(to_address) - type_filter = None - if "type_filter" in request: - type_filter = TransactionTypeFilter.from_json_dict(request["type_filter"]) + if request.to_address is not None: + to_puzzle_hash = decode_puzzle_hash(request.to_address) transactions = await self.service.wallet_state_manager.tx_store.get_transactions_between( - wallet_id, - start, - end, - sort_key=sort_key, - reverse=reverse, + wallet_id=request.wallet_id, + start=uint16(0) if request.start is None else request.start, + end=uint16(50) if request.end is None else request.end, + sort_key=request.sort_key, + reverse=request.reverse, to_puzzle_hash=to_puzzle_hash, - type_filter=type_filter, - confirmed=request.get("confirmed", None), + type_filter=request.type_filter, + confirmed=request.confirmed, ) tx_list = [] # Format for clawback transactions for tr in transactions: - tx = (await self._convert_tx_puzzle_hash(tr)).to_json_dict_convenience(self.service.config) + tx = (await self._convert_tx_puzzle_hash(tr)).to_json_dict() tx_list.append(tx) if tx["type"] not in CLAWBACK_INCOMING_TRANSACTION_TYPES: continue @@ -1536,98 +1552,79 @@ async def get_transactions(self, request: dict[str, Any]) -> EndpointResult: continue tx["metadata"]["coin_id"] = coin.name().hex() tx["metadata"]["spent"] = record.spent - return { - "transactions": tx_list, - "wallet_id": wallet_id, - } + return GetTransactionsResponse( + transactions=[TransactionRecordWithMetadata.from_json_dict(tx) for tx in tx_list], + wallet_id=request.wallet_id, + ) - async def get_transaction_count(self, request: dict[str, Any]) -> EndpointResult: - wallet_id = int(request["wallet_id"]) - type_filter = None - if "type_filter" in request: - type_filter = TransactionTypeFilter.from_json_dict(request["type_filter"]) + @marshal + async def get_transaction_count(self, request: GetTransactionCount) -> GetTransactionCountResponse: count = await self.service.wallet_state_manager.tx_store.get_transaction_count_for_wallet( - wallet_id, confirmed=request.get("confirmed", None), type_filter=type_filter + request.wallet_id, confirmed=request.confirmed, type_filter=request.type_filter + ) + return GetTransactionCountResponse( + request.wallet_id, + uint16(count), ) - return { - "count": count, - "wallet_id": wallet_id, - } - async def get_next_address(self, request: dict[str, Any]) -> EndpointResult: + @marshal + async def get_next_address(self, request: GetNextAddress) -> GetNextAddressResponse: """ Returns a new address """ - if request["new_address"] is True: - create_new = True - else: - create_new = False - wallet_id = uint32(request["wallet_id"]) - wallet = self.service.wallet_state_manager.wallets[wallet_id] + wallet = self.service.wallet_state_manager.wallets[request.wallet_id] selected = self.service.config["selected_network"] prefix = self.service.config["network_overrides"]["config"][selected]["address_prefix"] if wallet.type() in {WalletType.STANDARD_WALLET, WalletType.CAT, WalletType.CRCAT, WalletType.RCAT}: async with self.service.wallet_state_manager.new_action_scope( - DEFAULT_TX_CONFIG, push=request.get("save_derivations", True) + DEFAULT_TX_CONFIG, push=request.save_derivations ) as action_scope: raw_puzzle_hash = await action_scope.get_puzzle_hash( - self.service.wallet_state_manager, override_reuse_puzhash_with=not create_new + self.service.wallet_state_manager, override_reuse_puzhash_with=not request.new_address ) address = encode_puzzle_hash(raw_puzzle_hash, prefix) else: raise ValueError(f"Wallet type {wallet.type()} cannot create puzzle hashes") - return { - "wallet_id": wallet_id, - "address": address, - } + return GetNextAddressResponse( + request.wallet_id, + address, + ) @tx_endpoint(push=True) + @marshal async def send_transaction( self, - request: dict[str, Any], + request: SendTransaction, action_scope: WalletActionScope, extra_conditions: tuple[Condition, ...] = tuple(), - ) -> EndpointResult: + ) -> SendTransactionResponse: if await self.service.wallet_state_manager.synced() is False: raise ValueError("Wallet needs to be fully synced before sending transactions") - wallet_id = uint32(request["wallet_id"]) - wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=Wallet) + wallet = self.service.wallet_state_manager.get_wallet(id=request.wallet_id, required_type=Wallet) # TODO: Add support for multiple puzhash/amount/memo sets - if not isinstance(request["amount"], int) or not isinstance(request["fee"], int): - raise ValueError("An integer amount or fee is required (too many decimals)") - amount: uint64 = uint64(request["amount"]) - address = request["address"] selected_network = self.service.config["selected_network"] expected_prefix = self.service.config["network_overrides"]["config"][selected_network]["address_prefix"] - if address[0 : len(expected_prefix)] != expected_prefix: + if request.address[0 : len(expected_prefix)] != expected_prefix: raise ValueError("Unexpected Address Prefix") - puzzle_hash: bytes32 = decode_puzzle_hash(address) - - memos: list[bytes] = [] - if "memos" in request: - memos = [mem.encode("utf-8") for mem in request["memos"]] - - fee: uint64 = uint64(request.get("fee", 0)) await wallet.generate_signed_transaction( - [amount], - [puzzle_hash], + [request.amount], + [decode_puzzle_hash(request.address)], action_scope, - fee, - memos=[memos], - puzzle_decorator_override=request.get("puzzle_decorator", None), + request.fee, + memos=[[mem.encode("utf-8") for mem in request.memos]], + puzzle_decorator_override=[request.puzzle_decorator[0].to_json_dict()] + if request.puzzle_decorator is not None + else None, extra_conditions=extra_conditions, ) # Transaction may not have been included in the mempool yet. Use get_transaction to check. - return { - "transaction": None, # tx_endpoint wrapper will take care of this - "transactions": None, # tx_endpoint wrapper will take care of this - "transaction_id": None, # tx_endpoint wrapper will take care of this - } + # tx_endpoint will take care of the default values here + return SendTransactionResponse([], [], transaction=REPLACEABLE_TRANSACTION_RECORD, transaction_id=bytes32.zeros) async def send_transaction_multi(self, request: dict[str, Any]) -> EndpointResult: if await self.service.wallet_state_manager.synced() is False: @@ -1654,18 +1651,19 @@ async def send_transaction_multi(self, request: dict[str, Any]) -> EndpointResul # Transaction may not have been included in the mempool yet. Use get_transaction to check. return { "transaction": transaction, - "transaction_id": TransactionRecord.from_json_dict_convenience(transaction).name, + "transaction_id": TransactionRecord.from_json_dict(transaction).name, "transactions": transactions, "unsigned_transactions": response["unsigned_transactions"], } @tx_endpoint(push=True, merge_spends=False) + @marshal async def spend_clawback_coins( self, - request: dict[str, Any], + request: SpendClawbackCoins, action_scope: WalletActionScope, extra_conditions: tuple[Condition, ...] = tuple(), - ) -> EndpointResult: + ) -> SpendClawbackCoinsResponse: """Spend clawback coins that were sent (to claw them back) or received (to claim them). :param coin_ids: list of coin ids to be spent @@ -1673,21 +1671,19 @@ async def spend_clawback_coins( :param fee: transaction fee in mojos :return: """ - if "coin_ids" not in request: - raise ValueError("Coin IDs are required.") - coin_ids: list[bytes32] = [bytes32.from_hexstr(coin) for coin in request["coin_ids"]] - tx_fee: uint64 = uint64(request.get("fee", 0)) # Get inner puzzle coin_records = await self.service.wallet_state_manager.coin_store.get_coin_records( - coin_id_filter=HashFilter.include(coin_ids), + coin_id_filter=HashFilter.include(request.coin_ids), coin_type=CoinType.CLAWBACK, wallet_type=WalletType.STANDARD_WALLET, spent_range=UInt32Range(stop=uint32(0)), ) coins: dict[Coin, ClawbackMetadata] = {} - batch_size = request.get( - "batch_size", self.service.wallet_state_manager.config.get("auto_claim", {}).get("batch_size", 50) + batch_size = ( + request.batch_size + if request.batch_size is not None + else self.service.wallet_state_manager.config.get("auto_claim", {}).get("batch_size", 50) ) for coin_id, coin_record in coin_records.coin_id_to_record.items(): try: @@ -1697,9 +1693,9 @@ async def spend_clawback_coins( if len(coins) >= batch_size: await self.service.wallet_state_manager.spend_clawback_coins( coins, - tx_fee, + request.fee, action_scope, - request.get("force", False), + request.force, extra_conditions=extra_conditions, ) coins = {} @@ -1708,32 +1704,29 @@ async def spend_clawback_coins( if len(coins) > 0: await self.service.wallet_state_manager.spend_clawback_coins( coins, - tx_fee, + request.fee, action_scope, - request.get("force", False), + request.force, extra_conditions=extra_conditions, ) - return { - "success": True, - "transaction_ids": None, # tx_endpoint wrapper will take care of this - "transactions": None, # tx_endpoint wrapper will take care of this - } + # tx_endpoint will fill in the default values here + return SpendClawbackCoinsResponse([], [], transaction_ids=[]) - async def delete_unconfirmed_transactions(self, request: dict[str, Any]) -> EndpointResult: - wallet_id = uint32(request["wallet_id"]) - if wallet_id not in self.service.wallet_state_manager.wallets: - raise ValueError(f"Wallet id {wallet_id} does not exist") + @marshal + async def delete_unconfirmed_transactions(self, request: DeleteUnconfirmedTransactions) -> Empty: + if request.wallet_id not in self.service.wallet_state_manager.wallets: + raise ValueError(f"Wallet id {request.wallet_id} does not exist") if await self.service.wallet_state_manager.synced() is False: raise ValueError("Wallet needs to be fully synced.") async with self.service.wallet_state_manager.db_wrapper.writer(): - await self.service.wallet_state_manager.tx_store.delete_unconfirmed_transactions(wallet_id) - wallet = self.service.wallet_state_manager.wallets[wallet_id] + await self.service.wallet_state_manager.tx_store.delete_unconfirmed_transactions(request.wallet_id) + wallet = self.service.wallet_state_manager.wallets[request.wallet_id] if wallet.type() == WalletType.POOLING_WALLET.value: assert isinstance(wallet, PoolWallet) wallet.target_state = None - return {} + return Empty() async def select_coins( self, @@ -1875,26 +1868,23 @@ async def get_coin_records_by_names(self, request: dict[str, Any]) -> EndpointRe return {"coin_records": [cr.to_json_dict() for cr in coin_records]} - async def get_current_derivation_index(self, request: dict[str, Any]) -> dict[str, Any]: + @marshal + async def get_current_derivation_index(self, request: Empty) -> GetCurrentDerivationIndexResponse: assert self.service.wallet_state_manager is not None index: Optional[uint32] = await self.service.wallet_state_manager.puzzle_store.get_last_derivation_path() - return {"success": True, "index": index} + return GetCurrentDerivationIndexResponse(index) - async def extend_derivation_index(self, request: dict[str, Any]) -> dict[str, Any]: + @marshal + async def extend_derivation_index(self, request: ExtendDerivationIndex) -> ExtendDerivationIndexResponse: assert self.service.wallet_state_manager is not None - # Require a new max derivation index - if "index" not in request: - raise ValueError("Derivation index is required") - # Require that the wallet is fully synced synced = await self.service.wallet_state_manager.synced() if synced is False: raise ValueError("Wallet needs to be fully synced before extending derivation index") - index = uint32(request["index"]) current: Optional[uint32] = await self.service.wallet_state_manager.puzzle_store.get_last_derivation_path() # Additional sanity check that the wallet is synced @@ -1902,10 +1892,10 @@ async def extend_derivation_index(self, request: dict[str, Any]) -> dict[str, An raise ValueError("No current derivation record found, unable to extend index") # Require that the new index is greater than the current index - if index <= current: + if request.index <= current: raise ValueError(f"New derivation index must be greater than current index: {current}") - if index - current > MAX_DERIVATION_INDEX_DELTA: + if request.index - current > MAX_DERIVATION_INDEX_DELTA: raise ValueError( "Too many derivations requested. " f"Use a derivation index less than {current + MAX_DERIVATION_INDEX_DELTA + 1}" @@ -1915,14 +1905,13 @@ async def extend_derivation_index(self, request: dict[str, Any]) -> dict[str, An # to preserve the current last used index, so we call create_more_puzzle_hashes with # mark_existing_as_used=False result = await self.service.wallet_state_manager.create_more_puzzle_hashes( - from_zero=False, mark_existing_as_used=False, up_to_index=index, num_additional_phs=0 + from_zero=False, mark_existing_as_used=False, up_to_index=request.index, num_additional_phs=0 ) await result.commit(self.service.wallet_state_manager) - updated: Optional[uint32] = await self.service.wallet_state_manager.puzzle_store.get_last_derivation_path() - updated_index = updated if updated is not None else None + updated_index = await self.service.wallet_state_manager.puzzle_store.get_last_derivation_path() - return {"success": True, "index": updated_index} + return ExtendDerivationIndexResponse(updated_index) @marshal async def get_notifications(self, request: GetNotifications) -> GetNotificationsResponse: @@ -1941,16 +1930,16 @@ async def get_notifications(self, request: GetNotifications) -> GetNotifications return GetNotificationsResponse(notifications) - async def delete_notifications(self, request: dict[str, Any]) -> EndpointResult: - ids: Optional[list[str]] = request.get("ids", None) - if ids is None: + @marshal + async def delete_notifications(self, request: DeleteNotifications) -> Empty: + if request.ids is None: await self.service.wallet_state_manager.notification_manager.notification_store.delete_all_notifications() else: await self.service.wallet_state_manager.notification_manager.notification_store.delete_notifications( - [bytes32.from_hexstr(id) for id in ids] + request.ids ) - return {} + return Empty() @tx_endpoint(push=True) async def send_notification( @@ -1970,116 +1959,101 @@ async def send_notification( return {"tx": None, "transactions": None} # tx_endpoint wrapper will take care of this - async def verify_signature(self, request: dict[str, Any]) -> EndpointResult: + @marshal + async def verify_signature(self, request: VerifySignature) -> VerifySignatureResponse: """ Given a public key, message and signature, verify if it is valid. :param request: :return: """ - input_message: str = request["message"] - signing_mode_str: Optional[str] = request.get("signing_mode") # Default to BLS_MESSAGE_AUGMENTATION_HEX_INPUT as this RPC was originally designed to verify # signatures made by `chia keys sign`, which uses BLS_MESSAGE_AUGMENTATION_HEX_INPUT - if signing_mode_str is None: + if request.signing_mode is None: signing_mode = SigningMode.BLS_MESSAGE_AUGMENTATION_HEX_INPUT else: try: - signing_mode = SigningMode(signing_mode_str) + signing_mode = SigningMode(request.signing_mode) except ValueError: - raise ValueError(f"Invalid signing mode: {signing_mode_str!r}") + raise ValueError(f"Invalid signing mode: {request.signing_mode!r}") if signing_mode in {SigningMode.CHIP_0002, SigningMode.CHIP_0002_P2_DELEGATED_CONDITIONS}: # CHIP-0002 message signatures are made over the tree hash of: # ("Chia Signed Message", message) - message_to_verify: bytes = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, input_message)).get_tree_hash() + message_to_verify: bytes = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, request.message)).get_tree_hash() elif signing_mode == SigningMode.BLS_MESSAGE_AUGMENTATION_HEX_INPUT: # Message is expected to be a hex string - message_to_verify = hexstr_to_bytes(input_message) + message_to_verify = hexstr_to_bytes(request.message) elif signing_mode == SigningMode.BLS_MESSAGE_AUGMENTATION_UTF8_INPUT: # Message is expected to be a UTF-8 string - message_to_verify = bytes(input_message, "utf-8") + message_to_verify = bytes(request.message, "utf-8") else: - raise ValueError(f"Unsupported signing mode: {signing_mode_str!r}") + raise ValueError(f"Unsupported signing mode: {request.signing_mode!r}") # Verify using the BLS message augmentation scheme is_valid = AugSchemeMPL.verify( - G1Element.from_bytes(hexstr_to_bytes(request["pubkey"])), + request.pubkey, message_to_verify, - G2Element.from_bytes(hexstr_to_bytes(request["signature"])), + request.signature, ) - address = request.get("address") - if address is not None: + if request.address is not None: # For signatures made by the sign_message_by_address/sign_message_by_id # endpoints, the "address" field should contain the p2_address of the NFT/DID # that was used to sign the message. - puzzle_hash: bytes32 = decode_puzzle_hash(address) + puzzle_hash: bytes32 = decode_puzzle_hash(request.address) expected_puzzle_hash: Optional[bytes32] = None if signing_mode == SigningMode.CHIP_0002_P2_DELEGATED_CONDITIONS: - puzzle = p2_delegated_conditions.puzzle_for_pk(Program.to(hexstr_to_bytes(request["pubkey"]))) + puzzle = p2_delegated_conditions.puzzle_for_pk(Program.to(request.pubkey)) expected_puzzle_hash = bytes32(puzzle.get_tree_hash()) else: - expected_puzzle_hash = puzzle_hash_for_synthetic_public_key( - G1Element.from_bytes(hexstr_to_bytes(request["pubkey"])) - ) + expected_puzzle_hash = puzzle_hash_for_synthetic_public_key(request.pubkey) if puzzle_hash != expected_puzzle_hash: - return {"isValid": False, "error": "Public key doesn't match the address"} + return VerifySignatureResponse(isValid=False, error="Public key doesn't match the address") if is_valid: - return {"isValid": is_valid} + return VerifySignatureResponse(isValid=is_valid) else: - return {"isValid": False, "error": "Signature is invalid."} + return VerifySignatureResponse(isValid=False, error="Signature is invalid.") - async def sign_message_by_address(self, request: dict[str, Any]) -> EndpointResult: + @marshal + async def sign_message_by_address(self, request: SignMessageByAddress) -> SignMessageByAddressResponse: """ Given a derived P2 address, sign the message by its private key. :param request: :return: """ - puzzle_hash: bytes32 = decode_puzzle_hash(request["address"]) - is_hex: bool = request.get("is_hex", False) - if isinstance(is_hex, str): - is_hex = True if is_hex.lower() == "true" else False - safe_mode: bool = request.get("safe_mode", True) - if isinstance(safe_mode, str): - safe_mode = True if safe_mode.lower() == "true" else False + puzzle_hash: bytes32 = decode_puzzle_hash(request.address) mode: SigningMode = SigningMode.CHIP_0002 - if is_hex and safe_mode: + if request.is_hex and request.safe_mode: mode = SigningMode.CHIP_0002_HEX_INPUT - elif not is_hex and not safe_mode: + elif not request.is_hex and not request.safe_mode: mode = SigningMode.BLS_MESSAGE_AUGMENTATION_UTF8_INPUT - elif is_hex and not safe_mode: + elif request.is_hex and not request.safe_mode: mode = SigningMode.BLS_MESSAGE_AUGMENTATION_HEX_INPUT pubkey, signature = await self.service.wallet_state_manager.main_wallet.sign_message( - request["message"], puzzle_hash, mode + request.message, puzzle_hash, mode + ) + return SignMessageByAddressResponse( + pubkey=pubkey, + signature=signature, + signing_mode=mode.value, ) - return { - "success": True, - "pubkey": str(pubkey), - "signature": str(signature), - "signing_mode": mode.value, - } - async def sign_message_by_id(self, request: dict[str, Any]) -> EndpointResult: + @marshal + async def sign_message_by_id(self, request: SignMessageByID) -> SignMessageByIDResponse: """ Given a NFT/DID ID, sign the message by the P2 private key. :param request: :return: """ - entity_id: bytes32 = decode_puzzle_hash(request["id"]) + entity_id: bytes32 = decode_puzzle_hash(request.id) selected_wallet: Optional[WalletProtocol[Any]] = None - is_hex: bool = request.get("is_hex", False) - if isinstance(is_hex, str): - is_hex = True if is_hex.lower() == "true" else False - safe_mode: bool = request.get("safe_mode", True) - if isinstance(safe_mode, str): - safe_mode = True if safe_mode.lower() == "true" else False mode: SigningMode = SigningMode.CHIP_0002 - if is_hex and safe_mode: + if request.is_hex and request.safe_mode: mode = SigningMode.CHIP_0002_HEX_INPUT - elif not is_hex and not safe_mode: + elif not request.is_hex and not request.safe_mode: mode = SigningMode.BLS_MESSAGE_AUGMENTATION_UTF8_INPUT - elif is_hex and not safe_mode: + elif request.is_hex and not request.safe_mode: mode = SigningMode.BLS_MESSAGE_AUGMENTATION_HEX_INPUT - if is_valid_address(request["id"], {AddressType.DID}, self.service.config): + if is_valid_address(request.id, {AddressType.DID}, self.service.config): for wallet in self.service.wallet_state_manager.wallets.values(): if wallet.type() == WalletType.DECENTRALIZED_ID.value: assert isinstance(wallet, DIDWallet) @@ -2088,11 +2062,11 @@ async def sign_message_by_id(self, request: dict[str, Any]) -> EndpointResult: selected_wallet = wallet break if selected_wallet is None: - return {"success": False, "error": f"DID for {entity_id.hex()} doesn't exist."} + raise ValueError(f"DID for {entity_id.hex()} doesn't exist.") assert isinstance(selected_wallet, DIDWallet) - pubkey, signature = await selected_wallet.sign_message(request["message"], mode) + pubkey, signature = await selected_wallet.sign_message(request.message, mode) latest_coin_id = (await selected_wallet.get_coin()).name() - elif is_valid_address(request["id"], {AddressType.NFT}, self.service.config): + elif is_valid_address(request.id, {AddressType.NFT}, self.service.config): target_nft: Optional[NFTCoinInfo] = None for wallet in self.service.wallet_state_manager.wallets.values(): if wallet.type() == WalletType.NFT.value: @@ -2103,21 +2077,20 @@ async def sign_message_by_id(self, request: dict[str, Any]) -> EndpointResult: target_nft = nft break if selected_wallet is None or target_nft is None: - return {"success": False, "error": f"NFT for {entity_id.hex()} doesn't exist."} + raise ValueError(f"NFT for {entity_id.hex()} doesn't exist.") assert isinstance(selected_wallet, NFTWallet) - pubkey, signature = await selected_wallet.sign_message(request["message"], target_nft, mode) + pubkey, signature = await selected_wallet.sign_message(request.message, target_nft, mode) latest_coin_id = target_nft.coin.name() else: - return {"success": False, "error": f"Unknown ID type, {request['id']}"} + raise ValueError(f"Unknown ID type, {request.id}") - return { - "success": True, - "pubkey": str(pubkey), - "signature": str(signature), - "latest_coin_id": latest_coin_id.hex() if latest_coin_id is not None else None, - "signing_mode": mode.value, - } + return SignMessageByIDResponse( + pubkey=pubkey, + signature=signature, + latest_coin_id=latest_coin_id, + signing_mode=mode.value, + ) ########################################################################################## # CATs and Trading @@ -3520,12 +3493,18 @@ async def get_farmed_amount(self, request: dict[str, Any]) -> EndpointResult: fee_amount = 0 blocks_won = 0 last_height_farmed = uint32(0) + + include_pool_rewards = request.get("include_pool_rewards", False) + for record in tx_records: if record.wallet_id not in self.service.wallet_state_manager.wallets: continue if record.type == TransactionType.COINBASE_REWARD.value: - if self.service.wallet_state_manager.wallets[record.wallet_id].type() == WalletType.POOLING_WALLET: - # Don't add pool rewards for pool wallets. + if ( + not include_pool_rewards + and self.service.wallet_state_manager.wallets[record.wallet_id].type() == WalletType.POOLING_WALLET + ): + # Don't add pool rewards for pool wallets unless explicitly requested continue pool_reward_amount += record.amount height = record.height_farmed(self.service.constants.GENESIS_CHALLENGE) diff --git a/chia/wallet/wallet_rpc_client.py b/chia/wallet/wallet_rpc_client.py index 2744a75363f6..16ecf7d584e0 100644 --- a/chia/wallet/wallet_rpc_client.py +++ b/chia/wallet/wallet_rpc_client.py @@ -1,6 +1,5 @@ from __future__ import annotations -from collections.abc import Sequence from typing import Any, Optional, Union, cast from chia_rs.sized_bytes import bytes32 @@ -16,9 +15,7 @@ from chia.wallet.trade_record import TradeRecord from chia.wallet.trading.offer import Offer from chia.wallet.transaction_record import TransactionRecord -from chia.wallet.transaction_sorting import SortKey from chia.wallet.util.clvm_streamable import json_deserialize_with_clvm_streamable -from chia.wallet.util.query_filter import TransactionTypeFilter from chia.wallet.util.tx_config import CoinSelectionConfig, TXConfig from chia.wallet.wallet_coin_store import GetCoinRecords from chia.wallet.wallet_request_types import ( @@ -38,6 +35,8 @@ CreateOfferForIDsResponse, CreateSignedTransactionsResponse, DeleteKey, + DeleteNotifications, + DeleteUnconfirmedTransactions, DIDCreateBackupFile, DIDCreateBackupFileResponse, DIDFindLostDID, @@ -83,12 +82,17 @@ DLUpdateRootResponse, ExecuteSigningInstructions, ExecuteSigningInstructionsResponse, + ExtendDerivationIndex, + ExtendDerivationIndexResponse, GatherSigningInfo, GatherSigningInfoResponse, GenerateMnemonicResponse, GetCATListResponse, + GetCurrentDerivationIndexResponse, GetHeightInfoResponse, GetLoggedInFingerprintResponse, + GetNextAddress, + GetNextAddressResponse, GetNotifications, GetNotificationsResponse, GetOffersCountResponse, @@ -98,8 +102,18 @@ GetSyncStatusResponse, GetTimestampForHeight, GetTimestampForHeightResponse, + GetTransaction, + GetTransactionCount, + GetTransactionCountResponse, GetTransactionMemo, GetTransactionMemoResponse, + GetTransactionResponse, + GetTransactions, + GetTransactionsResponse, + GetWalletBalance, + GetWalletBalanceResponse, + GetWalletBalances, + GetWalletBalancesResponse, GetWallets, GetWalletsResponse, LogIn, @@ -143,9 +157,16 @@ PWSelfPoolResponse, PWStatus, PWStatusResponse, + SendTransaction, SendTransactionMultiResponse, SendTransactionResponse, SetWalletResyncOnStartup, + SignMessageByAddress, + SignMessageByAddressResponse, + SignMessageByID, + SignMessageByIDResponse, + SpendClawbackCoins, + SpendClawbackCoinsResponse, SplitCoins, SplitCoinsResponse, SubmitTransactions, @@ -171,7 +192,7 @@ def parse_result_transactions(result: dict[str, Any]) -> dict[str, Any]: result["transaction"] = TransactionRecord.from_json_dict(result["transaction"]) - result["transactions"] = [TransactionRecord.from_json_dict_convenience(tx) for tx in result["transactions"]] + result["transactions"] = [TransactionRecord.from_json_dict(tx) for tx in result["transactions"]] if result["fee_transaction"]: result["fee_transaction"] = TransactionRecord.from_json_dict(result["fee_transaction"]) return result @@ -256,102 +277,39 @@ async def get_wallets(self, request: GetWallets) -> GetWalletsResponse: return GetWalletsResponse.from_json_dict(await self.fetch("get_wallets", request.to_json_dict())) # Wallet APIs - async def get_wallet_balance(self, wallet_id: int) -> dict[str, Any]: - request = {"wallet_id": wallet_id} - response = await self.fetch("get_wallet_balance", request) - # TODO: casting due to lack of type checked deserialization - return cast(dict[str, Any], response["wallet_balance"]) + async def get_wallet_balance(self, request: GetWalletBalance) -> GetWalletBalanceResponse: + return GetWalletBalanceResponse.from_json_dict(await self.fetch("get_wallet_balance", request.to_json_dict())) - async def get_wallet_balances(self, wallet_ids: Optional[list[int]] = None) -> dict[str, dict[str, Any]]: - request = {"wallet_ids": wallet_ids} - response = await self.fetch("get_wallet_balances", request) - # TODO: casting due to lack of type checked deserialization - return cast(dict[str, dict[str, Any]], response["wallet_balances"]) + async def get_wallet_balances(self, request: GetWalletBalances) -> GetWalletBalancesResponse: + return GetWalletBalancesResponse.from_json_dict(await self.fetch("get_wallet_balances", request.to_json_dict())) - async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: - request = {"transaction_id": transaction_id.hex()} - response = await self.fetch("get_transaction", request) - return TransactionRecord.from_json_dict_convenience(response["transaction"]) + async def get_transaction(self, request: GetTransaction) -> GetTransactionResponse: + return GetTransactionResponse.from_json_dict(await self.fetch("get_transaction", request.to_json_dict())) - async def get_transactions( - self, - wallet_id: int, - start: Optional[int] = None, - end: Optional[int] = None, - sort_key: Optional[SortKey] = None, - reverse: bool = False, - to_address: Optional[str] = None, - type_filter: Optional[TransactionTypeFilter] = None, - confirmed: Optional[bool] = None, - ) -> list[TransactionRecord]: - request: dict[str, Any] = {"wallet_id": wallet_id} - - if start is not None: - request["start"] = start - if end is not None: - request["end"] = end - if sort_key is not None: - request["sort_key"] = sort_key.name - request["reverse"] = reverse - - if to_address is not None: - request["to_address"] = to_address - - if type_filter is not None: - request["type_filter"] = type_filter.to_json_dict() - - if confirmed is not None: - request["confirmed"] = confirmed - - res = await self.fetch("get_transactions", request) - return [TransactionRecord.from_json_dict_convenience(tx) for tx in res["transactions"]] - - async def get_transaction_count( - self, wallet_id: int, confirmed: Optional[bool] = None, type_filter: Optional[TransactionTypeFilter] = None - ) -> int: - request: dict[str, Any] = {"wallet_id": wallet_id} - if type_filter is not None: - request["type_filter"] = type_filter.to_json_dict() - if confirmed is not None: - request["confirmed"] = confirmed - res = await self.fetch("get_transaction_count", request) - # TODO: casting due to lack of type checked deserialization - return cast(int, res["count"]) + async def get_transactions(self, request: GetTransactions) -> GetTransactionsResponse: + return GetTransactionsResponse.from_json_dict(await self.fetch("get_transactions", request.to_json_dict())) - async def get_next_address(self, wallet_id: int, new_address: bool) -> str: - request = {"wallet_id": wallet_id, "new_address": new_address} - response = await self.fetch("get_next_address", request) - # TODO: casting due to lack of type checked deserialization - return cast(str, response["address"]) + async def get_transaction_count(self, request: GetTransactionCount) -> GetTransactionCountResponse: + return GetTransactionCountResponse.from_json_dict( + await self.fetch("get_transaction_count", request.to_json_dict()) + ) + + async def get_next_address(self, request: GetNextAddress) -> GetNextAddressResponse: + return GetNextAddressResponse.from_json_dict(await self.fetch("get_next_address", request.to_json_dict())) async def send_transaction( self, - wallet_id: int, - amount: uint64, - address: str, + request: SendTransaction, tx_config: TXConfig, - fee: uint64 = uint64(0), - memos: Optional[list[str]] = None, - puzzle_decorator_override: Optional[list[dict[str, Union[str, int, bool]]]] = None, extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - push: bool = True, ) -> SendTransactionResponse: - request = { - "wallet_id": wallet_id, - "amount": amount, - "address": address, - "fee": fee, - "puzzle_decorator": puzzle_decorator_override, - "extra_conditions": conditions_to_json_dicts(extra_conditions), - "push": push, - **tx_config.to_json_dict(), - **timelock_info.to_json_dict(), - } - if memos is not None: - request["memos"] = memos - response = await self.fetch("send_transaction", request) - return json_deserialize_with_clvm_streamable(response, SendTransactionResponse) + return SendTransactionResponse.from_json_dict( + await self.fetch( + "send_transaction", + request.json_serialize_for_transport(tx_config, extra_conditions, timelock_info), + ) + ) async def send_transaction_multi( self, @@ -385,39 +343,30 @@ async def send_transaction_multi( async def spend_clawback_coins( self, - coin_ids: list[bytes32], - fee: int = 0, - force: bool = False, - push: bool = True, + request: SpendClawbackCoins, + tx_config: TXConfig, extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> dict[str, Any]: - request = { - "coin_ids": [cid.hex() for cid in coin_ids], - "fee": fee, - "force": force, - "extra_conditions": conditions_to_json_dicts(extra_conditions), - "push": push, - **timelock_info.to_json_dict(), - } - response = await self.fetch("spend_clawback_coins", request) - return response + ) -> SpendClawbackCoinsResponse: + return SpendClawbackCoinsResponse.from_json_dict( + await self.fetch( + "spend_clawback_coins", request.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) + ) + ) - async def delete_unconfirmed_transactions(self, wallet_id: int) -> None: - await self.fetch("delete_unconfirmed_transactions", {"wallet_id": wallet_id}) + async def delete_unconfirmed_transactions(self, request: DeleteUnconfirmedTransactions) -> None: + await self.fetch("delete_unconfirmed_transactions", request.to_json_dict()) - async def get_current_derivation_index(self) -> str: - response = await self.fetch("get_current_derivation_index", {}) - index = response["index"] - return str(index) + async def get_current_derivation_index(self) -> GetCurrentDerivationIndexResponse: + return GetCurrentDerivationIndexResponse.from_json_dict(await self.fetch("get_current_derivation_index", {})) - async def extend_derivation_index(self, index: int) -> str: - response = await self.fetch("extend_derivation_index", {"index": index}) - updated_index = response["index"] - return str(updated_index) + async def extend_derivation_index(self, request: ExtendDerivationIndex) -> ExtendDerivationIndexResponse: + return ExtendDerivationIndexResponse.from_json_dict( + await self.fetch("extend_derivation_index", request.to_json_dict()) + ) - async def get_farmed_amount(self) -> dict[str, Any]: - return await self.fetch("get_farmed_amount", {}) + async def get_farmed_amount(self, include_pool_rewards: bool = False) -> dict[str, Any]: + return await self.fetch("get_farmed_amount", {"include_pool_rewards": include_pool_rewards}) async def create_signed_transactions( self, @@ -1153,14 +1102,8 @@ async def get_notifications(self, request: GetNotifications) -> GetNotifications response = await self.fetch("get_notifications", request.to_json_dict()) return json_deserialize_with_clvm_streamable(response, GetNotificationsResponse) - async def delete_notifications(self, ids: Optional[Sequence[bytes32]] = None) -> bool: - request = {} - if ids is not None: - request["ids"] = [id.hex() for id in ids] - response = await self.fetch("delete_notifications", request) - # TODO: casting due to lack of type checked deserialization - result = cast(bool, response["success"]) - return result + async def delete_notifications(self, request: DeleteNotifications) -> None: + await self.fetch("delete_notifications", request.to_json_dict()) async def send_notification( self, @@ -1184,19 +1127,15 @@ async def send_notification( **timelock_info.to_json_dict(), }, ) - return TransactionRecord.from_json_dict_convenience(response["tx"]) + return TransactionRecord.from_json_dict(response["tx"]) - async def sign_message_by_address(self, address: str, message: str) -> tuple[str, str, str]: - response = await self.fetch("sign_message_by_address", {"address": address, "message": message}) - return response["pubkey"], response["signature"], response["signing_mode"] - - async def sign_message_by_id( - self, id: str, message: str, is_hex: bool = False, safe_mode: bool = True - ) -> tuple[str, str, str]: - response = await self.fetch( - "sign_message_by_id", {"id": id, "message": message, "is_hex": is_hex, "safe_mode": safe_mode} + async def sign_message_by_address(self, request: SignMessageByAddress) -> SignMessageByAddressResponse: + return SignMessageByAddressResponse.from_json_dict( + await self.fetch("sign_message_by_address", request.to_json_dict()) ) - return response["pubkey"], response["signature"], response["signing_mode"] + + async def sign_message_by_id(self, request: SignMessageByID) -> SignMessageByIDResponse: + return SignMessageByIDResponse.from_json_dict(await self.fetch("sign_message_by_id", request.to_json_dict())) async def verify_signature(self, request: VerifySignature) -> VerifySignatureResponse: return VerifySignatureResponse.from_json_dict(await self.fetch("verify_signature", {**request.to_json_dict()})) @@ -1279,7 +1218,7 @@ async def crcat_approve_pending( **timelock_info.to_json_dict(), }, ) - return [TransactionRecord.from_json_dict_convenience(tx) for tx in response["transactions"]] + return [TransactionRecord.from_json_dict(tx) for tx in response["transactions"]] async def gather_signing_info( self, diff --git a/chia/wallet/wallet_service.py b/chia/wallet/wallet_service.py new file mode 100644 index 000000000000..8851b60ddae3 --- /dev/null +++ b/chia/wallet/wallet_service.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from chia.server.start_service import Service +from chia.wallet.wallet_node import WalletNode +from chia.wallet.wallet_node_api import WalletNodeAPI +from chia.wallet.wallet_rpc_api import WalletRpcApi + +WalletService = Service[WalletNode, WalletNodeAPI, WalletRpcApi] diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index fca7e4ac61f6..ba09ea83a206 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -232,7 +232,7 @@ async def create( self.initial_num_public_keys = min_num_public_keys self.coin_store = await WalletCoinStore.create(self.db_wrapper) - self.tx_store = await WalletTransactionStore.create(self.db_wrapper) + self.tx_store = await WalletTransactionStore.create(self.db_wrapper, self.config) self.puzzle_store = await WalletPuzzleStore.create(self.db_wrapper) self.user_store = await WalletUserStore.create(self.db_wrapper) self.nft_store = await WalletNftStore.create(self.db_wrapper) @@ -1053,6 +1053,7 @@ async def spend_clawback_coins( confirmed_at_height=uint32(0), created_at_time=now, to_puzzle_hash=derivation_record.puzzle_hash, + to_address=self.encode_puzzle_hash(derivation_record.puzzle_hash), amount=amount, fee_amount=uint64(fee), confirmed=False, @@ -1427,14 +1428,16 @@ async def handle_nft( nft_data.parent_coin_spend.solution, ) if uncurried_nft.supports_did: - _new_did_id = get_new_owner_did(uncurried_nft, Program.from_serialized(nft_data.parent_coin_spend.solution)) + parsed_did_id = get_new_owner_did( + uncurried_nft, Program.from_serialized(nft_data.parent_coin_spend.solution) + ) old_did_id = uncurried_nft.owner_did - if _new_did_id is None: + if parsed_did_id is None: new_did_id = old_did_id - elif _new_did_id == b"": + elif parsed_did_id == b"": new_did_id = None else: - new_did_id = _new_did_id + new_did_id = parsed_did_id self.log.debug( "Handling NFT: %s, old DID:%s, new DID:%s, old P2:%s, new P2:%s", nft_data.parent_coin_spend, @@ -1549,14 +1552,16 @@ async def handle_clawback( clawback_coin_spend: CoinSpend = await fetch_coin_spend_for_coin_state(coin_state, peer) clawback_spend_bundle = WalletSpendBundle([clawback_coin_spend], G2Element()) if await self.puzzle_store.puzzle_hash_exists(clawback_spend_bundle.additions()[0].puzzle_hash): + to_ph = ( + metadata.sender_puzzle_hash + if clawback_spend_bundle.additions()[0].puzzle_hash == metadata.sender_puzzle_hash + else metadata.recipient_puzzle_hash + ) tx_record = TransactionRecord( confirmed_at_height=uint32(coin_state.spent_height), created_at_time=created_timestamp, - to_puzzle_hash=( - metadata.sender_puzzle_hash - if clawback_spend_bundle.additions()[0].puzzle_hash == metadata.sender_puzzle_hash - else metadata.recipient_puzzle_hash - ), + to_puzzle_hash=to_ph, + to_address=self.encode_puzzle_hash(to_ph), amount=uint64(coin_state.coin.amount), fee_amount=uint64(0), confirmed=True, @@ -1594,6 +1599,7 @@ async def handle_clawback( confirmed_at_height=uint32(coin_state.created_height), created_at_time=uint64(created_timestamp), to_puzzle_hash=metadata.recipient_puzzle_hash, + to_address=self.encode_puzzle_hash(metadata.recipient_puzzle_hash), amount=uint64(coin_state.coin.amount), fee_amount=uint64(0), confirmed=spent_height != 0, @@ -1790,14 +1796,14 @@ async def _add_coin_states( created_timestamp = await self.wallet_node.get_timestamp_for_height( uint32(coin_state.created_height) ) + to_ph = await self.convert_puzzle_hash( + wallet_identifier.id, coin_state.coin.puzzle_hash + ) tx_record = TransactionRecord( confirmed_at_height=uint32(coin_state.created_height), created_at_time=uint64(created_timestamp), - to_puzzle_hash=( - await self.convert_puzzle_hash( - wallet_identifier.id, coin_state.coin.puzzle_hash - ) - ), + to_puzzle_hash=to_ph, + to_address=self.encode_puzzle_hash(to_ph), amount=uint64(coin_state.coin.amount), fee_amount=uint64(0), confirmed=True, @@ -1872,12 +1878,12 @@ async def _add_coin_states( for added_coin in additions: tx_name += bytes(added_coin.name()) tx_name = std_hash(tx_name) + to_ph = await self.convert_puzzle_hash(wallet_identifier.id, to_puzzle_hash) tx_record = TransactionRecord( confirmed_at_height=uint32(coin_state.spent_height), created_at_time=uint64(spent_timestamp), - to_puzzle_hash=( - await self.convert_puzzle_hash(wallet_identifier.id, to_puzzle_hash) - ), + to_puzzle_hash=to_ph, + to_address=self.encode_puzzle_hash(to_ph), amount=uint64(amount), fee_amount=uint64(fee), confirmed=True, @@ -1941,8 +1947,8 @@ async def _add_coin_states( # No more singleton (maybe destroyed?) break - coin_name = new_singleton_coin.name() - existing = await self.coin_store.get_coin_record(coin_name) + new_singleton_name = new_singleton_coin.name() + existing = await self.coin_store.get_coin_record(new_singleton_name) if existing is None: await self.coin_added( new_singleton_coin, @@ -1951,7 +1957,7 @@ async def _add_coin_states( uint32(record.wallet_id), record.wallet_type, peer, - coin_name, + new_singleton_name, coin_data, ) await self.coin_store.set_spent( @@ -1959,7 +1965,7 @@ async def _add_coin_states( ) await self.add_interested_coin_ids([new_singleton_coin.name()]) new_coin_state: list[CoinState] = await self.wallet_node.get_coin_state( - [coin_name], peer=peer, fork_height=fork_height + [new_singleton_name], peer=peer, fork_height=fork_height ) assert len(new_coin_state) == 1 curr_coin_state = new_coin_state[0] @@ -2035,8 +2041,8 @@ async def _add_coin_states( launcher_spend_additions = compute_additions(launcher_spend) assert len(launcher_spend_additions) == 1 coin_added = launcher_spend_additions[0] - coin_name = coin_added.name() - existing = await self.coin_store.get_coin_record(coin_name) + coin_added_name = coin_added.name() + existing = await self.coin_store.get_coin_record(coin_added_name) if existing is None: await self.coin_added( coin_added, @@ -2045,10 +2051,10 @@ async def _add_coin_states( pool_wallet.id(), pool_wallet.type(), peer, - coin_name, + coin_added_name, coin_data, ) - await self.add_interested_coin_ids([coin_name]) + await self.add_interested_coin_ids([coin_added_name]) else: raise RuntimeError("All cases already handled") # Logic error, all cases handled @@ -2199,10 +2205,12 @@ async def coin_added( clawback = parent_coin_record is not None and parent_coin_record.coin_type == CoinType.CLAWBACK if coinbase or clawback or (not coin_confirmed_transaction and not change): + to_ph = await self.convert_puzzle_hash(wallet_id, coin.puzzle_hash) tx_record = TransactionRecord( confirmed_at_height=uint32(height), created_at_time=await self.wallet_node.get_timestamp_for_height(height), - to_puzzle_hash=await self.convert_puzzle_hash(wallet_id, coin.puzzle_hash), + to_puzzle_hash=to_ph, + to_address=self.encode_puzzle_hash(to_ph), amount=uint64(coin.amount), fee_amount=uint64(0), confirmed=True, @@ -2605,20 +2613,20 @@ async def key_hints_for_pubkeys(self, pks: list[bytes]) -> KeyHints: [path_hint for pk in pks for path_hint in (await self.path_hint_for_pubkey(pk),) if path_hint is not None], ) - async def gather_signing_info(self, coin_spends: list[Spend]) -> SigningInstructions: + async def gather_signing_info(self, spends: list[Spend]) -> SigningInstructions: pks: list[bytes] = [] signing_targets: list[SigningTarget] = [] - for coin_spend in coin_spends: - _coin_spend = coin_spend.as_coin_spend() + for spend in spends: + coin_spend = spend.as_coin_spend() # Get AGG_SIG conditions conditions_dict = conditions_dict_for_solution( - Program.from_serialized(_coin_spend.puzzle_reveal), - Program.from_serialized(_coin_spend.solution), + Program.from_serialized(coin_spend.puzzle_reveal), + Program.from_serialized(coin_spend.solution), self.constants.MAX_BLOCK_COST_CLVM, ) # Create signature for pk, msg in pkm_pairs_for_conditions_dict( - conditions_dict, _coin_spend.coin, self.constants.AGG_SIG_ME_ADDITIONAL_DATA + conditions_dict, coin_spend.coin, self.constants.AGG_SIG_ME_ADDITIONAL_DATA ): pk_bytes = bytes(pk) pks.append(pk_bytes) @@ -2769,3 +2777,6 @@ async def new_action_scope( async def delete_wallet(self, wallet_id: uint32) -> None: await self.user_store.delete_wallet(wallet_id) await self.puzzle_store.delete_wallet(wallet_id) + + def encode_puzzle_hash(self, puzzle_hash: bytes32) -> str: + return encode_puzzle_hash(puzzle_hash, AddressType.XCH.hrp(self.config)) diff --git a/chia/wallet/wallet_transaction_store.py b/chia/wallet/wallet_transaction_store.py index 040c42340c6a..30c0e8c13749 100644 --- a/chia/wallet/wallet_transaction_store.py +++ b/chia/wallet/wallet_transaction_store.py @@ -8,8 +8,10 @@ import aiosqlite from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint8, uint32 +from typing_extensions import Any from chia.types.mempool_inclusion_status import MempoolInclusionStatus +from chia.util.bech32m import encode_puzzle_hash from chia.util.db_wrapper import DBWrapper2 from chia.util.errors import Err from chia.wallet.conditions import ConditionValidTimes @@ -20,6 +22,7 @@ minimum_send_attempts, ) from chia.wallet.transaction_sorting import SortKey +from chia.wallet.util.address_type import AddressType from chia.wallet.util.query_filter import FilterMode, TransactionTypeFilter from chia.wallet.util.transaction_type import TransactionType @@ -44,11 +47,13 @@ class WalletTransactionStore: tx_submitted: dict[bytes32, tuple[int, int]] # tx_id: [time submitted: count] unconfirmed_txs: list[LightTransactionRecord] # tx_id: [time submitted: count] last_wallet_tx_resend_time: int # Epoch time in seconds + config: dict[str, Any] @classmethod - async def create(cls, db_wrapper: DBWrapper2): + async def create(cls, db_wrapper: DBWrapper2, config: dict[str, Any]): self = cls() + self.config = config self.db_wrapper = db_wrapper async with self.db_wrapper.writer_maybe_transaction() as conn: await conn.execute( @@ -257,10 +262,9 @@ async def get_not_sent(self, *, include_accepted_txs=False) -> list[TransactionR if time_submitted < current_time - (60 * 10): records.append(record) self.tx_submitted[record.name] = current_time, 1 - else: - if count < minimum_send_attempts: - records.append(record) - self.tx_submitted[record.name] = time_submitted, (count + 1) + elif count < minimum_send_attempts: + records.append(record) + self.tx_submitted[record.name] = time_submitted, (count + 1) else: records.append(record) self.tx_submitted[record.name] = current_time, 1 @@ -485,6 +489,10 @@ async def _get_new_tx_records_from_old(self, old_records: list[TransactionRecord valid_times=( tx_id_to_valid_times[record.name] if record.name in tx_id_to_valid_times else empty_valid_times ), + to_address=encode_puzzle_hash( + record.to_puzzle_hash, + AddressType.XCH.hrp(self.config), + ), ) for record in old_records ] diff --git a/install-timelord.sh b/install-timelord.sh index 1cf12deba1f5..35583f06c447 100755 --- a/install-timelord.sh +++ b/install-timelord.sh @@ -113,8 +113,11 @@ else symlink_vdf_bench "$PYTHON_VERSION" elif [ -e venv/bin/python ] && test "$MACOS"; then echo "Installing chiavdf dependencies for MacOS." + if ! cmake --version >/dev/null 2>&1; then + brew install --formula --quiet cmake + fi # The most recent boost version causes compile errors. - brew install --formula --quiet boost@1.85 cmake gmp + brew install --formula --quiet boost@1.85 gmp # boost@1.85 is keg-only, which means it was not symlinked into /usr/local, # because this is an alternate version of another formula. export LDFLAGS="-L/usr/local/opt/boost@1.85/lib" diff --git a/mypy-exclusions.txt b/mypy-exclusions.txt index 16fe00a12e5b..3274b0145430 100644 --- a/mypy-exclusions.txt +++ b/mypy-exclusions.txt @@ -58,7 +58,6 @@ chia._tests.core.full_node.test_address_manager chia._tests.core.full_node.test_node_load chia._tests.core.full_node.test_performance chia._tests.core.full_node.test_transactions -chia._tests.core.server.test_rate_limits chia._tests.core.ssl.test_ssl chia._tests.core.test_crawler_rpc chia._tests.core.test_daemon_rpc diff --git a/poetry.lock b/poetry.lock index 2fb8e4bf4fe8..d8106e536089 100644 --- a/poetry.lock +++ b/poetry.lock @@ -26,98 +26,98 @@ files = [ [[package]] name = "aiohttp" -version = "3.12.14" +version = "3.12.15" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248"}, - {file = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb"}, - {file = "aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61"}, - {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8"}, - {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3"}, - {file = "aiohttp-3.12.14-cp310-cp310-win32.whl", hash = "sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c"}, - {file = "aiohttp-3.12.14-cp310-cp310-win_amd64.whl", hash = "sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db"}, - {file = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597"}, - {file = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393"}, - {file = "aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe"}, - {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0"}, - {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28"}, - {file = "aiohttp-3.12.14-cp311-cp311-win32.whl", hash = "sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b"}, - {file = "aiohttp-3.12.14-cp311-cp311-win_amd64.whl", hash = "sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced"}, - {file = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22"}, - {file = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a"}, - {file = "aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660"}, - {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425"}, - {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0"}, - {file = "aiohttp-3.12.14-cp312-cp312-win32.whl", hash = "sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729"}, - {file = "aiohttp-3.12.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338"}, - {file = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767"}, - {file = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e"}, - {file = "aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd"}, - {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3"}, - {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758"}, - {file = "aiohttp-3.12.14-cp313-cp313-win32.whl", hash = "sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5"}, - {file = "aiohttp-3.12.14-cp313-cp313-win_amd64.whl", hash = "sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa"}, - {file = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b8cc6b05e94d837bcd71c6531e2344e1ff0fb87abe4ad78a9261d67ef5d83eae"}, - {file = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1dcb015ac6a3b8facd3677597edd5ff39d11d937456702f0bb2b762e390a21b"}, - {file = "aiohttp-3.12.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3779ed96105cd70ee5e85ca4f457adbce3d9ff33ec3d0ebcdf6c5727f26b21b3"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:717a0680729b4ebd7569c1dcd718c46b09b360745fd8eb12317abc74b14d14d0"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b5dd3a2ef7c7e968dbbac8f5574ebeac4d2b813b247e8cec28174a2ba3627170"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4710f77598c0092239bc12c1fcc278a444e16c7032d91babf5abbf7166463f7b"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3e9f75ae842a6c22a195d4a127263dbf87cbab729829e0bd7857fb1672400b2"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9c8d55d6802086edd188e3a7d85a77787e50d56ce3eb4757a3205fa4657922"}, - {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79b29053ff3ad307880d94562cca80693c62062a098a5776ea8ef5ef4b28d140"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:23e1332fff36bebd3183db0c7a547a1da9d3b4091509f6d818e098855f2f27d3"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a564188ce831fd110ea76bcc97085dd6c625b427db3f1dbb14ca4baa1447dcbc"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a7a1b4302f70bb3ec40ca86de82def532c97a80db49cac6a6700af0de41af5ee"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:1b07ccef62950a2519f9bfc1e5b294de5dd84329f444ca0b329605ea787a3de5"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:938bd3ca6259e7e48b38d84f753d548bd863e0c222ed6ee6ace3fd6752768a84"}, - {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8bc784302b6b9f163b54c4e93d7a6f09563bd01ff2b841b29ed3ac126e5040bf"}, - {file = "aiohttp-3.12.14-cp39-cp39-win32.whl", hash = "sha256:a3416f95961dd7d5393ecff99e3f41dc990fb72eda86c11f2a60308ac6dcd7a0"}, - {file = "aiohttp-3.12.14-cp39-cp39-win_amd64.whl", hash = "sha256:196858b8820d7f60578f8b47e5669b3195c21d8ab261e39b1d705346458f445f"}, - {file = "aiohttp-3.12.14.tar.gz", hash = "sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2"}, + {file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc"}, + {file = "aiohttp-3.12.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af"}, + {file = "aiohttp-3.12.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6"}, + {file = "aiohttp-3.12.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065"}, + {file = "aiohttp-3.12.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1"}, + {file = "aiohttp-3.12.15-cp310-cp310-win32.whl", hash = "sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a"}, + {file = "aiohttp-3.12.15-cp310-cp310-win_amd64.whl", hash = "sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830"}, + {file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117"}, + {file = "aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe"}, + {file = "aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b"}, + {file = "aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7"}, + {file = "aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685"}, + {file = "aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b"}, + {file = "aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d"}, + {file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7"}, + {file = "aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444"}, + {file = "aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545"}, + {file = "aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea"}, + {file = "aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3"}, + {file = "aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1"}, + {file = "aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34"}, + {file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315"}, + {file = "aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd"}, + {file = "aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d"}, + {file = "aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64"}, + {file = "aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51"}, + {file = "aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0"}, + {file = "aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84"}, + {file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:691d203c2bdf4f4637792efbbcdcd157ae11e55eaeb5e9c360c1206fb03d4d98"}, + {file = "aiohttp-3.12.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e995e1abc4ed2a454c731385bf4082be06f875822adc4c6d9eaadf96e20d406"}, + {file = "aiohttp-3.12.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bd44d5936ab3193c617bfd6c9a7d8d1085a8dc8c3f44d5f1dcf554d17d04cf7d"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46749be6e89cd78d6068cdf7da51dbcfa4321147ab8e4116ee6678d9a056a0cf"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0c643f4d75adea39e92c0f01b3fb83d57abdec8c9279b3078b68a3a52b3933b6"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a23918fedc05806966a2438489dcffccbdf83e921a1170773b6178d04ade142"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74bdd8c864b36c3673741023343565d95bfbd778ffe1eb4d412c135a28a8dc89"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a146708808c9b7a988a4af3821379e379e0f0e5e466ca31a73dbdd0325b0263"}, + {file = "aiohttp-3.12.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7011a70b56facde58d6d26da4fec3280cc8e2a78c714c96b7a01a87930a9530"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3bdd6e17e16e1dbd3db74d7f989e8af29c4d2e025f9828e6ef45fbdee158ec75"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57d16590a351dfc914670bd72530fd78344b885a00b250e992faea565b7fdc05"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc9a0f6569ff990e0bbd75506c8d8fe7214c8f6579cca32f0546e54372a3bb54"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:536ad7234747a37e50e7b6794ea868833d5220b49c92806ae2d7e8a9d6b5de02"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f0adb4177fa748072546fb650d9bd7398caaf0e15b370ed3317280b13f4083b0"}, + {file = "aiohttp-3.12.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14954a2988feae3987f1eb49c706bff39947605f4b6fa4027c1d75743723eb09"}, + {file = "aiohttp-3.12.15-cp39-cp39-win32.whl", hash = "sha256:b784d6ed757f27574dca1c336f968f4e81130b27595e458e69457e6878251f5d"}, + {file = "aiohttp-3.12.15-cp39-cp39-win_amd64.whl", hash = "sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8"}, + {file = "aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2"}, ] [package.dependencies] @@ -198,14 +198,14 @@ files = [ [[package]] name = "anyio" -version = "4.9.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" +version = "4.10.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, - {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, + {file = "anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1"}, + {file = "anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6"}, ] [package.dependencies] @@ -215,8 +215,6 @@ sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -503,18 +501,18 @@ bitarray = ">=3.0.0,<4.0" [[package]] name = "boto3" -version = "1.39.11" +version = "1.40.22" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "boto3-1.39.11-py3-none-any.whl", hash = "sha256:af8f1dad35eceff7658fab43b39b0f55892b6e3dd12308733521cc24dd2c9a02"}, - {file = "boto3-1.39.11.tar.gz", hash = "sha256:3027edf20642fe1d5f9dc50a420d0fe2733073ed6a9f0f047b60fe08c3682132"}, + {file = "boto3-1.40.22-py3-none-any.whl", hash = "sha256:ecc468266a018f77869fd9cc3564500c3c1b658eb6d8e20351ec88cc06258dbf"}, + {file = "boto3-1.40.22.tar.gz", hash = "sha256:9972752b50fd376576a6e04a7d6afc69762a368f29b85314598edb62c1894663"}, ] [package.dependencies] -botocore = ">=1.39.11,<1.40.0" +botocore = ">=1.40.22,<1.41.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.13.0,<0.14.0" @@ -523,14 +521,14 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.39.11" +version = "1.40.22" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "botocore-1.39.11-py3-none-any.whl", hash = "sha256:1545352931a8a186f3e977b1e1a4542d7d434796e274c3c62efd0210b5ea76dc"}, - {file = "botocore-1.39.11.tar.gz", hash = "sha256:953b12909d6799350e346ab038e55b6efe622c616f80aef74d7a6683ffdd972c"}, + {file = "botocore-1.40.22-py3-none-any.whl", hash = "sha256:df50788fc71250dd884a4e2b60931103416bfba5baa85d2e150b8434ded7e61e"}, + {file = "botocore-1.40.22.tar.gz", hash = "sha256:eb800ece2cd67777ebb09a67a0d1628db3aea4f2ccbf1d8bf7dbf8504d1f3b71"}, ] [package.dependencies] @@ -542,18 +540,18 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.23.8)"] +crt = ["awscrt (==0.27.6)"] [[package]] name = "build" -version = "1.2.2.post1" +version = "1.3.0" description = "A simple, correct Python build frontend" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, - {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, + {file = "build-1.3.0-py3-none-any.whl", hash = "sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4"}, + {file = "build-1.3.0.tar.gz", hash = "sha256:698edd0ea270bde950f53aed21f3a0135672206f3911e0176261a31e0e07b397"}, ] [package.dependencies] @@ -564,11 +562,8 @@ pyproject_hooks = "*" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0) ; python_version < \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.11\"", "setuptools (>=67.8.0) ; python_version >= \"3.12\"", "wheel (>=0.36.0)"] -typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] uv = ["uv (>=0.1.18)"] -virtualenv = ["virtualenv (>=20.0.35)"] +virtualenv = ["virtualenv (>=20.11) ; python_version < \"3.10\"", "virtualenv (>=20.17) ; python_version >= \"3.10\" and python_version < \"3.14\"", "virtualenv (>=20.31) ; python_version >= \"3.14\""] [[package]] name = "certifi" @@ -808,14 +803,14 @@ dev = ["black (>=23.1.0)", "pytest (>=7.2.1)", "ruff (>=0.0.252)"] [[package]] name = "chia-puzzles-py" -version = "0.20.1" +version = "0.20.2" description = "A collection of the currently deployed ChiaLisp puzzles." optional = false python-versions = "<4.0,>=3.8" groups = ["main"] files = [ - {file = "chia_puzzles_py-0.20.1-py3-none-any.whl", hash = "sha256:05feda5d148d116c0f8d6c72e9c53d3965c8141e70fedd4f802ba01777ab9da0"}, - {file = "chia_puzzles_py-0.20.1.tar.gz", hash = "sha256:d1c171042fde1269230004089c7b506d51bc86ddc5b147efd3b47a91abc37507"}, + {file = "chia_puzzles_py-0.20.2-py3-none-any.whl", hash = "sha256:17e8edb910009a2657809725e68aa4ba4a992e914d6ce98329c263abd448231b"}, + {file = "chia_puzzles_py-0.20.2.tar.gz", hash = "sha256:12343f39ee5485d266634124521193a0fa2529000d664d4beff2ae9b42f0760d"}, ] [package.dependencies] @@ -824,25 +819,44 @@ clvm-tools-rs = ">=0.1.45,<0.2.0" pytest = ">=8.3.3,<9.0.0" [[package]] -name = "chia_rs" -version = "0.28.1" +name = "chia-rs" +version = "0.30.0" description = "" optional = false python-versions = "*" groups = ["main"] -files = [] -develop = false +files = [ + {file = "chia_rs-0.30.0-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:a2ca9669e0ffad97cc3e39e49839fef568013dcb4a645cfa802c68f5acb321c6"}, + {file = "chia_rs-0.30.0-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:ef12c719f4c6fb0d427b110f331e026b4bcb6fa95856a0095f0d885ebf50d0a9"}, + {file = "chia_rs-0.30.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7d140a0de8ab59fa75421590550491ea042a8a6e17f1b789b92b7f267561c568"}, + {file = "chia_rs-0.30.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b9dbd5b2265e8743c1038b7e8cfff6131ef8a56a68fc8675b11ba18d4a3d9f97"}, + {file = "chia_rs-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:67e5b5a5f9acb42ff364dcf72e5fc7fab7c6e2daad26a99edd9b24a7d4c932e8"}, + {file = "chia_rs-0.30.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:2b995c05429343c8b91bdfa1cdca97e23b72393b1b54f0ab6703e63e74016f1d"}, + {file = "chia_rs-0.30.0-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:2cc43223e3e092621523e35a38320a87941886315d3eb29b69e0470c7546e1c2"}, + {file = "chia_rs-0.30.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b30d3868b004dba2fbc59c004cb3345b598f9f5b500ce426d23ccb3aaa1f608b"}, + {file = "chia_rs-0.30.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45284e3d4655c1b8fca2024c050777bf6919de429949e26c5ff83ffe553fbd87"}, + {file = "chia_rs-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:d892e63730ec94ab16fbb21cb25e07889d8136973487252c7b65d2611d759346"}, + {file = "chia_rs-0.30.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:004259970cba586f4bc70b6759ce9d1fc8310fdd96bb25d9e68e79a497a55bc9"}, + {file = "chia_rs-0.30.0-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:5c9ffcd52640edc31860e52ce482e91e2090b46373e70c07fe5828b99be7b10b"}, + {file = "chia_rs-0.30.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b1850d32ee34cc92e3d912a445a528dc2cfde08e5eed2304fb050209f4637fa5"}, + {file = "chia_rs-0.30.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3ce97f9baa4adc6233935b1138193446b81a0742993d7d49f186dbc61a8a2f91"}, + {file = "chia_rs-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9edd53c61d1462aae5a91265b9499afee19fea40a69c36f79d8ce7e519b3ad54"}, + {file = "chia_rs-0.30.0-cp313-cp313-macosx_13_0_arm64.whl", hash = "sha256:828477104cb13e57a1d82f286cb2a9961f711748da6b6459fbc522e14a6eeaa3"}, + {file = "chia_rs-0.30.0-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:707035a2596a48ada8a16b1daae7454360cfefc41074860ccf55dd6db0de1bcd"}, + {file = "chia_rs-0.30.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:3e1437b5af745b7f080882204c96b82ca50df394cca71ca4e977d86c0df5881a"}, + {file = "chia_rs-0.30.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e3aa7eb08d34641d9059a2cd4d27ccf03e86c1b371b1ae465bbb5b1d84e1d3eb"}, + {file = "chia_rs-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:6961a0c946217a6c25abcd5964e7e6d484f54456f70f226348b3a2a60b2c7840"}, + {file = "chia_rs-0.30.0-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:5209a94bf27e3209292b5a18cd582f6a185180b42a1b70d0bf69463c41ac440e"}, + {file = "chia_rs-0.30.0-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:203d52fd1622b96e970ad9c047d516132b19bc136ecf62e526c4e8ca9f23de1e"}, + {file = "chia_rs-0.30.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ef930b975b4f74b5c7089d263f9183826c5afdf076ae6fc5e765bafcad36b983"}, + {file = "chia_rs-0.30.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:68374a5b3e9e7d27a81d0d96b06053f24ace0238d60795ab7188ba1dee556112"}, + {file = "chia_rs-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:de9da557ef7afd56dbc28b46d53747c39ce9cb919b78fb50be22ed335b22293d"}, + {file = "chia_rs-0.30.0.tar.gz", hash = "sha256:87937b76a0dab8cb304c641fb9af77a59c8270d59686b00d1a81766c4c8acfed"}, +] [package.dependencies] typing-extensions = "*" -[package.source] -type = "git" -url = "https://github.com/chia-network/chia_rs" -reference = "long_lived/initial_datalayer" -resolved_reference = "ca58fd4617fb06684a90f669221290623293d3a2" -subdirectory = "wheel/" - [[package]] name = "chiabip158" version = "1.5.3" @@ -1119,79 +1133,100 @@ dev = ["black", "coverage", "hatch", "pytest", "pytest-cov", "pytest-mock", "pyt [[package]] name = "coverage" -version = "7.9.2" +version = "7.10.5" description = "Code coverage measurement for Python" optional = true python-versions = ">=3.9" groups = ["main"] files = [ - {file = "coverage-7.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66283a192a14a3854b2e7f3418d7db05cdf411012ab7ff5db98ff3b181e1f912"}, - {file = "coverage-7.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e01d138540ef34fcf35c1aa24d06c3de2a4cffa349e29a10056544f35cca15f"}, - {file = "coverage-7.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f22627c1fe2745ee98d3ab87679ca73a97e75ca75eb5faee48660d060875465f"}, - {file = "coverage-7.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b1c2d8363247b46bd51f393f86c94096e64a1cf6906803fa8d5a9d03784bdbf"}, - {file = "coverage-7.9.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c10c882b114faf82dbd33e876d0cbd5e1d1ebc0d2a74ceef642c6152f3f4d547"}, - {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de3c0378bdf7066c3988d66cd5232d161e933b87103b014ab1b0b4676098fa45"}, - {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1e2f097eae0e5991e7623958a24ced3282676c93c013dde41399ff63e230fcf2"}, - {file = "coverage-7.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28dc1f67e83a14e7079b6cea4d314bc8b24d1aed42d3582ff89c0295f09b181e"}, - {file = "coverage-7.9.2-cp310-cp310-win32.whl", hash = "sha256:bf7d773da6af9e10dbddacbf4e5cab13d06d0ed93561d44dae0188a42c65be7e"}, - {file = "coverage-7.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:0c0378ba787681ab1897f7c89b415bd56b0b2d9a47e5a3d8dc0ea55aac118d6c"}, - {file = "coverage-7.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a7a56a2964a9687b6aba5b5ced6971af308ef6f79a91043c05dd4ee3ebc3e9ba"}, - {file = "coverage-7.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123d589f32c11d9be7fe2e66d823a236fe759b0096f5db3fb1b75b2fa414a4fa"}, - {file = "coverage-7.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:333b2e0ca576a7dbd66e85ab402e35c03b0b22f525eed82681c4b866e2e2653a"}, - {file = "coverage-7.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:326802760da234baf9f2f85a39e4a4b5861b94f6c8d95251f699e4f73b1835dc"}, - {file = "coverage-7.9.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e7be4cfec248df38ce40968c95d3952fbffd57b400d4b9bb580f28179556d2"}, - {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b4a4cb73b9f2b891c1788711408ef9707666501ba23684387277ededab1097c"}, - {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2c8937fa16c8c9fbbd9f118588756e7bcdc7e16a470766a9aef912dd3f117dbd"}, - {file = "coverage-7.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42da2280c4d30c57a9b578bafd1d4494fa6c056d4c419d9689e66d775539be74"}, - {file = "coverage-7.9.2-cp311-cp311-win32.whl", hash = "sha256:14fa8d3da147f5fdf9d298cacc18791818f3f1a9f542c8958b80c228320e90c6"}, - {file = "coverage-7.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:549cab4892fc82004f9739963163fd3aac7a7b0df430669b75b86d293d2df2a7"}, - {file = "coverage-7.9.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2667a2b913e307f06aa4e5677f01a9746cd08e4b35e14ebcde6420a9ebb4c62"}, - {file = "coverage-7.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae9eb07f1cfacd9cfe8eaee6f4ff4b8a289a668c39c165cd0c8548484920ffc0"}, - {file = "coverage-7.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce85551f9a1119f02adc46d3014b5ee3f765deac166acf20dbb851ceb79b6f3"}, - {file = "coverage-7.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8f6389ac977c5fb322e0e38885fbbf901743f79d47f50db706e7644dcdcb6e1"}, - {file = "coverage-7.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d9eae8cdfcd58fe7893b88993723583a6ce4dfbfd9f29e001922544f95615"}, - {file = "coverage-7.9.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae939811e14e53ed8a9818dad51d434a41ee09df9305663735f2e2d2d7d959b"}, - {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:31991156251ec202c798501e0a42bbdf2169dcb0f137b1f5c0f4267f3fc68ef9"}, - {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d0d67963f9cbfc7c7f96d4ac74ed60ecbebd2ea6eeb51887af0f8dce205e545f"}, - {file = "coverage-7.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49b752a2858b10580969ec6af6f090a9a440a64a301ac1528d7ca5f7ed497f4d"}, - {file = "coverage-7.9.2-cp312-cp312-win32.whl", hash = "sha256:88d7598b8ee130f32f8a43198ee02edd16d7f77692fa056cb779616bbea1b355"}, - {file = "coverage-7.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:9dfb070f830739ee49d7c83e4941cc767e503e4394fdecb3b54bfdac1d7662c0"}, - {file = "coverage-7.9.2-cp312-cp312-win_arm64.whl", hash = "sha256:4e2c058aef613e79df00e86b6d42a641c877211384ce5bd07585ed7ba71ab31b"}, - {file = "coverage-7.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038"}, - {file = "coverage-7.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d"}, - {file = "coverage-7.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3"}, - {file = "coverage-7.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14"}, - {file = "coverage-7.9.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6"}, - {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b"}, - {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d"}, - {file = "coverage-7.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868"}, - {file = "coverage-7.9.2-cp313-cp313-win32.whl", hash = "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a"}, - {file = "coverage-7.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b"}, - {file = "coverage-7.9.2-cp313-cp313-win_arm64.whl", hash = "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694"}, - {file = "coverage-7.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5"}, - {file = "coverage-7.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b"}, - {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3"}, - {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8"}, - {file = "coverage-7.9.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46"}, - {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584"}, - {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e"}, - {file = "coverage-7.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac"}, - {file = "coverage-7.9.2-cp313-cp313t-win32.whl", hash = "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926"}, - {file = "coverage-7.9.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd"}, - {file = "coverage-7.9.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb"}, - {file = "coverage-7.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddc39510ac922a5c4c27849b739f875d3e1d9e590d1e7b64c98dadf037a16cce"}, - {file = "coverage-7.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a535c0c7364acd55229749c2b3e5eebf141865de3a8f697076a3291985f02d30"}, - {file = "coverage-7.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df0f9ef28e0f20c767ccdccfc5ae5f83a6f4a2fbdfbcbcc8487a8a78771168c8"}, - {file = "coverage-7.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f3da12e0ccbcb348969221d29441ac714bbddc4d74e13923d3d5a7a0bebef7a"}, - {file = "coverage-7.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a17eaf46f56ae0f870f14a3cbc2e4632fe3771eab7f687eda1ee59b73d09fe4"}, - {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:669135a9d25df55d1ed56a11bf555f37c922cf08d80799d4f65d77d7d6123fcf"}, - {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9d3a700304d01a627df9db4322dc082a0ce1e8fc74ac238e2af39ced4c083193"}, - {file = "coverage-7.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:71ae8b53855644a0b1579d4041304ddc9995c7b21c8a1f16753c4d8903b4dfed"}, - {file = "coverage-7.9.2-cp39-cp39-win32.whl", hash = "sha256:dd7a57b33b5cf27acb491e890720af45db05589a80c1ffc798462a765be6d4d7"}, - {file = "coverage-7.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f65bb452e579d5540c8b37ec105dd54d8b9307b07bcaa186818c104ffda22441"}, - {file = "coverage-7.9.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:8a1166db2fb62473285bcb092f586e081e92656c7dfa8e9f62b4d39d7e6b5050"}, - {file = "coverage-7.9.2-py3-none-any.whl", hash = "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4"}, - {file = "coverage-7.9.2.tar.gz", hash = "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b"}, + {file = "coverage-7.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c6a5c3414bfc7451b879141ce772c546985163cf553f08e0f135f0699a911801"}, + {file = "coverage-7.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc8e4d99ce82f1710cc3c125adc30fd1487d3cf6c2cd4994d78d68a47b16989a"}, + {file = "coverage-7.10.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:02252dc1216e512a9311f596b3169fad54abcb13827a8d76d5630c798a50a754"}, + {file = "coverage-7.10.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73269df37883e02d460bee0cc16be90509faea1e3bd105d77360b512d5bb9c33"}, + {file = "coverage-7.10.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f8a81b0614642f91c9effd53eec284f965577591f51f547a1cbeb32035b4c2f"}, + {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6a29f8e0adb7f8c2b95fa2d4566a1d6e6722e0a637634c6563cb1ab844427dd9"}, + {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fcf6ab569436b4a647d4e91accba12509ad9f2554bc93d3aee23cc596e7f99c3"}, + {file = "coverage-7.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:90dc3d6fb222b194a5de60af8d190bedeeddcbc7add317e4a3cd333ee6b7c879"}, + {file = "coverage-7.10.5-cp310-cp310-win32.whl", hash = "sha256:414a568cd545f9dc75f0686a0049393de8098414b58ea071e03395505b73d7a8"}, + {file = "coverage-7.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:e551f9d03347196271935fd3c0c165f0e8c049220280c1120de0084d65e9c7ff"}, + {file = "coverage-7.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c177e6ffe2ebc7c410785307758ee21258aa8e8092b44d09a2da767834f075f2"}, + {file = "coverage-7.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14d6071c51ad0f703d6440827eaa46386169b5fdced42631d5a5ac419616046f"}, + {file = "coverage-7.10.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:61f78c7c3bc272a410c5ae3fde7792b4ffb4acc03d35a7df73ca8978826bb7ab"}, + {file = "coverage-7.10.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f39071caa126f69d63f99b324fb08c7b1da2ec28cbb1fe7b5b1799926492f65c"}, + {file = "coverage-7.10.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343a023193f04d46edc46b2616cdbee68c94dd10208ecd3adc56fcc54ef2baa1"}, + {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:585ffe93ae5894d1ebdee69fc0b0d4b7c75d8007983692fb300ac98eed146f78"}, + {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0ef4e66f006ed181df29b59921bd8fc7ed7cd6a9289295cd8b2824b49b570df"}, + {file = "coverage-7.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb7b0bbf7cc1d0453b843eca7b5fa017874735bef9bfdfa4121373d2cc885ed6"}, + {file = "coverage-7.10.5-cp311-cp311-win32.whl", hash = "sha256:1d043a8a06987cc0c98516e57c4d3fc2c1591364831e9deb59c9e1b4937e8caf"}, + {file = "coverage-7.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:fefafcca09c3ac56372ef64a40f5fe17c5592fab906e0fdffd09543f3012ba50"}, + {file = "coverage-7.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:7e78b767da8b5fc5b2faa69bb001edafcd6f3995b42a331c53ef9572c55ceb82"}, + {file = "coverage-7.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c2d05c7e73c60a4cecc7d9b60dbfd603b4ebc0adafaef371445b47d0f805c8a9"}, + {file = "coverage-7.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32ddaa3b2c509778ed5373b177eb2bf5662405493baeff52278a0b4f9415188b"}, + {file = "coverage-7.10.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dd382410039fe062097aa0292ab6335a3f1e7af7bba2ef8d27dcda484918f20c"}, + {file = "coverage-7.10.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fa22800f3908df31cea6fb230f20ac49e343515d968cc3a42b30d5c3ebf9b5a"}, + {file = "coverage-7.10.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f366a57ac81f5e12797136552f5b7502fa053c861a009b91b80ed51f2ce651c6"}, + {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1dc8f1980a272ad4a6c84cba7981792344dad33bf5869361576b7aef42733a"}, + {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2285c04ee8676f7938b02b4936d9b9b672064daab3187c20f73a55f3d70e6b4a"}, + {file = "coverage-7.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c2492e4dd9daab63f5f56286f8a04c51323d237631eb98505d87e4c4ff19ec34"}, + {file = "coverage-7.10.5-cp312-cp312-win32.whl", hash = "sha256:38a9109c4ee8135d5df5505384fc2f20287a47ccbe0b3f04c53c9a1989c2bbaf"}, + {file = "coverage-7.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:6b87f1ad60b30bc3c43c66afa7db6b22a3109902e28c5094957626a0143a001f"}, + {file = "coverage-7.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:672a6c1da5aea6c629819a0e1461e89d244f78d7b60c424ecf4f1f2556c041d8"}, + {file = "coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c"}, + {file = "coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44"}, + {file = "coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc"}, + {file = "coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869"}, + {file = "coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f"}, + {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5"}, + {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c"}, + {file = "coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2"}, + {file = "coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4"}, + {file = "coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b"}, + {file = "coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84"}, + {file = "coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7"}, + {file = "coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b"}, + {file = "coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae"}, + {file = "coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760"}, + {file = "coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235"}, + {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5"}, + {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db"}, + {file = "coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e"}, + {file = "coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee"}, + {file = "coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14"}, + {file = "coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff"}, + {file = "coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031"}, + {file = "coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3"}, + {file = "coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031"}, + {file = "coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2"}, + {file = "coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762"}, + {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae"}, + {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262"}, + {file = "coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99"}, + {file = "coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde"}, + {file = "coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13"}, + {file = "coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9"}, + {file = "coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508"}, + {file = "coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357"}, + {file = "coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b"}, + {file = "coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4"}, + {file = "coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba"}, + {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842"}, + {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874"}, + {file = "coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732"}, + {file = "coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df"}, + {file = "coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f"}, + {file = "coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2"}, + {file = "coverage-7.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:62835c1b00c4a4ace24c1a88561a5a59b612fbb83a525d1c70ff5720c97c0610"}, + {file = "coverage-7.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5255b3bbcc1d32a4069d6403820ac8e6dbcc1d68cb28a60a1ebf17e47028e898"}, + {file = "coverage-7.10.5-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3876385722e335d6e991c430302c24251ef9c2a9701b2b390f5473199b1b8ebf"}, + {file = "coverage-7.10.5-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8048ce4b149c93447a55d279078c8ae98b08a6951a3c4d2d7e87f4efc7bfe100"}, + {file = "coverage-7.10.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4028e7558e268dd8bcf4d9484aad393cafa654c24b4885f6f9474bf53183a82a"}, + {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03f47dc870eec0367fcdd603ca6a01517d2504e83dc18dbfafae37faec66129a"}, + {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2d488d7d42b6ded7ea0704884f89dcabd2619505457de8fc9a6011c62106f6e5"}, + {file = "coverage-7.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3dcf2ead47fa8be14224ee817dfc1df98043af568fe120a22f81c0eb3c34ad2"}, + {file = "coverage-7.10.5-cp39-cp39-win32.whl", hash = "sha256:02650a11324b80057b8c9c29487020073d5e98a498f1857f37e3f9b6ea1b2426"}, + {file = "coverage-7.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:b45264dd450a10f9e03237b41a9a24e85cbb1e278e5a32adb1a303f58f0017f3"}, + {file = "coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a"}, + {file = "coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6"}, ] [package.dependencies] @@ -1202,49 +1237,49 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" -version = "45.0.5" +version = "45.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" groups = ["main"] files = [ - {file = "cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9"}, - {file = "cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27"}, - {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e"}, - {file = "cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174"}, - {file = "cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9"}, - {file = "cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63"}, - {file = "cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42"}, - {file = "cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492"}, - {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0"}, - {file = "cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a"}, - {file = "cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f"}, - {file = "cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:206210d03c1193f4e1ff681d22885181d47efa1ab3018766a7b32a7b3d6e6afd"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c648025b6840fe62e57107e0a25f604db740e728bd67da4f6f060f03017d5097"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b8fa8b0a35a9982a3c60ec79905ba5bb090fc0b9addcfd3dc2dd04267e45f25e"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:14d96584701a887763384f3c47f0ca7c1cce322aa1c31172680eb596b890ec30"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57c816dfbd1659a367831baca4b775b2a5b43c003daf52e9d57e1d30bc2e1b0e"}, - {file = "cryptography-45.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b9e38e0a83cd51e07f5a48ff9691cae95a79bea28fe4ded168a8e5c6c77e819d"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1"}, - {file = "cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f"}, - {file = "cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a"}, + {file = "cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3"}, + {file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3"}, + {file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6"}, + {file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd"}, + {file = "cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8"}, + {file = "cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443"}, + {file = "cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27"}, + {file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17"}, + {file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b"}, + {file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c"}, + {file = "cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5"}, + {file = "cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:de58755d723e86175756f463f2f0bddd45cc36fbd62601228a3f8761c9f58252"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a20e442e917889d1a6b3c570c9e3fa2fdc398c20868abcea268ea33c024c4083"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:258e0dff86d1d891169b5af222d362468a9570e2532923088658aa866eb11130"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d97cf502abe2ab9eff8bd5e4aca274da8d06dd3ef08b759a8d6143f4ad65d4b4"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:c987dad82e8c65ebc985f5dae5e74a3beda9d0a2a4daf8a1115f3772b59e5141"}, + {file = "cryptography-45.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c13b1e3afd29a5b3b2656257f14669ca8fa8d7956d509926f0b130b600b50ab7"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b"}, + {file = "cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63"}, + {file = "cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971"}, ] [package.dependencies] @@ -1257,7 +1292,7 @@ nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8 pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==45.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==45.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] @@ -1359,21 +1394,16 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.18.0" +version = "3.19.1" description = "A platform independent file lock." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, - {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, + {file = "filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d"}, + {file = "filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58"}, ] -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] -typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] - [[package]] name = "frozenlist" version = "1.4.1" @@ -1750,106 +1780,128 @@ pycryptodome = "*" [[package]] name = "lxml" -version = "6.0.0" +version = "6.0.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true python-versions = ">=3.8" groups = ["main"] files = [ - {file = "lxml-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:35bc626eec405f745199200ccb5c6b36f202675d204aa29bb52e27ba2b71dea8"}, - {file = "lxml-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:246b40f8a4aec341cbbf52617cad8ab7c888d944bfe12a6abd2b1f6cfb6f6082"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2793a627e95d119e9f1e19720730472f5543a6d84c50ea33313ce328d870f2dd"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:46b9ed911f36bfeb6338e0b482e7fe7c27d362c52fde29f221fddbc9ee2227e7"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b4790b558bee331a933e08883c423f65bbcd07e278f91b2272489e31ab1e2b4"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2030956cf4886b10be9a0285c6802e078ec2391e1dd7ff3eb509c2c95a69b76"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23854ecf381ab1facc8f353dcd9adeddef3652268ee75297c1164c987c11dc"}, - {file = "lxml-6.0.0-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:43fe5af2d590bf4691531b1d9a2495d7aab2090547eaacd224a3afec95706d76"}, - {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74e748012f8c19b47f7d6321ac929a9a94ee92ef12bc4298c47e8b7219b26541"}, - {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:43cfbb7db02b30ad3926e8fceaef260ba2fb7df787e38fa2df890c1ca7966c3b"}, - {file = "lxml-6.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:34190a1ec4f1e84af256495436b2d196529c3f2094f0af80202947567fdbf2e7"}, - {file = "lxml-6.0.0-cp310-cp310-win32.whl", hash = "sha256:5967fe415b1920a3877a4195e9a2b779249630ee49ece22021c690320ff07452"}, - {file = "lxml-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:f3389924581d9a770c6caa4df4e74b606180869043b9073e2cec324bad6e306e"}, - {file = "lxml-6.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:522fe7abb41309e9543b0d9b8b434f2b630c5fdaf6482bee642b34c8c70079c8"}, - {file = "lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36"}, - {file = "lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e"}, - {file = "lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58"}, - {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2"}, - {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851"}, - {file = "lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f"}, - {file = "lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c"}, - {file = "lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816"}, - {file = "lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab"}, - {file = "lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108"}, - {file = "lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da"}, - {file = "lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e"}, - {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741"}, - {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3"}, - {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16"}, - {file = "lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0"}, - {file = "lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a"}, - {file = "lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3"}, - {file = "lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb"}, - {file = "lxml-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6da7cd4f405fd7db56e51e96bff0865b9853ae70df0e6720624049da76bde2da"}, - {file = "lxml-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b34339898bb556a2351a1830f88f751679f343eabf9cf05841c95b165152c9e7"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:51a5e4c61a4541bd1cd3ba74766d0c9b6c12d6a1a4964ef60026832aac8e79b3"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d18a25b19ca7307045581b18b3ec9ead2b1db5ccd8719c291f0cd0a5cec6cb81"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d4f0c66df4386b75d2ab1e20a489f30dc7fd9a06a896d64980541506086be1f1"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f4b481b6cc3a897adb4279216695150bbe7a44c03daba3c894f49d2037e0a24"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a78d6c9168f5bcb20971bf3329c2b83078611fbe1f807baadc64afc70523b3a"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae06fbab4f1bb7db4f7c8ca9897dc8db4447d1a2b9bee78474ad403437bcc29"}, - {file = "lxml-6.0.0-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:1fa377b827ca2023244a06554c6e7dc6828a10aaf74ca41965c5d8a4925aebb4"}, - {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1676b56d48048a62ef77a250428d1f31f610763636e0784ba67a9740823988ca"}, - {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:0e32698462aacc5c1cf6bdfebc9c781821b7e74c79f13e5ffc8bfe27c42b1abf"}, - {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4d6036c3a296707357efb375cfc24bb64cd955b9ec731abf11ebb1e40063949f"}, - {file = "lxml-6.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7488a43033c958637b1a08cddc9188eb06d3ad36582cebc7d4815980b47e27ef"}, - {file = "lxml-6.0.0-cp313-cp313-win32.whl", hash = "sha256:5fcd7d3b1d8ecb91445bd71b9c88bdbeae528fefee4f379895becfc72298d181"}, - {file = "lxml-6.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:2f34687222b78fff795feeb799a7d44eca2477c3d9d3a46ce17d51a4f383e32e"}, - {file = "lxml-6.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:21db1ec5525780fd07251636eb5f7acb84003e9382c72c18c542a87c416ade03"}, - {file = "lxml-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4eb114a0754fd00075c12648d991ec7a4357f9cb873042cc9a77bf3a7e30c9db"}, - {file = "lxml-6.0.0-cp38-cp38-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:7da298e1659e45d151b4028ad5c7974917e108afb48731f4ed785d02b6818994"}, - {file = "lxml-6.0.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7bf61bc4345c1895221357af8f3e89f8c103d93156ef326532d35c707e2fb19d"}, - {file = "lxml-6.0.0-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63b634facdfbad421d4b61c90735688465d4ab3a8853ac22c76ccac2baf98d97"}, - {file = "lxml-6.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e380e85b93f148ad28ac15f8117e2fd8e5437aa7732d65e260134f83ce67911b"}, - {file = "lxml-6.0.0-cp38-cp38-win32.whl", hash = "sha256:185efc2fed89cdd97552585c624d3c908f0464090f4b91f7d92f8ed2f3b18f54"}, - {file = "lxml-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:f97487996a39cb18278ca33f7be98198f278d0bc3c5d0fd4d7b3d63646ca3c8a"}, - {file = "lxml-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85b14a4689d5cff426c12eefe750738648706ea2753b20c2f973b2a000d3d261"}, - {file = "lxml-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f64ccf593916e93b8d36ed55401bb7fe9c7d5de3180ce2e10b08f82a8f397316"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:b372d10d17a701b0945f67be58fae4664fd056b85e0ff0fbc1e6c951cdbc0512"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a674c0948789e9136d69065cc28009c1b1874c6ea340253db58be7622ce6398f"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:edf6e4c8fe14dfe316939711e3ece3f9a20760aabf686051b537a7562f4da91a"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:048a930eb4572829604982e39a0c7289ab5dc8abc7fc9f5aabd6fbc08c154e93"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b5fa5eda84057a4f1bbb4bb77a8c28ff20ae7ce211588d698ae453e13c6281"}, - {file = "lxml-6.0.0-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:c352fc8f36f7e9727db17adbf93f82499457b3d7e5511368569b4c5bd155a922"}, - {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8db5dc617cb937ae17ff3403c3a70a7de9df4852a046f93e71edaec678f721d0"}, - {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:2181e4b1d07dde53986023482673c0f1fba5178ef800f9ab95ad791e8bdded6a"}, - {file = "lxml-6.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3c98d5b24c6095e89e03d65d5c574705be3d49c0d8ca10c17a8a4b5201b72f5"}, - {file = "lxml-6.0.0-cp39-cp39-win32.whl", hash = "sha256:04d67ceee6db4bcb92987ccb16e53bef6b42ced872509f333c04fb58a3315256"}, - {file = "lxml-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e0b1520ef900e9ef62e392dd3d7ae4f5fa224d1dd62897a792cf353eb20b6cae"}, - {file = "lxml-6.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:e35e8aaaf3981489f42884b59726693de32dabfc438ac10ef4eb3409961fd402"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:dbdd7679a6f4f08152818043dbb39491d1af3332128b3752c3ec5cebc0011a72"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40442e2a4456e9910875ac12951476d36c0870dcb38a68719f8c4686609897c4"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:db0efd6bae1c4730b9c863fc4f5f3c0fa3e8f05cae2c44ae141cb9dfc7d091dc"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ab542c91f5a47aaa58abdd8ea84b498e8e49fe4b883d67800017757a3eb78e8"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:013090383863b72c62a702d07678b658fa2567aa58d373d963cca245b017e065"}, - {file = "lxml-6.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c86df1c9af35d903d2b52d22ea3e66db8058d21dc0f59842ca5deb0595921141"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4337e4aec93b7c011f7ee2e357b0d30562edd1955620fdd4aeab6aacd90d43c5"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ae74f7c762270196d2dda56f8dd7309411f08a4084ff2dfcc0b095a218df2e06"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:059c4cbf3973a621b62ea3132934ae737da2c132a788e6cfb9b08d63a0ef73f9"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f090a9bc0ce8da51a5632092f98a7e7f84bca26f33d161a98b57f7fb0004ca"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9da022c14baeec36edfcc8daf0e281e2f55b950249a455776f0d1adeeada4734"}, - {file = "lxml-6.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a55da151d0b0c6ab176b4e761670ac0e2667817a1e0dadd04a01d0561a219349"}, - {file = "lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72"}, + {file = "lxml-6.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3b38e20c578149fdbba1fd3f36cb1928a3aaca4b011dfd41ba09d11fb396e1b9"}, + {file = "lxml-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:11a052cbd013b7140bbbb38a14e2329b6192478344c99097e378c691b7119551"}, + {file = "lxml-6.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:21344d29c82ca8547ea23023bb8e7538fa5d4615a1773b991edf8176a870c1ea"}, + {file = "lxml-6.0.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aa8f130f4b2dc94baa909c17bb7994f0268a2a72b9941c872e8e558fd6709050"}, + {file = "lxml-6.0.1-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4588806a721552692310ebe9f90c17ac6c7c5dac438cd93e3d74dd60531c3211"}, + {file = "lxml-6.0.1-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:8466faa66b0353802fb7c054a400ac17ce2cf416e3ad8516eadeff9cba85b741"}, + {file = "lxml-6.0.1-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50b5e54f6a9461b1e9c08b4a3420415b538d4773bd9df996b9abcbfe95f4f1fd"}, + {file = "lxml-6.0.1-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:6f393e10685b37f15b1daef8aa0d734ec61860bb679ec447afa0001a31e7253f"}, + {file = "lxml-6.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:07038c62fd0fe2743e2f5326f54d464715373c791035d7dda377b3c9a5d0ad77"}, + {file = "lxml-6.0.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:7a44a5fb1edd11b3a65c12c23e1049c8ae49d90a24253ff18efbcb6aa042d012"}, + {file = "lxml-6.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a57d9eb9aadf311c9e8785230eec83c6abb9aef2adac4c0587912caf8f3010b8"}, + {file = "lxml-6.0.1-cp310-cp310-win32.whl", hash = "sha256:d877874a31590b72d1fa40054b50dc33084021bfc15d01b3a661d85a302af821"}, + {file = "lxml-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c43460f4aac016ee0e156bfa14a9de9b3e06249b12c228e27654ac3996a46d5b"}, + {file = "lxml-6.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:615bb6c73fed7929e3a477a3297a797892846b253d59c84a62c98bdce3849a0a"}, + {file = "lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6acde83f7a3d6399e6d83c1892a06ac9b14ea48332a5fbd55d60b9897b9570a"}, + {file = "lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d21c9cacb6a889cbb8eeb46c77ef2c1dd529cde10443fdeb1de847b3193c541"}, + {file = "lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:847458b7cd0d04004895f1fb2cca8e7c0f8ec923c49c06b7a72ec2d48ea6aca2"}, + {file = "lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1dc13405bf315d008fe02b1472d2a9d65ee1c73c0a06de5f5a45e6e404d9a1c0"}, + {file = "lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f540c229a8c0a770dcaf6d5af56a5295e0fc314fc7ef4399d543328054bcea"}, + {file = "lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:d2f73aef768c70e8deb8c4742fca4fd729b132fda68458518851c7735b55297e"}, + {file = "lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e7f4066b85a4fa25ad31b75444bd578c3ebe6b8ed47237896341308e2ce923c3"}, + {file = "lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:0cce65db0cd8c750a378639900d56f89f7d6af11cd5eda72fde054d27c54b8ce"}, + {file = "lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c372d42f3eee5844b69dcab7b8d18b2f449efd54b46ac76970d6e06b8e8d9a66"}, + {file = "lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2e2b0e042e1408bbb1c5f3cfcb0f571ff4ac98d8e73f4bf37c5dd179276beedd"}, + {file = "lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cc73bb8640eadd66d25c5a03175de6801f63c535f0f3cf50cac2f06a8211f420"}, + {file = "lxml-6.0.1-cp311-cp311-win32.whl", hash = "sha256:7c23fd8c839708d368e406282d7953cee5134f4592ef4900026d84566d2b4c88"}, + {file = "lxml-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:2516acc6947ecd3c41a4a4564242a87c6786376989307284ddb115f6a99d927f"}, + {file = "lxml-6.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:cb46f8cfa1b0334b074f40c0ff94ce4d9a6755d492e6c116adb5f4a57fb6ad96"}, + {file = "lxml-6.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c03ac546adaabbe0b8e4a15d9ad815a281afc8d36249c246aecf1aaad7d6f200"}, + {file = "lxml-6.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33b862c7e3bbeb4ba2c96f3a039f925c640eeba9087a4dc7a572ec0f19d89392"}, + {file = "lxml-6.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a3ec1373f7d3f519de595032d4dcafae396c29407cfd5073f42d267ba32440d"}, + {file = "lxml-6.0.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03b12214fb1608f4cffa181ec3d046c72f7e77c345d06222144744c122ded870"}, + {file = "lxml-6.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:207ae0d5f0f03b30f95e649a6fa22aa73f5825667fee9c7ec6854d30e19f2ed8"}, + {file = "lxml-6.0.1-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:32297b09ed4b17f7b3f448de87a92fb31bb8747496623483788e9f27c98c0f00"}, + {file = "lxml-6.0.1-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7e18224ea241b657a157c85e9cac82c2b113ec90876e01e1f127312006233756"}, + {file = "lxml-6.0.1-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a07a994d3c46cd4020c1ea566345cf6815af205b1e948213a4f0f1d392182072"}, + {file = "lxml-6.0.1-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:2287fadaa12418a813b05095485c286c47ea58155930cfbd98c590d25770e225"}, + {file = "lxml-6.0.1-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b4e597efca032ed99f418bd21314745522ab9fa95af33370dcee5533f7f70136"}, + {file = "lxml-6.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9696d491f156226decdd95d9651c6786d43701e49f32bf23715c975539aa2b3b"}, + {file = "lxml-6.0.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e4e3cd3585f3c6f87cdea44cda68e692cc42a012f0131d25957ba4ce755241a7"}, + {file = "lxml-6.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:45cbc92f9d22c28cd3b97f8d07fcefa42e569fbd587dfdac76852b16a4924277"}, + {file = "lxml-6.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:f8c9bcfd2e12299a442fba94459adf0b0d001dbc68f1594439bfa10ad1ecb74b"}, + {file = "lxml-6.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1e9dc2b9f1586e7cd77753eae81f8d76220eed9b768f337dc83a3f675f2f0cf9"}, + {file = "lxml-6.0.1-cp312-cp312-win32.whl", hash = "sha256:987ad5c3941c64031f59c226167f55a04d1272e76b241bfafc968bdb778e07fb"}, + {file = "lxml-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:abb05a45394fd76bf4a60c1b7bec0e6d4e8dfc569fc0e0b1f634cd983a006ddc"}, + {file = "lxml-6.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:c4be29bce35020d8579d60aa0a4e95effd66fcfce31c46ffddf7e5422f73a299"}, + {file = "lxml-6.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:485eda5d81bb7358db96a83546949c5fe7474bec6c68ef3fa1fb61a584b00eea"}, + {file = "lxml-6.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d12160adea318ce3d118f0b4fbdff7d1225c75fb7749429541b4d217b85c3f76"}, + {file = "lxml-6.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48c8d335d8ab72f9265e7ba598ae5105a8272437403f4032107dbcb96d3f0b29"}, + {file = "lxml-6.0.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:405e7cf9dbdbb52722c231e0f1257214202dfa192327fab3de45fd62e0554082"}, + {file = "lxml-6.0.1-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:299a790d403335a6a057ade46f92612ebab87b223e4e8c5308059f2dc36f45ed"}, + {file = "lxml-6.0.1-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:48da704672f6f9c461e9a73250440c647638cc6ff9567ead4c3b1f189a604ee8"}, + {file = "lxml-6.0.1-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:21e364e1bb731489e3f4d51db416f991a5d5da5d88184728d80ecfb0904b1d68"}, + {file = "lxml-6.0.1-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1bce45a2c32032afddbd84ed8ab092130649acb935536ef7a9559636ce7ffd4a"}, + {file = "lxml-6.0.1-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:fa164387ff20ab0e575fa909b11b92ff1481e6876835014e70280769920c4433"}, + {file = "lxml-6.0.1-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7587ac5e000e1594e62278422c5783b34a82b22f27688b1074d71376424b73e8"}, + {file = "lxml-6.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:57478424ac4c9170eabf540237125e8d30fad1940648924c058e7bc9fb9cf6dd"}, + {file = "lxml-6.0.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:09c74afc7786c10dd6afaa0be2e4805866beadc18f1d843cf517a7851151b499"}, + {file = "lxml-6.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7fd70681aeed83b196482d42a9b0dc5b13bab55668d09ad75ed26dff3be5a2f5"}, + {file = "lxml-6.0.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:10a72e456319b030b3dd900df6b1f19d89adf06ebb688821636dc406788cf6ac"}, + {file = "lxml-6.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b0fa45fb5f55111ce75b56c703843b36baaf65908f8b8d2fbbc0e249dbc127ed"}, + {file = "lxml-6.0.1-cp313-cp313-win32.whl", hash = "sha256:01dab65641201e00c69338c9c2b8a0f2f484b6b3a22d10779bb417599fae32b5"}, + {file = "lxml-6.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:bdf8f7c8502552d7bff9e4c98971910a0a59f60f88b5048f608d0a1a75e94d1c"}, + {file = "lxml-6.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:a6aeca75959426b9fd8d4782c28723ba224fe07cfa9f26a141004210528dcbe2"}, + {file = "lxml-6.0.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:29b0e849ec7030e3ecb6112564c9f7ad6881e3b2375dd4a0c486c5c1f3a33859"}, + {file = "lxml-6.0.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:02a0f7e629f73cc0be598c8b0611bf28ec3b948c549578a26111b01307fd4051"}, + {file = "lxml-6.0.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:beab5e54de016e730875f612ba51e54c331e2fa6dc78ecf9a5415fc90d619348"}, + {file = "lxml-6.0.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:92a08aefecd19ecc4ebf053c27789dd92c87821df2583a4337131cf181a1dffa"}, + {file = "lxml-6.0.1-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36c8fa7e177649470bc3dcf7eae6bee1e4984aaee496b9ccbf30e97ac4127fa2"}, + {file = "lxml-6.0.1-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:5d08e0f1af6916267bb7eff21c09fa105620f07712424aaae09e8cb5dd4164d1"}, + {file = "lxml-6.0.1-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9705cdfc05142f8c38c97a61bd3a29581ceceb973a014e302ee4a73cc6632476"}, + {file = "lxml-6.0.1-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74555e2da7c1636e30bff4e6e38d862a634cf020ffa591f1f63da96bf8b34772"}, + {file = "lxml-6.0.1-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:e38b5f94c5a2a5dadaddd50084098dfd005e5a2a56cd200aaf5e0a20e8941782"}, + {file = "lxml-6.0.1-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a5ec101a92ddacb4791977acfc86c1afd624c032974bfb6a21269d1083c9bc49"}, + {file = "lxml-6.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5c17e70c82fd777df586c12114bbe56e4e6f823a971814fd40dec9c0de518772"}, + {file = "lxml-6.0.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:45fdd0415a0c3d91640b5d7a650a8f37410966a2e9afebb35979d06166fd010e"}, + {file = "lxml-6.0.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:d417eba28981e720a14fcb98f95e44e7a772fe25982e584db38e5d3b6ee02e79"}, + {file = "lxml-6.0.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:8e5d116b9e59be7934febb12c41cce2038491ec8fdb743aeacaaf36d6e7597e4"}, + {file = "lxml-6.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c238f0d0d40fdcb695c439fe5787fa69d40f45789326b3bb6ef0d61c4b588d6e"}, + {file = "lxml-6.0.1-cp314-cp314-win32.whl", hash = "sha256:537b6cf1c5ab88cfd159195d412edb3e434fee880f206cbe68dff9c40e17a68a"}, + {file = "lxml-6.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:911d0a2bb3ef3df55b3d97ab325a9ca7e438d5112c102b8495321105d25a441b"}, + {file = "lxml-6.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:2834377b0145a471a654d699bdb3a2155312de492142ef5a1d426af2c60a0a31"}, + {file = "lxml-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9283997edb661ebba05314da1b9329e628354be310bbf947b0faa18263c5df1b"}, + {file = "lxml-6.0.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1beca37c6e7a4ddd1ca24829e2c6cb60b5aad0d6936283b5b9909a7496bd97af"}, + {file = "lxml-6.0.1-cp38-cp38-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:42897fe8cb097274087fafc8251a39b4cf8d64a7396d49479bdc00b3587331cb"}, + {file = "lxml-6.0.1-cp38-cp38-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ef8cd44a080bfb92776047d11ab64875faf76e0d8be20ea3ff0c1e67b3fc9cb"}, + {file = "lxml-6.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:433ab647dad6a9fb31418ccd3075dcb4405ece75dced998789fe14a8e1e3785c"}, + {file = "lxml-6.0.1-cp38-cp38-win32.whl", hash = "sha256:bfa30ef319462242333ef8f0c7631fb8b8b8eae7dca83c1f235d2ea2b7f8ff2b"}, + {file = "lxml-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:7f36e4a2439d134b8e70f92ff27ada6fb685966de385668e21c708021733ead1"}, + {file = "lxml-6.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:edb975280633a68d0988b11940834ce2b0fece9f5278297fc50b044cb713f0e1"}, + {file = "lxml-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4c5acb9bc22f2026bbd0ecbfdb890e9b3e5b311b992609d35034706ad111b5d"}, + {file = "lxml-6.0.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:47ab1aff82a95a07d96c1eff4eaebec84f823e0dfb4d9501b1fbf9621270c1d3"}, + {file = "lxml-6.0.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:faa7233bdb7a4365e2411a665d034c370ac82798a926e65f76c26fbbf0fd14b7"}, + {file = "lxml-6.0.1-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c71a0ce0e08c7e11e64895c720dc7752bf064bfecd3eb2c17adcd7bfa8ffb22c"}, + {file = "lxml-6.0.1-cp39-cp39-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:57744270a512a93416a149f8b6ea1dbbbee127f5edcbcd5adf28e44b6ff02f33"}, + {file = "lxml-6.0.1-cp39-cp39-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e89d977220f7b1f0c725ac76f5c65904193bd4c264577a3af9017de17560ea7e"}, + {file = "lxml-6.0.1-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:0c8f7905f1971c2c408badf49ae0ef377cc54759552bcf08ae7a0a8ed18999c2"}, + {file = "lxml-6.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ea27626739e82f2be18cbb1aff7ad59301c723dc0922d9a00bc4c27023f16ab7"}, + {file = "lxml-6.0.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:21300d8c1bbcc38925aabd4b3c2d6a8b09878daf9e8f2035f09b5b002bcddd66"}, + {file = "lxml-6.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:021497a94907c5901cd49d24b5b0fdd18d198a06611f5ce26feeb67c901b92f2"}, + {file = "lxml-6.0.1-cp39-cp39-win32.whl", hash = "sha256:620869f2a3ec1475d000b608024f63259af8d200684de380ccb9650fbc14d1bb"}, + {file = "lxml-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:afae3a15889942426723839a3cf56dab5e466f7d873640a7a3c53abc671e2387"}, + {file = "lxml-6.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:2719e42acda8f3444a0d88204fd90665116dda7331934da4d479dd9296c33ce2"}, + {file = "lxml-6.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0abfbaf4ebbd7fd33356217d317b6e4e2ef1648be6a9476a52b57ffc6d8d1780"}, + {file = "lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ebbf2d9775be149235abebdecae88fe3b3dd06b1797cd0f6dffe6948e85309d"}, + {file = "lxml-6.0.1-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a389e9f11c010bd30531325805bbe97bdf7f728a73d0ec475adef57ffec60547"}, + {file = "lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f5cf2addfbbe745251132c955ad62d8519bb4b2c28b0aa060eca4541798d86e"}, + {file = "lxml-6.0.1-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1b60a3287bf33a2a54805d76b82055bcc076e445fd539ee9ae1fe85ed373691"}, + {file = "lxml-6.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f7bbfb0751551a8786915fc6b615ee56344dacc1b1033697625b553aefdd9837"}, + {file = "lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b556aaa6ef393e989dac694b9c95761e32e058d5c4c11ddeef33f790518f7a5e"}, + {file = "lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:64fac7a05ebb3737b79fd89fe5a5b6c5546aac35cfcfd9208eb6e5d13215771c"}, + {file = "lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:038d3c08babcfce9dc89aaf498e6da205efad5b7106c3b11830a488d4eadf56b"}, + {file = "lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:445f2cee71c404ab4259bc21e20339a859f75383ba2d7fb97dfe7c163994287b"}, + {file = "lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e352d8578e83822d70bea88f3d08b9912528e4c338f04ab707207ab12f4b7aac"}, + {file = "lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:51bd5d1a9796ca253db6045ab45ca882c09c071deafffc22e06975b7ace36300"}, + {file = "lxml-6.0.1.tar.gz", hash = "sha256:2b3a882ebf27dd026df3801a87cf49ff791336e0f94b0fad195db77e01240690"}, ] [package.extras] @@ -2152,48 +2204,55 @@ files = [ [[package]] name = "mypy" -version = "1.15.0" +version = "1.17.1" description = "Optional static typing for Python" optional = true python-versions = ">=3.9" groups = ["main"] files = [ - {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, - {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, - {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, - {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, - {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, - {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, - {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, - {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, - {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, - {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, - {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, - {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, - {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, - {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, + {file = "mypy-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972"}, + {file = "mypy-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7"}, + {file = "mypy-1.17.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df"}, + {file = "mypy-1.17.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390"}, + {file = "mypy-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94"}, + {file = "mypy-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b"}, + {file = "mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58"}, + {file = "mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5"}, + {file = "mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd"}, + {file = "mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b"}, + {file = "mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5"}, + {file = "mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b"}, + {file = "mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb"}, + {file = "mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403"}, + {file = "mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056"}, + {file = "mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341"}, + {file = "mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb"}, + {file = "mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19"}, + {file = "mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7"}, + {file = "mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81"}, + {file = "mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6"}, + {file = "mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849"}, + {file = "mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14"}, + {file = "mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a"}, + {file = "mypy-1.17.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733"}, + {file = "mypy-1.17.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd"}, + {file = "mypy-1.17.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0"}, + {file = "mypy-1.17.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a"}, + {file = "mypy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91"}, + {file = "mypy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed"}, + {file = "mypy-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9"}, + {file = "mypy-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99"}, + {file = "mypy-1.17.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8"}, + {file = "mypy-1.17.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8"}, + {file = "mypy-1.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259"}, + {file = "mypy-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d"}, + {file = "mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9"}, + {file = "mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing_extensions = ">=4.6.0" @@ -2281,19 +2340,16 @@ test-extras = ["pytest-mpl", "pytest-randomly"] [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = true -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["main"] files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "packaging" version = "25.0" @@ -2306,6 +2362,18 @@ files = [ {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = true +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pefile" version = "2023.2.7" @@ -2321,14 +2389,14 @@ files = [ [[package]] name = "pip" -version = "25.1.1" +version = "25.2" description = "The PyPA recommended tool for installing Python packages." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pip-25.1.1-py3-none-any.whl", hash = "sha256:2913a38a2abf4ea6b64ab507bd9e967f3b53dc1ede74b01b0931e1ce548751af"}, - {file = "pip-25.1.1.tar.gz", hash = "sha256:3de45d411d308d5054c2168185d8da7f9a2cd753dbac8acbfa88a8909ecd9077"}, + {file = "pip-25.2-py3-none-any.whl", hash = "sha256:6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717"}, + {file = "pip-25.2.tar.gz", hash = "sha256:578283f006390f85bb6282dffb876454593d637f5d1be494b5202ce4877e71f2"}, ] [[package]] @@ -2385,14 +2453,14 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "pre-commit" -version = "4.2.0" +version = "4.3.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = true python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, - {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, + {file = "pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8"}, + {file = "pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16"}, ] [package.dependencies] @@ -2639,25 +2707,25 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyinstaller" -version = "6.14.2" +version = "6.15.0" description = "PyInstaller bundles a Python application and all its dependencies into a single package." optional = true -python-versions = "<3.14,>=3.8" +python-versions = "<3.15,>=3.8" groups = ["main"] markers = "extra == \"dev\" and python_version <= \"3.12\"" files = [ - {file = "pyinstaller-6.14.2-py3-none-macosx_10_13_universal2.whl", hash = "sha256:d77d18bf5343a1afef2772393d7a489d4ec2282dee5bca549803fc0d74b78330"}, - {file = "pyinstaller-6.14.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:3fa0c391e1300a9fd7752eb1ffe2950112b88fba9d2743eee2ef218a15f4705f"}, - {file = "pyinstaller-6.14.2-py3-none-manylinux2014_i686.whl", hash = "sha256:077efb2d01d16d9c8fdda3ad52788f0fead2791c5cec9ed6ce058af7e26eb74b"}, - {file = "pyinstaller-6.14.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:fdd2bd020a18736806a6bd5d3c4352f1209b427a96ad6c459d88aec1d90c4f21"}, - {file = "pyinstaller-6.14.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:03862c6b3cf7b16843d24b529f89cd4077cbe467883cd54ce7a81940d6da09d3"}, - {file = "pyinstaller-6.14.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:78827a21ada2a848e98671852d20d74b2955b6e2aaf2359ed13a462e1a603d84"}, - {file = "pyinstaller-6.14.2-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:185710ab1503dfdfa14c43237d394d96ac183422d588294be42531480dfa6c38"}, - {file = "pyinstaller-6.14.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:6c673a7e761bd4a2560cfd5dbe1ccdcfe2dff304b774e6e5242fc5afed953661"}, - {file = "pyinstaller-6.14.2-py3-none-win32.whl", hash = "sha256:1697601aa788e3a52f0b5e620b4741a34b82e6f222ec6e1318b3a1349f566bb2"}, - {file = "pyinstaller-6.14.2-py3-none-win_amd64.whl", hash = "sha256:e10e0e67288d6dcb5898a917dd1d4272aa0ff33f197ad49a0e39618009d63ed9"}, - {file = "pyinstaller-6.14.2-py3-none-win_arm64.whl", hash = "sha256:69fd11ca57e572387826afaa4a1b3d4cb74927d76f231f0308c0bd7872ca5ac1"}, - {file = "pyinstaller-6.14.2.tar.gz", hash = "sha256:142cce0719e79315f0cc26400c2e5c45d9b6b17e7e0491fee444a9f8f16f4917"}, + {file = "pyinstaller-6.15.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:9f00c71c40148cd1e61695b2c6f1e086693d3bcf9bfa22ab513aa4254c3b966f"}, + {file = "pyinstaller-6.15.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:cbcc8eb77320c60722030ac875883b564e00768fe3ff1721c7ba3ad0e0a277e9"}, + {file = "pyinstaller-6.15.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c33e6302bc53db2df1104ed5566bd980b3e0ee7f18416a6e3caa908c12a54542"}, + {file = "pyinstaller-6.15.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:eb902d0fed3bb1f8b7190dc4df5c11f3b59505767e0d56d1ed782b853938bbf3"}, + {file = "pyinstaller-6.15.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:b4df862adae7cf1f08eff53c43ace283822447f7f528f72e4f94749062712f15"}, + {file = "pyinstaller-6.15.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:b9ebf16ed0f99016ae8ae5746dee4cb244848a12941539e62ce2eea1df5a3f95"}, + {file = "pyinstaller-6.15.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:22193489e6a22435417103f61e7950363bba600ef36ec3ab1487303668c81092"}, + {file = "pyinstaller-6.15.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:18f743069849dbaee3e10900385f35795a5743eabab55e99dcc42f204e40a0db"}, + {file = "pyinstaller-6.15.0-py3-none-win32.whl", hash = "sha256:60da8f1b5071766b45c0f607d8bc3d7e59ba2c3b262d08f2e4066ba65f3544a2"}, + {file = "pyinstaller-6.15.0-py3-none-win_amd64.whl", hash = "sha256:cbea297e16eeda30b41c300d6ec2fd2abea4dbd8d8a32650eeec36431c94fcd9"}, + {file = "pyinstaller-6.15.0-py3-none-win_arm64.whl", hash = "sha256:f43c035621742cf2d19b84308c60e4e44e72c94786d176b8f6adcde351b5bd98"}, + {file = "pyinstaller-6.15.0.tar.gz", hash = "sha256:a48fc4644ee4aa2aa2a35e7b51f496f8fbd7eecf6a2150646bbf1613ad07bc2d"}, ] [package.dependencies] @@ -2666,7 +2734,7 @@ importlib_metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} packaging = ">=22.0" pefile = {version = ">=2022.5.30,<2024.8.26 || >2024.8.26", markers = "sys_platform == \"win32\""} -pyinstaller-hooks-contrib = ">=2025.5" +pyinstaller-hooks-contrib = ">=2025.8" pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""} setuptools = ">=42.0.0" @@ -2676,15 +2744,15 @@ hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] [[package]] name = "pyinstaller-hooks-contrib" -version = "2025.5" +version = "2025.8" description = "Community maintained hooks for PyInstaller" optional = true python-versions = ">=3.8" groups = ["main"] markers = "extra == \"dev\" and python_version <= \"3.12\"" files = [ - {file = "pyinstaller_hooks_contrib-2025.5-py3-none-any.whl", hash = "sha256:ebfae1ba341cb0002fb2770fad0edf2b3e913c2728d92df7ad562260988ca373"}, - {file = "pyinstaller_hooks_contrib-2025.5.tar.gz", hash = "sha256:707386770b8fe066c04aad18a71bc483c7b25e18b4750a756999f7da2ab31982"}, + {file = "pyinstaller_hooks_contrib-2025.8-py3-none-any.whl", hash = "sha256:8d0b8cfa0cb689a619294ae200497374234bd4e3994b3ace2a4442274c899064"}, + {file = "pyinstaller_hooks_contrib-2025.8.tar.gz", hash = "sha256:3402ad41dfe9b5110af134422e37fc5d421ba342c6cb980bd67cb30b7415641c"}, ] [package.dependencies] @@ -2842,27 +2910,33 @@ six = ">=1.5" [[package]] name = "pywin32" -version = "306" +version = "311" description = "Python for Window Extensions" optional = false python-versions = "*" groups = ["main"] markers = "platform_system == \"Windows\"" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, + {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, + {file = "pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b"}, + {file = "pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151"}, + {file = "pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503"}, + {file = "pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2"}, + {file = "pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31"}, + {file = "pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067"}, + {file = "pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852"}, + {file = "pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d"}, + {file = "pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d"}, + {file = "pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a"}, + {file = "pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee"}, + {file = "pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87"}, + {file = "pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42"}, + {file = "pywin32-311-cp38-cp38-win32.whl", hash = "sha256:6c6f2969607b5023b0d9ce2541f8d2cbb01c4f46bc87456017cf63b73f1e2d8c"}, + {file = "pywin32-311-cp38-cp38-win_amd64.whl", hash = "sha256:c8015b09fb9a5e188f83b7b04de91ddca4658cee2ae6f3bc483f0b21a77ef6cd"}, + {file = "pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b"}, + {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, + {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, ] [[package]] @@ -2986,30 +3060,31 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.12.4" +version = "0.12.11" description = "An extremely fast Python linter and code formatter, written in Rust." optional = true python-versions = ">=3.7" groups = ["main"] files = [ - {file = "ruff-0.12.4-py3-none-linux_armv6l.whl", hash = "sha256:cb0d261dac457ab939aeb247e804125a5d521b21adf27e721895b0d3f83a0d0a"}, - {file = "ruff-0.12.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:55c0f4ca9769408d9b9bac530c30d3e66490bd2beb2d3dae3e4128a1f05c7442"}, - {file = "ruff-0.12.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a8224cc3722c9ad9044da7f89c4c1ec452aef2cfe3904365025dd2f51daeae0e"}, - {file = "ruff-0.12.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9949d01d64fa3672449a51ddb5d7548b33e130240ad418884ee6efa7a229586"}, - {file = "ruff-0.12.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:be0593c69df9ad1465e8a2d10e3defd111fdb62dcd5be23ae2c06da77e8fcffb"}, - {file = "ruff-0.12.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7dea966bcb55d4ecc4cc3270bccb6f87a337326c9dcd3c07d5b97000dbff41c"}, - {file = "ruff-0.12.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afcfa3ab5ab5dd0e1c39bf286d829e042a15e966b3726eea79528e2e24d8371a"}, - {file = "ruff-0.12.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c057ce464b1413c926cdb203a0f858cd52f3e73dcb3270a3318d1630f6395bb3"}, - {file = "ruff-0.12.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e64b90d1122dc2713330350626b10d60818930819623abbb56535c6466cce045"}, - {file = "ruff-0.12.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2abc48f3d9667fdc74022380b5c745873499ff827393a636f7a59da1515e7c57"}, - {file = "ruff-0.12.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2b2449dc0c138d877d629bea151bee8c0ae3b8e9c43f5fcaafcd0c0d0726b184"}, - {file = "ruff-0.12.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:56e45bb11f625db55f9b70477062e6a1a04d53628eda7784dce6e0f55fd549eb"}, - {file = "ruff-0.12.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:478fccdb82ca148a98a9ff43658944f7ab5ec41c3c49d77cd99d44da019371a1"}, - {file = "ruff-0.12.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0fc426bec2e4e5f4c4f182b9d2ce6a75c85ba9bcdbe5c6f2a74fcb8df437df4b"}, - {file = "ruff-0.12.4-py3-none-win32.whl", hash = "sha256:4de27977827893cdfb1211d42d84bc180fceb7b72471104671c59be37041cf93"}, - {file = "ruff-0.12.4-py3-none-win_amd64.whl", hash = "sha256:fe0b9e9eb23736b453143d72d2ceca5db323963330d5b7859d60d101147d461a"}, - {file = "ruff-0.12.4-py3-none-win_arm64.whl", hash = "sha256:0618ec4442a83ab545e5b71202a5c0ed7791e8471435b94e655b570a5031a98e"}, - {file = "ruff-0.12.4.tar.gz", hash = "sha256:13efa16df6c6eeb7d0f091abae50f58e9522f3843edb40d56ad52a5a4a4b6873"}, + {file = "ruff-0.12.11-py3-none-linux_armv6l.whl", hash = "sha256:93fce71e1cac3a8bf9200e63a38ac5c078f3b6baebffb74ba5274fb2ab276065"}, + {file = "ruff-0.12.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8e33ac7b28c772440afa80cebb972ffd823621ded90404f29e5ab6d1e2d4b93"}, + {file = "ruff-0.12.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d69fb9d4937aa19adb2e9f058bc4fbfe986c2040acb1a4a9747734834eaa0bfd"}, + {file = "ruff-0.12.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:411954eca8464595077a93e580e2918d0a01a19317af0a72132283e28ae21bee"}, + {file = "ruff-0.12.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a2c0a2e1a450f387bf2c6237c727dd22191ae8c00e448e0672d624b2bbd7fb0"}, + {file = "ruff-0.12.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ca4c3a7f937725fd2413c0e884b5248a19369ab9bdd850b5781348ba283f644"}, + {file = "ruff-0.12.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4d1df0098124006f6a66ecf3581a7f7e754c4df7644b2e6704cd7ca80ff95211"}, + {file = "ruff-0.12.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a8dd5f230efc99a24ace3b77e3555d3fbc0343aeed3fc84c8d89e75ab2ff793"}, + {file = "ruff-0.12.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dc75533039d0ed04cd33fb8ca9ac9620b99672fe7ff1533b6402206901c34ee"}, + {file = "ruff-0.12.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc58f9266d62c6eccc75261a665f26b4ef64840887fc6cbc552ce5b29f96cc8"}, + {file = "ruff-0.12.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5a0113bd6eafd545146440225fe60b4e9489f59eb5f5f107acd715ba5f0b3d2f"}, + {file = "ruff-0.12.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0d737b4059d66295c3ea5720e6efc152623bb83fde5444209b69cd33a53e2000"}, + {file = "ruff-0.12.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:916fc5defee32dbc1fc1650b576a8fed68f5e8256e2180d4d9855aea43d6aab2"}, + {file = "ruff-0.12.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c984f07d7adb42d3ded5be894fb4007f30f82c87559438b4879fe7aa08c62b39"}, + {file = "ruff-0.12.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e07fbb89f2e9249f219d88331c833860489b49cdf4b032b8e4432e9b13e8a4b9"}, + {file = "ruff-0.12.11-py3-none-win32.whl", hash = "sha256:c792e8f597c9c756e9bcd4d87cf407a00b60af77078c96f7b6366ea2ce9ba9d3"}, + {file = "ruff-0.12.11-py3-none-win_amd64.whl", hash = "sha256:a3283325960307915b6deb3576b96919ee89432ebd9c48771ca12ee8afe4a0fd"}, + {file = "ruff-0.12.11-py3-none-win_arm64.whl", hash = "sha256:bae4d6e6a2676f8fb0f98b74594a048bae1b944aab17e9f5d504062303c6dbea"}, + {file = "ruff-0.12.11.tar.gz", hash = "sha256:c6b09ae8426a65bbee5425b9d0b82796dbb07cb1af045743c79bfb163001165d"}, ] [[package]] @@ -3319,14 +3394,14 @@ files = [ [[package]] name = "types-aiofiles" -version = "24.1.0.20250708" +version = "24.1.0.20250822" description = "Typing stubs for aiofiles" optional = true python-versions = ">=3.9" groups = ["main"] files = [ - {file = "types_aiofiles-24.1.0.20250708-py3-none-any.whl", hash = "sha256:07f8f06465fd415d9293467d1c66cd074b2c3b62b679e26e353e560a8cf63720"}, - {file = "types_aiofiles-24.1.0.20250708.tar.gz", hash = "sha256:c8207ed7385491ce5ba94da02658164ebd66b69a44e892288c9f20cbbf5284ff"}, + {file = "types_aiofiles-24.1.0.20250822-py3-none-any.whl", hash = "sha256:0ec8f8909e1a85a5a79aed0573af7901f53120dd2a29771dd0b3ef48e12328b0"}, + {file = "types_aiofiles-24.1.0.20250822.tar.gz", hash = "sha256:9ab90d8e0c307fe97a7cf09338301e3f01a163e39f3b529ace82466355c84a7b"}, ] [[package]] @@ -3343,38 +3418,38 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.20250516" +version = "6.0.12.20250822" description = "Typing stubs for PyYAML" optional = true python-versions = ">=3.9" groups = ["main"] files = [ - {file = "types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530"}, - {file = "types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba"}, + {file = "types_pyyaml-6.0.12.20250822-py3-none-any.whl", hash = "sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098"}, + {file = "types_pyyaml-6.0.12.20250822.tar.gz", hash = "sha256:259f1d93079d335730a9db7cff2bcaf65d7e04b4a56b5927d49a612199b59413"}, ] [[package]] name = "types-setuptools" -version = "80.9.0.20250529" +version = "80.9.0.20250822" description = "Typing stubs for setuptools" optional = true python-versions = ">=3.9" groups = ["main"] files = [ - {file = "types_setuptools-80.9.0.20250529-py3-none-any.whl", hash = "sha256:00dfcedd73e333a430e10db096e4d46af93faf9314f832f13b6bbe3d6757e95f"}, - {file = "types_setuptools-80.9.0.20250529.tar.gz", hash = "sha256:79e088ba0cba2186c8d6499cbd3e143abb142d28a44b042c28d3148b1e353c91"}, + {file = "types_setuptools-80.9.0.20250822-py3-none-any.whl", hash = "sha256:53bf881cb9d7e46ed12c76ef76c0aaf28cfe6211d3fab12e0b83620b1a8642c3"}, + {file = "types_setuptools-80.9.0.20250822.tar.gz", hash = "sha256:070ea7716968ec67a84c7f7768d9952ff24d28b65b6594797a464f1b3066f965"}, ] [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, - {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] @@ -3787,4 +3862,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.1" python-versions = ">=3.9, <4, !=3.9.0, !=3.9.1" -content-hash = "b1eb04b2b07d3bca51903d8ad3928cdf5480436954045d161ff2d35a6a4a7559" +content-hash = "650f0c98ff03cf897ee793ebb1e26bee8ae7b3c99bab5295fee7736bc19e346f" diff --git a/pyproject.toml b/pyproject.toml index 8ae0928326c4..bd0d1f297330 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,19 +20,20 @@ version = "0.0.0" [project.scripts] chia = "chia.cmds.chia:main" chia_daemon = "chia.daemon.server:main" -chia_wallet = "chia.server.start_wallet:main" -chia_full_node = "chia.server.start_full_node:main" -chia_harvester = "chia.server.start_harvester:main" -chia_farmer = "chia.server.start_farmer:main" -chia_introducer = "chia.server.start_introducer:main" +chia_wallet = "chia.wallet.start_wallet:main" +chia_full_node = "chia.full_node.start_full_node:main" +chia_harvester = "chia.harvester.start_harvester:main" +chia_farmer = "chia.farmer.start_farmer:main" +chia_introducer = "chia.introducer.start_introducer:main" chia_crawler = "chia.seeder.start_crawler:main" chia_seeder = "chia.seeder.dns_server:main" -chia_timelord = "chia.server.start_timelord:main" +chia_timelord = "chia.timelord.start_timelord:main" chia_timelord_launcher = "chia.timelord.timelord_launcher:main" chia_full_node_simulator = "chia.simulator.start_simulator:main" -chia_data_layer = "chia.server.start_data_layer:main" +chia_data_layer = "chia.data_layer.start_data_layer:main" chia_data_layer_http = "chia.data_layer.data_layer_server:main" chia_data_layer_s3_plugin = "chia.data_layer.s3_plugin_service:run_server" +chia_solver = "chia.solver.start_solver:main" [[tool.poetry.source]] name = "chia" @@ -53,7 +54,7 @@ boto3 = ">=1.35.43" # AWS S3 for Data Layer S3 plugin chiabip158 = ">=1.5.2" # bip158-style wallet filters chiapos = ">=2.0.10" # proof of space chia-puzzles-py = ">=0.20.1" -chia_rs = { git = "https://github.com/chia-network/chia_rs", rev = "long_lived/initial_datalayer", subdirectory = "wheel/" } +chia_rs = ">=0.30, <0.31" chiavdf = ">=1.1.10" # timelord and vdf verification click = ">=8.1.7" # For the CLI clvm = ">=0.9.14" diff --git a/ruff.toml b/ruff.toml index d05f462a1bd4..2b29977f9794 100644 --- a/ruff.toml +++ b/ruff.toml @@ -61,13 +61,10 @@ ignore = [ # Should probably fix these "PLR6301", # no-self-use "PLR2004", # magic-value-comparison - "PLR1704", # redefined-argument-from-local - "PLR5501", # collapsible-else-if # Pylint warning "PLW1641", # eq-without-hash # Should probably fix these - "PLW2901", # redefined-loop-name "PLW1514", # unspecified-encoding "PLW0603", # global-statement @@ -80,10 +77,6 @@ ignore = [ # flake8-implicit-str-concat "ISC003", # explicit-string-concatenation - # pyupgrade - # Should probably fix these - "UP006", # non-pep585-annotation - # Ruff Specific # This code is problematic because using instantiated types as defaults is so common across the codebase. @@ -93,9 +86,6 @@ ignore = [ "RUF056", # falsy-dict-get-fallback # Should probably fix this "RUF029", # unused-async - "RUF043", # pytest-raises-ambiguous-pattern - "RUF046", # unnecessary-cast-to-int - "RUF052", # used-dummy-variable # Security linter "S603", # subprocess-without-shell-equals-true diff --git a/tach.toml b/tach.toml index 3bcd7dd9d269..e05edea91695 100644 --- a/tach.toml +++ b/tach.toml @@ -32,21 +32,8 @@ depends_on = [ "chia.types", "chia.util", { path = "chia.ssl", deprecated = false }, - # these ones can be removed by getting rid of `chia.server.aliases` - # and/or the `start_*.py` scripts, which can go in their respective modules { path = "chia.rpc", deprecated = false }, - { path = "chia.full_node", deprecated = false }, - { path = "chia.data_layer", deprecated = false }, - { path = "chia.farmer", deprecated = false }, - { path = "chia.simulator", deprecated = false }, { path = "chia.daemon", deprecated = false }, - { path = "chia.harvester", deprecated = false }, - { path = "chia.timelord", deprecated = false }, - { path = "chia.introducer", deprecated = false }, - { path = "chia.consensus", deprecated = false }, - { path = "chia.wallet", deprecated = false }, - { path = "chia.apis", deprecated = false }, - { path = "chia.seeder", deprecated = false }, ] [[modules]] @@ -57,13 +44,16 @@ depends_on = [ { path = "chia.server", deprecated = false }, { path = "chia.plotting", deprecated = false }, { path = "chia.full_node", deprecated = false }, + { path = "chia.introducer", deprecated = false }, { path = "chia.wallet", deprecated = false }, + { path = "chia.farmer", deprecated = false }, { path = "chia.seeder", deprecated = false }, { path = "chia.consensus", deprecated = false }, { path = "chia.apis", deprecated = false }, { path = "chia.cmds", deprecated = false }, { path = "chia.protocols", deprecated = false }, { path = "chia.timelord", deprecated = false }, + { path = "chia.harvester", deprecated = false }, { path = "chia.daemon", deprecated = false }, { path = "chia.ssl", deprecated = false }, { path = "chia.rpc", deprecated = false }, @@ -72,6 +62,7 @@ depends_on = [ [[modules]] path = "chia.full_node" depends_on = [ + "chia.apis", "chia.types", "chia.util", "chia.consensus", @@ -83,6 +74,8 @@ depends_on = [ [[modules]] path = "chia.data_layer" depends_on = [ + "chia.apis", + "chia.ssl", "chia.types", "chia.util", "chia.protocols", @@ -108,6 +101,8 @@ depends_on = [ [[modules]] path = "chia.introducer" depends_on = [ + "chia.apis", + "chia.full_node", "chia.types", "chia.util", { path = "chia.rpc", deprecated = false }, @@ -128,6 +123,7 @@ depends_on = [ [[modules]] path = "chia.farmer" depends_on = [ + "chia.apis", "chia.types", "chia.util", { path = "chia.protocols", deprecated = false }, @@ -156,6 +152,7 @@ depends_on = ["chia.util"] [[modules]] path = "chia.timelord" depends_on = [ + "chia.apis", "chia.types", "chia.util", { path = "chia.server", deprecated = false }, @@ -183,6 +180,7 @@ depends_on = [ [[modules]] path = "chia.harvester" depends_on = [ + "chia.apis", "chia.types", "chia.util", { path = "chia.protocols", deprecated = false }, @@ -230,6 +228,8 @@ depends_on = [ [[modules]] path = "chia.wallet" depends_on = [ + "chia.apis", + "chia.simulator", "chia.types", "chia.util", { path = "chia.server", deprecated = false }, diff --git a/tools/cpu_utilization.py b/tools/cpu_utilization.py index 41333f014c68..960e11c2807b 100644 --- a/tools/cpu_utilization.py +++ b/tools/cpu_utilization.py @@ -83,7 +83,6 @@ def main(pid: int, output: str, threads: bool) -> None: out.write(f" {(c[row].system_time - c[row - 1].system_time) * 100 / time_delta:6.2f}% ") else: out.write(" 0.00% 0.00% ") - row += 1 out.write("\n") with open("plot-cpu.gnuplot", "w+") as out: diff --git a/tools/validate_rpcs.py b/tools/validate_rpcs.py index aa77347f458d..d93ef190b46d 100755 --- a/tools/validate_rpcs.py +++ b/tools/validate_rpcs.py @@ -3,7 +3,9 @@ from __future__ import annotations import asyncio +import logging import time +from itertools import chain from pathlib import Path from typing import Any, Optional @@ -18,6 +20,8 @@ from chia.util.task_referencer import create_referenced_task DEFAULT_PIPELINE_DEPTH: int = 10 +log = logging.getLogger("validate_rpcs") +logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)-8s %(message)s") def get_height_to_hash_filename(root_path: Path, config: dict[str, Any]) -> Path: @@ -40,7 +44,7 @@ async def get_height_to_hash_bytes(root_path: Path, config: dict[str, Any]) -> b return await f.read() -def get_block_hash_from_height(height: int, height_to_hash: bytes) -> bytes32: +def get_block_hash_for_height(height: int, height_to_hash: bytes) -> bytes32: """ Get the block header hash from the height-to-hash database. """ @@ -154,9 +158,11 @@ async def node_spends_with_conditions( height: int, ) -> None: try: - await node_client.get_block_spends_with_conditions(block_hash) + res = await node_client.get_block_spends_with_conditions(block_hash) + for c in res: + c.coin_spend.get_hash() # Ensure CoinSpend is valid except Exception as e: - print(f"ERROR: [{height}] get_block_spends_with_conditions returned invalid result") + log.error(f"ERROR: [{height}] get_block_spends_with_conditions returned invalid result") raise e @@ -166,9 +172,11 @@ async def node_block_spends( height: int, ) -> None: try: - await node_client.get_block_spends(block_hash) + res = await node_client.get_block_spends(block_hash) + for c in res: + c.get_hash() # Ensure CoinSpend is valid except Exception as e: - print(f"ERROR: [{height}] get_block_spends returned invalid result") + log.error(f"ERROR: [{height}] get_block_spends returned invalid result") raise e @@ -178,9 +186,12 @@ async def node_additions_removals( height: int, ) -> None: try: - await node_client.get_additions_and_removals(block_hash) + add, rem = await node_client.get_additions_and_removals(block_hash) + for coin in chain(add, rem): + coin.get_hash() + except Exception as e: - print(f"ERROR: [{height}] get_additions_and_removals returned invalid result") + log.error(f"ERROR: [{height}] get_additions_and_removals returned invalid result") raise e @@ -212,7 +223,7 @@ async def cli_async( height_to_hash_bytes: bytes = await get_height_to_hash_bytes(root_path=root_path, config=config) - print("block header hashes loaded from height-to-hash file.") + log.info("block header hashes loaded from height-to-hash file.") # Set initial values for the loop @@ -224,7 +235,7 @@ async def cli_async( start_time: float = cycle_start def add_tasks_for_height(height: int) -> None: - block_header_hash = get_block_hash_from_height(height, height_to_hash_bytes) + block_header_hash = get_block_hash_for_height(height, height_to_hash_bytes) # Create tasks for each RPC call based on the flags if spends_with_conditions: pipeline.add( @@ -238,13 +249,13 @@ def add_tasks_for_height(height: int) -> None: for i in range(start_height, end_height + 1): add_tasks_for_height(height=i) # Make Status Updates. - if len(pipeline) >= pipeline_depth: + while len(pipeline) >= pipeline_depth: done, pipeline = await asyncio.wait(pipeline, return_when=asyncio.FIRST_COMPLETED) completed_requests += len(done) now = time.monotonic() if cycle_start + 5 < now: time_taken = now - cycle_start - print( + log.info( f"Processed {completed_requests} RPCs in {time_taken:.2f}s, " f"{time_taken / completed_requests:.4f}s per RPC " f"({i - start_height} Blocks completed out of {end_height - start_height})" @@ -253,7 +264,7 @@ def add_tasks_for_height(height: int) -> None: cycle_start = now # Wait for any remaining tasks to complete - print(f"Waiting for {len(pipeline)} remaining tasks to complete...") + log.info(f"Waiting for {len(pipeline)} remaining tasks to complete...") if pipeline: await asyncio.gather(*pipeline)