diff --git a/.github/workflows/check-btcli-tests.yml b/.github/workflows/check-btcli-tests.yml
new file mode 100644
index 0000000000..1307774742
--- /dev/null
+++ b/.github/workflows/check-btcli-tests.yml
@@ -0,0 +1,191 @@
+name: Bittensor BTCLI Test
+
+permissions:
+ pull-requests: write
+ contents: read
+
+concurrency:
+ group: e2e-cli-${{ github.ref }}
+ cancel-in-progress: true
+
+on:
+ pull_request:
+ branches:
+ - devnet
+ - devnet-ready
+ - testnet
+ - testnet-ready
+ - main
+ types: [opened, synchronize, reopened, labeled, unlabeled]
+
+env:
+ CARGO_TERM_COLOR: always
+ VERBOSE: ${{ github.event.inputs.verbose }}
+
+jobs:
+ apply-label-to-new-pr:
+ runs-on: ubuntu-latest
+ if: ${{ github.event.pull_request.draft == false }}
+ outputs:
+ should_continue_cli: ${{ steps.check.outputs.should_continue_cli }}
+ steps:
+ - name: Check
+ id: check
+ run: |
+ ACTION="${{ github.event.action }}"
+ if [[ "$ACTION" == "opened" || "$ACTION" == "reopened" ]]; then
+ echo "should_continue_cli=true" >> $GITHUB_OUTPUT
+ else
+ echo "should_continue_cli=false" >> $GITHUB_OUTPUT
+ fi
+ shell: bash
+
+ - name: Add label
+ if: steps.check.outputs.should_continue_cli == 'true'
+ uses: actions-ecosystem/action-add-labels@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ labels: run-bittensor-cli-tests
+
+ check-labels:
+ needs: apply-label-to-new-pr
+ runs-on: ubuntu-latest
+ if: always()
+ outputs:
+ run-cli-tests: ${{ steps.get-labels.outputs.run-cli-tests }}
+ steps:
+ - name: Check out repository
+ uses: actions/checkout@v4
+
+ - name: Get labels from PR
+ id: get-labels
+ run: |
+ LABELS=$(gh pr view ${{ github.event.pull_request.number }} --json labels --jq '.labels[].name')
+ echo "Current labels: $LABELS"
+ if echo "$LABELS" | grep -q "run-bittensor-cli-tests"; then
+ echo "run-cli-tests=true" >> $GITHUB_ENV
+ echo "::set-output name=run-cli-tests::true"
+ else
+ echo "run-cli-tests=false" >> $GITHUB_ENV
+ echo "::set-output name=run-cli-tests::false"
+ fi
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ find-e2e-tests:
+ needs: check-labels
+ if: always() && needs.check-labels.outputs.run-cli-tests == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ test-files: ${{ steps.get-tests.outputs.test-files }}
+ steps:
+ - name: Research preparation
+ working-directory: ${{ github.workspace }}
+ run: git clone https://github.com/opentensor/btcli.git
+
+ - name: Checkout
+ working-directory: ${{ github.workspace }}/btcli
+ run: git checkout staging
+
+ - name: Install dependencies
+ run: sudo apt-get install -y jq
+
+ - name: Find e2e test files
+ id: get-tests
+ run: |
+ test_files=$(find ${{ github.workspace }}/btcli/tests/e2e_tests -name "test*.py" | jq -R -s -c 'split("\n") | map(select(. != ""))')
+ echo "::set-output name=test-files::$test_files"
+ shell: bash
+
+ pull-docker-image:
+ needs: check-labels
+ runs-on: ubuntu-latest
+ if: always() && needs.check-labels.outputs.run-cli-tests == 'true'
+ steps:
+ - name: Log in to GitHub Container Registry
+ run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin
+
+ - name: Pull Docker Image
+ run: docker pull ghcr.io/opentensor/subtensor-localnet:latest
+
+ - name: Save Docker Image to Cache
+ run: docker save -o subtensor-localnet.tar ghcr.io/opentensor/subtensor-localnet:latest
+
+ - name: Upload Docker Image as Artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: subtensor-localnet
+ path: subtensor-localnet.tar
+
+ # main job
+ run-e2e-tests:
+ needs:
+ - check-labels
+ - find-e2e-tests
+ - pull-docker-image
+
+ if: always() && needs.check-labels.outputs.run-cli-tests == 'true'
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ max-parallel: 16
+ matrix:
+ rust-branch:
+ - stable
+ rust-target:
+ - x86_64-unknown-linux-gnu
+ os:
+ - ubuntu-latest
+ test-file: ${{ fromJson(needs.find-e2e-tests.outputs.test-files) }}
+
+ env:
+ RELEASE_NAME: development
+ RUSTV: ${{ matrix.rust-branch }}
+ RUST_BACKTRACE: full
+ RUST_BIN_DIR: target/${{ matrix.rust-target }}
+ TARGET: ${{ matrix.rust-target }}
+
+ timeout-minutes: 60
+ name: "cli: ${{ matrix.test-file }}"
+ steps:
+ - name: Check-out repository
+ uses: actions/checkout@v4
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+
+ - name: Create Python virtual environment
+ working-directory: ${{ github.workspace }}
+ run: uv venv ${{ github.workspace }}/venv
+
+ - name: Clone Bittensor CLI repo
+ working-directory: ${{ github.workspace }}
+ run: git clone https://github.com/opentensor/btcli.git
+
+ - name: Setup Bittensor-cli from cloned repo
+ working-directory: ${{ github.workspace }}/btcli
+ run: |
+ source ${{ github.workspace }}/venv/bin/activate
+ git checkout staging
+ git fetch origin staging
+ uv run --active pip install --upgrade pip
+ uv run --active pip install '.[dev]'
+ uv run --active pip install pytest
+
+ - name: Install uv dependencies
+ working-directory: ${{ github.workspace }}/btcli
+ run: uv sync --all-extras --dev
+
+ - name: Download Cached Docker Image
+ uses: actions/download-artifact@v4
+ with:
+ name: subtensor-localnet
+
+ - name: Load Docker Image
+ run: docker load -i subtensor-localnet.tar
+
+ - name: Run tests
+ working-directory: ${{ github.workspace }}/btcli
+ run: |
+ source ${{ github.workspace }}/venv/bin/activate
+ uv run pytest ${{ matrix.test-file }} -s
diff --git a/.github/workflows/check-docker-localnet.yml b/.github/workflows/check-docker-localnet.yml
new file mode 100644
index 0000000000..126b718d8c
--- /dev/null
+++ b/.github/workflows/check-docker-localnet.yml
@@ -0,0 +1,21 @@
+name: Build Localnet Docker Image
+
+on:
+ pull_request:
+
+jobs:
+ build:
+ runs-on: SubtensorCI
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Build Docker Image
+ run: docker build -f Dockerfile-localnet -t localnet .
diff --git a/.github/workflows/check-sdk-tests.yml b/.github/workflows/check-sdk-tests.yml
new file mode 100644
index 0000000000..d54308c17b
--- /dev/null
+++ b/.github/workflows/check-sdk-tests.yml
@@ -0,0 +1,190 @@
+name: Bittensor SDK Test
+
+permissions:
+ pull-requests: write
+ contents: read
+
+concurrency:
+ group: e2e-sdk-${{ github.ref }}
+ cancel-in-progress: true
+
+on:
+ pull_request:
+ branches:
+ - devnet
+ - devnet-ready
+ - testnet
+ - testnet-ready
+ - main
+ types: [opened, synchronize, reopened, labeled, unlabeled]
+
+env:
+ CARGO_TERM_COLOR: always
+ VERBOSE: ${{ github.event.inputs.verbose }}
+
+jobs:
+ apply-label-to-new-pr:
+ runs-on: ubuntu-latest
+ if: ${{ github.event.pull_request.draft == false }}
+ outputs:
+ should_continue_sdk: ${{ steps.check.outputs.should_continue_sdk }}
+ steps:
+ - name: Check
+ id: check
+ run: |
+ ACTION="${{ github.event.action }}"
+ if [[ "$ACTION" == "opened" || "$ACTION" == "reopened" ]]; then
+ echo "should_continue_sdk=true" >> $GITHUB_OUTPUT
+ else
+ echo "should_continue_sdk=false" >> $GITHUB_OUTPUT
+ fi
+ shell: bash
+
+ - name: Add label
+ if: steps.check.outputs.should_continue_sdk == 'true'
+ uses: actions-ecosystem/action-add-labels@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ labels: run-bittensor-sdk-tests
+
+ check-labels:
+ needs: apply-label-to-new-pr
+ runs-on: ubuntu-latest
+ if: always()
+ outputs:
+ run-sdk-tests: ${{ steps.get-labels.outputs.run-sdk-tests }}
+ steps:
+ - name: Check out repository
+ uses: actions/checkout@v4
+
+ - name: Get labels from PR
+ id: get-labels
+ run: |
+ sleep 5
+ LABELS=$(gh api repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/labels --jq '.[].name')
+ echo "Current labels: $LABELS"
+ if echo "$LABELS" | grep -q "run-bittensor-sdk-tests"; then
+ echo "run-sdk-tests=true" >> $GITHUB_OUTPUT
+ else
+ echo "run-sdk-tests=false" >> $GITHUB_OUTPUT
+ fi
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ find-e2e-tests:
+ needs: check-labels
+ if: always() && needs.check-labels.outputs.run-sdk-tests == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ test-files: ${{ steps.get-tests.outputs.test-files }}
+ steps:
+ - name: Research preparation
+ working-directory: ${{ github.workspace }}
+ run: git clone https://github.com/opentensor/bittensor.git
+
+ - name: Checkout
+ working-directory: ${{ github.workspace }}/bittensor
+ run: git checkout staging
+
+ - name: Install dependencies
+ run: sudo apt-get install -y jq
+
+ - name: Find e2e test files
+ id: get-tests
+ run: |
+ test_files=$(find ${{ github.workspace }}/bittensor/tests/e2e_tests -name "test*.py" | jq -R -s -c 'split("\n") | map(select(. != ""))')
+ echo "::set-output name=test-files::$test_files"
+ shell: bash
+
+ pull-docker-image:
+ needs: check-labels
+ runs-on: ubuntu-latest
+ if: always() && needs.check-labels.outputs.run-sdk-tests == 'true'
+ steps:
+ - name: Log in to GitHub Container Registry
+ run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin
+
+ - name: Pull Docker Image
+ run: docker pull ghcr.io/opentensor/subtensor-localnet:devnet-ready
+
+ - name: Save Docker Image to Cache
+ run: docker save -o subtensor-localnet.tar ghcr.io/opentensor/subtensor-localnet:devnet-ready
+
+ - name: Upload Docker Image as Artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: subtensor-localnet
+ path: subtensor-localnet.tar
+
+ # main job
+ run-e2e-tests:
+ needs:
+ - check-labels
+ - find-e2e-tests
+ - pull-docker-image
+
+ if: always() && needs.check-labels.outputs.run-sdk-tests == 'true'
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ max-parallel: 16
+ matrix:
+ rust-branch:
+ - stable
+ rust-target:
+ - x86_64-unknown-linux-gnu
+ os:
+ - ubuntu-latest
+ test-file: ${{ fromJson(needs.find-e2e-tests.outputs.test-files) }}
+
+ env:
+ RELEASE_NAME: development
+ RUSTV: ${{ matrix.rust-branch }}
+ RUST_BACKTRACE: full
+ RUST_BIN_DIR: target/${{ matrix.rust-target }}
+ TARGET: ${{ matrix.rust-target }}
+
+ timeout-minutes: 60
+ name: "sdk: ${{ matrix.test-file }}"
+ steps:
+ - name: Check-out repository
+ uses: actions/checkout@v4
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+
+ - name: Create Python virtual environment
+ working-directory: ${{ github.workspace }}
+ run: uv venv ${{ github.workspace }}/venv
+
+ - name: Clone Bittensor SDK repo
+ working-directory: ${{ github.workspace }}
+ run: git clone https://github.com/opentensor/bittensor.git
+
+ - name: Setup Bittensor SDK from cloned repo
+ working-directory: ${{ github.workspace }}/bittensor
+ run: |
+ source ${{ github.workspace }}/venv/bin/activate
+ git checkout staging
+ git fetch origin staging
+ uv run --active pip install --upgrade pip
+ uv run --active pip install '.[dev]'
+ uv run --active pip install pytest
+
+ - name: Install uv dependencies
+ working-directory: ${{ github.workspace }}/bittensor
+ run: uv sync --all-extras --dev
+
+ - name: Download Cached Docker Image
+ uses: actions/download-artifact@v4
+ with:
+ name: subtensor-localnet
+
+ - name: Load Docker Image
+ run: docker load -i subtensor-localnet.tar
+
+ - name: Run tests
+ working-directory: ${{ github.workspace }}/bittensor
+ run: |
+ source ${{ github.workspace }}/venv/bin/activate
+ uv run pytest ${{ matrix.test-file }} -s
diff --git a/.github/workflows/docker-localnet.yml b/.github/workflows/docker-localnet.yml
new file mode 100644
index 0000000000..cd6460bfe4
--- /dev/null
+++ b/.github/workflows/docker-localnet.yml
@@ -0,0 +1,69 @@
+name: Publish Localnet Docker Image
+
+on:
+ release:
+ types: [published]
+ workflow_dispatch:
+ inputs:
+ branch-or-tag:
+ description: "Branch or tag to use for the Docker image tag and ref to checkout (optional)"
+ required: false
+ default: ""
+ push:
+ branches:
+ - devnet-ready
+
+permissions:
+ contents: read
+ packages: write
+ actions: read
+ security-events: write
+
+jobs:
+ publish:
+ runs-on: SubtensorCI
+
+ steps:
+ - name: Determine Docker tag and ref
+ id: tag
+ run: |
+ branch_or_tag="${{ github.event.inputs.branch-or-tag || github.ref_name }}"
+ echo "Determined branch or tag: $branch_or_tag"
+ echo "tag=$branch_or_tag" >> $GITHUB_ENV
+ echo "ref=$branch_or_tag" >> $GITHUB_ENV
+
+ # Check if this is a tagged release (not devnet-ready/devnet/testnet)
+ if [[ "$branch_or_tag" != "devnet-ready" ]]; then
+ echo "latest_tag=true" >> $GITHUB_ENV
+ else
+ echo "latest_tag=false" >> $GITHUB_ENV
+ fi
+
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.ref }}
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Login to GHCR
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ file: Dockerfile-localnet
+ push: true
+ platforms: linux/amd64,linux/arm64
+ tags: |
+ ghcr.io/${{ github.repository }}-localnet:${{ env.tag }}
+ ${{ env.latest_tag == 'true' && format('ghcr.io/{0}-localnet:latest', github.repository) || '' }}
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 530c7cd8df..2b36e37282 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -9,7 +9,6 @@ on:
description: "Branch or tag to use for the Docker image tag and ref to checkout (optional)"
required: false
default: ""
-
push:
branches:
- devnet-ready
@@ -23,7 +22,7 @@ permissions:
security-events: write
jobs:
- publish:
+ publish-x86:
runs-on: SubtensorCI
steps:
@@ -35,6 +34,59 @@ jobs:
echo "tag=$branch_or_tag" >> $GITHUB_ENV
echo "ref=$branch_or_tag" >> $GITHUB_ENV
+ # Check if this is a tagged release (not devnet-ready/devnet/testnet)
+ if [[ "${{ github.event_name }}" == "release" && "$branch_or_tag" != "devnet-ready" && "$branch_or_tag" != "devnet" && "$branch_or_tag" != "testnet" ]]; then
+ echo "latest_tag=true" >> $GITHUB_ENV
+ else
+ echo "latest_tag=false" >> $GITHUB_ENV
+ fi
+
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ env.ref }}
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Login to GHCR
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ push: true
+ platforms: linux/amd64
+ tags: |
+ ghcr.io/${{ github.repository }}:${{ env.tag }}
+ ${{ env.latest_tag == 'true' && format('ghcr.io/{0}:latest', github.repository) || '' }}
+ publish-arm:
+ runs-on: SubtensorCI
+
+ steps:
+ - name: Determine Docker tag and ref
+ id: tag
+ run: |
+ branch_or_tag="${{ github.event.inputs.branch-or-tag || github.ref_name }}"
+ echo "Determined branch or tag: $branch_or_tag"
+ echo "tag=$branch_or_tag" >> $GITHUB_ENV
+ echo "ref=$branch_or_tag" >> $GITHUB_ENV
+
+ # Check if this is a tagged release (not devnet-ready/devnet/testnet)
+ if [[ "${{ github.event_name }}" == "release" && "$branch_or_tag" != "devnet-ready" && "$branch_or_tag" != "devnet" && "$branch_or_tag" != "testnet" ]]; then
+ echo "latest_tag=true" >> $GITHUB_ENV
+ else
+ echo "latest_tag=false" >> $GITHUB_ENV
+ fi
+
- name: Checkout code
uses: actions/checkout@v4
with:
@@ -58,5 +110,7 @@ jobs:
with:
context: .
push: true
+ platforms: linux/arm64
tags: |
ghcr.io/${{ github.repository }}:${{ env.tag }}
+ ${{ env.latest_tag == 'true' && format('ghcr.io/{0}:latest', github.repository) || '' }}
diff --git a/.github/workflows/e2e-bittensor-tests.yml b/.github/workflows/e2e-bittensor-tests.yml
deleted file mode 100644
index 5be78c2ec2..0000000000
--- a/.github/workflows/e2e-bittensor-tests.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-name: E2E Bittensor Tests
-
-concurrency:
- group: e2e-bittensor-${{ github.ref }}
- cancel-in-progress: true
-
-on:
- pull_request:
-
- ## Allow running workflow manually from the Actions tab
- workflow_dispatch:
- inputs:
- verbose:
- description: "Output more information when triggered manually"
- required: false
- default: ""
-
-env:
- CARGO_TERM_COLOR: always
- VERBOSE: ${{ github.events.input.verbose }}
-
-jobs:
- run:
- runs-on: SubtensorCI
- env:
- RUST_BACKTRACE: full
- steps:
- - name: Check-out repository under $GITHUB_WORKSPACE
- uses: actions/checkout@v4
-
- - name: Utilize Shared Rust Cache
- uses: Swatinem/rust-cache@v2
-
- - name: Install dependencies
- run: |
- sudo apt-get update &&
- sudo apt-get install -y clang curl libssl-dev llvm libudev-dev protobuf-compiler
-
- - name: Clone bittensor repo
- run: git clone https://github.com/opentensor/bittensor.git
-
- - name: Setup bittensor repo
- working-directory: ${{ github.workspace }}/bittensor
- run: |
- git checkout staging
- python3 -m pip install -e .
- python3 -m pip install torch
- python3 -m pip install pytest
- python3 -m pip install -r requirements/dev.txt
-
- - name: Run tests
- working-directory: ${{ github.workspace }}/bittensor
- run: |
- pwd
- ls
- LOCALNET_SH_PATH="${{ github.workspace }}/scripts/localnet.sh" pytest tests/e2e_tests/ -s
diff --git a/.github/workflows/try-runtime.yml b/.github/workflows/try-runtime.yml
index c3b54a3514..1241ca94d0 100644
--- a/.github/workflows/try-runtime.yml
+++ b/.github/workflows/try-runtime.yml
@@ -50,7 +50,7 @@ jobs:
check-finney:
name: check finney
- if: github.base_ref == 'testnet' || github.base_ref == 'devnet' || github.base_ref == 'main'
+ # if: github.base_ref == 'testnet' || github.base_ref == 'devnet' || github.base_ref == 'main'
runs-on: SubtensorCI
steps:
- name: Checkout sources
diff --git a/Cargo.lock b/Cargo.lock
index 70dcd3d377..385175fcff 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1075,9 +1075,9 @@ checksum = "fd6c0e7b807d60291f42f33f58480c0bfafe28ed08286446f45e463728cf9c1c"
[[package]]
name = "cc"
-version = "1.1.24"
+version = "1.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938"
+checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c"
dependencies = [
"jobserver",
"libc",
@@ -4247,7 +4247,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
- "windows-targets 0.52.6",
+ "windows-targets 0.48.5",
]
[[package]]
@@ -5597,6 +5597,7 @@ dependencies = [
"substrate-prometheus-endpoint",
"subtensor-custom-rpc",
"subtensor-custom-rpc-runtime-api",
+ "subtensor-runtime-common",
"thiserror",
]
@@ -5605,7 +5606,6 @@ name = "node-subtensor-runtime"
version = "4.0.0-dev"
dependencies = [
"ark-serialize",
- "ed25519-dalek",
"fp-account",
"fp-evm",
"fp-rpc",
@@ -5641,7 +5641,7 @@ dependencies = [
"pallet-membership",
"pallet-multisig",
"pallet-preimage",
- "pallet-proxy",
+ "pallet-proxy 38.0.0",
"pallet-registry",
"pallet-safe-mode",
"pallet-scheduler",
@@ -5650,7 +5650,7 @@ dependencies = [
"pallet-timestamp",
"pallet-transaction-payment",
"pallet-transaction-payment-rpc-runtime-api",
- "pallet-utility",
+ "pallet-utility 38.0.0",
"parity-scale-codec",
"precompile-utils",
"rand_chacha",
@@ -5676,6 +5676,8 @@ dependencies = [
"substrate-wasm-builder",
"subtensor-custom-rpc-runtime-api",
"subtensor-macros",
+ "subtensor-precompiles",
+ "subtensor-runtime-common",
"tle",
"w3f-bls",
]
@@ -6315,6 +6317,23 @@ dependencies = [
"sp-runtime",
]
+[[package]]
+name = "pallet-proxy"
+version = "38.0.0"
+dependencies = [
+ "frame-benchmarking",
+ "frame-support",
+ "frame-system",
+ "pallet-balances",
+ "pallet-utility 38.0.0",
+ "parity-scale-codec",
+ "scale-info",
+ "sp-core",
+ "sp-io",
+ "sp-runtime",
+ "subtensor-macros",
+]
+
[[package]]
name = "pallet-proxy"
version = "38.0.0"
@@ -6346,6 +6365,20 @@ dependencies = [
"subtensor-macros",
]
+[[package]]
+name = "pallet-root-testing"
+version = "14.0.0"
+source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-stable2409#87971b3e92721bdf10bf40b410eaae779d494ca0"
+dependencies = [
+ "frame-support",
+ "frame-system",
+ "parity-scale-codec",
+ "scale-info",
+ "sp-core",
+ "sp-io",
+ "sp-runtime",
+]
+
[[package]]
name = "pallet-safe-mode"
version = "19.0.0"
@@ -6356,8 +6389,8 @@ dependencies = [
"frame-support",
"frame-system",
"pallet-balances",
- "pallet-proxy",
- "pallet-utility",
+ "pallet-proxy 38.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-stable2409)",
+ "pallet-utility 38.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-stable2409)",
"parity-scale-codec",
"scale-info",
"sp-arithmetic",
@@ -6424,7 +6457,7 @@ dependencies = [
"pallet-preimage",
"pallet-scheduler",
"pallet-transaction-payment",
- "pallet-utility",
+ "pallet-utility 38.0.0",
"parity-scale-codec",
"parity-util-mem",
"rand",
@@ -6527,6 +6560,25 @@ dependencies = [
"sp-weights",
]
+[[package]]
+name = "pallet-utility"
+version = "38.0.0"
+dependencies = [
+ "frame-benchmarking",
+ "frame-support",
+ "frame-system",
+ "pallet-balances",
+ "pallet-collective",
+ "pallet-root-testing",
+ "pallet-timestamp",
+ "parity-scale-codec",
+ "scale-info",
+ "sp-core",
+ "sp-io",
+ "sp-runtime",
+ "subtensor-macros",
+]
+
[[package]]
name = "pallet-utility"
version = "38.0.0"
@@ -7755,15 +7807,14 @@ dependencies = [
[[package]]
name = "ring"
-version = "0.17.8"
+version = "0.17.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d"
+checksum = "70ac5d832aa16abd7d1def883a8545280c20a60f523a370aa3a9617c2b8550ee"
dependencies = [
"cc",
"cfg-if",
"getrandom",
"libc",
- "spin 0.9.8",
"untrusted 0.9.0",
"windows-sys 0.52.0",
]
@@ -7947,7 +7998,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
dependencies = [
"log",
- "ring 0.17.8",
+ "ring 0.17.13",
"rustls-webpki",
"sct",
]
@@ -7979,7 +8030,7 @@ version = "0.101.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
dependencies = [
- "ring 0.17.8",
+ "ring 0.17.13",
"untrusted 0.9.0",
]
@@ -9230,7 +9281,7 @@ version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
dependencies = [
- "ring 0.17.8",
+ "ring 0.17.13",
"untrusted 0.9.0",
]
@@ -9653,7 +9704,7 @@ dependencies = [
"chacha20poly1305",
"curve25519-dalek",
"rand_core",
- "ring 0.17.8",
+ "ring 0.17.13",
"rustc_version 0.4.1",
"sha2 0.10.8",
"subtle 2.6.1",
@@ -10963,6 +11014,41 @@ dependencies = [
"syn 2.0.90",
]
+[[package]]
+name = "subtensor-precompiles"
+version = "0.1.0"
+dependencies = [
+ "ed25519-dalek",
+ "fp-evm",
+ "frame-support",
+ "frame-system",
+ "log",
+ "pallet-admin-utils",
+ "pallet-balances",
+ "pallet-evm",
+ "pallet-evm-precompile-modexp",
+ "pallet-evm-precompile-sha3fips",
+ "pallet-evm-precompile-simple",
+ "pallet-proxy 38.0.0",
+ "pallet-subtensor",
+ "precompile-utils",
+ "sp-core",
+ "sp-runtime",
+ "sp-std 14.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-stable2409)",
+ "subtensor-runtime-common",
+]
+
+[[package]]
+name = "subtensor-runtime-common"
+version = "0.1.0"
+dependencies = [
+ "frame-support",
+ "parity-scale-codec",
+ "scale-info",
+ "sp-core",
+ "sp-runtime",
+]
+
[[package]]
name = "subtensor-tools"
version = "0.1.0"
@@ -12189,7 +12275,7 @@ version = "0.22.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53"
dependencies = [
- "ring 0.17.8",
+ "ring 0.17.13",
"untrusted 0.9.0",
]
diff --git a/Cargo.toml b/Cargo.toml
index ef5d64b0d6..15dd760a57 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -23,21 +23,14 @@ rayon = "1.10"
[workspace]
members = [
- "node",
- "pallets/commitments",
- "pallets/subtensor",
- "pallets/admin-utils",
- "pallets/collective",
- "pallets/registry",
- "primitives/*",
- "runtime",
- "support/tools",
- "support/macros",
- "support/linting",
- "support/procedural-fork",
- "pallets/drand",
+ "common",
+ "node",
+ "pallets/*",
+ "precompiles",
+ "primitives/*",
+ "runtime",
+ "support/*",
]
-exclude = ["support/procedural-fork"]
resolver = "2"
[workspace.lints.clippy]
@@ -46,14 +39,29 @@ arithmetic-side-effects = "deny"
type_complexity = "allow"
unwrap-used = "deny"
manual_inspect = "allow"
-useless_conversion = "allow" # until polkadot is patched
+useless_conversion = "allow" # until polkadot is patched
[workspace.dependencies]
+pallet-admin-utils = { default-features = false, path = "pallets/admin-utils" }
+pallet-collective = { default-features = false, path = "pallets/collective" }
+pallet-commitments = { default-features = false, path = "pallets/commitments" }
+pallet-registry = { default-features = false, path = "pallets/registry" }
+pallet-subtensor = { default-features = false, path = "pallets/subtensor" }
+subtensor-custom-rpc = { default-features = false, path = "pallets/subtensor/rpc" }
+subtensor-custom-rpc-runtime-api = { default-features = false, path = "pallets/subtensor/runtime-api" }
+subtensor-precompiles = { default-features = false, path = "precompiles" }
+subtensor-runtime-common = { default-features = false, path = "common" }
+node-subtensor-runtime = { default-features = false, path = "runtime" }
+
async-trait = "0.1"
cargo-husky = { version = "1", default-features = false }
clap = "4.5.4"
-codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] }
-ed25519-dalek = { version = "2.1.0", default-features = false, features = ["alloc"] }
+codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = [
+ "derive",
+] }
+ed25519-dalek = { version = "2.1.0", default-features = false, features = [
+ "alloc",
+] }
enumflags2 = "0.7.9"
futures = "0.3.30"
hex = { version = "0.4", default-features = false }
@@ -64,7 +72,9 @@ memmap2 = "0.9.4"
ndarray = { version = "0.15.6", default-features = false }
parity-util-mem = "0.12.0"
rand = "0.8.5"
-scale-codec = { package = "parity-scale-codec", version = "3.6.12", default-features = false, features = ["derive"] }
+scale-codec = { package = "parity-scale-codec", version = "3.6.12", default-features = false, features = [
+ "derive",
+] }
scale-info = { version = "2.11.2", default-features = false }
serde = { version = "1.0.214", default-features = false }
serde-tuple-vec-map = { version = "1.0.1", default-features = false }
@@ -74,11 +84,11 @@ serde_with = { version = "=2.0.0", default-features = false }
smallvec = "1.13.2"
litep2p = { git = "https://github.com/paritytech/litep2p", tag = "v0.7.0" }
syn = { version = "2.0.87", features = [
- "full",
- "visit-mut",
- "visit",
- "extra-traits",
- "parsing",
+ "full",
+ "visit-mut",
+ "visit",
+ "extra-traits",
+ "parsing",
] }
quote = "1"
proc-macro2 = { version = "1", features = ["span-locations"] }
@@ -105,7 +115,7 @@ pallet-insecure-randomness-collective-flip = { git = "https://github.com/parityt
pallet-membership = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
pallet-multisig = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
pallet-preimage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
-pallet-proxy = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
+pallet-proxy = { path = "pallets/proxy", default-features = false }
pallet-safe-mode = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
pallet-scheduler = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
pallet-sudo = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
@@ -113,7 +123,8 @@ pallet-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag
pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
pallet-transaction-payment-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409" }
pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
-pallet-utility = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
+pallet-utility = { path = "pallets/utility", default-features = false }
+pallet-root-testing = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
sc-basic-authorship = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409" }
sc-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409" }
@@ -181,7 +192,9 @@ fc-consensus = { git = "https://github.com/opentensor/frontier", rev = "635bdac8
fp-consensus = { git = "https://github.com/opentensor/frontier", rev = "635bdac882", default-features = false }
fp-dynamic-fee = { git = "https://github.com/opentensor/frontier", rev = "635bdac882", default-features = false }
fc-api = { git = "https://github.com/opentensor/frontier", rev = "635bdac882", default-features = false }
-fc-rpc = { git = "https://github.com/opentensor/frontier", rev = "635bdac882", default-features = false, features = ["rpc-binary-search-estimate"]}
+fc-rpc = { git = "https://github.com/opentensor/frontier", rev = "635bdac882", default-features = false, features = [
+ "rpc-binary-search-estimate",
+] }
fc-rpc-core = { git = "https://github.com/opentensor/frontier", rev = "635bdac882", default-features = false }
fc-mapping-sync = { git = "https://github.com/opentensor/frontier", rev = "635bdac882", default-features = false }
precompile-utils = { git = "https://github.com/opentensor/frontier", rev = "635bdac882", default-features = false }
@@ -199,21 +212,34 @@ pallet-hotfix-sufficients = { git = "https://github.com/opentensor/frontier", re
#DRAND
pallet-drand = { path = "pallets/drand", default-features = false }
-sp-crypto-ec-utils = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", features = ["bls12-381"] }
-getrandom = { version = "0.2.15", features = ["custom"], default-features = false }
-sp-keystore = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
+sp-crypto-ec-utils = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", features = [
+ "bls12-381",
+] }
+getrandom = { version = "0.2.15", features = [
+ "custom",
+], default-features = false }
+sp-keystore = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false }
w3f-bls = { version = "=0.1.3", default-features = false }
-ark-crypto-primitives = { version = "0.4.0", default-features = false, features = [ "r1cs", "snark" ] }
-ark-scale = { version = "0.0.11", default-features = false, features = ["hazmat"] }
+ark-crypto-primitives = { version = "0.4.0", default-features = false, features = [
+ "r1cs",
+ "snark",
+] }
+ark-scale = { version = "0.0.11", default-features = false, features = [
+ "hazmat",
+] }
sp-ark-bls12-381 = { git = "https://github.com/paritytech/substrate-curves", default-features = false }
-ark-bls12-381 = { version = "0.4.0", features = ["curve"], default-features = false }
-ark-serialize = { version = "0.4.0", features = [ "derive" ], default-features = false }
+ark-bls12-381 = { version = "0.4.0", features = [
+ "curve",
+], default-features = false }
+ark-serialize = { version = "0.4.0", features = [
+ "derive",
+], default-features = false }
ark-ff = { version = "0.4.0", default-features = false }
ark-ec = { version = "0.4.0", default-features = false }
ark-std = { version = "0.4.0", default-features = false }
anyhow = "1.0.81"
sha2 = { version = "0.10.8", default-features = false }
-rand_chacha = { version = "0.3.1", default-features = false }
+rand_chacha = { version = "0.3.1", default-features = false }
tle = { git = "https://github.com/ideal-lab5/timelock", rev = "5416406cfd32799e31e1795393d4916894de4468", default-features = false }
frame-metadata = "16"
@@ -232,11 +258,11 @@ codegen-units = 1
[features]
default = []
try-runtime = [
- "node-subtensor/try-runtime",
- "node-subtensor-runtime/try-runtime",
+ "node-subtensor/try-runtime",
+ "node-subtensor-runtime/try-runtime",
]
runtime-benchmarks = [
- "node-subtensor/runtime-benchmarks",
- "node-subtensor-runtime/runtime-benchmarks",
+ "node-subtensor/runtime-benchmarks",
+ "node-subtensor-runtime/runtime-benchmarks",
]
metadata-hash = ["node-subtensor-runtime/metadata-hash"]
diff --git a/Dockerfile b/Dockerfile
index edceab0e8c..2439d5b2a2 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-ARG BASE_IMAGE=ubuntu:24.04
+ARG BASE_IMAGE=ubuntu:latest
FROM $BASE_IMAGE AS builder
SHELL ["/bin/bash", "-c"]
@@ -18,18 +18,22 @@ RUN apt-get update && \
apt-get install -y curl build-essential protobuf-compiler clang git pkg-config libssl-dev && \
rm -rf /var/lib/apt/lists/*
-RUN set -o pipefail && curl https://sh.rustup.rs -sSf | sh -s -- -y
-ENV PATH="/root/.cargo/bin:${PATH}"
-RUN rustup update stable
-RUN rustup target add wasm32-unknown-unknown --toolchain stable
-
# Copy entire repository
COPY . /build
WORKDIR /build
+# Install Rust
+RUN set -o pipefail && curl https://sh.rustup.rs -sSf | sh -s -- -y
+ENV PATH="/root/.cargo/bin:${PATH}"
+RUN rustup toolchain install
+RUN rustup target add wasm32-unknown-unknown
+
# Build the project
RUN cargo build -p node-subtensor --profile production --features="metadata-hash" --locked
+# Slim down image
+RUN rm -rf /root/.cargo
+
# Verify the binary was produced
RUN test -e /build/target/production/node-subtensor
diff --git a/Dockerfile-localnet b/Dockerfile-localnet
new file mode 100644
index 0000000000..0efaf5b47a
--- /dev/null
+++ b/Dockerfile-localnet
@@ -0,0 +1,65 @@
+ARG BASE_IMAGE=ubuntu:latest
+
+FROM $BASE_IMAGE AS builder
+SHELL ["/bin/bash", "-c"]
+
+# Set noninteractive mode for apt-get
+ARG DEBIAN_FRONTEND=noninteractive
+
+LABEL ai.opentensor.image.authors="operations@opentensor.ai" \
+ ai.opentensor.image.vendor="Opentensor Foundation" \
+ ai.opentensor.image.title="opentensor/subtensor-localnet" \
+ ai.opentensor.image.description="Opentensor Subtensor Blockchain" \
+ ai.opentensor.image.documentation="https://docs.bittensor.com"
+
+# Set up Rust environment
+ENV RUST_BACKTRACE=1
+
+RUN apt-get update
+RUN apt-get install -y curl build-essential protobuf-compiler clang git pkg-config libssl-dev llvm libudev-dev
+
+# Copy entire repository
+COPY . /build
+WORKDIR /build
+
+# Install Rust
+RUN set -o pipefail && curl https://sh.rustup.rs -sSf | sh -s -- -y
+ENV PATH="/root/.cargo/bin:${PATH}"
+RUN rustup toolchain install
+RUN rustup target add wasm32-unknown-unknown
+
+## Build fast-blocks node
+RUN ./scripts/localnet.sh --build-only
+# Build non-fast-blocks
+RUN ./scripts/localnet.sh False --build-only
+
+# Verify the binaries was produced
+RUN test -e /build/target/fast-blocks/release/node-subtensor
+RUN test -e /build/target/non-fast-blocks/release/node-subtensor
+
+FROM $BASE_IMAGE AS subtensor-localnet
+
+# Copy binaries
+COPY --from=builder /build/target/fast-blocks/release/node-subtensor target/fast-blocks/release/node-subtensor
+RUN chmod +x target/fast-blocks/release/node-subtensor
+
+COPY --from=builder /build/target/non-fast-blocks/release/node-subtensor target/non-fast-blocks/release/node-subtensor
+RUN chmod +x target/non-fast-blocks/release/node-subtensor
+
+COPY --from=builder /build/snapshot.json /snapshot.json
+
+COPY --from=builder /build/scripts/localnet.sh scripts/localnet.sh
+RUN chmod +x /scripts/localnet.sh
+
+## Ubdate certificates
+RUN apt-get update && apt-get install -y ca-certificates
+
+# Do not build (just run)
+ENV BUILD_BINARY=0
+# Switch to local run with IP 0.0.0.0 within docker image
+ENV RUN_IN_DOCKER=1
+# Expose ports
+EXPOSE 30334 30335 9944 9945
+
+ENTRYPOINT ["/scripts/localnet.sh"]
+CMD ["True"]
diff --git a/build.rs b/build.rs
index 1abd7456b4..854778873e 100644
--- a/build.rs
+++ b/build.rs
@@ -31,6 +31,7 @@ fn main() {
// Parse each rust file with syn and run the linting suite on it in parallel
rust_files.par_iter().for_each_with(tx.clone(), |tx, file| {
+ let is_test = file.display().to_string().contains("test");
let Ok(content) = fs::read_to_string(file) else {
return;
};
@@ -63,6 +64,10 @@ fn main() {
track_lint(ForbidKeysRemoveCall::lint(&parsed_file));
track_lint(RequireFreezeStruct::lint(&parsed_file));
track_lint(RequireExplicitPalletIndex::lint(&parsed_file));
+
+ if is_test {
+ track_lint(ForbidSaturatingMath::lint(&parsed_file));
+ }
});
// Collect and print all errors after the parallel processing is done
diff --git a/common/Cargo.toml b/common/Cargo.toml
new file mode 100644
index 0000000000..d0b43cdc1b
--- /dev/null
+++ b/common/Cargo.toml
@@ -0,0 +1,32 @@
+[package]
+name = "subtensor-runtime-common"
+version = "0.1.0"
+edition = "2024"
+authors = ["Opentensor Foundation "]
+homepage = "https://opentensor.ai/"
+publish = false
+repository = "https://github.com/opentensor/subtensor/"
+
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[dependencies]
+codec = { workspace = true }
+frame-support = { workspace = true }
+scale-info = { workspace = true }
+sp-runtime = { workspace = true }
+sp-core = { workspace = true }
+
+[lints]
+workspace = true
+
+[features]
+default = ["std"]
+fast-blocks = []
+std = [
+ "codec/std",
+ "frame-support/std",
+ "scale-info/std",
+ "sp-core/std",
+ "sp-runtime/std",
+]
diff --git a/common/src/lib.rs b/common/src/lib.rs
new file mode 100644
index 0000000000..75b18e3b14
--- /dev/null
+++ b/common/src/lib.rs
@@ -0,0 +1,86 @@
+#![cfg_attr(not(feature = "std"), no_std)]
+
+use codec::{Decode, Encode, MaxEncodedLen};
+use scale_info::TypeInfo;
+use sp_runtime::{
+ MultiSignature,
+ traits::{IdentifyAccount, Verify},
+};
+
+/// Balance of an account.
+pub type Balance = u64;
+
+/// An index to a block.
+pub type BlockNumber = u32;
+
+/// Alias to 512-bit hash when used in the context of a transaction signature on the chain.
+pub type Signature = MultiSignature;
+
+/// Some way of identifying an account on the chain. We intentionally make it equivalent to the
+/// public key of our transaction signing scheme.
+pub type AccountId = <::Signer as IdentifyAccount>::AccountId;
+
+/// Index of a transaction in the chain.
+pub type Index = u32;
+
+/// A hash of some data used by the chain.
+pub type Hash = sp_core::H256;
+
+pub type Nonce = u32;
+
+/// Transfers below SMALL_TRANSFER_LIMIT are considered small transfers
+pub const SMALL_TRANSFER_LIMIT: Balance = 500_000_000; // 0.5 TAO
+
+#[derive(
+ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, Debug, MaxEncodedLen, TypeInfo,
+)]
+pub enum ProxyType {
+ Any,
+ Owner, // Subnet owner Calls
+ NonCritical,
+ NonTransfer,
+ Senate,
+ NonFungibile, // Nothing involving moving TAO
+ Triumvirate,
+ Governance, // Both above governance
+ Staking,
+ Registration,
+ Transfer,
+ SmallTransfer,
+ RootWeights,
+ ChildKeys,
+ SudoUncheckedSetCode,
+}
+
+impl Default for ProxyType {
+ // allow all Calls; required to be most permissive
+ fn default() -> Self {
+ Self::Any
+ }
+}
+
+pub mod time {
+ use super::*;
+
+ /// This determines the average expected block time that we are targeting. Blocks will be
+ /// produced at a minimum duration defined by `SLOT_DURATION`. `SLOT_DURATION` is picked up by
+ /// `pallet_timestamp` which is in turn picked up by `pallet_aura` to implement `fn
+ /// slot_duration()`.
+ ///
+ /// Change this to adjust the block time.
+ #[cfg(not(feature = "fast-blocks"))]
+ pub const MILLISECS_PER_BLOCK: u64 = 12000;
+
+ /// Fast blocks for development
+ #[cfg(feature = "fast-blocks")]
+ pub const MILLISECS_PER_BLOCK: u64 = 250;
+
+ // NOTE: Currently it is not possible to change the slot duration after the chain has started.
+ // Attempting to do so will brick block production.
+ pub const SLOT_DURATION: u64 = MILLISECS_PER_BLOCK;
+
+ // Time is measured by number of blocks.
+ pub const MINUTES: BlockNumber = 60_000 / (MILLISECS_PER_BLOCK as BlockNumber);
+ pub const HOURS: BlockNumber = MINUTES * 60;
+ pub const DAYS: BlockNumber = HOURS * 24;
+}
diff --git a/evm-tests/.gitignore b/evm-tests/.gitignore
new file mode 100644
index 0000000000..661f94a6e0
--- /dev/null
+++ b/evm-tests/.gitignore
@@ -0,0 +1,3 @@
+node_modules
+.papi
+.env
diff --git a/evm-tests/README.md b/evm-tests/README.md
new file mode 100644
index 0000000000..7d01034bd8
--- /dev/null
+++ b/evm-tests/README.md
@@ -0,0 +1,27 @@
+# type-test
+
+test with ts
+
+## polkadot api
+
+```bash
+npx papi add devnet -w ws://10.0.0.11:9944
+```
+
+## get the new metadata
+
+```bash
+sh get-metadata.sh
+```
+
+## run all tests
+
+```bash
+yarn run test
+```
+
+## To run a particular test case, you can pass an argument with the name or part of the name. For example:
+
+```bash
+yarn run test -- -g "Can set subnet parameter"
+```
diff --git a/evm-tests/get-metadata.sh b/evm-tests/get-metadata.sh
new file mode 100644
index 0000000000..6d7727009d
--- /dev/null
+++ b/evm-tests/get-metadata.sh
@@ -0,0 +1,3 @@
+rm -rf .papi
+npx papi add devnet -w ws://localhost:9944
+
diff --git a/evm-tests/local.test.ts b/evm-tests/local.test.ts
new file mode 100644
index 0000000000..9eb24d4327
--- /dev/null
+++ b/evm-tests/local.test.ts
@@ -0,0 +1,53 @@
+import * as assert from "assert";
+import { getAliceSigner, getClient, getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate"
+import { SUB_LOCAL_URL, } from "../src/config";
+import { devnet } from "@polkadot-api/descriptors"
+import { PolkadotSigner, TypedApi } from "polkadot-api";
+import { convertPublicKeyToSs58, convertH160ToSS58 } from "../src/address-utils"
+import { ethers } from "ethers"
+import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron"
+import { generateRandomEthersWallet } from "../src/utils"
+import { forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, addNewSubnetwork, burnedRegister } from "../src/subtensor"
+
+describe("Test neuron precompile Serve Axon Prometheus", () => {
+ // init eth part
+ // const wallet1 = generateRandomEthersWallet();
+ // const wallet2 = generateRandomEthersWallet();
+ // const wallet3 = generateRandomEthersWallet();
+
+ // init substrate part
+
+ // const coldkey = getRandomSubstrateKeypair();
+
+ let api: TypedApi
+
+ // sudo account alice as signer
+ let alice: PolkadotSigner;
+ before(async () => {
+ // init variables got from await and async
+ const subClient = await getClient(SUB_LOCAL_URL)
+ api = await getDevnetApi()
+ // alice = await getAliceSigner();
+
+ // await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey))
+ // await forceSetBalanceToEthAddress(api, wallet1.address)
+ // await forceSetBalanceToEthAddress(api, wallet2.address)
+ // await forceSetBalanceToEthAddress(api, wallet3.address)
+
+
+ let index = 0;
+ while (index < 30) {
+ const hotkey = getRandomSubstrateKeypair();
+ const coldkey = getRandomSubstrateKeypair();
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey))
+ let netuid = await addNewSubnetwork(api, hotkey, coldkey)
+ }
+
+
+ })
+
+ it("Serve Axon", async () => {
+
+ });
+});
\ No newline at end of file
diff --git a/evm-tests/package.json b/evm-tests/package.json
new file mode 100644
index 0000000000..a96a2c4a0c
--- /dev/null
+++ b/evm-tests/package.json
@@ -0,0 +1,31 @@
+{
+ "scripts": {
+ "test": "mocha --timeout 999999 --require ts-node/register test/*test.ts"
+ },
+ "keywords": [],
+ "author": "",
+ "license": "ISC",
+ "dependencies": {
+ "@polkadot-api/descriptors": "file:.papi/descriptors",
+ "@polkadot-labs/hdkd": "^0.0.10",
+ "@polkadot-labs/hdkd-helpers": "^0.0.11",
+ "@polkadot/api": "15.1.1",
+ "crypto": "^1.0.1",
+ "dotenv": "16.4.7",
+ "ethers": "^6.13.5",
+ "polkadot-api": "^1.9.5",
+ "viem": "2.23.4"
+ },
+ "devDependencies": {
+ "@types/bun": "^1.1.13",
+ "@types/chai": "^5.0.1",
+ "@types/mocha": "^10.0.10",
+ "assert": "^2.1.0",
+ "chai": "^5.2.0",
+ "mocha": "^11.1.0",
+ "prettier": "^3.3.3",
+ "ts-node": "^10.9.2",
+ "typescript": "^5.7.2",
+ "vite": "^5.4.8"
+ }
+}
diff --git a/evm-tests/src/address-utils.ts b/evm-tests/src/address-utils.ts
new file mode 100644
index 0000000000..ed3abc5008
--- /dev/null
+++ b/evm-tests/src/address-utils.ts
@@ -0,0 +1,77 @@
+import { Address } from "viem"
+import { encodeAddress } from "@polkadot/util-crypto";
+import { ss58Address } from "@polkadot-labs/hdkd-helpers";
+import { hexToU8a } from "@polkadot/util";
+import { blake2AsU8a, decodeAddress } from "@polkadot/util-crypto";
+import { Binary } from "polkadot-api";
+import { SS58_PREFIX } from "./config"
+
+export function toViemAddress(address: string): Address {
+ let addressNoPrefix = address.replace("0x", "")
+ return `0x${addressNoPrefix}`
+}
+
+export function convertH160ToSS58(ethAddress: string) {
+ // get the public key
+ const hash = convertH160ToPublicKey(ethAddress);
+
+ // Convert the hash to SS58 format
+ const ss58Address = encodeAddress(hash, SS58_PREFIX);
+ return ss58Address;
+}
+
+export function convertPublicKeyToSs58(publickey: Uint8Array) {
+ return ss58Address(publickey, SS58_PREFIX);
+}
+
+export function convertH160ToPublicKey(ethAddress: string) {
+ const prefix = "evm:";
+ const prefixBytes = new TextEncoder().encode(prefix);
+ const addressBytes = hexToU8a(
+ ethAddress.startsWith("0x") ? ethAddress : `0x${ethAddress}`
+ );
+ const combined = new Uint8Array(prefixBytes.length + addressBytes.length);
+
+ // Concatenate prefix and Ethereum address
+ combined.set(prefixBytes);
+ combined.set(addressBytes, prefixBytes.length);
+
+ // Hash the combined data (the public key)
+ const hash = blake2AsU8a(combined);
+ return hash;
+}
+
+export function ss58ToEthAddress(ss58Address: string) {
+ // Decode the SS58 address to a Uint8Array public key
+ const publicKey = decodeAddress(ss58Address);
+
+ // Take the first 20 bytes of the hashed public key for the Ethereum address
+ const ethereumAddressBytes = publicKey.slice(0, 20);
+
+ // Convert the 20 bytes into an Ethereum H160 address format (Hex string)
+ const ethereumAddress = '0x' + Buffer.from(ethereumAddressBytes).toString('hex');
+
+ return ethereumAddress;
+}
+
+export function ss58ToH160(ss58Address: string): Binary {
+ // Decode the SS58 address to a Uint8Array public key
+ const publicKey = decodeAddress(ss58Address);
+
+ // Take the first 20 bytes of the hashed public key for the Ethereum address
+ const ethereumAddressBytes = publicKey.slice(0, 20);
+
+
+ return new Binary(ethereumAddressBytes);
+}
+
+export function ethAddressToH160(ethAddress: string): Binary {
+ // Decode the SS58 address to a Uint8Array public key
+ const publicKey = hexToU8a(ethAddress);
+
+ // Take the first 20 bytes of the hashed public key for the Ethereum address
+ // const ethereumAddressBytes = publicKey.slice(0, 20);
+
+
+ return new Binary(publicKey);
+}
\ No newline at end of file
diff --git a/evm-tests/src/balance-math.ts b/evm-tests/src/balance-math.ts
new file mode 100644
index 0000000000..8d6e86bd5a
--- /dev/null
+++ b/evm-tests/src/balance-math.ts
@@ -0,0 +1,26 @@
+import assert from "assert"
+
+export const TAO = BigInt(1000000000) // 10^9
+export const ETH_PER_RAO = BigInt(1000000000) // 10^9
+export const GWEI = BigInt(1000000000) // 10^9
+export const MAX_TX_FEE = BigInt(21000000) * GWEI // 100 times EVM to EVM transfer fee
+
+export function bigintToRao(value: bigint) {
+ return TAO * value
+}
+
+export function tao(value: number) {
+ return TAO * BigInt(value)
+}
+
+export function raoToEth(value: bigint) {
+ return ETH_PER_RAO * value
+}
+
+export function compareEthBalanceWithTxFee(balance1: bigint, balance2: bigint) {
+ if (balance1 > balance2) {
+ assert((balance1 - balance2) < MAX_TX_FEE)
+ } else {
+ assert((balance2 - balance1) < MAX_TX_FEE)
+ }
+}
diff --git a/evm-tests/src/config.ts b/evm-tests/src/config.ts
new file mode 100644
index 0000000000..601c89c8c1
--- /dev/null
+++ b/evm-tests/src/config.ts
@@ -0,0 +1,38 @@
+export const ETH_LOCAL_URL = 'http://localhost:9944'
+export const SUB_LOCAL_URL = 'ws://localhost:9944'
+export const SS58_PREFIX = 42;
+// set the tx timeout as 2 second when eable the fast-blocks feature.
+export const TX_TIMEOUT = 2000;
+
+export const IED25519VERIFY_ADDRESS = "0x0000000000000000000000000000000000000402";
+export const IEd25519VerifyABI = [
+ {
+ inputs: [
+ { internalType: "bytes32", name: "message", type: "bytes32" },
+ { internalType: "bytes32", name: "publicKey", type: "bytes32" },
+ { internalType: "bytes32", name: "r", type: "bytes32" },
+ { internalType: "bytes32", name: "s", type: "bytes32" },
+ ],
+ name: "verify",
+ outputs: [{ internalType: "bool", name: "", type: "bool" }],
+ stateMutability: "pure",
+ type: "function",
+ },
+];
+
+export const IBALANCETRANSFER_ADDRESS = "0x0000000000000000000000000000000000000800";
+export const IBalanceTransferABI = [
+ {
+ inputs: [
+ {
+ internalType: "bytes32",
+ name: "data",
+ type: "bytes32",
+ },
+ ],
+ name: "transfer",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+];
\ No newline at end of file
diff --git a/evm-tests/src/contracts/bridgeToken.ts b/evm-tests/src/contracts/bridgeToken.ts
new file mode 100644
index 0000000000..f8b3ea4d03
--- /dev/null
+++ b/evm-tests/src/contracts/bridgeToken.ts
@@ -0,0 +1,631 @@
+export const BRIDGE_TOKEN_CONTRACT_ABI = [
+ {
+ "inputs": [
+ {
+ "internalType": "string",
+ "name": "name_",
+ "type": "string"
+ },
+ {
+ "internalType": "string",
+ "name": "symbol_",
+ "type": "string"
+ },
+ {
+ "internalType": "address",
+ "name": "admin",
+ "type": "address"
+ }
+ ],
+ "stateMutability": "nonpayable",
+ "type": "constructor"
+ },
+ {
+ "inputs": [],
+ "name": "AccessControlBadConfirmation",
+ "type": "error"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "account",
+ "type": "address"
+ },
+ {
+ "internalType": "bytes32",
+ "name": "neededRole",
+ "type": "bytes32"
+ }
+ ],
+ "name": "AccessControlUnauthorizedAccount",
+ "type": "error"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "spender",
+ "type": "address"
+ },
+ {
+ "internalType": "uint256",
+ "name": "allowance",
+ "type": "uint256"
+ },
+ {
+ "internalType": "uint256",
+ "name": "needed",
+ "type": "uint256"
+ }
+ ],
+ "name": "ERC20InsufficientAllowance",
+ "type": "error"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "sender",
+ "type": "address"
+ },
+ {
+ "internalType": "uint256",
+ "name": "balance",
+ "type": "uint256"
+ },
+ {
+ "internalType": "uint256",
+ "name": "needed",
+ "type": "uint256"
+ }
+ ],
+ "name": "ERC20InsufficientBalance",
+ "type": "error"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "approver",
+ "type": "address"
+ }
+ ],
+ "name": "ERC20InvalidApprover",
+ "type": "error"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "receiver",
+ "type": "address"
+ }
+ ],
+ "name": "ERC20InvalidReceiver",
+ "type": "error"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "sender",
+ "type": "address"
+ }
+ ],
+ "name": "ERC20InvalidSender",
+ "type": "error"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "spender",
+ "type": "address"
+ }
+ ],
+ "name": "ERC20InvalidSpender",
+ "type": "error"
+ },
+ {
+ "inputs": [],
+ "name": "UnauthorizedHandler",
+ "type": "error"
+ },
+ {
+ "anonymous": false,
+ "inputs": [
+ {
+ "indexed": true,
+ "internalType": "address",
+ "name": "owner",
+ "type": "address"
+ },
+ {
+ "indexed": true,
+ "internalType": "address",
+ "name": "spender",
+ "type": "address"
+ },
+ {
+ "indexed": false,
+ "internalType": "uint256",
+ "name": "value",
+ "type": "uint256"
+ }
+ ],
+ "name": "Approval",
+ "type": "event"
+ },
+ {
+ "anonymous": false,
+ "inputs": [
+ {
+ "indexed": true,
+ "internalType": "bytes32",
+ "name": "role",
+ "type": "bytes32"
+ },
+ {
+ "indexed": true,
+ "internalType": "bytes32",
+ "name": "previousAdminRole",
+ "type": "bytes32"
+ },
+ {
+ "indexed": true,
+ "internalType": "bytes32",
+ "name": "newAdminRole",
+ "type": "bytes32"
+ }
+ ],
+ "name": "RoleAdminChanged",
+ "type": "event"
+ },
+ {
+ "anonymous": false,
+ "inputs": [
+ {
+ "indexed": true,
+ "internalType": "bytes32",
+ "name": "role",
+ "type": "bytes32"
+ },
+ {
+ "indexed": true,
+ "internalType": "address",
+ "name": "account",
+ "type": "address"
+ },
+ {
+ "indexed": true,
+ "internalType": "address",
+ "name": "sender",
+ "type": "address"
+ }
+ ],
+ "name": "RoleGranted",
+ "type": "event"
+ },
+ {
+ "anonymous": false,
+ "inputs": [
+ {
+ "indexed": true,
+ "internalType": "bytes32",
+ "name": "role",
+ "type": "bytes32"
+ },
+ {
+ "indexed": true,
+ "internalType": "address",
+ "name": "account",
+ "type": "address"
+ },
+ {
+ "indexed": true,
+ "internalType": "address",
+ "name": "sender",
+ "type": "address"
+ }
+ ],
+ "name": "RoleRevoked",
+ "type": "event"
+ },
+ {
+ "anonymous": false,
+ "inputs": [
+ {
+ "indexed": true,
+ "internalType": "address",
+ "name": "from",
+ "type": "address"
+ },
+ {
+ "indexed": true,
+ "internalType": "address",
+ "name": "to",
+ "type": "address"
+ },
+ {
+ "indexed": false,
+ "internalType": "uint256",
+ "name": "value",
+ "type": "uint256"
+ }
+ ],
+ "name": "Transfer",
+ "type": "event"
+ },
+ {
+ "inputs": [],
+ "name": "DEFAULT_ADMIN_ROLE",
+ "outputs": [
+ {
+ "internalType": "bytes32",
+ "name": "",
+ "type": "bytes32"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "owner",
+ "type": "address"
+ },
+ {
+ "internalType": "address",
+ "name": "spender",
+ "type": "address"
+ }
+ ],
+ "name": "allowance",
+ "outputs": [
+ {
+ "internalType": "uint256",
+ "name": "",
+ "type": "uint256"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "spender",
+ "type": "address"
+ },
+ {
+ "internalType": "uint256",
+ "name": "value",
+ "type": "uint256"
+ }
+ ],
+ "name": "approve",
+ "outputs": [
+ {
+ "internalType": "bool",
+ "name": "",
+ "type": "bool"
+ }
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "account",
+ "type": "address"
+ }
+ ],
+ "name": "balanceOf",
+ "outputs": [
+ {
+ "internalType": "uint256",
+ "name": "",
+ "type": "uint256"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "uint256",
+ "name": "value",
+ "type": "uint256"
+ }
+ ],
+ "name": "burn",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "from",
+ "type": "address"
+ },
+ {
+ "internalType": "uint256",
+ "name": "amount",
+ "type": "uint256"
+ }
+ ],
+ "name": "burnFrom",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [],
+ "name": "decimals",
+ "outputs": [
+ {
+ "internalType": "uint8",
+ "name": "",
+ "type": "uint8"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "role",
+ "type": "bytes32"
+ }
+ ],
+ "name": "getRoleAdmin",
+ "outputs": [
+ {
+ "internalType": "bytes32",
+ "name": "",
+ "type": "bytes32"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "role",
+ "type": "bytes32"
+ },
+ {
+ "internalType": "address",
+ "name": "account",
+ "type": "address"
+ }
+ ],
+ "name": "grantRole",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "role",
+ "type": "bytes32"
+ },
+ {
+ "internalType": "address",
+ "name": "account",
+ "type": "address"
+ }
+ ],
+ "name": "hasRole",
+ "outputs": [
+ {
+ "internalType": "bool",
+ "name": "",
+ "type": "bool"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "account",
+ "type": "address"
+ }
+ ],
+ "name": "isAdmin",
+ "outputs": [
+ {
+ "internalType": "bool",
+ "name": "",
+ "type": "bool"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "to",
+ "type": "address"
+ },
+ {
+ "internalType": "uint256",
+ "name": "amount",
+ "type": "uint256"
+ }
+ ],
+ "name": "mint",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [],
+ "name": "name",
+ "outputs": [
+ {
+ "internalType": "string",
+ "name": "",
+ "type": "string"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "role",
+ "type": "bytes32"
+ },
+ {
+ "internalType": "address",
+ "name": "callerConfirmation",
+ "type": "address"
+ }
+ ],
+ "name": "renounceRole",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "role",
+ "type": "bytes32"
+ },
+ {
+ "internalType": "address",
+ "name": "account",
+ "type": "address"
+ }
+ ],
+ "name": "revokeRole",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes4",
+ "name": "interfaceId",
+ "type": "bytes4"
+ }
+ ],
+ "name": "supportsInterface",
+ "outputs": [
+ {
+ "internalType": "bool",
+ "name": "",
+ "type": "bool"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [],
+ "name": "symbol",
+ "outputs": [
+ {
+ "internalType": "string",
+ "name": "",
+ "type": "string"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [],
+ "name": "totalSupply",
+ "outputs": [
+ {
+ "internalType": "uint256",
+ "name": "",
+ "type": "uint256"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "to",
+ "type": "address"
+ },
+ {
+ "internalType": "uint256",
+ "name": "value",
+ "type": "uint256"
+ }
+ ],
+ "name": "transfer",
+ "outputs": [
+ {
+ "internalType": "bool",
+ "name": "",
+ "type": "bool"
+ }
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "address",
+ "name": "from",
+ "type": "address"
+ },
+ {
+ "internalType": "address",
+ "name": "to",
+ "type": "address"
+ },
+ {
+ "internalType": "uint256",
+ "name": "value",
+ "type": "uint256"
+ }
+ ],
+ "name": "transferFrom",
+ "outputs": [
+ {
+ "internalType": "bool",
+ "name": "",
+ "type": "bool"
+ }
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ }
+];
+
+export const BRIDGE_TOKEN_CONTRACT_BYTECODE = "0x60806040523480156200001157600080fd5b5060405162000fac38038062000fac8339810160408190526200003491620001ea565b8282600362000044838262000308565b50600462000053828262000308565b5062000065915060009050826200006f565b50505050620003d4565b60008281526005602090815260408083206001600160a01b038516845290915281205460ff16620001185760008381526005602090815260408083206001600160a01b03861684529091529020805460ff19166001179055620000cf3390565b6001600160a01b0316826001600160a01b0316847f2f8788117e7eff1d82e926ec794901d17c78024a50270940304540a733656f0d60405160405180910390a45060016200011c565b5060005b92915050565b634e487b7160e01b600052604160045260246000fd5b600082601f8301126200014a57600080fd5b81516001600160401b038082111562000167576200016762000122565b604051601f8301601f19908116603f0116810190828211818310171562000192576200019262000122565b8160405283815260209250866020858801011115620001b057600080fd5b600091505b83821015620001d45785820183015181830184015290820190620001b5565b6000602085830101528094505050505092915050565b6000806000606084860312156200020057600080fd5b83516001600160401b03808211156200021857600080fd5b620002268783880162000138565b945060208601519150808211156200023d57600080fd5b506200024c8682870162000138565b604086015190935090506001600160a01b03811681146200026c57600080fd5b809150509250925092565b600181811c908216806200028c57607f821691505b602082108103620002ad57634e487b7160e01b600052602260045260246000fd5b50919050565b601f82111562000303576000816000526020600020601f850160051c81016020861015620002de5750805b601f850160051c820191505b81811015620002ff57828155600101620002ea565b5050505b505050565b81516001600160401b0381111562000324576200032462000122565b6200033c8162000335845462000277565b84620002b3565b602080601f8311600181146200037457600084156200035b5750858301515b600019600386901b1c1916600185901b178555620002ff565b600085815260208120601f198616915b82811015620003a55788860151825594840194600190910190840162000384565b5085821015620003c45787850151600019600388901b60f8161c191681555b5050505050600190811b01905550565b610bc880620003e46000396000f3fe608060405234801561001057600080fd5b506004361061012c5760003560e01c806340c10f19116100ad57806395d89b411161007157806395d89b4114610288578063a217fddf14610290578063a9059cbb14610298578063d547741f146102ab578063dd62ed3e146102be57600080fd5b806340c10f191461021357806342966c681461022657806370a082311461023957806379cc67901461026257806391d148541461027557600080fd5b8063248a9ca3116100f4578063248a9ca3146101a657806324d7806c146101c95780632f2ff15d146101dc578063313ce567146101f157806336568abe1461020057600080fd5b806301ffc9a71461013157806306fdde0314610159578063095ea7b31461016e57806318160ddd1461018157806323b872dd14610193575b600080fd5b61014461013f3660046109ab565b6102f7565b60405190151581526020015b60405180910390f35b61016161032e565b60405161015091906109dc565b61014461017c366004610a47565b6103c0565b6002545b604051908152602001610150565b6101446101a1366004610a71565b6103d8565b6101856101b4366004610aad565b60009081526005602052604090206001015490565b6101446101d7366004610ac6565b6103fc565b6101ef6101ea366004610ae1565b610408565b005b60405160128152602001610150565b6101ef61020e366004610ae1565b610433565b6101ef610221366004610a47565b61046b565b6101ef610234366004610aad565b610480565b610185610247366004610ac6565b6001600160a01b031660009081526020819052604090205490565b6101ef610270366004610a47565b61048d565b610144610283366004610ae1565b6104a2565b6101616104cd565b610185600081565b6101446102a6366004610a47565b6104dc565b6101ef6102b9366004610ae1565b6104ea565b6101856102cc366004610b0d565b6001600160a01b03918216600090815260016020908152604080832093909416825291909152205490565b60006001600160e01b03198216637965db0b60e01b148061032857506301ffc9a760e01b6001600160e01b03198316145b92915050565b60606003805461033d90610b37565b80601f016020809104026020016040519081016040528092919081815260200182805461036990610b37565b80156103b65780601f1061038b576101008083540402835291602001916103b6565b820191906000526020600020905b81548152906001019060200180831161039957829003601f168201915b5050505050905090565b6000336103ce81858561050f565b5060019392505050565b6000336103e685828561051c565b6103f1858585610599565b506001949350505050565b600061032881836104a2565b600082815260056020526040902060010154610423816105f8565b61042d8383610602565b50505050565b6001600160a01b038116331461045c5760405163334bd91960e11b815260040160405180910390fd5b6104668282610696565b505050565b6000610476816105f8565b6104668383610703565b61048a338261073d565b50565b6000610498816105f8565b610466838361073d565b60009182526005602090815260408084206001600160a01b0393909316845291905290205460ff1690565b60606004805461033d90610b37565b6000336103ce818585610599565b600082815260056020526040902060010154610505816105f8565b61042d8383610696565b6104668383836001610773565b6001600160a01b03838116600090815260016020908152604080832093861683529290522054600019811461042d578181101561058a57604051637dc7a0d960e11b81526001600160a01b038416600482015260248101829052604481018390526064015b60405180910390fd5b61042d84848484036000610773565b6001600160a01b0383166105c357604051634b637e8f60e11b815260006004820152602401610581565b6001600160a01b0382166105ed5760405163ec442f0560e01b815260006004820152602401610581565b610466838383610848565b61048a8133610972565b600061060e83836104a2565b61068e5760008381526005602090815260408083206001600160a01b03861684529091529020805460ff191660011790556106463390565b6001600160a01b0316826001600160a01b0316847f2f8788117e7eff1d82e926ec794901d17c78024a50270940304540a733656f0d60405160405180910390a4506001610328565b506000610328565b60006106a283836104a2565b1561068e5760008381526005602090815260408083206001600160a01b0386168085529252808320805460ff1916905551339286917ff6391f5c32d9c69d2a47ea670b442974b53935d1edc7fd64eb21e047a839171b9190a4506001610328565b6001600160a01b03821661072d5760405163ec442f0560e01b815260006004820152602401610581565b61073960008383610848565b5050565b6001600160a01b03821661076757604051634b637e8f60e11b815260006004820152602401610581565b61073982600083610848565b6001600160a01b03841661079d5760405163e602df0560e01b815260006004820152602401610581565b6001600160a01b0383166107c757604051634a1406b160e11b815260006004820152602401610581565b6001600160a01b038085166000908152600160209081526040808320938716835292905220829055801561042d57826001600160a01b0316846001600160a01b03167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9258460405161083a91815260200190565b60405180910390a350505050565b6001600160a01b0383166108735780600260008282546108689190610b71565b909155506108e59050565b6001600160a01b038316600090815260208190526040902054818110156108c65760405163391434e360e21b81526001600160a01b03851660048201526024810182905260448101839052606401610581565b6001600160a01b03841660009081526020819052604090209082900390555b6001600160a01b03821661090157600280548290039055610920565b6001600160a01b03821660009081526020819052604090208054820190555b816001600160a01b0316836001600160a01b03167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef8360405161096591815260200190565b60405180910390a3505050565b61097c82826104a2565b6107395760405163e2517d3f60e01b81526001600160a01b038216600482015260248101839052604401610581565b6000602082840312156109bd57600080fd5b81356001600160e01b0319811681146109d557600080fd5b9392505050565b60006020808352835180602085015260005b81811015610a0a578581018301518582016040015282016109ee565b506000604082860101526040601f19601f8301168501019250505092915050565b80356001600160a01b0381168114610a4257600080fd5b919050565b60008060408385031215610a5a57600080fd5b610a6383610a2b565b946020939093013593505050565b600080600060608486031215610a8657600080fd5b610a8f84610a2b565b9250610a9d60208501610a2b565b9150604084013590509250925092565b600060208284031215610abf57600080fd5b5035919050565b600060208284031215610ad857600080fd5b6109d582610a2b565b60008060408385031215610af457600080fd5b82359150610b0460208401610a2b565b90509250929050565b60008060408385031215610b2057600080fd5b610b2983610a2b565b9150610b0460208401610a2b565b600181811c90821680610b4b57607f821691505b602082108103610b6b57634e487b7160e01b600052602260045260246000fd5b50919050565b8082018082111561032857634e487b7160e01b600052601160045260246000fdfea2646970667358221220e179fc58c926e64cb6e87416f8ca64c117044e3195b184afe45038857606c15364736f6c63430008160033"
diff --git a/evm-tests/src/contracts/incremental.sol b/evm-tests/src/contracts/incremental.sol
new file mode 100644
index 0000000000..2b3bc2fd49
--- /dev/null
+++ b/evm-tests/src/contracts/incremental.sol
@@ -0,0 +1,22 @@
+// SPDX-License-Identifier: GPL-3.0
+pragma solidity >=0.8.2 <0.9.0;
+
+contract Storage {
+ uint256 number;
+
+ /**
+ * @dev Store value in variable
+ * @param num value to store
+ */
+ function store(uint256 num) public {
+ number = num;
+ }
+
+ /**
+ * @dev Return value
+ * @return value of 'number'
+ */
+ function retrieve() public view returns (uint256) {
+ return number;
+ }
+}
diff --git a/evm-tests/src/contracts/incremental.ts b/evm-tests/src/contracts/incremental.ts
new file mode 100644
index 0000000000..b19909e491
--- /dev/null
+++ b/evm-tests/src/contracts/incremental.ts
@@ -0,0 +1,39 @@
+export const INCREMENTAL_CONTRACT_ABI = [
+ {
+ "inputs": [],
+ "name": "retrieve",
+ "outputs": [
+ {
+ "internalType": "uint256",
+ "name": "",
+ "type": "uint256"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "uint256",
+ "name": "num",
+ "type": "uint256"
+ }
+ ],
+ "name": "store",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ }
+];
+
+/*
+"compiler": {
+ "version": "0.8.26+commit.8a97fa7a"
+ },
+*/
+
+export const INCREMENTAL_CONTRACT_BYTECODE = "6080604052348015600e575f80fd5b506101438061001c5f395ff3fe608060405234801561000f575f80fd5b5060043610610034575f3560e01c80632e64cec1146100385780636057361d14610056575b5f80fd5b610040610072565b60405161004d919061009b565b60405180910390f35b610070600480360381019061006b91906100e2565b61007a565b005b5f8054905090565b805f8190555050565b5f819050919050565b61009581610083565b82525050565b5f6020820190506100ae5f83018461008c565b92915050565b5f80fd5b6100c181610083565b81146100cb575f80fd5b50565b5f813590506100dc816100b8565b92915050565b5f602082840312156100f7576100f66100b4565b5b5f610104848285016100ce565b9150509291505056fea26469706673582212209a0dd35336aff1eb3eeb11db76aa60a1427a12c1b92f945ea8c8d1dfa337cf2264736f6c634300081a0033"
+
+
+
diff --git a/evm-tests/src/contracts/metagraph.ts b/evm-tests/src/contracts/metagraph.ts
new file mode 100644
index 0000000000..d0c3bf5154
--- /dev/null
+++ b/evm-tests/src/contracts/metagraph.ts
@@ -0,0 +1,391 @@
+export const IMETAGRAPH_ADDRESS = "0x0000000000000000000000000000000000000802";
+
+export const IMetagraphABI = [
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getAxon",
+ outputs: [
+ {
+ components: [
+ {
+ internalType: "uint64",
+ name: "block",
+ type: "uint64",
+ },
+ {
+ internalType: "uint32",
+ name: "version",
+ type: "uint32",
+ },
+ {
+ internalType: "uint128",
+ name: "ip",
+ type: "uint128",
+ },
+ {
+ internalType: "uint16",
+ name: "port",
+ type: "uint16",
+ },
+ {
+ internalType: "uint8",
+ name: "ip_type",
+ type: "uint8",
+ },
+ {
+ internalType: "uint8",
+ name: "protocol",
+ type: "uint8",
+ },
+ ],
+ internalType: "struct AxonInfo",
+ name: "",
+ type: "tuple",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getColdkey",
+ outputs: [
+ {
+ internalType: "bytes32",
+ name: "",
+ type: "bytes32",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getConsensus",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getDividends",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getEmission",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getHotkey",
+ outputs: [
+ {
+ internalType: "bytes32",
+ name: "",
+ type: "bytes32",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getIncentive",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getIsActive",
+ outputs: [
+ {
+ internalType: "bool",
+ name: "",
+ type: "bool",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getLastUpdate",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getRank",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getStake",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getTrust",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getUidCount",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getValidatorStatus",
+ outputs: [
+ {
+ internalType: "bool",
+ name: "",
+ type: "bool",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "uid",
+ type: "uint16",
+ },
+ ],
+ name: "getVtrust",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+];
\ No newline at end of file
diff --git a/evm-tests/src/contracts/neuron.ts b/evm-tests/src/contracts/neuron.ts
new file mode 100644
index 0000000000..4a8fb47e4c
--- /dev/null
+++ b/evm-tests/src/contracts/neuron.ts
@@ -0,0 +1,235 @@
+export const INEURON_ADDRESS = "0x0000000000000000000000000000000000000804";
+
+export const INeuronABI = [
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "bytes32",
+ name: "commitHash",
+ type: "bytes32",
+ },
+ ],
+ name: "commitWeights",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16[]",
+ name: "uids",
+ type: "uint16[]",
+ },
+ {
+ internalType: "uint16[]",
+ name: "values",
+ type: "uint16[]",
+ },
+ {
+ internalType: "uint16[]",
+ name: "salt",
+ type: "uint16[]",
+ },
+ {
+ internalType: "uint64",
+ name: "versionKey",
+ type: "uint64",
+ },
+ ],
+ name: "revealWeights",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16[]",
+ name: "dests",
+ type: "uint16[]",
+ },
+ {
+ internalType: "uint16[]",
+ name: "weights",
+ type: "uint16[]",
+ },
+ {
+ internalType: "uint64",
+ name: "versionKey",
+ type: "uint64",
+ },
+ ],
+ name: "setWeights",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint32",
+ name: "version",
+ type: "uint32",
+ },
+ {
+ internalType: "uint128",
+ name: "ip",
+ type: "uint128",
+ },
+ {
+ internalType: "uint16",
+ name: "port",
+ type: "uint16",
+ },
+ {
+ internalType: "uint8",
+ name: "ipType",
+ type: "uint8",
+ },
+ {
+ internalType: "uint8",
+ name: "protocol",
+ type: "uint8",
+ },
+ {
+ internalType: "uint8",
+ name: "placeholder1",
+ type: "uint8",
+ },
+ {
+ internalType: "uint8",
+ name: "placeholder2",
+ type: "uint8",
+ },
+ ],
+ name: "serveAxon",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint32",
+ name: "version",
+ type: "uint32",
+ },
+ {
+ internalType: "uint128",
+ name: "ip",
+ type: "uint128",
+ },
+ {
+ internalType: "uint16",
+ name: "port",
+ type: "uint16",
+ },
+ {
+ internalType: "uint8",
+ name: "ipType",
+ type: "uint8",
+ },
+ {
+ internalType: "uint8",
+ name: "protocol",
+ type: "uint8",
+ },
+ {
+ internalType: "uint8",
+ name: "placeholder1",
+ type: "uint8",
+ },
+ {
+ internalType: "uint8",
+ name: "placeholder2",
+ type: "uint8",
+ },
+ {
+ internalType: "bytes",
+ name: "certificate",
+ type: "bytes",
+ },
+ ],
+ name: "serveAxonTls",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint32",
+ name: "version",
+ type: "uint32",
+ },
+ {
+ internalType: "uint128",
+ name: "ip",
+ type: "uint128",
+ },
+ {
+ internalType: "uint16",
+ name: "port",
+ type: "uint16",
+ },
+ {
+ internalType: "uint8",
+ name: "ipType",
+ type: "uint8",
+ },
+ ],
+ name: "servePrometheus",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "bytes32",
+ name: "hotkey",
+ type: "bytes32",
+ },
+ ],
+ name: "burnedRegister",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+];
\ No newline at end of file
diff --git a/evm-tests/src/contracts/staking.ts b/evm-tests/src/contracts/staking.ts
new file mode 100644
index 0000000000..9a30d307ba
--- /dev/null
+++ b/evm-tests/src/contracts/staking.ts
@@ -0,0 +1,243 @@
+export const ISTAKING_ADDRESS = "0x0000000000000000000000000000000000000801";
+export const ISTAKING_V2_ADDRESS = "0x0000000000000000000000000000000000000805";
+
+export const IStakingABI = [
+ {
+ inputs: [
+ {
+ internalType: "bytes32",
+ name: "delegate",
+ type: "bytes32",
+ },
+ ],
+ name: "addProxy",
+ outputs: [],
+ stateMutability: "nonpayable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "bytes32",
+ name: "hotkey",
+ type: "bytes32",
+ },
+ {
+ internalType: "uint256",
+ name: "netuid",
+ type: "uint256",
+ },
+ ],
+ name: "addStake",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "bytes32",
+ name: "delegate",
+ type: "bytes32",
+ },
+ ],
+ name: "removeProxy",
+ outputs: [],
+ stateMutability: "nonpayable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "bytes32",
+ name: "hotkey",
+ type: "bytes32",
+ },
+ {
+ internalType: "bytes32",
+ name: "coldkey",
+ type: "bytes32",
+ },
+ {
+ internalType: "uint256",
+ name: "netuid",
+ type: "uint256",
+ },
+ ],
+ name: "getStake",
+ outputs: [
+ {
+ internalType: "uint256",
+ name: "",
+ type: "uint256",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "bytes32",
+ name: "hotkey",
+ type: "bytes32",
+ },
+ {
+ internalType: "uint256",
+ name: "amount",
+ type: "uint256",
+ },
+ {
+ internalType: "uint256",
+ name: "netuid",
+ type: "uint256",
+ },
+ ],
+ name: "removeStake",
+ outputs: [],
+ stateMutability: "nonpayable",
+ type: "function",
+ },
+];
+
+export const IStakingV2ABI = [
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "delegate",
+ "type": "bytes32"
+ }
+ ],
+ "name": "addProxy",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "hotkey",
+ "type": "bytes32"
+ },
+ {
+ "internalType": "uint256",
+ "name": "amount",
+ "type": "uint256"
+ },
+ {
+ "internalType": "uint256",
+ "name": "netuid",
+ "type": "uint256"
+ }
+ ],
+ "name": "addStake",
+ "outputs": [],
+ "stateMutability": "payable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "hotkey",
+ "type": "bytes32"
+ },
+ {
+ "internalType": "bytes32",
+ "name": "coldkey",
+ "type": "bytes32"
+ },
+ {
+ "internalType": "uint256",
+ "name": "netuid",
+ "type": "uint256"
+ }
+ ],
+ "name": "getStake",
+ "outputs": [
+ {
+ "internalType": "uint256",
+ "name": "",
+ "type": "uint256"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "coldkey",
+ "type": "bytes32"
+ }
+ ],
+ "name": "getTotalColdkeyStake",
+ "outputs": [
+ {
+ "internalType": "uint256",
+ "name": "",
+ "type": "uint256"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "hotkey",
+ "type": "bytes32"
+ }
+ ],
+ "name": "getTotalHotkeyStake",
+ "outputs": [
+ {
+ "internalType": "uint256",
+ "name": "",
+ "type": "uint256"
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "delegate",
+ "type": "bytes32"
+ }
+ ],
+ "name": "removeProxy",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "bytes32",
+ "name": "hotkey",
+ "type": "bytes32"
+ },
+ {
+ "internalType": "uint256",
+ "name": "amount",
+ "type": "uint256"
+ },
+ {
+ "internalType": "uint256",
+ "name": "netuid",
+ "type": "uint256"
+ }
+ ],
+ "name": "removeStake",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function"
+ }
+];
\ No newline at end of file
diff --git a/evm-tests/src/contracts/subnet.ts b/evm-tests/src/contracts/subnet.ts
new file mode 100644
index 0000000000..9b6fe00596
--- /dev/null
+++ b/evm-tests/src/contracts/subnet.ts
@@ -0,0 +1,889 @@
+export const ISUBNET_ADDRESS = "0x0000000000000000000000000000000000000803";
+
+export const ISubnetABI = [
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getAdjustmentAlpha",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getAlphaValues",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getBondsMovingAverage",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getCommitRevealWeightsEnabled",
+ outputs: [
+ {
+ internalType: "bool",
+ name: "",
+ type: "bool",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getDifficulty",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ name: "getImmunityPeriod",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ name: "getKappa",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getMaxBurn",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getMaxDifficulty",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getMaxWeightLimit",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getMinAllowedWeights",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getMinBurn",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getMinDifficulty",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getNetworkRegistrationAllowed",
+ outputs: [
+ {
+ internalType: "bool",
+ name: "",
+ type: "bool",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ name: "getRho",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getServingRateLimit",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getWeightsSetRateLimit",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getWeightsVersionKey",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "activityCutoff",
+ type: "uint16",
+ },
+ ],
+ name: "setActivityCutoff",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getActivityCutoff",
+ outputs: [
+ {
+ internalType: "uint16",
+ name: "",
+ type: "uint16",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "adjustmentAlpha",
+ type: "uint64",
+ },
+ ],
+ name: "setAdjustmentAlpha",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "alphaLow",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "alphaHigh",
+ type: "uint16",
+ },
+ ],
+ name: "setAlphaValues",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "bondsMovingAverage",
+ type: "uint64",
+ },
+ ],
+ name: "setBondsMovingAverage",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "bool",
+ name: "commitRevealWeightsEnabled",
+ type: "bool",
+ },
+ ],
+ name: "setCommitRevealWeightsEnabled",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getCommitRevealWeightsInterval",
+ outputs: [
+ {
+ internalType: "uint64",
+ name: "",
+ type: "uint64",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "commitRevealWeightsInterval",
+ type: "uint64",
+ },
+ ],
+ name: "setCommitRevealWeightsInterval",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "difficulty",
+ type: "uint64",
+ },
+ ],
+ name: "setDifficulty",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "immunityPeriod",
+ type: "uint16",
+ },
+ ],
+ name: "setImmunityPeriod",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "kappa",
+ type: "uint16",
+ },
+ ],
+ name: "setKappa",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getLiquidAlphaEnabled",
+ outputs: [
+ {
+ internalType: "bool",
+ name: "",
+ type: "bool",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "bool",
+ name: "liquidAlphaEnabled",
+ type: "bool",
+ },
+ ],
+ name: "setLiquidAlphaEnabled",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "maxBurn",
+ type: "uint64",
+ },
+ ],
+ name: "setMaxBurn",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "maxDifficulty",
+ type: "uint64",
+ },
+ ],
+ name: "setMaxDifficulty",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "maxWeightLimit",
+ type: "uint16",
+ },
+ ],
+ name: "setMaxWeightLimit",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "minAllowedWeights",
+ type: "uint16",
+ },
+ ],
+ name: "setMinAllowedWeights",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "minBurn",
+ type: "uint64",
+ },
+ ],
+ name: "setMinBurn",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "minDifficulty",
+ type: "uint64",
+ },
+ ],
+ name: "setMinDifficulty",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ ],
+ name: "getNetworkPowRegistrationAllowed",
+ outputs: [
+ {
+ internalType: "bool",
+ name: "",
+ type: "bool",
+ },
+ ],
+ stateMutability: "view",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "bool",
+ name: "networkPowRegistrationAllowed",
+ type: "bool",
+ },
+ ],
+ name: "setNetworkPowRegistrationAllowed",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "bool",
+ name: "networkRegistrationAllowed",
+ type: "bool",
+ },
+ ],
+ name: "setNetworkRegistrationAllowed",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint16",
+ name: "rho",
+ type: "uint16",
+ },
+ ],
+ name: "setRho",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "servingRateLimit",
+ type: "uint64",
+ },
+ ],
+ name: "setServingRateLimit",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "weightsSetRateLimit",
+ type: "uint64",
+ },
+ ],
+ name: "setWeightsSetRateLimit",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "uint16",
+ name: "netuid",
+ type: "uint16",
+ },
+ {
+ internalType: "uint64",
+ name: "weightsVersionKey",
+ type: "uint64",
+ },
+ ],
+ name: "setWeightsVersionKey",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "bytes32",
+ name: "hotkey",
+ type: "bytes32",
+ },
+ ],
+ name: "registerNetwork",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function",
+ },
+ {
+ inputs: [
+ {
+ internalType: "bytes32",
+ name: "hotkey",
+ type: "bytes32"
+ },
+ {
+ internalType: "string",
+ name: "subnetName",
+ type: "string"
+ },
+ {
+ internalType: "string",
+ name: "githubRepo",
+ type: "string"
+ },
+ {
+ internalType: "string",
+ name: "subnetContact",
+ type: "string"
+ },
+ {
+ internalType: "string",
+ name: "subnetUrl",
+ type: "string"
+ },
+ {
+ internalType: "string",
+ name: "discord",
+ type: "string"
+ },
+ {
+ internalType: "string",
+ name: "description",
+ type: "string"
+ },
+ {
+ internalType: "string",
+ name: "additional",
+ type: "string"
+ }
+ ],
+ name: "registerNetwork",
+ outputs: [],
+ stateMutability: "payable",
+ type: "function"
+ },
+];
\ No newline at end of file
diff --git a/evm-tests/src/contracts/withdraw.sol b/evm-tests/src/contracts/withdraw.sol
new file mode 100644
index 0000000000..3945661e09
--- /dev/null
+++ b/evm-tests/src/contracts/withdraw.sol
@@ -0,0 +1,13 @@
+// SPDX-License-Identifier: GPL-3.0
+
+pragma solidity >=0.7.0 <0.9.0;
+
+contract Withdraw {
+ constructor() {}
+
+ function withdraw(uint256 value) public payable {
+ payable(msg.sender).transfer(value);
+ }
+
+ receive() external payable {}
+}
diff --git a/evm-tests/src/contracts/withdraw.ts b/evm-tests/src/contracts/withdraw.ts
new file mode 100644
index 0000000000..46fe66bf24
--- /dev/null
+++ b/evm-tests/src/contracts/withdraw.ts
@@ -0,0 +1,31 @@
+export const WITHDRAW_CONTRACT_ABI = [
+ {
+ "inputs": [],
+ "stateMutability": "nonpayable",
+ "type": "constructor"
+ },
+ {
+ "inputs": [
+ {
+ "internalType": "uint256",
+ "name": "value",
+ "type": "uint256"
+ }
+ ],
+ "name": "withdraw",
+ "outputs": [],
+ "stateMutability": "payable",
+ "type": "function"
+ },
+ {
+ "stateMutability": "payable",
+ "type": "receive"
+ }
+];
+
+// "compiler": {
+// "version": "0.8.26+commit.8a97fa7a"
+// },
+
+export const WITHDRAW_CONTRACT_BYTECODE = "6080604052348015600e575f80fd5b506101148061001c5f395ff3fe608060405260043610601e575f3560e01c80632e1a7d4d146028576024565b36602457005b5f80fd5b603e6004803603810190603a919060b8565b6040565b005b3373ffffffffffffffffffffffffffffffffffffffff166108fc8290811502906040515f60405180830381858888f193505050501580156082573d5f803e3d5ffd5b5050565b5f80fd5b5f819050919050565b609a81608a565b811460a3575f80fd5b50565b5f8135905060b2816093565b92915050565b5f6020828403121560ca5760c96086565b5b5f60d58482850160a6565b9150509291505056fea2646970667358221220f43400858bfe4fcc0bf3c1e2e06d3a9e6ced86454a00bd7e4866b3d4d64e46bb64736f6c634300081a0033"
+
diff --git a/evm-tests/src/eth.ts b/evm-tests/src/eth.ts
new file mode 100644
index 0000000000..ea3ebb9976
--- /dev/null
+++ b/evm-tests/src/eth.ts
@@ -0,0 +1,17 @@
+
+import { ethers, Provider, TransactionRequest, Wallet } from "ethers";
+export async function estimateTransactionCost(provider: Provider, tx: TransactionRequest) {
+ const feeData = await provider.getFeeData();
+ const estimatedGas = BigInt(await provider.estimateGas(tx));
+ const gasPrice = feeData.gasPrice || feeData.maxFeePerGas;
+ if (gasPrice === null)
+ return estimatedGas
+ else
+ return estimatedGas * BigInt(gasPrice);
+}
+
+export function getContract(contractAddress: string, abi: {}[], wallet: Wallet) {
+ const contract = new ethers.Contract(contractAddress, abi, wallet);
+ return contract
+
+}
\ No newline at end of file
diff --git a/evm-tests/src/substrate.ts b/evm-tests/src/substrate.ts
new file mode 100644
index 0000000000..ddfdfb626d
--- /dev/null
+++ b/evm-tests/src/substrate.ts
@@ -0,0 +1,274 @@
+import * as assert from "assert";
+import { devnet, MultiAddress } from '@polkadot-api/descriptors';
+import { createClient, TypedApi, Transaction, PolkadotSigner, Binary } from 'polkadot-api';
+import { getWsProvider } from 'polkadot-api/ws-provider/web';
+import { sr25519CreateDerive } from "@polkadot-labs/hdkd"
+import { convertPublicKeyToSs58 } from "../src/address-utils"
+import { DEV_PHRASE, entropyToMiniSecret, mnemonicToEntropy, KeyPair } from "@polkadot-labs/hdkd-helpers"
+import { getPolkadotSigner } from "polkadot-api/signer"
+import { randomBytes } from 'crypto';
+import { Keyring } from '@polkadot/keyring';
+import { SS58_PREFIX, TX_TIMEOUT } from "./config";
+
+let api: TypedApi | undefined = undefined
+
+// define url string as type to extend in the future
+// export type ClientUrlType = 'ws://localhost:9944' | 'wss://test.finney.opentensor.ai:443' | 'wss://dev.chain.opentensor.ai:443' | 'wss://archive.chain.opentensor.ai';
+export type ClientUrlType = 'ws://localhost:9944'
+
+export async function getClient(url: ClientUrlType) {
+ const provider = getWsProvider(url);
+ const client = createClient(provider);
+ return client
+}
+
+export async function getDevnetApi() {
+ if (api === undefined) {
+ let client = await getClient('ws://localhost:9944')
+ api = client.getTypedApi(devnet)
+ }
+ return api
+}
+
+export function getAlice() {
+ const entropy = mnemonicToEntropy(DEV_PHRASE)
+ const miniSecret = entropyToMiniSecret(entropy)
+ const derive = sr25519CreateDerive(miniSecret)
+ const hdkdKeyPair = derive("//Alice")
+
+ return hdkdKeyPair
+}
+
+export function getAliceSigner() {
+ const alice = getAlice()
+ const polkadotSigner = getPolkadotSigner(
+ alice.publicKey,
+ "Sr25519",
+ alice.sign,
+ )
+
+ return polkadotSigner
+}
+
+export function getRandomSubstrateSigner() {
+ const keypair = getRandomSubstrateKeypair();
+ return getSignerFromKeypair(keypair)
+}
+
+export function getSignerFromKeypair(keypair: KeyPair) {
+ const polkadotSigner = getPolkadotSigner(
+ keypair.publicKey,
+ "Sr25519",
+ keypair.sign,
+ )
+ return polkadotSigner
+}
+
+export function getRandomSubstrateKeypair() {
+ const seed = randomBytes(32);
+ const miniSecret = entropyToMiniSecret(seed)
+ const derive = sr25519CreateDerive(miniSecret)
+ const hdkdKeyPair = derive("")
+
+ return hdkdKeyPair
+}
+
+export async function getBalance(api: TypedApi) {
+ const value = await api.query.Balances.Account.getValue("")
+ return value
+}
+
+export async function getNonce(api: TypedApi, ss58Address: string): Promise {
+ const value = await api.query.System.Account.getValue(ss58Address);
+ return value.nonce
+}
+
+export async function getNonceChangePromise(api: TypedApi, ss58Address: string) {
+ // api.query.System.Account.getValue()
+ const initValue = await api.query.System.Account.getValue(ss58Address);
+ return new Promise((resolve, reject) => {
+ const subscription = api.query.System.Account.watchValue(ss58Address).subscribe({
+ next(value) {
+ if (value.nonce > initValue.nonce) {
+ subscription.unsubscribe();
+ // Resolve the promise when the transaction is finalized
+ resolve();
+ }
+ },
+
+ error(err: Error) {
+ console.error("Transaction failed:", err);
+ subscription.unsubscribe();
+ // Reject the promise in case of an error
+ reject(err);
+ },
+ complete() {
+ console.log("Subscription complete");
+ }
+ })
+
+ setTimeout(() => {
+ subscription.unsubscribe();
+ console.log('unsubscribed!');
+ resolve()
+ }, TX_TIMEOUT);
+
+ })
+}
+
+export function convertPublicKeyToMultiAddress(publicKey: Uint8Array, ss58Format: number = SS58_PREFIX): MultiAddress {
+ // Create a keyring instance
+ const keyring = new Keyring({ type: 'sr25519', ss58Format });
+
+ // Add the public key to the keyring
+ const address = keyring.encodeAddress(publicKey);
+
+ return MultiAddress.Id(address);
+}
+
+
+export async function waitForTransactionCompletion(api: TypedApi, tx: Transaction<{}, string, string, void>, signer: PolkadotSigner,) {
+ const transactionPromise = await getTransactionWatchPromise(tx, signer)
+ return transactionPromise
+
+ // If we can't always get the finalized event, then add nonce subscribe as other evidence for tx is finalized.
+ // Don't need it based on current testing.
+ // const ss58Address = convertPublicKeyToSs58(signer.publicKey)
+ // const noncePromise = await getNonceChangePromise(api, ss58Address)
+
+ // return new Promise((resolve, reject) => {
+ // Promise.race([transactionPromise, noncePromise])
+ // .then(resolve)
+ // .catch(reject);
+ // })
+}
+
+export async function getTransactionWatchPromise(tx: Transaction<{}, string, string, void>, signer: PolkadotSigner,) {
+ return new Promise((resolve, reject) => {
+ // store the txHash, then use it in timeout. easier to know which tx is not finalized in time
+ let txHash = ""
+ const subscription = tx.signSubmitAndWatch(signer).subscribe({
+ next(value) {
+ console.log("Event:", value);
+ txHash = value.txHash
+
+ // TODO investigate why finalized not for each extrinsic
+ if (value.type === "finalized") {
+ console.log("Transaction is finalized in block:", value.txHash);
+ subscription.unsubscribe();
+ // Resolve the promise when the transaction is finalized
+ resolve();
+
+ }
+ },
+ error(err) {
+ console.error("Transaction failed:", err);
+ subscription.unsubscribe();
+ // Reject the promise in case of an error
+ reject(err);
+
+ },
+ complete() {
+ console.log("Subscription complete");
+ }
+ });
+
+ setTimeout(() => {
+ subscription.unsubscribe();
+ console.log('unsubscribed because of timeout for tx {}', txHash);
+ reject()
+ }, TX_TIMEOUT);
+ });
+}
+
+export async function waitForFinalizedBlock(api: TypedApi) {
+ const currentBlockNumber = await api.query.System.Number.getValue()
+ return new Promise((resolve, reject) => {
+
+ const subscription = api.query.System.Number.watchValue().subscribe({
+ // TODO check why the block number event just get once
+ next(value: number) {
+ console.log("Event block number is :", value);
+
+ if (value > currentBlockNumber + 6) {
+ console.log("Transaction is finalized in block:", value);
+ subscription.unsubscribe();
+
+ resolve();
+
+ }
+
+ },
+ error(err: Error) {
+ console.error("Transaction failed:", err);
+ subscription.unsubscribe();
+ // Reject the promise in case of an error
+ reject(err);
+
+ },
+ complete() {
+ console.log("Subscription complete");
+ }
+ });
+
+ setTimeout(() => {
+ subscription.unsubscribe();
+ console.log('unsubscribed!');
+ resolve()
+ }, 2000);
+ });
+}
+
+// second solution to wait for transaction finalization. pass the raw data to avoid the complex transaction type definition
+export async function waitForTransactionCompletion2(api: TypedApi, raw: Binary, signer: PolkadotSigner,) {
+ const tx = await api.txFromCallData(raw);
+ return new Promise((resolve, reject) => {
+ const subscription = tx.signSubmitAndWatch(signer).subscribe({
+ next(value) {
+ console.log("Event:", value);
+
+ if (value.type === "txBestBlocksState") {
+ console.log("Transaction is finalized in block:", value.txHash);
+ subscription.unsubscribe();
+ // Resolve the promise when the transaction is finalized
+ resolve();
+
+ }
+ },
+ error(err: Error) {
+ console.error("Transaction failed:", err);
+ subscription.unsubscribe();
+ // Reject the promise in case of an error
+ reject(err);
+
+ },
+ complete() {
+ console.log("Subscription complete");
+ }
+ });
+ });
+}
+
+export async function waitForNonceChange(api: TypedApi, ss58Address: string) {
+ const initNonce = await getNonce(api, ss58Address)
+ while (true) {
+ const currentNonce = await getNonce(api, ss58Address)
+ if (currentNonce > initNonce) {
+ break
+ }
+
+ await new Promise(resolve => setTimeout(resolve, 200));
+ }
+}
+
+
+// other approach to convert public key to ss58
+// export function convertPublicKeyToSs58(publicKey: Uint8Array, ss58Format: number = 42): string {
+// // Create a keyring instance
+// const keyring = new Keyring({ type: 'sr25519', ss58Format });
+
+// // Add the public key to the keyring
+// const address = keyring.encodeAddress(publicKey);
+
+// return address
+// }
\ No newline at end of file
diff --git a/evm-tests/src/subtensor.ts b/evm-tests/src/subtensor.ts
new file mode 100644
index 0000000000..48dc5c83c7
--- /dev/null
+++ b/evm-tests/src/subtensor.ts
@@ -0,0 +1,345 @@
+import * as assert from "assert";
+import { devnet, MultiAddress } from '@polkadot-api/descriptors';
+import { TypedApi, TxCallData } from 'polkadot-api';
+import { KeyPair } from "@polkadot-labs/hdkd-helpers"
+import { getAliceSigner, waitForTransactionCompletion, getSignerFromKeypair } from './substrate'
+import { convertH160ToSS58, convertPublicKeyToSs58 } from './address-utils'
+import { tao } from './balance-math'
+
+// create a new subnet and return netuid
+export async function addNewSubnetwork(api: TypedApi, hotkey: KeyPair, coldkey: KeyPair) {
+ const alice = getAliceSigner()
+ const totalNetworks = await api.query.SubtensorModule.TotalNetworks.getValue()
+
+ const rateLimit = await api.query.SubtensorModule.NetworkRateLimit.getValue()
+ if (rateLimit !== BigInt(0)) {
+ const internalCall = api.tx.AdminUtils.sudo_set_network_rate_limit({ rate_limit: BigInt(0) })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ }
+
+ const signer = getSignerFromKeypair(coldkey)
+ const registerNetworkTx = api.tx.SubtensorModule.register_network({ hotkey: convertPublicKeyToSs58(hotkey.publicKey) })
+ await waitForTransactionCompletion(api, registerNetworkTx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+ assert.equal(totalNetworks + 1, await api.query.SubtensorModule.TotalNetworks.getValue())
+ return totalNetworks
+}
+
+// force set balance for a ss58 address
+export async function forceSetBalanceToSs58Address(api: TypedApi, ss58Address: string) {
+ const alice = getAliceSigner()
+ const balance = tao(1e8)
+ const internalCall = api.tx.Balances.force_set_balance({ who: MultiAddress.Id(ss58Address), new_free: balance })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+ const balanceOnChain = (await api.query.System.Account.getValue(ss58Address)).data.free
+ // check the balance except for sudo account becasue of tx fee
+ if (ss58Address !== convertPublicKeyToSs58(alice.publicKey)) {
+ assert.equal(balance, balanceOnChain)
+ }
+}
+
+// set balance for an eth address
+export async function forceSetBalanceToEthAddress(api: TypedApi, ethAddress: string) {
+ const ss58Address = convertH160ToSS58(ethAddress)
+ await forceSetBalanceToSs58Address(api, ss58Address)
+}
+
+export async function setCommitRevealWeightsEnabled(api: TypedApi, netuid: number, enabled: boolean) {
+ const value = await api.query.SubtensorModule.CommitRevealWeightsEnabled.getValue(netuid)
+ if (value === enabled) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+ const internalCall = api.tx.AdminUtils.sudo_set_commit_reveal_weights_enabled({ netuid: netuid, enabled: enabled })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(enabled, await api.query.SubtensorModule.CommitRevealWeightsEnabled.getValue(netuid))
+}
+
+export async function setWeightsSetRateLimit(api: TypedApi, netuid: number, rateLimit: bigint) {
+ const value = await api.query.SubtensorModule.WeightsSetRateLimit.getValue(netuid)
+ if (value === rateLimit) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+ const internalCall = api.tx.AdminUtils.sudo_set_weights_set_rate_limit({ netuid: netuid, weights_set_rate_limit: rateLimit })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(rateLimit, await api.query.SubtensorModule.WeightsSetRateLimit.getValue(netuid))
+}
+
+// tempo is u16 in rust, but we just number in js. so value should be less than u16::Max
+export async function setTempo(api: TypedApi, netuid: number, tempo: number) {
+ const value = await api.query.SubtensorModule.Tempo.getValue(netuid)
+ console.log("init avlue is ", value)
+ if (value === tempo) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+ const internalCall = api.tx.AdminUtils.sudo_set_tempo({ netuid: netuid, tempo: tempo })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(tempo, await api.query.SubtensorModule.Tempo.getValue(netuid))
+}
+
+export async function setCommitRevealWeightsInterval(api: TypedApi, netuid: number, interval: bigint) {
+ const value = await api.query.SubtensorModule.RevealPeriodEpochs.getValue(netuid)
+ if (value === interval) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+ const internalCall = api.tx.AdminUtils.sudo_set_commit_reveal_weights_interval({ netuid: netuid, interval: interval })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(interval, await api.query.SubtensorModule.RevealPeriodEpochs.getValue(netuid))
+}
+
+
+export async function forceSetChainID(api: TypedApi, chainId: bigint) {
+ const value = await api.query.EVMChainId.ChainId.getValue()
+ if (value === chainId) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+ const internalCall = api.tx.AdminUtils.sudo_set_evm_chain_id({ chain_id: chainId })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(chainId, await api.query.EVMChainId.ChainId.getValue())
+}
+
+export async function disableWhiteListCheck(api: TypedApi, disabled: boolean) {
+ const value = await api.query.EVM.DisableWhitelistCheck.getValue()
+ if (value === disabled) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+ const internalCall = api.tx.EVM.disable_whitelist({ disabled: disabled })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(disabled, await api.query.EVM.DisableWhitelistCheck.getValue())
+}
+
+export async function burnedRegister(api: TypedApi, netuid: number, ss58Address: string, keypair: KeyPair) {
+ const uids = await api.query.SubtensorModule.SubnetworkN.getValue(netuid)
+ const signer = getSignerFromKeypair(keypair)
+ const tx = api.tx.SubtensorModule.burned_register({ hotkey: ss58Address, netuid: netuid })
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(uids + 1, await api.query.SubtensorModule.SubnetworkN.getValue(netuid))
+}
+
+
+export async function sendProxyCall(api: TypedApi, calldata: TxCallData, ss58Address: string, keypair: KeyPair) {
+ const signer = getSignerFromKeypair(keypair)
+ const tx = api.tx.Proxy.proxy({
+ call: calldata,
+ real: MultiAddress.Id(ss58Address),
+ force_proxy_type: undefined
+ });
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+}
+
+
+export async function setTxRateLimit(api: TypedApi, txRateLimit: bigint) {
+ const value = await api.query.SubtensorModule.TxRateLimit.getValue()
+ if (value === txRateLimit) {
+ return;
+ }
+ const alice = getAliceSigner()
+
+ const internalCall = api.tx.AdminUtils.sudo_set_tx_rate_limit({ tx_rate_limit: txRateLimit })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(txRateLimit, await api.query.SubtensorModule.TxRateLimit.getValue())
+}
+
+export async function setMaxAllowedValidators(api: TypedApi, netuid: number, maxAllowedValidators: number) {
+ const value = await api.query.SubtensorModule.MaxAllowedValidators.getValue(netuid)
+ if (value === maxAllowedValidators) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+
+ const internalCall = api.tx.AdminUtils.sudo_set_max_allowed_validators({
+ netuid: netuid,
+ max_allowed_validators: maxAllowedValidators
+ })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(maxAllowedValidators, await api.query.SubtensorModule.MaxAllowedValidators.getValue(netuid))
+}
+
+export async function setSubnetOwnerCut(api: TypedApi, subnetOwnerCut: number) {
+ const value = await api.query.SubtensorModule.SubnetOwnerCut.getValue()
+ if (value === subnetOwnerCut) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+
+ const internalCall = api.tx.AdminUtils.sudo_set_subnet_owner_cut({
+ subnet_owner_cut: subnetOwnerCut
+ })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(subnetOwnerCut, await api.query.SubtensorModule.SubnetOwnerCut.getValue())
+}
+
+export async function setActivityCutoff(api: TypedApi, netuid: number, activityCutoff: number) {
+ const value = await api.query.SubtensorModule.ActivityCutoff.getValue(netuid)
+ if (value === activityCutoff) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+
+ const internalCall = api.tx.AdminUtils.sudo_set_activity_cutoff({
+ netuid: netuid,
+ activity_cutoff: activityCutoff
+ })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(activityCutoff, await api.query.SubtensorModule.ActivityCutoff.getValue(netuid))
+}
+
+export async function setMaxAllowedUids(api: TypedApi, netuid: number, maxAllowedUids: number) {
+ const value = await api.query.SubtensorModule.MaxAllowedUids.getValue(netuid)
+ if (value === maxAllowedUids) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+
+ const internalCall = api.tx.AdminUtils.sudo_set_max_allowed_uids({
+ netuid: netuid,
+ max_allowed_uids: maxAllowedUids
+ })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(maxAllowedUids, await api.query.SubtensorModule.MaxAllowedUids.getValue(netuid))
+}
+
+export async function setMinDelegateTake(api: TypedApi, minDelegateTake: number) {
+ const value = await api.query.SubtensorModule.MinDelegateTake.getValue()
+ if (value === minDelegateTake) {
+ return;
+ }
+
+ const alice = getAliceSigner()
+
+ const internalCall = api.tx.AdminUtils.sudo_set_min_delegate_take({
+ take: minDelegateTake
+ })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ assert.equal(minDelegateTake, await api.query.SubtensorModule.MinDelegateTake.getValue())
+}
+
+export async function becomeDelegate(api: TypedApi, ss58Address: string, keypair: KeyPair) {
+ const singer = getSignerFromKeypair(keypair)
+
+ const tx = api.tx.SubtensorModule.become_delegate({
+ hotkey: ss58Address
+ })
+ await waitForTransactionCompletion(api, tx, singer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+}
+
+export async function addStake(api: TypedApi, netuid: number, ss58Address: string, amount_staked: bigint, keypair: KeyPair) {
+ const singer = getSignerFromKeypair(keypair)
+ let tx = api.tx.SubtensorModule.add_stake({
+ netuid: netuid,
+ hotkey: ss58Address,
+ amount_staked: amount_staked
+ })
+
+ await waitForTransactionCompletion(api, tx, singer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+}
+
+export async function setWeight(api: TypedApi, netuid: number, dests: number[], weights: number[], version_key: bigint, keypair: KeyPair) {
+ const singer = getSignerFromKeypair(keypair)
+ let tx = api.tx.SubtensorModule.set_weights({
+ netuid: netuid,
+ dests: dests,
+ weights: weights,
+ version_key: version_key
+ })
+
+ await waitForTransactionCompletion(api, tx, singer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+}
+
+export async function rootRegister(api: TypedApi, ss58Address: string, keypair: KeyPair) {
+ const singer = getSignerFromKeypair(keypair)
+ let tx = api.tx.SubtensorModule.root_register({
+ hotkey: ss58Address
+ })
+
+ await waitForTransactionCompletion(api, tx, singer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+}
\ No newline at end of file
diff --git a/evm-tests/src/utils.ts b/evm-tests/src/utils.ts
new file mode 100644
index 0000000000..36e922b49e
--- /dev/null
+++ b/evm-tests/src/utils.ts
@@ -0,0 +1,55 @@
+import { defineChain, http, publicActions, createPublicClient } from "viem"
+import { privateKeyToAccount, generatePrivateKey } from 'viem/accounts'
+import { ethers } from "ethers"
+import { ETH_LOCAL_URL } from "./config"
+
+export type ClientUrlType = 'http://localhost:9944';
+
+export const chain = (id: number, url: string) => defineChain({
+ id: id,
+ name: 'bittensor',
+ network: 'bittensor',
+ nativeCurrency: {
+ name: 'tao',
+ symbol: 'TAO',
+ decimals: 9,
+ },
+ rpcUrls: {
+ default: {
+ http: [url],
+ },
+ },
+ testnet: true,
+})
+
+
+export async function getPublicClient(url: ClientUrlType) {
+ const wallet = createPublicClient({
+ chain: chain(42, url),
+ transport: http(),
+
+ })
+
+ return wallet.extend(publicActions)
+}
+
+/**
+ * Generates a random Ethereum wallet
+ * @returns wallet keyring
+ */
+export function generateRandomEthWallet() {
+ let privateKey = generatePrivateKey().toString();
+ privateKey = privateKey.replace('0x', '');
+
+ const account = privateKeyToAccount(`0x${privateKey}`)
+ return account
+}
+
+
+export function generateRandomEthersWallet() {
+ const account = ethers.Wallet.createRandom();
+ const provider = new ethers.JsonRpcProvider(ETH_LOCAL_URL);
+
+ const wallet = new ethers.Wallet(account.privateKey, provider);
+ return wallet;
+}
\ No newline at end of file
diff --git a/evm-tests/test/ed25519.precompile.verify.test.ts b/evm-tests/test/ed25519.precompile.verify.test.ts
new file mode 100644
index 0000000000..fcd79ec9d7
--- /dev/null
+++ b/evm-tests/test/ed25519.precompile.verify.test.ts
@@ -0,0 +1,122 @@
+import { IED25519VERIFY_ADDRESS, IEd25519VerifyABI, ETH_LOCAL_URL } from '../src/config'
+import { getPublicClient } from "../src/utils";
+import { toHex, toBytes, keccak256, PublicClient } from 'viem'
+import { Keyring } from "@polkadot/keyring";
+import * as assert from "assert";
+
+describe("Verfication of ed25519 signature", () => {
+ // init eth part
+ let ethClient: PublicClient;
+
+ before(async () => {
+ ethClient = await getPublicClient(ETH_LOCAL_URL);
+ });
+
+ it("Verification of ed25519 works", async () => {
+ const keyring = new Keyring({ type: "ed25519" });
+ const alice = keyring.addFromUri("//Alice");
+
+ // Use this example: https://github.com/gztensor/evm-demo/blob/main/docs/ed25519verify-precompile.md
+ // const keyring = new Keyring({ type: "ed25519" });
+ // const myAccount = keyring.addFromUri("//Alice");
+
+ //////////////////////////////////////////////////////////////////////
+ // Generate a signature
+
+ // Your message to sign
+ const message = "Sign this message";
+ const messageU8a = new TextEncoder().encode(message);
+ const messageHex = toHex(messageU8a); // Convert message to hex string
+ const messageHash = keccak256(messageHex); // Hash the message to fit into bytes32
+ console.log(`messageHash = ${messageHash}`);
+ const hashedMessageBytes = toBytes(messageHash);
+ console.log(`hashedMessageBytes = ${hashedMessageBytes}`);
+
+ // Sign the message
+ const signature = await alice.sign(hashedMessageBytes);
+ console.log(`Signature: ${toHex(signature)}`);
+
+ // Verify the signature locally
+ const isValid = alice.verify(
+ hashedMessageBytes,
+ signature,
+ alice.publicKey
+ );
+ console.log(`Is the signature valid? ${isValid}`);
+
+ //////////////////////////////////////////////////////////////////////
+ // Verify the signature using the precompile contract
+
+ const publicKeyBytes = toHex(alice.publicKey);
+ console.log(`publicKeyBytes = ${publicKeyBytes}`);
+
+ // Split signture into Commitment (R) and response (s)
+ let r = signature.slice(0, 32); // Commitment, a.k.a. "r" - first 32 bytes
+ let s = signature.slice(32, 64); // Response, a.k.a. "s" - second 32 bytes
+ let rBytes = toHex(r);
+ let sBytes = toHex(s);
+
+ const isPrecompileValid = await ethClient.readContract({
+ address: IED25519VERIFY_ADDRESS,
+ abi: IEd25519VerifyABI,
+ functionName: "verify",
+ args: [messageHash,
+ publicKeyBytes,
+ rBytes,
+ sBytes]
+
+ });
+
+ console.log(
+ `Is the signature valid according to the smart contract? ${isPrecompileValid}`
+ );
+ assert.equal(isPrecompileValid, true)
+
+ //////////////////////////////////////////////////////////////////////
+ // Verify the signature for bad data using the precompile contract
+
+ let brokenHashedMessageBytes = hashedMessageBytes;
+ brokenHashedMessageBytes[0] = (brokenHashedMessageBytes[0] + 1) % 0xff;
+ const brokenMessageHash = toHex(brokenHashedMessageBytes);
+ console.log(`brokenMessageHash = ${brokenMessageHash}`);
+
+ const isPrecompileValidBadData = await ethClient.readContract({
+ address: IED25519VERIFY_ADDRESS,
+ abi: IEd25519VerifyABI,
+ functionName: "verify",
+ args: [brokenMessageHash,
+ publicKeyBytes,
+ rBytes,
+ sBytes]
+
+ });
+
+ console.log(
+ `Is the signature valid according to the smart contract for broken data? ${isPrecompileValidBadData}`
+ );
+ assert.equal(isPrecompileValidBadData, false)
+
+ //////////////////////////////////////////////////////////////////////
+ // Verify the bad signature for good data using the precompile contract
+
+ let brokenR = r;
+ brokenR[0] = (brokenR[0] + 1) % 0xff;
+ rBytes = toHex(r);
+ const isPrecompileValidBadSignature = await ethClient.readContract({
+ address: IED25519VERIFY_ADDRESS,
+ abi: IEd25519VerifyABI,
+ functionName: "verify",
+ args: [messageHash,
+ publicKeyBytes,
+ rBytes,
+ sBytes]
+
+ });
+
+ console.log(
+ `Is the signature valid according to the smart contract for broken signature? ${isPrecompileValidBadSignature}`
+ );
+ assert.equal(isPrecompileValidBadSignature, false)
+
+ });
+});
\ No newline at end of file
diff --git a/evm-tests/test/eth.bridgeToken.deploy.test.ts b/evm-tests/test/eth.bridgeToken.deploy.test.ts
new file mode 100644
index 0000000000..94ebcd1260
--- /dev/null
+++ b/evm-tests/test/eth.bridgeToken.deploy.test.ts
@@ -0,0 +1,69 @@
+import * as assert from "assert";
+import * as chai from "chai";
+
+import { getDevnetApi } from "../src/substrate"
+import { generateRandomEthersWallet, getPublicClient } from "../src/utils";
+import { ETH_LOCAL_URL } from "../src/config";
+import { devnet } from "@polkadot-api/descriptors"
+import { PublicClient } from "viem";
+import { TypedApi } from "polkadot-api";
+import { BRIDGE_TOKEN_CONTRACT_ABI, BRIDGE_TOKEN_CONTRACT_BYTECODE } from "../src/contracts/bridgeToken";
+import { toViemAddress } from "../src/address-utils";
+import { forceSetBalanceToEthAddress, disableWhiteListCheck } from "../src/subtensor";
+import { ethers } from "ethers"
+describe("bridge token contract deployment", () => {
+ // init eth part
+ const wallet = generateRandomEthersWallet();
+ let publicClient: PublicClient;
+
+ // init substrate part
+ let api: TypedApi
+
+ before(async () => {
+ // init variables got from await and async
+ publicClient = await getPublicClient(ETH_LOCAL_URL)
+ api = await getDevnetApi()
+
+ await forceSetBalanceToEthAddress(api, wallet.address)
+ await disableWhiteListCheck(api, true)
+ });
+
+ it("Can deploy bridge token smart contract", async () => {
+ const contractFactory = new ethers.ContractFactory(BRIDGE_TOKEN_CONTRACT_ABI, BRIDGE_TOKEN_CONTRACT_BYTECODE, wallet)
+ const contract = await contractFactory.deploy("name",
+ "symbol", wallet.address)
+ await contract.waitForDeployment()
+ assert.notEqual(contract.target, undefined)
+
+ const contractAddress = contract.target.toString()
+
+ const code = await publicClient.getCode({ address: toViemAddress(contractAddress) })
+ if (code === undefined) {
+ throw new Error("code not available")
+ }
+ assert.ok(code.length > 100)
+ assert.ok(code.includes("0x60806040523480156"))
+ });
+
+ it("Can deploy bridge token contract with gas limit", async () => {
+ const contractFactory = new ethers.ContractFactory(BRIDGE_TOKEN_CONTRACT_ABI, BRIDGE_TOKEN_CONTRACT_BYTECODE, wallet)
+ const successful_gas_limit = "12345678";
+ const contract = await contractFactory.deploy("name",
+ "symbol", wallet.address,
+ {
+ gasLimit: successful_gas_limit,
+ }
+ )
+ await contract.waitForDeployment()
+ assert.notEqual(contract.target, undefined)
+
+ const contractAddress = contract.target.toString()
+
+ const code = await publicClient.getCode({ address: toViemAddress(contractAddress) })
+ if (code === undefined) {
+ throw new Error("code not available")
+ }
+ assert.ok(code.length > 100)
+ assert.ok(code.includes("0x60806040523480156"))
+ });
+});
\ No newline at end of file
diff --git a/evm-tests/test/eth.chain-id.test.ts b/evm-tests/test/eth.chain-id.test.ts
new file mode 100644
index 0000000000..09174c1212
--- /dev/null
+++ b/evm-tests/test/eth.chain-id.test.ts
@@ -0,0 +1,76 @@
+
+import * as assert from "assert";
+import * as chai from "chai";
+
+import { getDevnetApi, waitForTransactionCompletion, getRandomSubstrateKeypair } from "../src/substrate"
+import { generateRandomEthWallet, getPublicClient } from "../src/utils";
+import { convertPublicKeyToSs58 } from "../src/address-utils"
+import { ETH_LOCAL_URL } from "../src/config";
+import { devnet } from "@polkadot-api/descriptors"
+import { getPolkadotSigner } from "polkadot-api/signer";
+import { PublicClient } from "viem";
+import { TypedApi } from "polkadot-api";
+import { forceSetBalanceToSs58Address, forceSetChainID } from "../src/subtensor";
+
+describe("Test the EVM chain ID", () => {
+ // init eth part
+ const wallet = generateRandomEthWallet();
+ let ethClient: PublicClient;
+
+ // init substrate part
+ const keyPair = getRandomSubstrateKeypair();
+ let api: TypedApi;
+
+ // init other variable
+ const initChainId = 42;
+
+ before(async () => {
+ // init variables got from await and async
+ ethClient = await getPublicClient(ETH_LOCAL_URL);
+ api = await getDevnetApi()
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(keyPair.publicKey))
+
+ });
+
+ it("EVM chain id update is ok", async () => {
+ let chainId = await ethClient.getChainId();
+ // init chain id should be 42
+ assert.equal(chainId, initChainId);
+
+ const newChainId = BigInt(100)
+ await forceSetChainID(api, newChainId)
+
+ chainId = await ethClient.getChainId();
+ assert.equal(chainId, newChainId);
+
+ await forceSetChainID(api, BigInt(initChainId))
+
+ chainId = await ethClient.getChainId();
+ // back to original value for other tests. and we can run it repeatedly
+ assert.equal(chainId, initChainId);
+
+ });
+
+ it("EVM chain id is the same, only sudo can change it.", async () => {
+ let chainId = await ethClient.getChainId();
+ // init chain id should be 42
+ assert.equal(chainId, initChainId);
+
+ // invalide signer for set chain ID
+ let signer = getPolkadotSigner(
+ keyPair.publicKey,
+ "Sr25519",
+ keyPair.sign,
+ )
+
+ let tx = api.tx.AdminUtils.sudo_set_evm_chain_id({ chain_id: BigInt(100) })
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+ // extrinsic should be failed and chain ID not updated.
+ chainId = await ethClient.getChainId();
+ assert.equal(chainId, 42);
+
+ });
+});
\ No newline at end of file
diff --git a/evm-tests/test/eth.incremental.deploy.test.ts b/evm-tests/test/eth.incremental.deploy.test.ts
new file mode 100644
index 0000000000..c22187538d
--- /dev/null
+++ b/evm-tests/test/eth.incremental.deploy.test.ts
@@ -0,0 +1,61 @@
+
+
+import * as assert from "assert";
+import * as chai from "chai";
+
+import { getDevnetApi } from "../src/substrate"
+import { generateRandomEthersWallet, getPublicClient } from "../src/utils";
+import { ETH_LOCAL_URL } from "../src/config";
+import { devnet } from "@polkadot-api/descriptors"
+import { PublicClient } from "viem";
+import { TypedApi } from "polkadot-api";
+import { INCREMENTAL_CONTRACT_ABI, INCREMENTAL_CONTRACT_BYTECODE } from "../src/contracts/incremental";
+import { toViemAddress } from "../src/address-utils";
+import { ethers } from "ethers"
+import { disableWhiteListCheck, forceSetBalanceToEthAddress } from "../src/subtensor";
+
+describe("bridge token contract deployment", () => {
+ // init eth part
+ const wallet = generateRandomEthersWallet();
+ let publicClient: PublicClient;
+
+ // init substrate part
+ let api: TypedApi
+
+ before(async () => {
+ publicClient = await getPublicClient(ETH_LOCAL_URL)
+ api = await getDevnetApi()
+
+ await forceSetBalanceToEthAddress(api, wallet.address)
+ await disableWhiteListCheck(api, true)
+ });
+
+ it("Can deploy incremental smart contract", async () => {
+ const contractFactory = new ethers.ContractFactory(INCREMENTAL_CONTRACT_ABI, INCREMENTAL_CONTRACT_BYTECODE, wallet)
+ const contract = await contractFactory.deploy()
+ await contract.waitForDeployment()
+
+ const value = await publicClient.readContract({
+ abi: INCREMENTAL_CONTRACT_ABI,
+ address: toViemAddress(contract.target.toString()),
+ functionName: "retrieve",
+ args: []
+ })
+ assert.equal(value, 0)
+
+ const newValue = 1234
+
+ const deployContract = new ethers.Contract(contract.target.toString(), INCREMENTAL_CONTRACT_ABI, wallet)
+ const storeTx = await deployContract.store(newValue)
+ await storeTx.wait()
+
+ const newValueAfterStore = await publicClient.readContract({
+ abi: INCREMENTAL_CONTRACT_ABI,
+ address: toViemAddress(contract.target.toString()),
+ functionName: "retrieve",
+ args: []
+ })
+
+ assert.equal(newValue, newValueAfterStore)
+ });
+});
diff --git a/evm-tests/test/eth.substrate-transfer.test.ts b/evm-tests/test/eth.substrate-transfer.test.ts
new file mode 100644
index 0000000000..9e3a2b2050
--- /dev/null
+++ b/evm-tests/test/eth.substrate-transfer.test.ts
@@ -0,0 +1,412 @@
+import * as assert from "assert";
+
+import { getDevnetApi, waitForTransactionCompletion, getRandomSubstrateSigner, } from "../src/substrate"
+import { getPublicClient } from "../src/utils";
+import { ETH_LOCAL_URL, IBALANCETRANSFER_ADDRESS, IBalanceTransferABI } from "../src/config";
+import { devnet, MultiAddress } from "@polkadot-api/descriptors"
+import { PublicClient } from "viem";
+import { TypedApi, Binary, FixedSizeBinary } from "polkadot-api";
+import { generateRandomEthersWallet } from "../src/utils";
+import { tao, raoToEth, bigintToRao, compareEthBalanceWithTxFee } from "../src/balance-math";
+import { toViemAddress, convertPublicKeyToSs58, convertH160ToSS58, ss58ToH160, ss58ToEthAddress, ethAddressToH160 } from "../src/address-utils"
+import { ethers } from "ethers"
+import { estimateTransactionCost, getContract } from "../src/eth"
+
+import { WITHDRAW_CONTRACT_ABI, WITHDRAW_CONTRACT_BYTECODE } from "../src/contracts/withdraw"
+
+import { forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, disableWhiteListCheck } from "../src/subtensor";
+
+describe("Balance transfers between substrate and EVM", () => {
+ const gwei = BigInt("1000000000");
+ // init eth part
+ const wallet = generateRandomEthersWallet();
+ const wallet2 = generateRandomEthersWallet();
+ let publicClient: PublicClient;
+ const provider = new ethers.JsonRpcProvider(ETH_LOCAL_URL);
+ // init substrate part
+ const signer = getRandomSubstrateSigner();
+ let api: TypedApi
+
+ before(async () => {
+
+ publicClient = await getPublicClient(ETH_LOCAL_URL)
+ api = await getDevnetApi()
+
+ await forceSetBalanceToEthAddress(api, wallet.address)
+ await forceSetBalanceToEthAddress(api, wallet2.address)
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(signer.publicKey))
+ await disableWhiteListCheck(api, true)
+ });
+
+ it("Can transfer token from EVM to EVM", async () => {
+ const senderBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+ const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet2.address) })
+ const transferBalance = raoToEth(tao(1))
+ const tx = {
+ to: wallet2.address,
+ value: transferBalance.toString()
+ }
+ const txFee = await estimateTransactionCost(provider, tx)
+
+ const txResponse = await wallet.sendTransaction(tx)
+ await txResponse.wait();
+
+
+ const senderBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+ const receiverBalanceAfterTranser = await publicClient.getBalance({ address: toViemAddress(wallet2.address) })
+
+ assert.equal(senderBalanceAfterTransfer, senderBalance - transferBalance - txFee)
+ assert.equal(receiverBalance, receiverBalanceAfterTranser - transferBalance)
+ });
+
+ it("Can transfer token from Substrate to EVM", async () => {
+ const ss58Address = convertH160ToSS58(wallet.address)
+ const senderBalance = (await api.query.System.Account.getValue(ss58Address)).data.free
+ const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+ const transferBalance = tao(1)
+
+ const tx = api.tx.Balances.transfer_keep_alive({ value: transferBalance, dest: MultiAddress.Id(ss58Address) })
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+
+ const senderBalanceAfterTransfer = (await api.query.System.Account.getValue(ss58Address)).data.free
+ const receiverBalanceAfterTranser = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+
+ assert.equal(senderBalanceAfterTransfer, senderBalance + transferBalance)
+ assert.equal(receiverBalance, receiverBalanceAfterTranser - raoToEth(transferBalance))
+ });
+
+ it("Can transfer token from EVM to Substrate", async () => {
+ const contract = getContract(IBALANCETRANSFER_ADDRESS, IBalanceTransferABI, wallet)
+ const senderBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+ const receiverBalance = (await api.query.System.Account.getValue(convertPublicKeyToSs58(signer.publicKey))).data.free
+ const transferBalance = raoToEth(tao(1))
+
+ const tx = await contract.transfer(signer.publicKey, { value: transferBalance.toString() })
+ await tx.wait()
+
+
+ const senderBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+ const receiverBalanceAfterTranser = (await api.query.System.Account.getValue(convertPublicKeyToSs58(signer.publicKey))).data.free
+
+ compareEthBalanceWithTxFee(senderBalanceAfterTransfer, senderBalance - transferBalance)
+ assert.equal(receiverBalance, receiverBalanceAfterTranser - tao(1))
+ });
+
+ it("Transfer from EVM to substrate using evm::withdraw", async () => {
+ const ss58Address = convertPublicKeyToSs58(signer.publicKey)
+ const senderBalance = (await api.query.System.Account.getValue(ss58Address)).data.free
+ const ethAddresss = ss58ToH160(ss58Address);
+
+ // transfer token to mirror eth address
+ const ethTransfer = {
+ to: ss58ToEthAddress(ss58Address),
+ value: raoToEth(tao(2)).toString()
+ }
+
+ const txResponse = await wallet.sendTransaction(ethTransfer)
+ await txResponse.wait();
+
+ const tx = api.tx.EVM.withdraw({ address: ethAddresss, value: tao(1) })
+ const txFee = (await tx.getPaymentInfo(ss58Address)).partial_fee
+
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+ const senderBalanceAfterWithdraw = (await api.query.System.Account.getValue(ss58Address)).data.free
+
+ assert.equal(senderBalance, senderBalanceAfterWithdraw - tao(1) + txFee)
+ });
+
+ it("Transfer from EVM to substrate using evm::call", async () => {
+ const ss58Address = convertPublicKeyToSs58(signer.publicKey)
+ const ethAddresss = ss58ToH160(ss58Address);
+
+ // transfer token to mirror eth address
+ const ethTransfer = {
+ to: ss58ToEthAddress(ss58Address),
+ value: raoToEth(tao(2)).toString()
+ }
+
+ const txResponse = await wallet.sendTransaction(ethTransfer)
+ await txResponse.wait();
+
+ const source: FixedSizeBinary<20> = ethAddresss;
+ const target = ethAddressToH160(wallet.address)
+ const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+
+ // all these parameter value are tricky, any change could make the call failed
+ const tx = api.tx.EVM.call({
+ source: source,
+ target: target,
+ // it is U256 in the extrinsic.
+ value: [raoToEth(tao(1)), tao(0), tao(0), tao(0)],
+ gas_limit: BigInt(1000000),
+ // it is U256 in the extrinsic.
+ max_fee_per_gas: [BigInt(10e9), BigInt(0), BigInt(0), BigInt(0)],
+ max_priority_fee_per_gas: undefined,
+ input: Binary.fromText(""),
+ nonce: undefined,
+ access_list: []
+ })
+ // txFee not accurate
+ const txFee = (await tx.getPaymentInfo(ss58Address)).partial_fee
+
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+
+ const receiverBalanceAfterCall = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+ assert.equal(receiverBalanceAfterCall, receiverBalance + raoToEth(tao(1)))
+ });
+
+ it("Forward value in smart contract", async () => {
+
+
+ const contractFactory = new ethers.ContractFactory(WITHDRAW_CONTRACT_ABI, WITHDRAW_CONTRACT_BYTECODE, wallet)
+ const contract = await contractFactory.deploy()
+ await contract.waitForDeployment()
+
+ const code = await publicClient.getCode({ address: toViemAddress(contract.target.toString()) })
+ if (code === undefined) {
+ throw new Error("code length is wrong for deployed contract")
+ }
+ assert.ok(code.length > 100)
+
+ // transfer 2 TAO to contract
+ const ethTransfer = {
+ to: contract.target.toString(),
+ value: raoToEth(tao(2)).toString()
+ }
+
+ const txResponse = await wallet.sendTransaction(ethTransfer)
+ await txResponse.wait();
+
+ const contractBalance = await publicClient.getBalance({ address: toViemAddress(contract.target.toString()) })
+ const callerBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+
+ const contractForCall = new ethers.Contract(contract.target.toString(), WITHDRAW_CONTRACT_ABI, wallet)
+
+ const withdrawTx = await contractForCall.withdraw(
+ raoToEth(tao(1)).toString()
+ );
+
+ await withdrawTx.wait();
+
+ const contractBalanceAfterWithdraw = await publicClient.getBalance({ address: toViemAddress(contract.target.toString()) })
+ const callerBalanceAfterWithdraw = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+
+ compareEthBalanceWithTxFee(callerBalanceAfterWithdraw, callerBalance + raoToEth(tao(1)))
+ assert.equal(contractBalance, contractBalanceAfterWithdraw + raoToEth(tao(1)))
+ });
+
+ it("Transfer full balance", async () => {
+ const ethBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+ const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet2.address) })
+ const tx = {
+ to: wallet2.address,
+ value: ethBalance.toString(),
+ };
+ const txPrice = await estimateTransactionCost(provider, tx);
+ const finalTx = {
+ to: wallet2.address,
+ value: (ethBalance - txPrice).toString(),
+ };
+ try {
+ // transfer should be failed since substrate requires existial balance to keep account
+ const txResponse = await wallet.sendTransaction(finalTx)
+ await txResponse.wait();
+ } catch (error) {
+ if (error instanceof Error) {
+ assert.equal((error as any).code, "INSUFFICIENT_FUNDS")
+ assert.equal(error.toString().includes("insufficient funds"), true)
+ }
+ }
+
+ const receiverBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet2.address) })
+ assert.equal(receiverBalance, receiverBalanceAfterTransfer)
+ })
+
+ it("Transfer more than owned balance should fail", async () => {
+ const ethBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) })
+ const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet2.address) })
+ const tx = {
+ to: wallet2.address,
+ value: (ethBalance + raoToEth(tao(1))).toString(),
+ };
+
+ try {
+ // transfer should be failed since substrate requires existial balance to keep account
+ const txResponse = await wallet.sendTransaction(tx)
+ await txResponse.wait();
+ } catch (error) {
+ if (error instanceof Error) {
+ assert.equal((error as any).code, "INSUFFICIENT_FUNDS")
+ assert.equal(error.toString().includes("insufficient funds"), true)
+ }
+ }
+
+ const receiverBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet2.address) })
+ assert.equal(receiverBalance, receiverBalanceAfterTransfer)
+ });
+
+ it("Transfer more than u64::max in substrate equivalent should receive error response", async () => {
+ const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet2.address) })
+ try {
+ const tx = {
+ to: wallet2.address,
+ value: raoToEth(BigInt(2) ** BigInt(64)).toString(),
+ };
+ // transfer should be failed since substrate requires existial balance to keep account
+ const txResponse = await wallet.sendTransaction(tx)
+ await txResponse.wait();
+ } catch (error) {
+ if (error instanceof Error) {
+ assert.equal((error as any).code, "INSUFFICIENT_FUNDS")
+ assert.equal(error.toString().includes("insufficient funds"), true)
+ }
+ }
+
+ const contract = getContract(IBALANCETRANSFER_ADDRESS, IBalanceTransferABI, wallet)
+ try {
+ const tx = await contract.transfer(signer.publicKey, { value: raoToEth(BigInt(2) ** BigInt(64)).toString() })
+ await tx.await()
+ } catch (error) {
+ if (error instanceof Error) {
+ console.log(error.toString())
+ assert.equal(error.toString().includes("revert data"), true)
+ }
+ }
+
+ try {
+ const dest = convertH160ToSS58(wallet2.address)
+ const tx = api.tx.Balances.transfer_keep_alive({ value: bigintToRao(BigInt(2) ** BigInt(64)), dest: MultiAddress.Id(dest) })
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ } catch (error) {
+ if (error instanceof Error) {
+ console.log(error.toString())
+ assert.equal(error.toString().includes("Cannot convert"), true)
+ }
+ }
+
+ try {
+ const dest = ethAddressToH160(wallet2.address)
+ const tx = api.tx.EVM.withdraw({ value: bigintToRao(BigInt(2) ** BigInt(64)), address: dest })
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ } catch (error) {
+ if (error instanceof Error) {
+ assert.equal(error.toString().includes("Cannot convert"), true)
+ }
+ }
+
+ try {
+ const source = ethAddressToH160(wallet.address)
+ const target = ethAddressToH160(wallet2.address)
+ const tx = api.tx.EVM.call({
+ source: source,
+ target: target,
+ // it is U256 in the extrinsic, the value is more than u64::MAX
+ value: [raoToEth(tao(1)), tao(0), tao(0), tao(1)],
+ gas_limit: BigInt(1000000),
+ // it is U256 in the extrinsic.
+ max_fee_per_gas: [BigInt(10e9), BigInt(0), BigInt(0), BigInt(0)],
+ max_priority_fee_per_gas: undefined,
+ input: Binary.fromText(""),
+ nonce: undefined,
+ access_list: []
+ })
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ } catch (error) {
+ if (error instanceof Error) {
+ console.log(error.toString())
+ assert.equal((error as any).code, "INSUFFICIENT_FUNDS")
+ assert.equal(error.toString().includes("insufficient funds"), true)
+ }
+ }
+
+ const receiverBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet2.address) })
+ assert.equal(receiverBalance, receiverBalanceAfterTransfer)
+ });
+
+ it("Gas price should be 10 GWei", async () => {
+ const feeData = await provider.getFeeData();
+ assert.equal(feeData.gasPrice, BigInt(10000000000));
+ });
+
+
+ it("max_fee_per_gas and max_priority_fee_per_gas affect transaction fee properly", async () => {
+
+ const testCases = [
+ [10, 0, 21000 * 10 * 1e9],
+ [10, 10, 21000 * 10 * 1e9],
+ [11, 0, 21000 * 10 * 1e9],
+ [11, 1, (21000 * 10 + 21000) * 1e9],
+ [11, 2, (21000 * 10 + 21000) * 1e9],
+ ];
+
+ for (let i in testCases) {
+ const tc = testCases[i];
+ const actualFee = await transferAndGetFee(
+ wallet, wallet2, publicClient,
+ gwei * BigInt(tc[0]),
+ gwei * BigInt(tc[1])
+ );
+ assert.equal(actualFee, BigInt(tc[2]))
+ }
+ });
+
+ it("Low max_fee_per_gas gets transaction rejected", async () => {
+ try {
+ await transferAndGetFee(wallet, wallet2, publicClient, gwei * BigInt(9), BigInt(0))
+ } catch (error) {
+ if (error instanceof Error) {
+ console.log(error.toString())
+ assert.equal(error.toString().includes("gas price less than block base fee"), true)
+ }
+ }
+ });
+
+ it("max_fee_per_gas lower than max_priority_fee_per_gas gets transaction rejected", async () => {
+ try {
+ await transferAndGetFee(wallet, wallet2, publicClient, gwei * BigInt(10), gwei * BigInt(11))
+ } catch (error) {
+ if (error instanceof Error) {
+ assert.equal(error.toString().includes("priorityFee cannot be more than maxFee"), true)
+ }
+ }
+ });
+});
+
+async function transferAndGetFee(wallet: ethers.Wallet, wallet2: ethers.Wallet, client: PublicClient, max_fee_per_gas: BigInt, max_priority_fee_per_gas: BigInt) {
+
+ const ethBalanceBefore = await client.getBalance({ address: toViemAddress(wallet.address) })
+ // Send TAO
+ const tx = {
+ to: wallet2.address,
+ value: raoToEth(tao(1)).toString(),
+ // EIP-1559 transaction parameters
+ maxPriorityFeePerGas: max_priority_fee_per_gas.toString(),
+ maxFeePerGas: max_fee_per_gas.toString(),
+ gasLimit: 21000,
+ };
+
+ // Send the transaction
+ const txResponse = await wallet.sendTransaction(tx);
+ await txResponse.wait()
+
+ // Check balances
+ const ethBalanceAfter = await client.getBalance({ address: toViemAddress(wallet.address) })
+ const fee = ethBalanceBefore - ethBalanceAfter - raoToEth(tao(1))
+
+ return fee;
+}
\ No newline at end of file
diff --git a/evm-tests/test/metagraph.precompile.test.ts b/evm-tests/test/metagraph.precompile.test.ts
new file mode 100644
index 0000000000..94c0df8861
--- /dev/null
+++ b/evm-tests/test/metagraph.precompile.test.ts
@@ -0,0 +1,147 @@
+import * as assert from "assert";
+
+import { getAliceSigner, getClient, getDevnetApi, waitForTransactionCompletion, convertPublicKeyToMultiAddress, getRandomSubstrateKeypair, getSignerFromKeypair } from "../src/substrate"
+import { getPublicClient, } from "../src/utils";
+import { ETH_LOCAL_URL, SUB_LOCAL_URL, } from "../src/config";
+import { devnet } from "@polkadot-api/descriptors"
+import { PublicClient } from "viem";
+import { PolkadotSigner, TypedApi } from "polkadot-api";
+import { toViemAddress, convertPublicKeyToSs58 } from "../src/address-utils"
+import { IMetagraphABI, IMETAGRAPH_ADDRESS } from "../src/contracts/metagraph"
+
+describe("Test the EVM chain ID", () => {
+ // init substrate part
+ const hotkey = getRandomSubstrateKeypair();
+ const coldkey = getRandomSubstrateKeypair();
+ let publicClient: PublicClient;
+
+ let api: TypedApi
+
+ // sudo account alice as signer
+ let alice: PolkadotSigner;
+
+ // init other variable
+ let subnetId = 0;
+
+ before(async () => {
+ // init variables got from await and async
+ publicClient = await getPublicClient(ETH_LOCAL_URL)
+ const subClient = await getClient(SUB_LOCAL_URL)
+ api = await getDevnetApi()
+ alice = await getAliceSigner();
+
+ {
+ const multiAddress = convertPublicKeyToMultiAddress(hotkey.publicKey)
+ const internalCall = api.tx.Balances.force_set_balance({ who: multiAddress, new_free: BigInt(1e12) })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ }
+
+ {
+ const multiAddress = convertPublicKeyToMultiAddress(coldkey.publicKey)
+ const internalCall = api.tx.Balances.force_set_balance({ who: multiAddress, new_free: BigInt(1e12) })
+ const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall })
+
+ await waitForTransactionCompletion(api, tx, alice)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ }
+
+ const signer = getSignerFromKeypair(coldkey)
+ const registerNetworkTx = api.tx.SubtensorModule.register_network({ hotkey: convertPublicKeyToSs58(hotkey.publicKey) })
+ await waitForTransactionCompletion(api, registerNetworkTx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+
+ let totalNetworks = await api.query.SubtensorModule.TotalNetworks.getValue()
+ assert.ok(totalNetworks > 1)
+ subnetId = totalNetworks - 1
+
+ let uid_count =
+ await api.query.SubtensorModule.SubnetworkN.getValue(subnetId)
+ if (uid_count === 0) {
+ const tx = api.tx.SubtensorModule.burned_register({ hotkey: convertPublicKeyToSs58(hotkey.publicKey), netuid: subnetId })
+ await waitForTransactionCompletion(api, tx, signer)
+ .then(() => { })
+ .catch((error) => { console.log(`transaction error ${error}`) });
+ }
+ })
+
+ it("Metagraph data access via precompile contract is ok", async () => {
+ const uid = 0
+ const uid_count = await publicClient.readContract({
+ abi: IMetagraphABI,
+ address: toViemAddress(IMETAGRAPH_ADDRESS),
+ functionName: "getUidCount",
+ args: [subnetId]
+ })
+ // back to original value for other tests. and we can run it repeatedly
+ assert.ok(uid_count != undefined);
+
+ // const axon = api.query.SubtensorModule.Axons.getValue()
+
+ const axon = await publicClient.readContract({
+ abi: IMetagraphABI,
+ address: toViemAddress(IMETAGRAPH_ADDRESS),
+ functionName: "getAxon",
+ args: [subnetId, uid]
+ })
+
+ assert.ok(axon != undefined);
+ if (axon instanceof Object) {
+ assert.ok(axon != undefined);
+ if ("block" in axon) {
+ assert.ok(axon.block != undefined);
+ } else {
+ throw new Error("block not included in axon")
+ }
+
+ if ("version" in axon) {
+ assert.ok(axon.version != undefined);
+ } else {
+ throw new Error("version not included in axon")
+ }
+
+ if ("ip" in axon) {
+ assert.ok(axon.ip != undefined);
+ } else {
+ throw new Error("ip not included in axon")
+ }
+
+ if ("port" in axon) {
+ assert.ok(axon.port != undefined);
+ } else {
+ throw new Error("port not included in axon")
+ }
+
+ if ("ip_type" in axon) {
+ assert.ok(axon.ip_type != undefined);
+ } else {
+ throw new Error("ip_type not included in axon")
+ }
+
+ if ("protocol" in axon) {
+ assert.ok(axon.protocol != undefined);
+ } else {
+ throw new Error("protocol not included in axon")
+ }
+ }
+
+ const methodList = ["getEmission", "getVtrust", "getValidatorStatus", "getLastUpdate", "getIsActive",
+ "getHotkey", "getColdkey"
+ ]
+ for (const method of methodList) {
+ const value = await publicClient.readContract({
+ abi: IMetagraphABI,
+ address: toViemAddress(IMETAGRAPH_ADDRESS),
+ functionName: method,
+ args: [subnetId, uid]
+ })
+
+ assert.ok(value != undefined);
+ }
+ });
+});
\ No newline at end of file
diff --git a/evm-tests/test/neuron.precompile.emission-check.test.ts b/evm-tests/test/neuron.precompile.emission-check.test.ts
new file mode 100644
index 0000000000..ac609c1e27
--- /dev/null
+++ b/evm-tests/test/neuron.precompile.emission-check.test.ts
@@ -0,0 +1,72 @@
+import * as assert from "assert";
+
+import { getAliceSigner, getClient, getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate"
+import { getPublicClient, } from "../src/utils";
+import { ETH_LOCAL_URL, SUB_LOCAL_URL, } from "../src/config";
+import { devnet } from "@polkadot-api/descriptors"
+import { PublicClient } from "viem";
+import { PolkadotSigner, TypedApi } from "polkadot-api";
+import { convertPublicKeyToSs58, } from "../src/address-utils"
+import { ethers } from "ethers"
+import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron"
+import { generateRandomEthersWallet } from "../src/utils"
+import { forceSetBalanceToSs58Address, forceSetBalanceToEthAddress, addNewSubnetwork } from "../src/subtensor"
+
+describe("Test the EVM chain ID", () => {
+ // init eth part
+ const wallet = generateRandomEthersWallet();
+
+ // init substrate part
+ const hotkey = getRandomSubstrateKeypair();
+ const hotkey2 = getRandomSubstrateKeypair();
+ const coldkey = getRandomSubstrateKeypair();
+ let publicClient: PublicClient;
+
+ let api: TypedApi
+
+ // sudo account alice as signer
+ let alice: PolkadotSigner;
+
+ before(async () => {
+ // init variables got from await and async
+ publicClient = await getPublicClient(ETH_LOCAL_URL)
+ const subClient = await getClient(SUB_LOCAL_URL)
+ api = await getDevnetApi()
+ alice = await getAliceSigner();
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey2.publicKey))
+
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey))
+ await forceSetBalanceToEthAddress(api, wallet.address)
+
+ const netuid = await addNewSubnetwork(api, hotkey2, coldkey)
+ console.log("test on subnet ", netuid)
+ })
+
+ it("Burned register and check emission", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ const uid = await api.query.SubtensorModule.SubnetworkN.getValue(netuid)
+ const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet);
+
+ const tx = await contract.burnedRegister(
+ netuid,
+ hotkey.publicKey
+ );
+ await tx.wait();
+
+ const uidAfterNew = await api.query.SubtensorModule.SubnetworkN.getValue(netuid)
+ assert.equal(uid + 1, uidAfterNew)
+
+ const key = await api.query.SubtensorModule.Keys.getValue(netuid, uid)
+ assert.equal(key, convertPublicKeyToSs58(hotkey.publicKey))
+
+ let i = 0;
+ while (i < 10) {
+ const emission = await api.query.SubtensorModule.PendingEmission.getValue(netuid)
+
+ console.log("emission is ", emission);
+ await new Promise((resolve) => setTimeout(resolve, 2000));
+ i += 1;
+ }
+ })
+});
\ No newline at end of file
diff --git a/evm-tests/test/neuron.precompile.reveal-weights.test.ts b/evm-tests/test/neuron.precompile.reveal-weights.test.ts
new file mode 100644
index 0000000000..85125f0956
--- /dev/null
+++ b/evm-tests/test/neuron.precompile.reveal-weights.test.ts
@@ -0,0 +1,142 @@
+import * as assert from "assert";
+import { getAliceSigner, getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate"
+import { devnet } from "@polkadot-api/descriptors"
+import { PolkadotSigner, TypedApi } from "polkadot-api";
+import { convertPublicKeyToSs58, convertH160ToSS58 } from "../src/address-utils"
+import { Vec, Tuple, VecFixed, u16, u8, u64 } from "@polkadot/types-codec";
+import { TypeRegistry } from "@polkadot/types";
+import { ethers } from "ethers"
+import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron"
+import { generateRandomEthersWallet } from "../src/utils"
+import { convertH160ToPublicKey } from "../src/address-utils"
+import { blake2AsU8a } from "@polkadot/util-crypto"
+import {
+ forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, addNewSubnetwork, setCommitRevealWeightsEnabled, setWeightsSetRateLimit, burnedRegister,
+ setTempo, setCommitRevealWeightsInterval
+} from "../src/subtensor"
+
+// hardcode some values for reveal hash
+const uids = [1];
+const values = [5];
+const salt = [9];
+const version_key = 0;
+
+function getCommitHash(netuid: number, address: string) {
+ const registry = new TypeRegistry();
+ let publicKey = convertH160ToPublicKey(address);
+
+ const tupleData = new Tuple(
+ registry,
+ [
+ VecFixed.with(u8, 32),
+ u16,
+ Vec.with(u16),
+ Vec.with(u16),
+ Vec.with(u16),
+ u64,
+ ],
+ [publicKey, netuid, uids, values, salt, version_key]
+ );
+
+ const hash = blake2AsU8a(tupleData.toU8a());
+ return hash;
+}
+
+describe("Test neuron precompile reveal weights", () => {
+ // init eth part
+ const wallet = generateRandomEthersWallet();
+
+ // init substrate part
+ const hotkey = getRandomSubstrateKeypair();
+ const coldkey = getRandomSubstrateKeypair();
+
+ let api: TypedApi
+
+ // sudo account alice as signer
+ let alice: PolkadotSigner;
+ before(async () => {
+ // init variables got from await and async
+ api = await getDevnetApi()
+ alice = await getAliceSigner();
+
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(alice.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey))
+ await forceSetBalanceToEthAddress(api, wallet.address)
+ let netuid = await addNewSubnetwork(api, hotkey, coldkey)
+
+ console.log("test the case on subnet ", netuid)
+
+ // enable commit reveal feature
+ await setCommitRevealWeightsEnabled(api, netuid, true)
+ // set it as 0, we can set the weight anytime
+ await setWeightsSetRateLimit(api, netuid, BigInt(0))
+
+ const ss58Address = convertH160ToSS58(wallet.address)
+ await burnedRegister(api, netuid, ss58Address, coldkey)
+
+ const uid = await api.query.SubtensorModule.Uids.getValue(
+ netuid,
+ ss58Address
+ )
+ // eth wallet account should be the first neuron in the subnet
+ assert.equal(uid, uids[0])
+ })
+
+ it("EVM neuron commit weights via call precompile", async () => {
+ let totalNetworks = await api.query.SubtensorModule.TotalNetworks.getValue()
+ const subnetId = totalNetworks - 1
+ const commitHash = getCommitHash(subnetId, wallet.address)
+ const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet);
+ const tx = await contract.commitWeights(subnetId, commitHash)
+ await tx.wait()
+
+ const ss58Address = convertH160ToSS58(wallet.address)
+
+ const weightsCommit = await api.query.SubtensorModule.WeightCommits.getValue(subnetId, ss58Address)
+ if (weightsCommit === undefined) {
+ throw new Error("submit weights failed")
+ }
+ assert.ok(weightsCommit.length > 0)
+ })
+
+ it("EVM neuron reveal weights via call precompile", async () => {
+ let totalNetworks = await api.query.SubtensorModule.TotalNetworks.getValue()
+ const netuid = totalNetworks - 1
+ const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet);
+ // set tempo or epoch large, then enough time to reveal weight
+ await setTempo(api, netuid, 60000)
+ // set interval epoch as 0, we can reveal at the same epoch
+ await setCommitRevealWeightsInterval(api, netuid, BigInt(0))
+
+ const tx = await contract.revealWeights(
+ netuid,
+ uids,
+ values,
+ salt,
+ version_key
+ );
+ await tx.wait()
+ const ss58Address = convertH160ToSS58(wallet.address)
+
+ // check the weight commit is removed after reveal successfully
+ const weightsCommit = await api.query.SubtensorModule.WeightCommits.getValue(netuid, ss58Address)
+ assert.equal(weightsCommit, undefined)
+
+ // check the weight is set after reveal with correct uid
+ const neuron_uid = await api.query.SubtensorModule.Uids.getValue(
+ netuid,
+ ss58Address
+ )
+
+ const weights = await api.query.SubtensorModule.Weights.getValue(netuid, neuron_uid)
+
+ if (weights === undefined) {
+ throw new Error("weights not available onchain")
+ }
+ for (const weight of weights) {
+ assert.equal(weight[0], neuron_uid)
+ assert.ok(weight[1] !== undefined)
+ }
+ })
+});
\ No newline at end of file
diff --git a/evm-tests/test/neuron.precompile.serve.axon-prometheus.test.ts b/evm-tests/test/neuron.precompile.serve.axon-prometheus.test.ts
new file mode 100644
index 0000000000..aee84f130c
--- /dev/null
+++ b/evm-tests/test/neuron.precompile.serve.axon-prometheus.test.ts
@@ -0,0 +1,162 @@
+import * as assert from "assert";
+import { getAliceSigner, getClient, getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate"
+import { SUB_LOCAL_URL, } from "../src/config";
+import { devnet } from "@polkadot-api/descriptors"
+import { PolkadotSigner, TypedApi } from "polkadot-api";
+import { convertPublicKeyToSs58, convertH160ToSS58 } from "../src/address-utils"
+import { ethers } from "ethers"
+import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron"
+import { generateRandomEthersWallet } from "../src/utils"
+import { forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, addNewSubnetwork, burnedRegister } from "../src/subtensor"
+
+describe("Test neuron precompile Serve Axon Prometheus", () => {
+ // init eth part
+ const wallet1 = generateRandomEthersWallet();
+ const wallet2 = generateRandomEthersWallet();
+ const wallet3 = generateRandomEthersWallet();
+
+ // init substrate part
+ const hotkey = getRandomSubstrateKeypair();
+ const coldkey = getRandomSubstrateKeypair();
+
+ let api: TypedApi
+
+ // sudo account alice as signer
+ let alice: PolkadotSigner;
+ before(async () => {
+ // init variables got from await and async
+ const subClient = await getClient(SUB_LOCAL_URL)
+ api = await getDevnetApi()
+ alice = await getAliceSigner();
+
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(alice.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey))
+ await forceSetBalanceToEthAddress(api, wallet1.address)
+ await forceSetBalanceToEthAddress(api, wallet2.address)
+ await forceSetBalanceToEthAddress(api, wallet3.address)
+ let netuid = await addNewSubnetwork(api, hotkey, coldkey)
+
+ console.log("test the case on subnet ", netuid)
+
+ await burnedRegister(api, netuid, convertH160ToSS58(wallet1.address), coldkey)
+ await burnedRegister(api, netuid, convertH160ToSS58(wallet2.address), coldkey)
+ await burnedRegister(api, netuid, convertH160ToSS58(wallet3.address), coldkey)
+ })
+
+ it("Serve Axon", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ const version = 0;
+ const ip = 1;
+ const port = 2;
+ const ipType = 4;
+ const protocol = 0;
+ const placeholder1 = 8;
+ const placeholder2 = 9;
+
+ const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet1);
+
+ const tx = await contract.serveAxon(
+ netuid,
+ version,
+ ip,
+ port,
+ ipType,
+ protocol,
+ placeholder1,
+ placeholder2
+ );
+ await tx.wait();
+
+ const axon = await api.query.SubtensorModule.Axons.getValue(
+ netuid,
+ convertH160ToSS58(wallet1.address)
+ )
+ assert.notEqual(axon?.block, undefined)
+ assert.equal(axon?.version, version)
+ assert.equal(axon?.ip, ip)
+ assert.equal(axon?.port, port)
+ assert.equal(axon?.ip_type, ipType)
+ assert.equal(axon?.protocol, protocol)
+ assert.equal(axon?.placeholder1, placeholder1)
+ assert.equal(axon?.placeholder2, placeholder2)
+ });
+
+ it("Serve Axon TLS", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ const version = 0;
+ const ip = 1;
+ const port = 2;
+ const ipType = 4;
+ const protocol = 0;
+ const placeholder1 = 8;
+ const placeholder2 = 9;
+ // certificate length is 65
+ const certificate = new Uint8Array([
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
+ 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+ 57, 58, 59, 60, 61, 62, 63, 64, 65,
+ ]);
+
+ const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet2);
+
+ const tx = await contract.serveAxonTls(
+ netuid,
+ version,
+ ip,
+ port,
+ ipType,
+ protocol,
+ placeholder1,
+ placeholder2,
+ certificate
+ );
+ await tx.wait();
+
+ const axon = await api.query.SubtensorModule.Axons.getValue(
+ netuid,
+ convertH160ToSS58(wallet2.address))
+
+ assert.notEqual(axon?.block, undefined)
+ assert.equal(axon?.version, version)
+ assert.equal(axon?.ip, ip)
+ assert.equal(axon?.port, port)
+ assert.equal(axon?.ip_type, ipType)
+ assert.equal(axon?.protocol, protocol)
+ assert.equal(axon?.placeholder1, placeholder1)
+ assert.equal(axon?.placeholder2, placeholder2)
+ });
+
+ it("Serve Prometheus", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ const version = 0;
+ const ip = 1;
+ const port = 2;
+ const ipType = 4;
+
+ const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet3);
+
+ const tx = await contract.servePrometheus(
+ netuid,
+ version,
+ ip,
+ port,
+ ipType
+ );
+ await tx.wait();
+
+ const prometheus = (
+ await api.query.SubtensorModule.Prometheus.getValue(
+ netuid,
+ convertH160ToSS58(wallet3.address)
+ )
+ )
+
+ assert.notEqual(prometheus?.block, undefined)
+ assert.equal(prometheus?.version, version)
+ assert.equal(prometheus?.ip, ip)
+ assert.equal(prometheus?.port, port)
+ assert.equal(prometheus?.ip_type, ipType)
+ });
+});
\ No newline at end of file
diff --git a/evm-tests/test/neuron.precompile.set-weights.test.ts b/evm-tests/test/neuron.precompile.set-weights.test.ts
new file mode 100644
index 0000000000..393c2b97b8
--- /dev/null
+++ b/evm-tests/test/neuron.precompile.set-weights.test.ts
@@ -0,0 +1,65 @@
+import * as assert from "assert";
+
+import { getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate"
+import { devnet } from "@polkadot-api/descriptors"
+import { TypedApi } from "polkadot-api";
+import { convertH160ToSS58, convertPublicKeyToSs58, } from "../src/address-utils"
+import { ethers } from "ethers"
+import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron"
+import { generateRandomEthersWallet } from "../src/utils"
+import {
+ forceSetBalanceToSs58Address, forceSetBalanceToEthAddress, addNewSubnetwork, burnedRegister, setCommitRevealWeightsEnabled,
+ setWeightsSetRateLimit
+} from "../src/subtensor"
+
+describe("Test neuron precompile contract, set weights function", () => {
+ // init eth part
+ const wallet = generateRandomEthersWallet();
+
+ // init substrate part
+ const hotkey = getRandomSubstrateKeypair();
+ const coldkey = getRandomSubstrateKeypair();
+
+ let api: TypedApi
+
+ before(async () => {
+ api = await getDevnetApi()
+
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey))
+
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey))
+ await forceSetBalanceToEthAddress(api, wallet.address)
+
+ const netuid = await addNewSubnetwork(api, hotkey, coldkey)
+ console.log("test on subnet ", netuid)
+
+ await burnedRegister(api, netuid, convertH160ToSS58(wallet.address), coldkey)
+ const uid = await api.query.SubtensorModule.Uids.getValue(netuid, convertH160ToSS58(wallet.address))
+ assert.notEqual(uid, undefined)
+ // disable reveal and enable direct set weights
+ await setCommitRevealWeightsEnabled(api, netuid, false)
+ await setWeightsSetRateLimit(api, netuid, BigInt(0))
+ })
+
+ it("Set weights is ok", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ const uid = await api.query.SubtensorModule.Uids.getValue(netuid, convertH160ToSS58(wallet.address))
+
+ const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet);
+ const dests = [1];
+ const weights = [2];
+ const version_key = 0;
+
+ const tx = await contract.setWeights(netuid, dests, weights, version_key);
+
+ await tx.wait();
+ const weightsOnChain = await api.query.SubtensorModule.Weights.getValue(netuid, uid)
+
+ weightsOnChain.forEach((weight, _) => {
+ const uidInWeight = weight[0];
+ const value = weight[1];
+ assert.equal(uidInWeight, uid)
+ assert.ok(value > 0)
+ });
+ })
+});
\ No newline at end of file
diff --git a/evm-tests/test/staking.precompile.add-remove.test.ts b/evm-tests/test/staking.precompile.add-remove.test.ts
new file mode 100644
index 0000000000..5387e62428
--- /dev/null
+++ b/evm-tests/test/staking.precompile.add-remove.test.ts
@@ -0,0 +1,326 @@
+import * as assert from "assert";
+import { getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate"
+import { devnet } from "@polkadot-api/descriptors"
+import { PolkadotSigner, TypedApi } from "polkadot-api";
+import { convertPublicKeyToSs58, convertH160ToSS58 } from "../src/address-utils"
+import { raoToEth, tao } from "../src/balance-math"
+import { ethers } from "ethers"
+import { generateRandomEthersWallet, getPublicClient } from "../src/utils"
+import { convertH160ToPublicKey } from "../src/address-utils"
+import {
+ forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, addNewSubnetwork, burnedRegister,
+ sendProxyCall,
+} from "../src/subtensor"
+import { ETH_LOCAL_URL } from "../src/config";
+import { ISTAKING_ADDRESS, ISTAKING_V2_ADDRESS, IStakingABI, IStakingV2ABI } from "../src/contracts/staking"
+import { PublicClient } from "viem";
+
+describe("Test neuron precompile reveal weights", () => {
+ // init eth part
+ const wallet1 = generateRandomEthersWallet();
+ const wallet2 = generateRandomEthersWallet();
+ let publicClient: PublicClient;
+ // init substrate part
+ const hotkey = getRandomSubstrateKeypair();
+ const coldkey = getRandomSubstrateKeypair();
+ const proxy = getRandomSubstrateKeypair();
+
+ let api: TypedApi
+
+ // sudo account alice as signer
+ let alice: PolkadotSigner;
+ before(async () => {
+ publicClient = await getPublicClient(ETH_LOCAL_URL)
+ // init variables got from await and async
+ api = await getDevnetApi()
+
+ // await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(alice.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(proxy.publicKey))
+ await forceSetBalanceToEthAddress(api, wallet1.address)
+ await forceSetBalanceToEthAddress(api, wallet2.address)
+ let netuid = await addNewSubnetwork(api, hotkey, coldkey)
+
+ console.log("test the case on subnet ", netuid)
+
+ await burnedRegister(api, netuid, convertH160ToSS58(wallet1.address), coldkey)
+ await burnedRegister(api, netuid, convertH160ToSS58(wallet2.address), coldkey)
+ })
+
+ it("Can add stake", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ // ETH unit
+ let stakeBalance = raoToEth(tao(20))
+ const stakeBefore = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet1.address), netuid)
+ const contract = new ethers.Contract(ISTAKING_ADDRESS, IStakingABI, wallet1);
+ const tx = await contract.addStake(hotkey.publicKey, netuid, { value: stakeBalance.toString() })
+ await tx.wait()
+
+ const stakeFromContract = BigInt(
+ await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid)
+ );
+
+ assert.ok(stakeFromContract > stakeBefore)
+ const stakeAfter = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet1.address), netuid)
+ assert.ok(stakeAfter > stakeBefore)
+ })
+
+ it("Can add stake V2", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ // the unit in V2 is RAO, not ETH
+ let stakeBalance = tao(20)
+ const stakeBefore = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet2.address), netuid)
+ const contract = new ethers.Contract(ISTAKING_V2_ADDRESS, IStakingV2ABI, wallet2);
+ const tx = await contract.addStake(hotkey.publicKey, stakeBalance.toString(), netuid)
+ await tx.wait()
+
+ const stakeFromContract = BigInt(
+ await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet2.address), netuid)
+ );
+
+ assert.ok(stakeFromContract > stakeBefore)
+ const stakeAfter = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet2.address), netuid)
+ assert.ok(stakeAfter > stakeBefore)
+ })
+
+ it("Can not add stake if subnet doesn't exist", async () => {
+ // wrong netuid
+ let netuid = 12345;
+ let stakeBalance = raoToEth(tao(20))
+ const stakeBefore = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet1.address), netuid)
+ const contract = new ethers.Contract(ISTAKING_ADDRESS, IStakingABI, wallet1);
+ try {
+ const tx = await contract.addStake(hotkey.publicKey, netuid, { value: stakeBalance.toString() })
+ await tx.wait()
+ assert.fail("Transaction should have failed");
+ } catch (error) {
+ // Transaction failed as expected
+ }
+
+ const stakeFromContract = BigInt(
+ await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid)
+ );
+ assert.equal(stakeFromContract, stakeBefore)
+ const stakeAfter = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet1.address), netuid)
+ assert.equal(stakeAfter, stakeBefore)
+ });
+
+ it("Can not add stake V2 if subnet doesn't exist", async () => {
+ // wrong netuid
+ let netuid = 12345;
+ // the unit in V2 is RAO, not ETH
+ let stakeBalance = tao(20)
+ const stakeBefore = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet2.address), netuid)
+ const contract = new ethers.Contract(ISTAKING_V2_ADDRESS, IStakingV2ABI, wallet2);
+
+ try {
+ const tx = await contract.addStake(hotkey.publicKey, stakeBalance.toString(), netuid);
+ await tx.wait();
+ assert.fail("Transaction should have failed");
+ } catch (error) {
+ // Transaction failed as expected
+ }
+
+ const stakeFromContract = BigInt(
+ await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet2.address), netuid)
+ );
+ assert.equal(stakeFromContract, stakeBefore)
+ const stakeAfter = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet2.address), netuid)
+ assert.equal(stakeAfter, stakeBefore)
+ })
+
+ it("Can get stake via contract read method", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+
+ // TODO need check how to pass bytes32 as parameter of readContract
+ // const value = await publicClient.readContract({
+ // address: ISTAKING_ADDRESS,
+ // abi: IStakingABI,
+ // functionName: "getStake",
+ // args: [hotkey.publicKey, // Convert to bytes32 format
+ // convertH160ToPublicKey(wallet1.address),
+ // netuid]
+ // })
+ // if (value === undefined || value === null) {
+ // throw new Error("value of getStake from contract is undefined")
+ // }
+ // const intValue = BigInt(value.toString())
+
+ const contractV1 = new ethers.Contract(ISTAKING_ADDRESS, IStakingABI, wallet1);
+ const stakeFromContractV1 = BigInt(
+ await contractV1.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid)
+ );
+
+ const contractV2 = new ethers.Contract(ISTAKING_V2_ADDRESS, IStakingV2ABI, wallet1);
+ // unit from contract V2 is RAO, not ETH
+ const stakeFromContractV2 = Number(
+ await contractV2.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid)
+ );
+
+ assert.equal(stakeFromContractV1, tao(stakeFromContractV2))
+
+ })
+
+ it("Can remove stake", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ const contract = new ethers.Contract(
+ ISTAKING_ADDRESS,
+ IStakingABI,
+ wallet1
+ );
+
+ const stakeBeforeRemove = BigInt(
+ await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid)
+ );
+
+ let stakeBalance = raoToEth(tao(10))
+ const tx = await contract.removeStake(hotkey.publicKey, stakeBalance, netuid)
+ await tx.wait()
+
+ const stakeAfterRemove = BigInt(
+ await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid)
+ );
+ assert.ok(stakeAfterRemove < stakeBeforeRemove)
+
+ })
+
+ it("Can remove stake V2", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ const contract = new ethers.Contract(
+ ISTAKING_V2_ADDRESS,
+ IStakingV2ABI,
+ wallet2
+ );
+
+ const stakeBeforeRemove = BigInt(
+ await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet2.address), netuid)
+ );
+
+ let stakeBalance = tao(10)
+ const tx = await contract.removeStake(hotkey.publicKey, stakeBalance, netuid)
+ await tx.wait()
+
+ const stakeAfterRemove = BigInt(
+ await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet2.address), netuid)
+ );
+
+ assert.ok(stakeAfterRemove < stakeBeforeRemove)
+ })
+
+ it("Can add/remove proxy", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ // add/remove are done in a single test case, because we can't use the same private/public key
+ // between substrate and EVM, but to test the remove part, we must predefine the proxy first.
+ // it makes `remove` being dependent on `add`, because we should use `addProxy` from contract
+ // to prepare the proxy for `removeProxy` testing - the proxy is specified for the
+ // caller/origin.
+
+ // first, check we don't have proxies
+ const ss58Address = convertH160ToSS58(wallet1.address);
+ // the result include two items array, first one is delegate info, second one is balance
+ const initProxies = await api.query.Proxy.Proxies.getValue(ss58Address);
+ assert.equal(initProxies[0].length, 0);
+
+ // intialize the contract
+ const contract = new ethers.Contract(
+ ISTAKING_ADDRESS,
+ IStakingABI,
+ wallet1
+ );
+
+ // test "add"
+ let tx = await contract.addProxy(proxy.publicKey);
+ await tx.wait();
+
+ const proxiesAfterAdd = await api.query.Proxy.Proxies.getValue(ss58Address);
+
+ assert.equal(proxiesAfterAdd[0][0].delegate, convertPublicKeyToSs58(proxy.publicKey))
+
+ let stakeBefore = await api.query.SubtensorModule.Alpha.getValue(
+ convertPublicKeyToSs58(hotkey.publicKey),
+ ss58Address,
+ netuid
+ )
+
+ const call = api.tx.SubtensorModule.add_stake({
+ hotkey: convertPublicKeyToSs58(hotkey.publicKey),
+ netuid: netuid,
+ amount_staked: tao(1)
+ })
+ await sendProxyCall(api, call.decodedCall, ss58Address, proxy)
+
+ let stakeAfter = await api.query.SubtensorModule.Alpha.getValue(
+ convertPublicKeyToSs58(hotkey.publicKey),
+ ss58Address,
+ netuid
+ )
+
+ assert.ok(stakeAfter > stakeBefore)
+ // test "remove"
+ tx = await contract.removeProxy(proxy.publicKey);
+ await tx.wait();
+
+ const proxiesAfterRemove = await api.query.Proxy.Proxies.getValue(ss58Address);
+ assert.equal(proxiesAfterRemove[0].length, 0)
+ });
+
+ it("Can add/remove proxy V2", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+ // add/remove are done in a single test case, because we can't use the same private/public key
+ // between substrate and EVM, but to test the remove part, we must predefine the proxy first.
+ // it makes `remove` being dependent on `add`, because we should use `addProxy` from contract
+ // to prepare the proxy for `removeProxy` testing - the proxy is specified for the
+ // caller/origin.
+
+ // first, check we don't have proxies
+ const ss58Address = convertH160ToSS58(wallet1.address);
+ // the result include two items array, first one is delegate info, second one is balance
+ const initProxies = await api.query.Proxy.Proxies.getValue(ss58Address);
+ assert.equal(initProxies[0].length, 0);
+
+ // intialize the contract
+ // const signer = new ethers.Wallet(fundedEthWallet.privateKey, provider);
+ const contract = new ethers.Contract(
+ ISTAKING_V2_ADDRESS,
+ IStakingV2ABI,
+ wallet1
+ );
+
+ // test "add"
+ let tx = await contract.addProxy(proxy.publicKey);
+ await tx.wait();
+
+ const proxiesAfterAdd = await api.query.Proxy.Proxies.getValue(ss58Address);
+
+ assert.equal(proxiesAfterAdd[0][0].delegate, convertPublicKeyToSs58(proxy.publicKey))
+
+ let stakeBefore = await api.query.SubtensorModule.Alpha.getValue(
+ convertPublicKeyToSs58(hotkey.publicKey),
+ ss58Address,
+ netuid
+ )
+
+ const call = api.tx.SubtensorModule.add_stake({
+ hotkey: convertPublicKeyToSs58(hotkey.publicKey),
+ netuid: netuid,
+ amount_staked: tao(1)
+ })
+
+ await sendProxyCall(api, call.decodedCall, ss58Address, proxy)
+
+ let stakeAfter = await api.query.SubtensorModule.Alpha.getValue(
+ convertPublicKeyToSs58(hotkey.publicKey),
+ ss58Address,
+ netuid
+ )
+
+ assert.ok(stakeAfter > stakeBefore)
+ // test "remove"
+ tx = await contract.removeProxy(proxy.publicKey);
+ await tx.wait();
+
+ const proxiesAfterRemove = await api.query.Proxy.Proxies.getValue(ss58Address);
+ assert.equal(proxiesAfterRemove[0].length, 0)
+ });
+});
diff --git a/evm-tests/test/staking.precompile.reward.test.ts b/evm-tests/test/staking.precompile.reward.test.ts
new file mode 100644
index 0000000000..3600a6d08d
--- /dev/null
+++ b/evm-tests/test/staking.precompile.reward.test.ts
@@ -0,0 +1,105 @@
+import * as assert from "assert";
+import { getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate"
+import { devnet } from "@polkadot-api/descriptors"
+import { TypedApi } from "polkadot-api";
+import { convertPublicKeyToSs58 } from "../src/address-utils"
+import { tao } from "../src/balance-math"
+import {
+ forceSetBalanceToSs58Address, addNewSubnetwork, burnedRegister,
+ setTxRateLimit, setTempo, setWeightsSetRateLimit, setSubnetOwnerCut, setMaxAllowedUids,
+ setMinDelegateTake, becomeDelegate, setActivityCutoff, addStake, setWeight, rootRegister
+} from "../src/subtensor"
+
+describe("Test neuron precompile reveal weights", () => {
+ const hotkey = getRandomSubstrateKeypair();
+ const coldkey = getRandomSubstrateKeypair();
+
+ const validator = getRandomSubstrateKeypair();
+ const miner = getRandomSubstrateKeypair();
+ const nominator = getRandomSubstrateKeypair();
+
+ let api: TypedApi
+
+ before(async () => {
+ const root_netuid = 0;
+ const root_tempo = 1; // neet root epoch to happen before subnet tempo
+ const subnet_tempo = 1;
+ api = await getDevnetApi()
+
+ // await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(alice.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(validator.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(miner.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(nominator.publicKey))
+ // await forceSetBalanceToEthAddress(api, wallet1.address)
+ // await forceSetBalanceToEthAddress(api, wallet2.address)
+ let netuid = await addNewSubnetwork(api, hotkey, coldkey)
+
+ console.log("test the case on subnet ", netuid)
+
+ await setTxRateLimit(api, BigInt(0))
+ await setTempo(api, root_netuid, root_tempo)
+ await setTempo(api, netuid, subnet_tempo)
+ await setWeightsSetRateLimit(api, netuid, BigInt(0))
+
+ await burnedRegister(api, netuid, convertPublicKeyToSs58(validator.publicKey), coldkey)
+ await burnedRegister(api, netuid, convertPublicKeyToSs58(miner.publicKey), coldkey)
+ await burnedRegister(api, netuid, convertPublicKeyToSs58(nominator.publicKey), coldkey)
+ await setSubnetOwnerCut(api, 0)
+ await setActivityCutoff(api, netuid, 65535)
+ await setMaxAllowedUids(api, netuid, 65535)
+ await setMinDelegateTake(api, 0)
+ await becomeDelegate(api, convertPublicKeyToSs58(validator.publicKey), coldkey)
+ await becomeDelegate(api, convertPublicKeyToSs58(miner.publicKey), coldkey)
+ })
+
+ it("Staker receives rewards", async () => {
+ let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1
+
+ await addStake(api, netuid, convertPublicKeyToSs58(miner.publicKey), tao(1), coldkey)
+ await addStake(api, netuid, convertPublicKeyToSs58(nominator.publicKey), tao(1), coldkey)
+
+ await addStake(api, netuid, convertPublicKeyToSs58(validator.publicKey), tao(100), coldkey)
+
+ const miner_alpha_before_emission = await api.query.SubtensorModule.Alpha.getValue(
+ convertPublicKeyToSs58(miner.publicKey),
+ convertPublicKeyToSs58(coldkey.publicKey),
+ netuid
+ )
+
+ await setWeight(api, netuid, [0, 1], [0xffff, 0xffff], BigInt(0), validator)
+ await rootRegister(api, convertPublicKeyToSs58(validator.publicKey), coldkey)
+
+ let index = 0;
+ while (index < 60) {
+ const pending = await api.query.SubtensorModule.PendingEmission.getValue(netuid);
+ if (pending > 0) {
+ console.log("pending amount is ", pending);
+ break;
+ }
+
+ await new Promise((resolve) => setTimeout(resolve, 1000));
+ console.log("wait for the pendingEmission update");
+ index += 1;
+ }
+
+ index = 0;
+ while (index < 60) {
+ let miner_current_alpha = await api.query.SubtensorModule.Alpha.getValue(
+ convertPublicKeyToSs58(miner.publicKey),
+ convertPublicKeyToSs58(coldkey.publicKey),
+ netuid
+ )
+
+ if (miner_current_alpha > miner_alpha_before_emission) {
+ console.log("miner got reward");
+ break;
+ }
+
+ await new Promise((resolve) => setTimeout(resolve, 1000));
+ console.log(" waiting for emission");
+ index += 1;
+ }
+ })
+})
diff --git a/evm-tests/test/subnet.precompile.hyperparameter.test.ts b/evm-tests/test/subnet.precompile.hyperparameter.test.ts
new file mode 100644
index 0000000000..1805b85ce9
--- /dev/null
+++ b/evm-tests/test/subnet.precompile.hyperparameter.test.ts
@@ -0,0 +1,442 @@
+import * as assert from "assert";
+
+import { getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate"
+import { devnet } from "@polkadot-api/descriptors"
+import { TypedApi } from "polkadot-api";
+import { convertPublicKeyToSs58 } from "../src/address-utils"
+import { generateRandomEthersWallet } from "../src/utils";
+import { ISubnetABI, ISUBNET_ADDRESS } from "../src/contracts/subnet"
+import { ethers } from "ethers"
+import { forceSetBalanceToEthAddress, forceSetBalanceToSs58Address } from "../src/subtensor"
+
+describe("Test the Subnet precompile contract", () => {
+ // init eth part
+ const wallet = generateRandomEthersWallet();
+ // init substrate part
+
+ const hotkey1 = getRandomSubstrateKeypair();
+ const hotkey2 = getRandomSubstrateKeypair();
+ let api: TypedApi
+
+ before(async () => {
+ // init variables got from await and async
+ api = await getDevnetApi()
+
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey1.publicKey))
+ await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey2.publicKey))
+ await forceSetBalanceToEthAddress(api, wallet.address)
+ })
+
+ it("Can register network without identity info", async () => {
+ const totalNetwork = await api.query.SubtensorModule.TotalNetworks.getValue()
+
+ const contract = new ethers.Contract(ISUBNET_ADDRESS, ISubnetABI, wallet);
+ const tx = await contract.registerNetwork(hotkey1.publicKey);
+ await tx.wait();
+
+ const totalNetworkAfterAdd = await api.query.SubtensorModule.TotalNetworks.getValue()
+ assert.ok(totalNetwork + 1 === totalNetworkAfterAdd)
+ });
+
+ it("Can register network with identity info", async () => {
+ const totalNetwork = await api.query.SubtensorModule.TotalNetworks.getValue()
+
+ const contract = new ethers.Contract(ISUBNET_ADDRESS, ISubnetABI, wallet);
+ const tx = await contract.registerNetwork(hotkey2.publicKey,
+ "name",
+ "repo",
+ "contact",
+ "subnetUrl",
+ "discord",
+ "description",
+ "additional"
+ );
+ await tx.wait();
+
+ const totalNetworkAfterAdd = await api.query.SubtensorModule.TotalNetworks.getValue()
+ assert.ok(totalNetwork + 1 === totalNetworkAfterAdd)
+ });
+
+ it("Can set subnet parameter", async () => {
+
+ const totalNetwork = await api.query.SubtensorModule.TotalNetworks.getValue()
+ const contract = new ethers.Contract(ISUBNET_ADDRESS, ISubnetABI, wallet);
+ const netuid = totalNetwork - 1;
+
+ // servingRateLimit hyperparameter
+ {
+ const newValue = 100;
+ const tx = await contract.setServingRateLimit(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.ServingRateLimit.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getServingRateLimit(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // minDifficulty hyperparameter
+ //
+ // disabled: only by sudo
+ //
+ // newValue = 101;
+ // tx = await contract.setMinDifficulty(netuid, newValue);
+ // await tx.wait();
+
+ // await usingApi(async (api) => {
+ // onchainValue = Number(
+ // await api.query.subtensorModule.minDifficulty(netuid)
+ // );
+ // });
+
+ // valueFromContract = Number(await contract.getMinDifficulty(netuid));
+
+ // expect(valueFromContract).to.eq(newValue);
+ // expect(valueFromContract).to.eq(onchainValue);
+
+ // maxDifficulty hyperparameter
+
+ {
+ const newValue = 102;
+ const tx = await contract.setMaxDifficulty(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.MaxDifficulty.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getMaxDifficulty(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // weightsVersionKey hyperparameter
+ {
+ const newValue = 103;
+ const tx = await contract.setWeightsVersionKey(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.WeightsVersionKey.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getWeightsVersionKey(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+ // weightsSetRateLimit hyperparameter
+ {
+ const newValue = 104;
+ const tx = await contract.setWeightsSetRateLimit(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.WeightsSetRateLimit.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getWeightsSetRateLimit(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // adjustmentAlpha hyperparameter
+ {
+ const newValue = 105;
+ const tx = await contract.setAdjustmentAlpha(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.AdjustmentAlpha.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getAdjustmentAlpha(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // maxWeightLimit hyperparameter
+ {
+ const newValue = 106;
+ const tx = await contract.setMaxWeightLimit(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.MaxWeightsLimit.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getMaxWeightLimit(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+ // immunityPeriod hyperparameter
+ {
+ const newValue = 107;
+ const tx = await contract.setImmunityPeriod(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.ImmunityPeriod.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getImmunityPeriod(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // minAllowedWeights hyperparameter
+ {
+ const newValue = 108;
+ const tx = await contract.setMinAllowedWeights(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.MinAllowedWeights.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getMinAllowedWeights(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // kappa hyperparameter
+ {
+ const newValue = 109;
+ const tx = await contract.setKappa(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.Kappa.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getKappa(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // rho hyperparameter
+ {
+ const newValue = 110;
+ const tx = await contract.setRho(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.Rho.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getRho(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // activityCutoff hyperparameter
+ {
+ const newValue = 111;
+ const tx = await contract.setActivityCutoff(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.ActivityCutoff.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getActivityCutoff(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // networkRegistrationAllowed hyperparameter
+ {
+ const newValue = true;
+ const tx = await contract.setNetworkRegistrationAllowed(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.NetworkRegistrationAllowed.getValue(netuid)
+
+
+ let valueFromContract = Boolean(
+ await contract.getNetworkRegistrationAllowed(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // networkPowRegistrationAllowed hyperparameter
+ {
+ const newValue = true;
+ const tx = await contract.setNetworkPowRegistrationAllowed(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.NetworkPowRegistrationAllowed.getValue(netuid)
+
+
+ let valueFromContract = Boolean(
+ await contract.getNetworkPowRegistrationAllowed(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // minBurn hyperparameter. only sudo can set it now
+ // newValue = 112;
+
+ // tx = await contract.setMinBurn(netuid, newValue);
+ // await tx.wait();
+
+ // await usingApi(async (api) => {
+ // onchainValue = Number(
+ // await api.query.subtensorModule.minBurn(netuid)
+ // );
+ // });
+
+ // valueFromContract = Number(await contract.getMinBurn(netuid));
+
+ // expect(valueFromContract).to.eq(newValue);
+ // expect(valueFromContract).to.eq(onchainValue);
+
+ // maxBurn hyperparameter
+ {
+ const newValue = 113;
+ const tx = await contract.setMaxBurn(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.MaxBurn.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getMaxBurn(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+
+ // difficulty hyperparameter (disabled: sudo only)
+ // newValue = 114;
+
+ // tx = await contract.setDifficulty(netuid, newValue);
+ // await tx.wait();
+
+ // await usingApi(async (api) => {
+ // onchainValue = Number(
+ // await api.query.subtensorModule.difficulty(netuid)
+ // );
+ // });
+
+ // valueFromContract = Number(await contract.getDifficulty(netuid));
+
+ // expect(valueFromContract).to.eq(newValue);
+ // expect(valueFromContract).to.eq(onchainValue);
+
+ // bondsMovingAverage hyperparameter
+ {
+ const newValue = 115;
+ const tx = await contract.setBondsMovingAverage(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.BondsMovingAverage.getValue(netuid)
+
+
+ let valueFromContract = Number(
+ await contract.getBondsMovingAverage(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+
+ // commitRevealWeightsEnabled hyperparameter
+ {
+ const newValue = true;
+ const tx = await contract.setCommitRevealWeightsEnabled(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.CommitRevealWeightsEnabled.getValue(netuid)
+
+
+ let valueFromContract = Boolean(
+ await contract.getCommitRevealWeightsEnabled(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // liquidAlphaEnabled hyperparameter
+ {
+ const newValue = true;
+ const tx = await contract.setLiquidAlphaEnabled(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.LiquidAlphaOn.getValue(netuid)
+
+
+ let valueFromContract = Boolean(
+ await contract.getLiquidAlphaEnabled(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+
+ // alphaValues hyperparameter
+ {
+ const newValue = [118, 52429];
+ const tx = await contract.setAlphaValues(netuid, newValue[0], newValue[1]);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.AlphaValues.getValue(netuid)
+
+ let value = await contract.getAlphaValues(netuid)
+ let valueFromContract = [Number(value[0]), Number(value[1])]
+
+ assert.equal(valueFromContract[0], newValue[0])
+ assert.equal(valueFromContract[1], newValue[1])
+ assert.equal(valueFromContract[0], onchainValue[0]);
+ assert.equal(valueFromContract[1], onchainValue[1]);
+ }
+
+ // commitRevealWeightsInterval hyperparameter
+ {
+ const newValue = 119;
+ const tx = await contract.setCommitRevealWeightsInterval(netuid, newValue);
+ await tx.wait();
+
+ let onchainValue = await api.query.SubtensorModule.RevealPeriodEpochs.getValue(netuid)
+
+ let valueFromContract = Number(
+ await contract.getCommitRevealWeightsInterval(netuid)
+ );
+
+ assert.equal(valueFromContract, newValue)
+ assert.equal(valueFromContract, onchainValue);
+ }
+ })
+});
\ No newline at end of file
diff --git a/evm-tests/tsconfig.json b/evm-tests/tsconfig.json
new file mode 100644
index 0000000000..c9c555d96f
--- /dev/null
+++ b/evm-tests/tsconfig.json
@@ -0,0 +1,111 @@
+{
+ "compilerOptions": {
+ /* Visit https://aka.ms/tsconfig to read more about this file */
+
+ /* Projects */
+ // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
+ // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
+ // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
+ // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
+ // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
+ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
+
+ /* Language and Environment */
+ "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
+ // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
+ // "jsx": "preserve", /* Specify what JSX code is generated. */
+ // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
+ // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
+ // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
+ // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
+ // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
+ // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
+ // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
+ // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
+ // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
+
+ /* Modules */
+ "module": "commonjs", /* Specify what module code is generated. */
+ // "rootDir": "./", /* Specify the root folder within your source files. */
+ // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */
+ // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
+ // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
+ // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
+ // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
+ // "types": [], /* Specify type package names to be included without being referenced in a source file. */
+ // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
+ // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
+ // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
+ // "rewriteRelativeImportExtensions": true, /* Rewrite '.ts', '.tsx', '.mts', and '.cts' file extensions in relative import paths to their JavaScript equivalent in output files. */
+ // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
+ // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
+ // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
+ // "noUncheckedSideEffectImports": true, /* Check side effect imports. */
+ // "resolveJsonModule": true, /* Enable importing .json files. */
+ // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
+ // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */
+
+ /* JavaScript Support */
+ // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
+ // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
+ // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
+
+ /* Emit */
+ // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
+ // "declarationMap": true, /* Create sourcemaps for d.ts files. */
+ // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
+ // "sourceMap": true, /* Create source map files for emitted JavaScript files. */
+ // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
+ // "noEmit": true, /* Disable emitting files from a compilation. */
+ // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
+ // "outDir": "./", /* Specify an output folder for all emitted files. */
+ // "removeComments": true, /* Disable emitting comments. */
+ // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
+ // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
+ // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
+ // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
+ // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
+ // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
+ // "newLine": "crlf", /* Set the newline character for emitting files. */
+ // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
+ // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
+ // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
+ // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
+ // "declarationDir": "./", /* Specify the output directory for generated declaration files. */
+
+ /* Interop Constraints */
+ // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
+ // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
+ // "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */
+ // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
+ "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
+ // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
+ "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
+
+ /* Type Checking */
+ "strict": true, /* Enable all strict type-checking options. */
+ // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
+ // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
+ // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
+ // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
+ // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
+ // "strictBuiltinIteratorReturn": true, /* Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'. */
+ // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
+ // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
+ // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
+ // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
+ // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
+ // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
+ // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
+ // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
+ // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
+ // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
+ // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
+ // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
+ // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
+
+ /* Completeness */
+ // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
+ "skipLibCheck": true /* Skip type checking all .d.ts files. */
+ }
+}
diff --git a/node/Cargo.toml b/node/Cargo.toml
index f97425ba9a..6cea8f6950 100644
--- a/node/Cargo.toml
+++ b/node/Cargo.toml
@@ -66,7 +66,7 @@ pallet-transaction-payment = { workspace = true }
pallet-commitments = { path = "../pallets/commitments" }
pallet-drand = { workspace = true }
sp-crypto-ec-utils = { workspace = true }
-sp-keystore = { workspace = true, default-features = false }
+sp-keystore = { workspace = true, default-features = false }
# These dependencies are used for the subtensor's RPCs
@@ -105,9 +105,10 @@ thiserror = { workspace = true }
num-traits = { version = "0.2", features = ["std"] }
# Local Dependencies
-node-subtensor-runtime = { path = "../runtime" }
-subtensor-custom-rpc = { path = "../pallets/subtensor/rpc" }
-subtensor-custom-rpc-runtime-api = { path = "../pallets/subtensor/runtime-api" }
+node-subtensor-runtime = { workspace = true, features = ["std"] }
+subtensor-runtime-common = { workspace = true, features = ["std"] }
+subtensor-custom-rpc = { workspace = true, features = ["std"] }
+subtensor-custom-rpc-runtime-api = { workspace = true, features = ["std"] }
[build-dependencies]
substrate-build-script-utils = { workspace = true }
@@ -133,7 +134,7 @@ runtime-benchmarks = [
"sc-service/runtime-benchmarks",
"sp-runtime/runtime-benchmarks",
"pallet-commitments/runtime-benchmarks",
- "pallet-drand/runtime-benchmarks"
+ "pallet-drand/runtime-benchmarks",
]
pow-faucet = []
@@ -145,7 +146,7 @@ try-runtime = [
"pallet-transaction-payment/try-runtime",
"sp-runtime/try-runtime",
"pallet-commitments/try-runtime",
- "pallet-drand/try-runtime"
+ "pallet-drand/try-runtime",
]
metadata-hash = ["node-subtensor-runtime/metadata-hash"]
diff --git a/node/src/benchmarking.rs b/node/src/benchmarking.rs
index cee1cb4ac8..40031ac1aa 100644
--- a/node/src/benchmarking.rs
+++ b/node/src/benchmarking.rs
@@ -7,13 +7,14 @@ use crate::client::FullClient;
use node_subtensor_runtime as runtime;
use node_subtensor_runtime::check_nonce;
use node_subtensor_runtime::pallet_subtensor;
-use runtime::{AccountId, Balance, BalancesCall, SystemCall};
+use runtime::{BalancesCall, SystemCall};
use sc_cli::Result;
use sc_client_api::BlockBackend;
use sp_core::{Encode, Pair};
use sp_inherents::{InherentData, InherentDataProvider};
use sp_keyring::Sr25519Keyring;
use sp_runtime::{OpaqueExtrinsic, SaturatedConversion};
+use subtensor_runtime_common::{AccountId, Balance, Signature};
use std::{sync::Arc, time::Duration};
@@ -161,7 +162,7 @@ pub fn create_benchmark_extrinsic(
runtime::UncheckedExtrinsic::new_signed(
call.clone(),
sp_runtime::AccountId32::from(sender.public()).into(),
- runtime::Signature::Sr25519(signature),
+ Signature::Sr25519(signature),
extra.clone(),
)
}
diff --git a/node/src/chain_spec/mod.rs b/node/src/chain_spec/mod.rs
index 25942658d5..e8efbb1647 100644
--- a/node/src/chain_spec/mod.rs
+++ b/node/src/chain_spec/mod.rs
@@ -6,7 +6,7 @@ pub mod finney;
pub mod localnet;
pub mod testnet;
-use node_subtensor_runtime::{AccountId, Block, Signature, WASM_BINARY};
+use node_subtensor_runtime::{Block, WASM_BINARY};
use sc_chain_spec_derive::ChainSpecExtension;
use sc_service::ChainType;
use sp_consensus_aura::sr25519::AuthorityId as AuraId;
@@ -18,6 +18,7 @@ use sp_runtime::traits::{IdentifyAccount, Verify};
use std::collections::HashSet;
use std::env;
use std::str::FromStr;
+use subtensor_runtime_common::{AccountId, Signature};
// The URL for the telemetry server.
// const STAGING_TELEMETRY_URL: &str = "wss://telemetry.polkadot.io/submit/";
diff --git a/node/src/rpc.rs b/node/src/rpc.rs
index 39a63c602e..0d4cd355de 100644
--- a/node/src/rpc.rs
+++ b/node/src/rpc.rs
@@ -13,7 +13,6 @@ pub use fc_rpc::EthBlockDataCacheTask;
pub use fc_rpc_core::types::{FeeHistoryCache, FeeHistoryCacheLimit, FilterPool};
use fc_storage::StorageOverride;
use jsonrpsee::RpcModule;
-use node_subtensor_runtime::Hash;
use node_subtensor_runtime::opaque::Block;
use sc_consensus_manual_seal::EngineCommand;
use sc_network::service::traits::NetworkService;
@@ -24,6 +23,7 @@ use sc_transaction_pool_api::TransactionPool;
use sp_core::H256;
use sp_inherents::CreateInherentDataProviders;
use sp_runtime::traits::Block as BlockT;
+use subtensor_runtime_common::Hash;
use crate::{
client::{FullBackend, FullClient},
diff --git a/node/src/service.rs b/node/src/service.rs
index a0b7cb1c6a..4ce041b0a4 100644
--- a/node/src/service.rs
+++ b/node/src/service.rs
@@ -796,7 +796,7 @@ fn run_manual_seal_authorship(
) -> Result<(), sp_inherents::Error> {
TIMESTAMP.with(|x| {
let mut x_ref = x.borrow_mut();
- *x_ref = x_ref.saturating_add(node_subtensor_runtime::SLOT_DURATION);
+ *x_ref = x_ref.saturating_add(subtensor_runtime_common::time::SLOT_DURATION);
inherent_data.put_data(sp_timestamp::INHERENT_IDENTIFIER, &*x.borrow())
})
}
diff --git a/pallets/admin-utils/src/benchmarking.rs b/pallets/admin-utils/src/benchmarking.rs
index af9b68051f..c5794e0279 100644
--- a/pallets/admin-utils/src/benchmarking.rs
+++ b/pallets/admin-utils/src/benchmarking.rs
@@ -272,13 +272,5 @@ mod benchmarks {
_(RawOrigin::Root, 1u16/*netuid*/, true/*enabled*/)/*set_commit_reveal_weights_enabled*/;
}
- #[benchmark]
- fn sudo_set_network_max_stake() {
- pallet_subtensor::Pallet::::init_new_network(1u16 /*netuid*/, 1u16 /*tempo*/);
-
- #[extrinsic_call]
- _(RawOrigin::Root, 1u16/*netuid*/, 1_000_000_000_000_000u64/*max_stake*/)/*sudo_set_network_max_stake*/;
- }
-
//impl_benchmark_test_suite!(AdminUtils, crate::mock::new_test_ext(), crate::mock::Test);
}
diff --git a/pallets/admin-utils/src/lib.rs b/pallets/admin-utils/src/lib.rs
index e6653df478..19bbbee73b 100644
--- a/pallets/admin-utils/src/lib.rs
+++ b/pallets/admin-utils/src/lib.rs
@@ -25,9 +25,13 @@ pub mod pallet {
use super::*;
use frame_support::pallet_prelude::*;
use frame_support::traits::tokens::Balance;
- use frame_support::{dispatch::DispatchResult, pallet_prelude::StorageMap};
+ use frame_support::{
+ dispatch::{DispatchResult, RawOrigin},
+ pallet_prelude::StorageMap,
+ };
use frame_system::pallet_prelude::*;
use pallet_evm_chain_id::{self, ChainId};
+ use pallet_subtensor::utils::rate_limiting::TransactionType;
use sp_runtime::BoundedVec;
use substrate_fixed::types::I96F32;
@@ -249,12 +253,35 @@ pub mod pallet {
netuid: u16,
weights_version_key: u64,
) -> DispatchResult {
- pallet_subtensor::Pallet::::ensure_subnet_owner_or_root(origin, netuid)?;
+ pallet_subtensor::Pallet::::ensure_subnet_owner_or_root(origin.clone(), netuid)?;
ensure!(
pallet_subtensor::Pallet::::if_subnet_exist(netuid),
Error::::SubnetDoesNotExist
);
+
+ if let Ok(RawOrigin::Signed(who)) = origin.into() {
+ // SN Owner
+ // Ensure the origin passes the rate limit.
+ ensure!(
+ pallet_subtensor::Pallet::::passes_rate_limit_on_subnet(
+ &TransactionType::SetWeightsVersionKey,
+ &who,
+ netuid,
+ ),
+ pallet_subtensor::Error::::TxRateLimitExceeded
+ );
+
+ // Set last transaction block
+ let current_block = pallet_subtensor::Pallet::::get_current_block_as_u64();
+ pallet_subtensor::Pallet::::set_last_transaction_block_on_subnet(
+ &who,
+ netuid,
+ &TransactionType::SetWeightsVersionKey,
+ current_block,
+ );
+ }
+
pallet_subtensor::Pallet::::set_weights_version_key(netuid, weights_version_key);
log::debug!(
"WeightsVersionKeySet( netuid: {:?} weights_version_key: {:?} ) ",
@@ -916,12 +943,8 @@ pub mod pallet {
DispatchClass::Operational,
Pays::No
))]
- pub fn sudo_set_subnet_limit(origin: OriginFor, max_subnets: u16) -> DispatchResult {
+ pub fn sudo_set_subnet_limit(origin: OriginFor, _max_subnets: u16) -> DispatchResult {
ensure_root(origin)?;
- pallet_subtensor::Pallet::::set_max_subnets(max_subnets);
-
- log::debug!("SubnetLimit( max_subnets: {:?} ) ", max_subnets);
-
Ok(())
}
@@ -1161,22 +1184,11 @@ pub mod pallet {
#[pallet::weight((0, DispatchClass::Operational, Pays::No))]
pub fn sudo_set_network_max_stake(
origin: OriginFor,
- netuid: u16,
- max_stake: u64,
+ _netuid: u16,
+ _max_stake: u64,
) -> DispatchResult {
// Ensure the call is made by the root account
ensure_root(origin)?;
-
- // Set the new maximum stake for the specified network
- pallet_subtensor::Pallet::::set_network_max_stake(netuid, max_stake);
-
- // Log the change
- log::trace!(
- "NetworkMaxStakeSet( netuid: {:?}, max_stake: {:?} )",
- netuid,
- max_stake
- );
-
Ok(())
}
@@ -1405,6 +1417,65 @@ pub mod pallet {
log::debug!("SubnetMovingAlphaSet( alpha: {:?} )", alpha);
Ok(())
}
+
+ /// Change the SubnetOwnerHotkey for a given subnet.
+ ///
+ /// # Arguments
+ /// * `origin` - The origin of the call, which must be the subnet owner.
+ /// * `netuid` - The unique identifier for the subnet.
+ /// * `hotkey` - The new hotkey for the subnet owner.
+ ///
+ /// # Errors
+ /// * `BadOrigin` - If the caller is not the subnet owner or root account.
+ ///
+ /// # Weight
+ /// Weight is handled by the `#[pallet::weight]` attribute.
+ #[pallet::call_index(64)]
+ #[pallet::weight((0, DispatchClass::Operational, Pays::No))]
+ pub fn sudo_set_subnet_owner_hotkey(
+ origin: OriginFor,
+ netuid: u16,
+ hotkey: T::AccountId,
+ ) -> DispatchResult {
+ pallet_subtensor::Pallet::::ensure_subnet_owner(origin.clone(), netuid)?;
+ pallet_subtensor::Pallet::::set_subnet_owner_hotkey(netuid, &hotkey);
+
+ log::debug!(
+ "SubnetOwnerHotkeySet( netuid: {:?}, hotkey: {:?} )",
+ netuid,
+ hotkey
+ );
+ Ok(())
+ }
+
+ ///
+ ///
+ /// # Arguments
+ /// * `origin` - The origin of the call, which must be the root account.
+ /// * `ema_alpha_period` - Number of blocks for EMA price to halve
+ ///
+ /// # Errors
+ /// * `BadOrigin` - If the caller is not the root account.
+ ///
+ /// # Weight
+ /// Weight is handled by the `#[pallet::weight]` attribute.
+ #[pallet::call_index(65)]
+ #[pallet::weight((0, DispatchClass::Operational, Pays::No))]
+ pub fn sudo_set_ema_price_halving_period(
+ origin: OriginFor,
+ netuid: u16,
+ ema_halving: u64,
+ ) -> DispatchResult {
+ ensure_root(origin)?;
+ pallet_subtensor::EMAPriceHalvingBlocks::::set(netuid, ema_halving);
+
+ log::debug!(
+ "EMAPriceHalvingBlocks( netuid: {:?}, ema_halving: {:?} )",
+ netuid,
+ ema_halving
+ );
+ Ok(())
+ }
}
}
diff --git a/pallets/admin-utils/src/tests/mock.rs b/pallets/admin-utils/src/tests/mock.rs
index 40e29e54dd..fc0d016198 100644
--- a/pallets/admin-utils/src/tests/mock.rs
+++ b/pallets/admin-utils/src/tests/mock.rs
@@ -123,17 +123,18 @@ parameter_types! {
pub const InitialNetworkMinLockCost: u64 = 100_000_000_000;
pub const InitialSubnetOwnerCut: u16 = 0; // 0%. 100% of rewards go to validators + miners.
pub const InitialNetworkLockReductionInterval: u64 = 2; // 2 blocks.
- pub const InitialSubnetLimit: u16 = 10; // Max 10 subnets.
+ // pub const InitialSubnetLimit: u16 = 10; // (DEPRECATED)
pub const InitialNetworkRateLimit: u64 = 0;
pub const InitialKeySwapCost: u64 = 1_000_000_000;
pub const InitialAlphaHigh: u16 = 58982; // Represents 0.9 as per the production default
pub const InitialAlphaLow: u16 = 45875; // Represents 0.7 as per the production default
pub const InitialLiquidAlphaOn: bool = false; // Default value for LiquidAlphaOn
// pub const InitialHotkeyEmissionTempo: u64 = 1; // (DEPRECATED)
- pub const InitialNetworkMaxStake: u64 = u64::MAX; // Maximum possible value for u64, this make the make stake infinity
+ // pub const InitialNetworkMaxStake: u64 = u64::MAX; // (DEPRECATED)
pub const InitialColdkeySwapScheduleDuration: u64 = 5 * 24 * 60 * 60 / 12; // 5 days
pub const InitialDissolveNetworkScheduleDuration: u64 = 5 * 24 * 60 * 60 / 12; // 5 days
pub const InitialTaoWeight: u64 = u64::MAX/10; // 10% global weight.
+ pub const InitialEmaPriceHalvingPeriod: u64 = 201_600_u64; // 4 weeks
}
impl pallet_subtensor::Config for Test {
@@ -188,18 +189,16 @@ impl pallet_subtensor::Config for Test {
type InitialNetworkMinLockCost = InitialNetworkMinLockCost;
type InitialSubnetOwnerCut = InitialSubnetOwnerCut;
type InitialNetworkLockReductionInterval = InitialNetworkLockReductionInterval;
- type InitialSubnetLimit = InitialSubnetLimit;
type InitialNetworkRateLimit = InitialNetworkRateLimit;
type KeySwapCost = InitialKeySwapCost;
type AlphaHigh = InitialAlphaHigh;
type AlphaLow = InitialAlphaLow;
type LiquidAlphaOn = InitialLiquidAlphaOn;
- // type InitialHotkeyEmissionTempo = InitialHotkeyEmissionTempo; // (DEPRECATED)
- type InitialNetworkMaxStake = InitialNetworkMaxStake;
type Preimages = ();
type InitialColdkeySwapScheduleDuration = InitialColdkeySwapScheduleDuration;
type InitialDissolveNetworkScheduleDuration = InitialDissolveNetworkScheduleDuration;
type InitialTaoWeight = InitialTaoWeight;
+ type InitialEmaPriceHalvingPeriod = InitialEmaPriceHalvingPeriod;
}
#[derive_impl(frame_system::config_preludes::TestDefaultConfig)]
diff --git a/pallets/admin-utils/src/tests/mod.rs b/pallets/admin-utils/src/tests/mod.rs
index 6c879635e3..2f4c3f2b51 100644
--- a/pallets/admin-utils/src/tests/mod.rs
+++ b/pallets/admin-utils/src/tests/mod.rs
@@ -5,7 +5,7 @@ use frame_support::{
traits::Hooks,
};
use frame_system::Config;
-use pallet_subtensor::Error as SubtensorError;
+use pallet_subtensor::{Error as SubtensorError, SubnetOwner, Tempo, WeightsVersionKeyRateLimit};
// use pallet_subtensor::{migrations, Event};
use pallet_subtensor::Event;
use sp_consensus_grandpa::AuthorityId as GrandpaId;
@@ -162,6 +162,107 @@ fn test_sudo_set_weights_version_key() {
});
}
+#[test]
+fn test_sudo_set_weights_version_key_rate_limit() {
+ new_test_ext().execute_with(|| {
+ let netuid: u16 = 1;
+ let to_be_set: u64 = 10;
+
+ let sn_owner = U256::from(1);
+ add_network(netuid, 10);
+ // Set the Subnet Owner
+ SubnetOwner::::insert(netuid, sn_owner);
+
+ let rate_limit = WeightsVersionKeyRateLimit::::get();
+ let tempo: u16 = Tempo::::get(netuid);
+
+ let rate_limit_period = rate_limit * (tempo as u64);
+
+ assert_ok!(AdminUtils::sudo_set_weights_version_key(
+ <::RuntimeOrigin>::signed(sn_owner),
+ netuid,
+ to_be_set
+ ));
+ assert_eq!(SubtensorModule::get_weights_version_key(netuid), to_be_set);
+
+ // Try to set again with
+ // Assert rate limit not passed
+ assert!(!SubtensorModule::passes_rate_limit_on_subnet(
+ &pallet_subtensor::utils::rate_limiting::TransactionType::SetWeightsVersionKey,
+ &sn_owner,
+ netuid
+ ));
+
+ // Try transaction
+ assert_noop!(
+ AdminUtils::sudo_set_weights_version_key(
+ <::RuntimeOrigin>::signed(sn_owner),
+ netuid,
+ to_be_set + 1
+ ),
+ pallet_subtensor::Error::::TxRateLimitExceeded
+ );
+
+ // Wait for rate limit to pass
+ run_to_block(rate_limit_period + 2);
+ assert!(SubtensorModule::passes_rate_limit_on_subnet(
+ &pallet_subtensor::utils::rate_limiting::TransactionType::SetWeightsVersionKey,
+ &sn_owner,
+ netuid
+ ));
+
+ // Try transaction
+ assert_ok!(AdminUtils::sudo_set_weights_version_key(
+ <::RuntimeOrigin>::signed(sn_owner),
+ netuid,
+ to_be_set + 1
+ ));
+ assert_eq!(
+ SubtensorModule::get_weights_version_key(netuid),
+ to_be_set + 1
+ );
+ });
+}
+
+#[test]
+fn test_sudo_set_weights_version_key_rate_limit_root() {
+ // root should not be effected by rate limit
+ new_test_ext().execute_with(|| {
+ let netuid: u16 = 1;
+ let to_be_set: u64 = 10;
+
+ let sn_owner = U256::from(1);
+ add_network(netuid, 10);
+ // Set the Subnet Owner
+ SubnetOwner::::insert(netuid, sn_owner);
+
+ let rate_limit = WeightsVersionKeyRateLimit::::get();
+ let tempo: u16 = Tempo::::get(netuid);
+
+ let rate_limit_period = rate_limit * (tempo as u64);
+ // Verify the rate limit is more than 0 blocks
+ assert!(rate_limit_period > 0);
+
+ assert_ok!(AdminUtils::sudo_set_weights_version_key(
+ <::RuntimeOrigin>::root(),
+ netuid,
+ to_be_set
+ ));
+ assert_eq!(SubtensorModule::get_weights_version_key(netuid), to_be_set);
+
+ // Try transaction
+ assert_ok!(AdminUtils::sudo_set_weights_version_key(
+ <::RuntimeOrigin>::signed(sn_owner),
+ netuid,
+ to_be_set + 1
+ ));
+ assert_eq!(
+ SubtensorModule::get_weights_version_key(netuid),
+ to_be_set + 1
+ );
+ });
+}
+
#[test]
fn test_sudo_set_weights_set_rate_limit() {
new_test_ext().execute_with(|| {
@@ -546,7 +647,7 @@ fn test_sudo_set_rho() {
fn test_sudo_set_activity_cutoff() {
new_test_ext().execute_with(|| {
let netuid: u16 = 1;
- let to_be_set: u16 = 10;
+ let to_be_set: u16 = pallet_subtensor::MinActivityCutoff::::get();
add_network(netuid, 10);
let init_value: u16 = SubtensorModule::get_activity_cutoff(netuid);
assert_eq!(
@@ -850,30 +951,6 @@ fn test_sudo_set_rao_recycled() {
});
}
-#[test]
-fn test_sudo_set_subnet_limit() {
- new_test_ext().execute_with(|| {
- let netuid: u16 = 1;
- let to_be_set: u16 = 10;
- add_network(netuid, 10);
-
- let init_value: u16 = SubtensorModule::get_max_subnets();
- assert_eq!(
- AdminUtils::sudo_set_subnet_limit(
- <::RuntimeOrigin>::signed(U256::from(1)),
- to_be_set
- ),
- Err(DispatchError::BadOrigin)
- );
- assert_eq!(SubtensorModule::get_max_subnets(), init_value);
- assert_ok!(AdminUtils::sudo_set_subnet_limit(
- <::RuntimeOrigin>::root(),
- to_be_set
- ));
- assert_eq!(SubtensorModule::get_max_subnets(), to_be_set);
- });
-}
-
#[test]
fn test_sudo_set_network_lock_reduction_interval() {
new_test_ext().execute_with(|| {
@@ -1466,3 +1543,171 @@ fn test_sudo_root_sets_subnet_moving_alpha() {
assert_eq!(pallet_subtensor::SubnetMovingAlpha::::get(), alpha);
});
}
+
+#[test]
+fn test_sets_a_lower_value_clears_small_nominations() {
+ new_test_ext().execute_with(|| {
+ let hotkey: U256 = U256::from(3);
+ let owner_coldkey: U256 = U256::from(1);
+ let staker_coldkey: U256 = U256::from(2);
+
+ let initial_nominator_min_required_stake = 10u64;
+ let nominator_min_required_stake_0 = 5u64;
+ let nominator_min_required_stake_1 = 20u64;
+
+ assert!(nominator_min_required_stake_0 < nominator_min_required_stake_1);
+ assert!(nominator_min_required_stake_0 < initial_nominator_min_required_stake);
+
+ let to_stake = initial_nominator_min_required_stake + 1;
+
+ assert!(to_stake > initial_nominator_min_required_stake);
+ assert!(to_stake > nominator_min_required_stake_0); // Should stay when set
+ assert!(to_stake < nominator_min_required_stake_1); // Should be removed when set
+
+ // Create network
+ let netuid = 2;
+ add_network(netuid, 10);
+
+ // Register a neuron
+ register_ok_neuron(netuid, hotkey, owner_coldkey, 0);
+
+ assert_ok!(AdminUtils::sudo_set_nominator_min_required_stake(
+ RuntimeOrigin::root(),
+ initial_nominator_min_required_stake
+ ));
+ assert_eq!(
+ SubtensorModule::get_nominator_min_required_stake(),
+ initial_nominator_min_required_stake
+ );
+
+ // Stake to the hotkey as staker_coldkey
+ SubtensorModule::increase_stake_for_hotkey_and_coldkey_on_subnet(
+ &hotkey,
+ &staker_coldkey,
+ netuid,
+ to_stake,
+ );
+
+ assert_ok!(AdminUtils::sudo_set_nominator_min_required_stake(
+ RuntimeOrigin::root(),
+ nominator_min_required_stake_0
+ ));
+ assert_eq!(
+ SubtensorModule::get_nominator_min_required_stake(),
+ nominator_min_required_stake_0
+ );
+
+ // Check this nomination is not cleared
+ assert!(
+ SubtensorModule::get_stake_for_hotkey_and_coldkey_on_subnet(
+ &hotkey,
+ &staker_coldkey,
+ netuid
+ ) > 0
+ );
+
+ assert_ok!(AdminUtils::sudo_set_nominator_min_required_stake(
+ RuntimeOrigin::root(),
+ nominator_min_required_stake_1
+ ));
+ assert_eq!(
+ SubtensorModule::get_nominator_min_required_stake(),
+ nominator_min_required_stake_1
+ );
+
+ // Check this nomination is cleared
+ assert_eq!(
+ SubtensorModule::get_stake_for_hotkey_and_coldkey_on_subnet(
+ &hotkey,
+ &staker_coldkey,
+ netuid
+ ),
+ 0
+ );
+ });
+}
+
+#[test]
+fn test_sudo_set_subnet_owner_hotkey() {
+ new_test_ext().execute_with(|| {
+ let netuid: u16 = 1;
+
+ let coldkey: U256 = U256::from(1);
+ let hotkey: U256 = U256::from(2);
+ let new_hotkey: U256 = U256::from(3);
+
+ let coldkey_origin = <::RuntimeOrigin>::signed(coldkey);
+ let root = RuntimeOrigin::root();
+ let random_account = RuntimeOrigin::signed(U256::from(123456));
+
+ pallet_subtensor::SubnetOwner::::insert(netuid, coldkey);
+ pallet_subtensor::SubnetOwnerHotkey::::insert(netuid, hotkey);
+ assert_eq!(
+ pallet_subtensor::SubnetOwnerHotkey::::get(netuid),
+ hotkey
+ );
+
+ assert_ok!(AdminUtils::sudo_set_subnet_owner_hotkey(
+ coldkey_origin,
+ netuid,
+ new_hotkey
+ ));
+
+ assert_eq!(
+ pallet_subtensor::SubnetOwnerHotkey::::get(netuid),
+ new_hotkey
+ );
+
+ assert_noop!(
+ AdminUtils::sudo_set_subnet_owner_hotkey(random_account, netuid, new_hotkey),
+ DispatchError::BadOrigin
+ );
+
+ assert_noop!(
+ AdminUtils::sudo_set_subnet_owner_hotkey(root, netuid, new_hotkey),
+ DispatchError::BadOrigin
+ );
+ });
+}
+
+// cargo test --package pallet-admin-utils --lib -- tests::test_sudo_set_ema_halving --exact --show-output
+#[test]
+fn test_sudo_set_ema_halving() {
+ new_test_ext().execute_with(|| {
+ let netuid: u16 = 1;
+ let to_be_set: u64 = 10;
+ add_network(netuid, 10);
+
+ let value_before: u64 = pallet_subtensor::EMAPriceHalvingBlocks::::get(netuid);
+ assert_eq!(
+ AdminUtils::sudo_set_ema_price_halving_period(
+ <::RuntimeOrigin>::signed(U256::from(1)),
+ netuid,
+ to_be_set
+ ),
+ Err(DispatchError::BadOrigin)
+ );
+ let value_after_0: u64 = pallet_subtensor::EMAPriceHalvingBlocks::::get(netuid);
+ assert_eq!(value_after_0, value_before);
+
+ let owner = U256::from(10);
+ pallet_subtensor::SubnetOwner::::insert(netuid, owner);
+ assert_eq!(
+ AdminUtils::sudo_set_ema_price_halving_period(
+ <::RuntimeOrigin>::signed(owner),
+ netuid,
+ to_be_set
+ ),
+ Err(DispatchError::BadOrigin)
+ );
+ let value_after_1: u64 = pallet_subtensor::EMAPriceHalvingBlocks::::get(netuid);
+ assert_eq!(value_after_1, value_before);
+ assert_ok!(AdminUtils::sudo_set_ema_price_halving_period(
+ <::RuntimeOrigin>::root(),
+ netuid,
+ to_be_set
+ ));
+ let value_after_2: u64 = pallet_subtensor::EMAPriceHalvingBlocks::::get(netuid);
+ assert_eq!(value_after_2, to_be_set);
+ });
+}
diff --git a/pallets/proxy/Cargo.toml b/pallets/proxy/Cargo.toml
new file mode 100644
index 0000000000..f3a97dfedf
--- /dev/null
+++ b/pallets/proxy/Cargo.toml
@@ -0,0 +1,57 @@
+[package]
+name = "pallet-proxy"
+version = "38.0.0"
+authors = ["Bittensor Nucleus Team"]
+edition = "2021"
+license = "Apache-2.0"
+homepage = "https://bittensor.com"
+description = "FRAME proxying pallet"
+readme = "README.md"
+
+[lints]
+workspace = true
+
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[dependencies]
+codec = { features = ["max-encoded-len"], workspace = true }
+scale-info = { features = ["derive"], workspace = true }
+frame-benchmarking = { optional = true, workspace = true }
+frame-support.workspace = true
+frame-system.workspace = true
+sp-io.workspace = true
+sp-runtime.workspace = true
+subtensor-macros.workspace = true
+
+[dev-dependencies]
+pallet-balances = { default-features = true, workspace = true }
+pallet-utility = { default-features = true, workspace = true }
+sp-core = { default-features = true, workspace = true }
+
+[features]
+default = ["std"]
+std = [
+ "codec/std",
+ "frame-benchmarking?/std",
+ "frame-support/std",
+ "frame-system/std",
+ "scale-info/std",
+ "sp-io/std",
+ "sp-runtime/std",
+]
+runtime-benchmarks = [
+ "frame-benchmarking/runtime-benchmarks",
+ "frame-support/runtime-benchmarks",
+ "frame-system/runtime-benchmarks",
+ "sp-runtime/runtime-benchmarks",
+ "pallet-balances/runtime-benchmarks",
+ "pallet-utility/runtime-benchmarks"
+]
+try-runtime = [
+ "frame-support/try-runtime",
+ "frame-system/try-runtime",
+ "sp-runtime/try-runtime",
+ "pallet-balances/try-runtime",
+ "pallet-utility/try-runtime"
+]
diff --git a/pallets/proxy/README.md b/pallets/proxy/README.md
new file mode 100644
index 0000000000..290c49c050
--- /dev/null
+++ b/pallets/proxy/README.md
@@ -0,0 +1,26 @@
+# Proxy Module
+A module allowing accounts to give permission to other accounts to dispatch types of calls from
+their signed origin.
+
+The accounts to which permission is delegated may be required to announce the action that they
+wish to execute some duration prior to execution happens. In this case, the target account may
+reject the announcement and in doing so, veto the execution.
+
+- [`Config`](https://docs.rs/pallet-proxy/latest/pallet_proxy/pallet/trait.Config.html)
+- [`Call`](https://docs.rs/pallet-proxy/latest/pallet_proxy/pallet/enum.Call.html)
+
+## Overview
+
+## Interface
+
+### Dispatchable Functions
+
+[`Call`]: ./enum.Call.html
+[`Config`]: ./trait.Config.html
+
+License: Apache-2.0
+
+
+## Release
+
+Polkadot SDK stable2409
diff --git a/pallets/proxy/src/benchmarking.rs b/pallets/proxy/src/benchmarking.rs
new file mode 100644
index 0000000000..f519c0f0c3
--- /dev/null
+++ b/pallets/proxy/src/benchmarking.rs
@@ -0,0 +1,261 @@
+// This file is part of Substrate.
+//
+// Copyright (C) Parity Technologies (UK) Ltd.
+// SPDX-License-Identifier: Apache-2.0
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0/
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Benchmarks for Proxy Pallet
+
+#![cfg(feature = "runtime-benchmarks")]
+
+use super::*;
+use crate::Pallet as Proxy;
+use alloc::{boxed::Box, vec};
+use frame_benchmarking::v1::{account, benchmarks, whitelisted_caller};
+use frame_system::{pallet_prelude::BlockNumberFor, RawOrigin};
+use sp_runtime::traits::{Bounded, CheckedDiv};
+
+const SEED: u32 = 0;
+
+fn assert_last_event(generic_event: ::RuntimeEvent) {
+ frame_system::Pallet::::assert_last_event(generic_event.into());
+}
+
+fn half_max_balance() -> BalanceOf {
+ BalanceOf::::max_value()
+ .checked_div(&BalanceOf::::from(2_u32))
+ .unwrap_or_else(BalanceOf::::max_value)
+}
+
+fn add_proxies(n: u32, maybe_who: Option) -> Result<(), &'static str> {
+ let caller = maybe_who.unwrap_or_else(whitelisted_caller);
+ T::Currency::make_free_balance_be(&caller, half_max_balance::());
+ for i in 0..n {
+ let real = T::Lookup::unlookup(account("target", i, SEED));
+
+ Proxy::::add_proxy(
+ RawOrigin::Signed(caller.clone()).into(),
+ real,
+ T::ProxyType::default(),
+ BlockNumberFor::::zero(),
+ )?;
+ }
+ Ok(())
+}
+
+fn add_announcements(
+ n: u32,
+ maybe_who: Option,
+ maybe_real: Option,
+) -> Result<(), &'static str> {
+ let caller = maybe_who.unwrap_or_else(|| account("caller", 0, SEED));
+ let caller_lookup = T::Lookup::unlookup(caller.clone());
+ T::Currency::make_free_balance_be(&caller, half_max_balance::());
+ let real = if let Some(real) = maybe_real {
+ real
+ } else {
+ let real = account("real", 0, SEED);
+ T::Currency::make_free_balance_be(&real, half_max_balance::());
+ Proxy::::add_proxy(
+ RawOrigin::Signed(real.clone()).into(),
+ caller_lookup,
+ T::ProxyType::default(),
+ BlockNumberFor::::zero(),
+ )?;
+ real
+ };
+ let real_lookup = T::Lookup::unlookup(real);
+ for _ in 0..n {
+ Proxy::::announce(
+ RawOrigin::Signed(caller.clone()).into(),
+ real_lookup.clone(),
+ T::CallHasher::hash_of(&("add_announcement", n)),
+ )?;
+ }
+ Ok(())
+}
+
+benchmarks! {
+ proxy {
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = account("target", p.saturating_sub(1), SEED);
+ T::Currency::make_free_balance_be(&caller, half_max_balance::());
+ let real: T::AccountId = whitelisted_caller();
+ let real_lookup = T::Lookup::unlookup(real);
+ let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into();
+ }: _(RawOrigin::Signed(caller), real_lookup, Some(T::ProxyType::default()), Box::new(call))
+ verify {
+ assert_last_event::(Event::ProxyExecuted { result: Ok(()) }.into())
+ }
+
+ proxy_announced {
+ let a in 0 .. T::MaxPending::get().saturating_sub(1);
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = account("pure", 0, SEED);
+ let delegate: T::AccountId = account("target", p.saturating_sub(1), SEED);
+ let delegate_lookup = T::Lookup::unlookup(delegate.clone());
+ T::Currency::make_free_balance_be(&delegate, half_max_balance::());
+ let real: T::AccountId = whitelisted_caller();
+ let real_lookup = T::Lookup::unlookup(real);
+ let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into();
+ Proxy::::announce(
+ RawOrigin::Signed(delegate.clone()).into(),
+ real_lookup.clone(),
+ T::CallHasher::hash_of(&call),
+ )?;
+ add_announcements::(a, Some(delegate.clone()), None)?;
+ }: _(RawOrigin::Signed(caller), delegate_lookup, real_lookup, Some(T::ProxyType::default()), Box::new(call))
+ verify {
+ assert_last_event::(Event::ProxyExecuted { result: Ok(()) }.into())
+ }
+
+ remove_announcement {
+ let a in 0 .. T::MaxPending::get().saturating_sub(1);
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = account("target", p.saturating_sub(1), SEED);
+ T::Currency::make_free_balance_be(&caller, half_max_balance::());
+ let real: T::AccountId = whitelisted_caller();
+ let real_lookup = T::Lookup::unlookup(real);
+ let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into();
+ Proxy::::announce(
+ RawOrigin::Signed(caller.clone()).into(),
+ real_lookup.clone(),
+ T::CallHasher::hash_of(&call),
+ )?;
+ add_announcements::(a, Some(caller.clone()), None)?;
+ }: _(RawOrigin::Signed(caller.clone()), real_lookup, T::CallHasher::hash_of(&call))
+ verify {
+ let (announcements, _) = Announcements::::get(&caller);
+ assert_eq!(announcements.len() as u32, a);
+ }
+
+ reject_announcement {
+ let a in 0 .. T::MaxPending::get().saturating_sub(1);
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = account("target", p.saturating_sub(1), SEED);
+ let caller_lookup = T::Lookup::unlookup(caller.clone());
+ T::Currency::make_free_balance_be(&caller, half_max_balance::());
+ let real: T::AccountId = whitelisted_caller();
+ let real_lookup = T::Lookup::unlookup(real.clone());
+ let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into();
+ Proxy::::announce(
+ RawOrigin::Signed(caller.clone()).into(),
+ real_lookup,
+ T::CallHasher::hash_of(&call),
+ )?;
+ add_announcements::(a, Some(caller.clone()), None)?;
+ }: _(RawOrigin::Signed(real), caller_lookup, T::CallHasher::hash_of(&call))
+ verify {
+ let (announcements, _) = Announcements::::get(&caller);
+ assert_eq!(announcements.len() as u32, a);
+ }
+
+ announce {
+ let a in 0 .. T::MaxPending::get().saturating_sub(1);
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = account("target", p.saturating_sub(1), SEED);
+ T::Currency::make_free_balance_be(&caller, half_max_balance::());
+ let real: T::AccountId = whitelisted_caller();
+ let real_lookup = T::Lookup::unlookup(real.clone());
+ add_announcements::(a, Some(caller.clone()), None)?;
+ let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into();
+ let call_hash = T::CallHasher::hash_of(&call);
+ }: _(RawOrigin::Signed(caller.clone()), real_lookup, call_hash)
+ verify {
+ assert_last_event::(Event::Announced { real, proxy: caller, call_hash }.into());
+ }
+
+ add_proxy {
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = whitelisted_caller();
+ let real = T::Lookup::unlookup(account("target", T::MaxProxies::get(), SEED));
+ }: _(
+ RawOrigin::Signed(caller.clone()),
+ real,
+ T::ProxyType::default(),
+ BlockNumberFor::::zero()
+ )
+ verify {
+ let (proxies, _) = Proxies::::get(caller);
+ assert_eq!(proxies.len() as u32, p.saturating_add(1));
+ }
+
+ remove_proxy {
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = whitelisted_caller();
+ let delegate = T::Lookup::unlookup(account("target", 0, SEED));
+ }: _(
+ RawOrigin::Signed(caller.clone()),
+ delegate,
+ T::ProxyType::default(),
+ BlockNumberFor::::zero()
+ )
+ verify {
+ let (proxies, _) = Proxies::::get(caller);
+ assert_eq!(proxies.len() as u32, p.saturating_sub(1));
+ }
+
+ remove_proxies {
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = whitelisted_caller();
+ }: _(RawOrigin::Signed(caller.clone()))
+ verify {
+ let (proxies, _) = Proxies::::get(caller);
+ assert_eq!(proxies.len() as u32, 0);
+ }
+
+ create_pure {
+ let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?;
+ let caller: T::AccountId = whitelisted_caller();
+ }: _(
+ RawOrigin::Signed(caller.clone()),
+ T::ProxyType::default(),
+ BlockNumberFor::::zero(),
+ 0
+ )
+ verify {
+ let pure_account = Pallet::::pure_account(&caller, &T::ProxyType::default(), 0, None);
+ assert_last_event::(Event::PureCreated {
+ pure: pure_account,
+ who: caller,
+ proxy_type: T::ProxyType::default(),
+ disambiguation_index: 0,
+ }.into());
+ }
+
+ kill_pure {
+ let p in 0 .. (T::MaxProxies::get().saturating_sub(2));
+
+ let caller: T::AccountId = whitelisted_caller();
+ let caller_lookup = T::Lookup::unlookup(caller.clone());
+ T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value());
+ Pallet::::create_pure(
+ RawOrigin::Signed(whitelisted_caller()).into(),
+ T::ProxyType::default(),
+ BlockNumberFor::::zero(),
+ 0
+ )?;
+ let height = system::Pallet::::block_number();
+ let ext_index = system::Pallet::::extrinsic_index().unwrap_or(0);
+ let pure_account = Pallet::::pure_account(&caller, &T::ProxyType::default(), 0, None);
+
+ add_proxies::(p, Some(pure_account.clone()))?;
+ ensure!(Proxies::::contains_key(&pure_account), "pure proxy not created");
+ }: _(RawOrigin::Signed(pure_account.clone()), caller_lookup, T::ProxyType::default(), 0, height, ext_index)
+ verify {
+ assert!(!Proxies::::contains_key(&pure_account));
+ }
+
+ impl_benchmark_test_suite!(Proxy, crate::tests::new_test_ext(), crate::tests::Test);
+}
diff --git a/pallets/proxy/src/lib.rs b/pallets/proxy/src/lib.rs
new file mode 100644
index 0000000000..3f45951190
--- /dev/null
+++ b/pallets/proxy/src/lib.rs
@@ -0,0 +1,891 @@
+// This file is part of Substrate.
+//
+// Copyright (C) Parity Technologies (UK) Ltd.
+// SPDX-License-Identifier: Apache-2.0
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0/
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! # Proxy Pallet
+//! A pallet allowing accounts to give permission to other accounts to dispatch types of calls from
+//! their signed origin.
+//!
+//! The accounts to which permission is delegated may be required to announce the action that they
+//! wish to execute some duration prior to execution happens. In this case, the target account may
+//! reject the announcement and in doing so, veto the execution.
+//!
+//! - [`Config`]
+//! - [`Call`]
+
+// Ensure we're `no_std` when compiling for Wasm.
+#![cfg_attr(not(feature = "std"), no_std)]
+
+mod benchmarking;
+mod tests;
+pub mod weights;
+
+extern crate alloc;
+
+use alloc::{boxed::Box, vec};
+use codec::{Decode, Encode, MaxEncodedLen};
+use frame_support::pallet_prelude::{Pays, Weight};
+use frame_support::{
+ dispatch::GetDispatchInfo,
+ ensure,
+ traits::{Currency, Get, InstanceFilter, IsSubType, IsType, OriginTrait, ReservableCurrency},
+ BoundedVec,
+};
+use frame_system::{self as system, ensure_signed, pallet_prelude::BlockNumberFor};
+pub use pallet::*;
+use scale_info::{prelude::cmp::Ordering, TypeInfo};
+use sp_io::hashing::blake2_256;
+use sp_runtime::{
+ traits::{Dispatchable, Hash, Saturating, StaticLookup, TrailingZeroInput, Zero},
+ DispatchError, DispatchResult, RuntimeDebug,
+};
+use subtensor_macros::freeze_struct;
+pub use weights::WeightInfo;
+
+type CallHashOf = <::CallHasher as Hash>::Output;
+
+type BalanceOf =
+ <::Currency as Currency<::AccountId>>::Balance;
+
+type AccountIdLookupOf = <::Lookup as StaticLookup>::Source;
+
+/// The parameters under which a particular account has a proxy relationship with some other
+/// account.
+#[derive(
+ Encode,
+ Decode,
+ Clone,
+ Copy,
+ Eq,
+ PartialEq,
+ Ord,
+ PartialOrd,
+ RuntimeDebug,
+ MaxEncodedLen,
+ TypeInfo,
+)]
+#[freeze_struct("a37bb67fe5520678")]
+pub struct ProxyDefinition {
+ /// The account which may act on behalf of another.
+ pub delegate: AccountId,
+ /// A value defining the subset of calls that it is allowed to make.
+ pub proxy_type: ProxyType,
+ /// The number of blocks that an announcement must be in place for before the corresponding
+ /// call may be dispatched. If zero, then no announcement is needed.
+ pub delay: BlockNumber,
+}
+
+/// Details surrounding a specific instance of an announcement to make a call.
+#[derive(Encode, Decode, Clone, Copy, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)]
+#[freeze_struct("4c1b5c8c3bc489ad")]
+pub struct Announcement {
+ /// The account which made the announcement.
+ real: AccountId,
+ /// The hash of the call to be made.
+ call_hash: Hash,
+ /// The height at which the announcement was made.
+ height: BlockNumber,
+}
+
+#[frame_support::pallet]
+pub mod pallet {
+ use super::{DispatchResult, *};
+ use frame_support::pallet_prelude::*;
+ use frame_system::pallet_prelude::*;
+
+ #[pallet::pallet]
+ pub struct Pallet(_);
+
+ /// Configuration trait.
+ #[pallet::config]
+ pub trait Config: frame_system::Config {
+ /// The overarching event type.
+ type RuntimeEvent: From> + IsType<::RuntimeEvent>;
+
+ /// The overarching call type.
+ type RuntimeCall: Parameter
+ + Dispatchable
+ + GetDispatchInfo
+ + From>
+ + IsSubType>
+ + IsType<::RuntimeCall>;
+
+ /// The currency mechanism.
+ type Currency: ReservableCurrency;
+
+ /// A kind of proxy; specified with the proxy and passed in to the `IsProxyable` filter.
+ /// The instance filter determines whether a given call may be proxied under this type.
+ ///
+ /// IMPORTANT: `Default` must be provided and MUST BE the the *most permissive* value.
+ type ProxyType: Parameter
+ + Member
+ + Ord
+ + PartialOrd
+ + InstanceFilter<::RuntimeCall>
+ + Default
+ + MaxEncodedLen;
+
+ /// The base amount of currency needed to reserve for creating a proxy.
+ ///
+ /// This is held for an additional storage item whose value size is
+ /// `sizeof(Balance)` bytes and whose key size is `sizeof(AccountId)` bytes.
+ #[pallet::constant]
+ type ProxyDepositBase: Get>;
+
+ /// The amount of currency needed per proxy added.
+ ///
+ /// This is held for adding 32 bytes plus an instance of `ProxyType` more into a
+ /// pre-existing storage value. Thus, when configuring `ProxyDepositFactor` one should take
+ /// into account `32 + proxy_type.encode().len()` bytes of data.
+ #[pallet::constant]
+ type ProxyDepositFactor: Get>;
+
+ /// The maximum amount of proxies allowed for a single account.
+ #[pallet::constant]
+ type MaxProxies: Get;
+
+ /// Weight information for extrinsics in this pallet.
+ type WeightInfo: WeightInfo;
+
+ /// The maximum amount of time-delayed announcements that are allowed to be pending.
+ #[pallet::constant]
+ type MaxPending: Get;
+
+ /// The type of hash used for hashing the call.
+ type CallHasher: Hash;
+
+ /// The base amount of currency needed to reserve for creating an announcement.
+ ///
+ /// This is held when a new storage item holding a `Balance` is created (typically 16
+ /// bytes).
+ #[pallet::constant]
+ type AnnouncementDepositBase: Get>;
+
+ /// The amount of currency needed per announcement made.
+ ///
+ /// This is held for adding an `AccountId`, `Hash` and `BlockNumber` (typically 68 bytes)
+ /// into a pre-existing storage value.
+ #[pallet::constant]
+ type AnnouncementDepositFactor: Get>;
+ }
+
+ #[pallet::call]
+ impl Pallet {
+ /// Dispatch the given `call` from an account that the sender is authorised for through
+ /// `add_proxy`.
+ ///
+ /// The dispatch origin for this call must be _Signed_.
+ ///
+ /// Parameters:
+ /// - `real`: The account that the proxy will make a call on behalf of.
+ /// - `force_proxy_type`: Specify the exact proxy type to be used and checked for this call.
+ /// - `call`: The call to be made by the `real` account.
+ #[pallet::call_index(0)]
+ #[pallet::weight({
+ let di = call.get_dispatch_info();
+ let inner_call_weight = match di.pays_fee {
+ Pays::Yes => di.weight,
+ Pays::No => Weight::zero(),
+ };
+ let base_weight = T::WeightInfo::proxy(T::MaxProxies::get())
+ .saturating_add(T::DbWeight::get().reads_writes(1, 1));
+ (base_weight.saturating_add(inner_call_weight), di.class)
+ })]
+ pub fn proxy(
+ origin: OriginFor,
+ real: AccountIdLookupOf,
+ force_proxy_type: Option,
+ call: Box<::RuntimeCall>,
+ ) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+ let real = T::Lookup::lookup(real)?;
+ let def = Self::find_proxy(&real, &who, force_proxy_type)?;
+ ensure!(def.delay.is_zero(), Error::::Unannounced);
+
+ Self::do_proxy(def, real, *call);
+
+ Ok(())
+ }
+
+ /// Register a proxy account for the sender that is able to make calls on its behalf.
+ ///
+ /// The dispatch origin for this call must be _Signed_.
+ ///
+ /// Parameters:
+ /// - `proxy`: The account that the `caller` would like to make a proxy.
+ /// - `proxy_type`: The permissions allowed for this proxy account.
+ /// - `delay`: The announcement period required of the initial proxy. Will generally be
+ /// zero.
+ #[pallet::call_index(1)]
+ #[pallet::weight(T::WeightInfo::add_proxy(T::MaxProxies::get()))]
+ pub fn add_proxy(
+ origin: OriginFor,
+ delegate: AccountIdLookupOf,
+ proxy_type: T::ProxyType,
+ delay: BlockNumberFor,
+ ) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+ let delegate = T::Lookup::lookup(delegate)?;
+ Self::add_proxy_delegate(&who, delegate, proxy_type, delay)
+ }
+
+ /// Unregister a proxy account for the sender.
+ ///
+ /// The dispatch origin for this call must be _Signed_.
+ ///
+ /// Parameters:
+ /// - `proxy`: The account that the `caller` would like to remove as a proxy.
+ /// - `proxy_type`: The permissions currently enabled for the removed proxy account.
+ #[pallet::call_index(2)]
+ #[pallet::weight(T::WeightInfo::remove_proxy(T::MaxProxies::get()))]
+ pub fn remove_proxy(
+ origin: OriginFor,
+ delegate: AccountIdLookupOf,
+ proxy_type: T::ProxyType,
+ delay: BlockNumberFor,
+ ) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+ let delegate = T::Lookup::lookup(delegate)?;
+ Self::remove_proxy_delegate(&who, delegate, proxy_type, delay)
+ }
+
+ /// Unregister all proxy accounts for the sender.
+ ///
+ /// The dispatch origin for this call must be _Signed_.
+ ///
+ /// WARNING: This may be called on accounts created by `pure`, however if done, then
+ /// the unreserved fees will be inaccessible. **All access to this account will be lost.**
+ #[pallet::call_index(3)]
+ #[pallet::weight(T::WeightInfo::remove_proxies(T::MaxProxies::get()))]
+ pub fn remove_proxies(origin: OriginFor) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+ Self::remove_all_proxy_delegates(&who);
+ Ok(())
+ }
+
+ /// Spawn a fresh new account that is guaranteed to be otherwise inaccessible, and
+ /// initialize it with a proxy of `proxy_type` for `origin` sender.
+ ///
+ /// Requires a `Signed` origin.
+ ///
+ /// - `proxy_type`: The type of the proxy that the sender will be registered as over the
+ /// new account. This will almost always be the most permissive `ProxyType` possible to
+ /// allow for maximum flexibility.
+ /// - `index`: A disambiguation index, in case this is called multiple times in the same
+ /// transaction (e.g. with `utility::batch`). Unless you're using `batch` you probably just
+ /// want to use `0`.
+ /// - `delay`: The announcement period required of the initial proxy. Will generally be
+ /// zero.
+ ///
+ /// Fails with `Duplicate` if this has already been called in this transaction, from the
+ /// same sender, with the same parameters.
+ ///
+ /// Fails if there are insufficient funds to pay for deposit.
+ #[pallet::call_index(4)]
+ #[pallet::weight(T::WeightInfo::create_pure(T::MaxProxies::get()))]
+ pub fn create_pure(
+ origin: OriginFor,
+ proxy_type: T::ProxyType,
+ delay: BlockNumberFor,
+ index: u16,
+ ) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+
+ let pure = Self::pure_account(&who, &proxy_type, index, None);
+ ensure!(!Proxies::::contains_key(&pure), Error::::Duplicate);
+
+ let proxy_def = ProxyDefinition {
+ delegate: who.clone(),
+ proxy_type: proxy_type.clone(),
+ delay,
+ };
+ let bounded_proxies: BoundedVec<_, T::MaxProxies> = vec![proxy_def]
+ .try_into()
+ .map_err(|_| Error::::TooMany)?;
+
+ let deposit = T::ProxyDepositBase::get().saturating_add(T::ProxyDepositFactor::get());
+ T::Currency::reserve(&who, deposit)?;
+
+ Proxies::::insert(&pure, (bounded_proxies, deposit));
+ Self::deposit_event(Event::PureCreated {
+ pure,
+ who,
+ proxy_type,
+ disambiguation_index: index,
+ });
+
+ Ok(())
+ }
+
+ /// Removes a previously spawned pure proxy.
+ ///
+ /// WARNING: **All access to this account will be lost.** Any funds held in it will be
+ /// inaccessible.
+ ///
+ /// Requires a `Signed` origin, and the sender account must have been created by a call to
+ /// `pure` with corresponding parameters.
+ ///
+ /// - `spawner`: The account that originally called `pure` to create this account.
+ /// - `index`: The disambiguation index originally passed to `pure`. Probably `0`.
+ /// - `proxy_type`: The proxy type originally passed to `pure`.
+ /// - `height`: The height of the chain when the call to `pure` was processed.
+ /// - `ext_index`: The extrinsic index in which the call to `pure` was processed.
+ ///
+ /// Fails with `NoPermission` in case the caller is not a previously created pure
+ /// account whose `pure` call has corresponding parameters.
+ #[pallet::call_index(5)]
+ #[pallet::weight(T::WeightInfo::kill_pure(T::MaxProxies::get()))]
+ pub fn kill_pure(
+ origin: OriginFor,
+ spawner: AccountIdLookupOf,
+ proxy_type: T::ProxyType,
+ index: u16,
+ #[pallet::compact] height: BlockNumberFor,
+ #[pallet::compact] ext_index: u32,
+ ) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+ let spawner = T::Lookup::lookup(spawner)?;
+
+ let when = (height, ext_index);
+ let proxy = Self::pure_account(&spawner, &proxy_type, index, Some(when));
+ ensure!(proxy == who, Error::::NoPermission);
+
+ let (_, deposit) = Proxies::::take(&who);
+ T::Currency::unreserve(&spawner, deposit);
+
+ Ok(())
+ }
+
+ /// Publish the hash of a proxy-call that will be made in the future.
+ ///
+ /// This must be called some number of blocks before the corresponding `proxy` is attempted
+ /// if the delay associated with the proxy relationship is greater than zero.
+ ///
+ /// No more than `MaxPending` announcements may be made at any one time.
+ ///
+ /// This will take a deposit of `AnnouncementDepositFactor` as well as
+ /// `AnnouncementDepositBase` if there are no other pending announcements.
+ ///
+ /// The dispatch origin for this call must be _Signed_ and a proxy of `real`.
+ ///
+ /// Parameters:
+ /// - `real`: The account that the proxy will make a call on behalf of.
+ /// - `call_hash`: The hash of the call to be made by the `real` account.
+ #[pallet::call_index(6)]
+ #[pallet::weight(T::WeightInfo::announce(T::MaxPending::get(), T::MaxProxies::get()))]
+ pub fn announce(
+ origin: OriginFor,
+ real: AccountIdLookupOf,
+ call_hash: CallHashOf,
+ ) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+ let real = T::Lookup::lookup(real)?;
+ Proxies::::get(&real)
+ .0
+ .into_iter()
+ .find(|x| x.delegate == who)
+ .ok_or(Error::::NotProxy)?;
+
+ let announcement = Announcement {
+ real: real.clone(),
+ call_hash,
+ height: system::Pallet::::block_number(),
+ };
+
+ Announcements::::try_mutate(&who, |(ref mut pending, ref mut deposit)| {
+ pending
+ .try_push(announcement)
+ .map_err(|_| Error::::TooMany)?;
+ Self::rejig_deposit(
+ &who,
+ *deposit,
+ T::AnnouncementDepositBase::get(),
+ T::AnnouncementDepositFactor::get(),
+ pending.len(),
+ )
+ .map(|d| {
+ d.expect("Just pushed; pending.len() > 0; rejig_deposit returns Some; qed")
+ })
+ .map(|d| *deposit = d)
+ })?;
+ Self::deposit_event(Event::Announced {
+ real,
+ proxy: who,
+ call_hash,
+ });
+
+ Ok(())
+ }
+
+ /// Remove a given announcement.
+ ///
+ /// May be called by a proxy account to remove a call they previously announced and return
+ /// the deposit.
+ ///
+ /// The dispatch origin for this call must be _Signed_.
+ ///
+ /// Parameters:
+ /// - `real`: The account that the proxy will make a call on behalf of.
+ /// - `call_hash`: The hash of the call to be made by the `real` account.
+ #[pallet::call_index(7)]
+ #[pallet::weight(T::WeightInfo::remove_announcement(
+ T::MaxPending::get(),
+ T::MaxProxies::get()
+ ))]
+ pub fn remove_announcement(
+ origin: OriginFor,
+ real: AccountIdLookupOf,
+ call_hash: CallHashOf,
+ ) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+ let real = T::Lookup::lookup(real)?;
+ Self::edit_announcements(&who, |ann| ann.real != real || ann.call_hash != call_hash)?;
+
+ Ok(())
+ }
+
+ /// Remove the given announcement of a delegate.
+ ///
+ /// May be called by a target (proxied) account to remove a call that one of their delegates
+ /// (`delegate`) has announced they want to execute. The deposit is returned.
+ ///
+ /// The dispatch origin for this call must be _Signed_.
+ ///
+ /// Parameters:
+ /// - `delegate`: The account that previously announced the call.
+ /// - `call_hash`: The hash of the call to be made.
+ #[pallet::call_index(8)]
+ #[pallet::weight(T::WeightInfo::reject_announcement(
+ T::MaxPending::get(),
+ T::MaxProxies::get()
+ ))]
+ pub fn reject_announcement(
+ origin: OriginFor,
+ delegate: AccountIdLookupOf,
+ call_hash: CallHashOf,
+ ) -> DispatchResult {
+ let who = ensure_signed(origin)?;
+ let delegate = T::Lookup::lookup(delegate)?;
+ Self::edit_announcements(&delegate, |ann| {
+ ann.real != who || ann.call_hash != call_hash
+ })?;
+
+ Ok(())
+ }
+
+ /// Dispatch the given `call` from an account that the sender is authorized for through
+ /// `add_proxy`.
+ ///
+ /// Removes any corresponding announcement(s).
+ ///
+ /// The dispatch origin for this call must be _Signed_.
+ ///
+ /// Parameters:
+ /// - `real`: The account that the proxy will make a call on behalf of.
+ /// - `force_proxy_type`: Specify the exact proxy type to be used and checked for this call.
+ /// - `call`: The call to be made by the `real` account.
+ #[pallet::call_index(9)]
+ #[pallet::weight({
+ let di = call.get_dispatch_info();
+ (T::WeightInfo::proxy_announced(T::MaxPending::get(), T::MaxProxies::get())
+ // AccountData for inner call origin accountdata.
+ .saturating_add(T::DbWeight::get().reads_writes(1, 1))
+ .saturating_add(di.weight),
+ di.class)
+ })]
+ pub fn proxy_announced(
+ origin: OriginFor,
+ delegate: AccountIdLookupOf,
+ real: AccountIdLookupOf,
+ force_proxy_type: Option,
+ call: Box<::RuntimeCall>,
+ ) -> DispatchResult {
+ ensure_signed(origin)?;
+ let delegate = T::Lookup::lookup(delegate)?;
+ let real = T::Lookup::lookup(real)?;
+ let def = Self::find_proxy(&real, &delegate, force_proxy_type)?;
+
+ let call_hash = T::CallHasher::hash_of(&call);
+ let now = system::Pallet::::block_number();
+ Self::edit_announcements(&delegate, |ann| {
+ ann.real != real
+ || ann.call_hash != call_hash
+ || now.saturating_sub(ann.height) < def.delay
+ })
+ .map_err(|_| Error::::Unannounced)?;
+
+ Self::do_proxy(def, real, *call);
+
+ Ok(())
+ }
+ }
+
+ #[pallet::event]
+ #[pallet::generate_deposit(pub(super) fn deposit_event)]
+ pub enum Event {
+ /// A proxy was executed correctly, with the given.
+ ProxyExecuted { result: DispatchResult },
+ /// A pure account has been created by new proxy with given
+ /// disambiguation index and proxy type.
+ PureCreated {
+ pure: T::AccountId,
+ who: T::AccountId,
+ proxy_type: T::ProxyType,
+ disambiguation_index: u16,
+ },
+ /// An announcement was placed to make a call in the future.
+ Announced {
+ real: T::AccountId,
+ proxy: T::AccountId,
+ call_hash: CallHashOf,
+ },
+ /// A proxy was added.
+ ProxyAdded {
+ delegator: T::AccountId,
+ delegatee: T::AccountId,
+ proxy_type: T::ProxyType,
+ delay: BlockNumberFor,
+ },
+ /// A proxy was removed.
+ ProxyRemoved {
+ delegator: T::AccountId,
+ delegatee: T::AccountId,
+ proxy_type: T::ProxyType,
+ delay: BlockNumberFor,
+ },
+ }
+
+ #[pallet::error]
+ pub enum Error {
+ /// There are too many proxies registered or too many announcements pending.
+ TooMany,
+ /// Proxy registration not found.
+ NotFound,
+ /// Sender is not a proxy of the account to be proxied.
+ NotProxy,
+ /// A call which is incompatible with the proxy type's filter was attempted.
+ Unproxyable,
+ /// Account is already a proxy.
+ Duplicate,
+ /// Call may not be made by proxy because it may escalate its privileges.
+ NoPermission,
+ /// Announcement, if made at all, was made too recently.
+ Unannounced,
+ /// Cannot add self as proxy.
+ NoSelfProxy,
+ }
+
+ /// The set of account proxies. Maps the account which has delegated to the accounts
+ /// which are being delegated to, together with the amount held on deposit.
+ #[pallet::storage]
+ pub type Proxies = StorageMap<
+ _,
+ Twox64Concat,
+ T::AccountId,
+ (
+ BoundedVec<
+ ProxyDefinition>,
+ T::MaxProxies,
+ >,
+ BalanceOf,
+ ),
+ ValueQuery,
+ >;
+
+ /// The announcements made by the proxy (key).
+ #[pallet::storage]
+ pub type Announcements = StorageMap<
+ _,
+ Twox64Concat,
+ T::AccountId,
+ (
+ BoundedVec, BlockNumberFor>, T::MaxPending>,
+ BalanceOf