diff --git a/Cargo.lock b/Cargo.lock index 06933b9232..6fe43451ed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5849,7 +5849,7 @@ dependencies = [ "pallet-membership", "pallet-multisig", "pallet-preimage", - "pallet-proxy", + "pallet-proxy 38.0.0", "pallet-registry", "pallet-safe-mode", "pallet-scheduler", @@ -5858,7 +5858,7 @@ dependencies = [ "pallet-timestamp", "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", - "pallet-utility", + "pallet-utility 38.0.0", "parity-scale-codec", "precompile-utils", "rand_chacha", @@ -6522,6 +6522,23 @@ dependencies = [ "sp-runtime", ] +[[package]] +name = "pallet-proxy" +version = "38.0.0" +dependencies = [ + "frame-benchmarking", + "frame-support", + "frame-system", + "pallet-balances", + "pallet-utility 38.0.0", + "parity-scale-codec", + "scale-info", + "sp-core", + "sp-io", + "sp-runtime", + "subtensor-macros", +] + [[package]] name = "pallet-proxy" version = "38.0.0" @@ -6553,6 +6570,20 @@ dependencies = [ "subtensor-macros", ] +[[package]] +name = "pallet-root-testing" +version = "14.0.0" +source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-stable2409#87971b3e92721bdf10bf40b410eaae779d494ca0" +dependencies = [ + "frame-support", + "frame-system", + "parity-scale-codec", + "scale-info", + "sp-core", + "sp-io", + "sp-runtime", +] + [[package]] name = "pallet-safe-mode" version = "19.0.0" @@ -6563,8 +6594,8 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "pallet-proxy", - "pallet-utility", + "pallet-proxy 38.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-stable2409)", + "pallet-utility 38.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-stable2409)", "parity-scale-codec", "scale-info", "sp-arithmetic", @@ -6631,7 +6662,7 @@ dependencies = [ "pallet-preimage", "pallet-scheduler", "pallet-transaction-payment", - "pallet-utility", + "pallet-utility 38.0.0", "parity-scale-codec", "parity-util-mem", "rand", @@ -6734,6 +6765,25 @@ dependencies = [ "sp-weights", ] +[[package]] +name = "pallet-utility" +version = "38.0.0" +dependencies = [ + "frame-benchmarking", + "frame-support", + "frame-system", + "pallet-balances", + "pallet-collective", + "pallet-root-testing", + "pallet-timestamp", + "parity-scale-codec", + "scale-info", + "sp-core", + "sp-io", + "sp-runtime", + "subtensor-macros", +] + [[package]] name = "pallet-utility" version = "38.0.0" @@ -8011,9 +8061,9 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.12" +version = "0.17.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9b823fa29b721a59671b41d6b06e66b29e0628e207e8b1c3ceeda701ec928d" +checksum = "70ac5d832aa16abd7d1def883a8545280c20a60f523a370aa3a9617c2b8550ee" dependencies = [ "cc", "cfg-if", @@ -8205,7 +8255,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring 0.17.12", + "ring 0.17.13", "rustls-webpki", "sct", ] @@ -8237,7 +8287,7 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.17.12", + "ring 0.17.13", "untrusted 0.9.0", ] @@ -9488,7 +9538,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.17.12", + "ring 0.17.13", "untrusted 0.9.0", ] @@ -9911,7 +9961,7 @@ dependencies = [ "chacha20poly1305", "curve25519-dalek", "rand_core", - "ring 0.17.12", + "ring 0.17.13", "rustc_version 0.4.1", "sha2 0.10.8", "subtle 2.6.1", @@ -11220,7 +11270,7 @@ dependencies = [ "pallet-evm-precompile-modexp", "pallet-evm-precompile-sha3fips", "pallet-evm-precompile-simple", - "pallet-proxy", + "pallet-proxy 38.0.0", "pallet-subtensor", "precompile-utils", "sp-core", @@ -12534,7 +12584,7 @@ version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "ring 0.17.12", + "ring 0.17.13", "untrusted 0.9.0", ] diff --git a/Cargo.toml b/Cargo.toml index 781fe6dfe9..15dd760a57 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -115,7 +115,7 @@ pallet-insecure-randomness-collective-flip = { git = "https://github.com/parityt pallet-membership = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } pallet-multisig = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } pallet-preimage = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } -pallet-proxy = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } +pallet-proxy = { path = "pallets/proxy", default-features = false } pallet-safe-mode = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } pallet-scheduler = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } pallet-sudo = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } @@ -123,7 +123,8 @@ pallet-timestamp = { git = "https://github.com/paritytech/polkadot-sdk.git", tag pallet-transaction-payment = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } pallet-transaction-payment-rpc = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409" } pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } -pallet-utility = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } +pallet-utility = { path = "pallets/utility", default-features = false } +pallet-root-testing = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409", default-features = false } sc-basic-authorship = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409" } sc-cli = { git = "https://github.com/paritytech/polkadot-sdk.git", tag = "polkadot-stable2409" } diff --git a/evm-tests/.gitignore b/evm-tests/.gitignore new file mode 100644 index 0000000000..661f94a6e0 --- /dev/null +++ b/evm-tests/.gitignore @@ -0,0 +1,3 @@ +node_modules +.papi +.env diff --git a/evm-tests/README.md b/evm-tests/README.md new file mode 100644 index 0000000000..7d01034bd8 --- /dev/null +++ b/evm-tests/README.md @@ -0,0 +1,27 @@ +# type-test + +test with ts + +## polkadot api + +```bash +npx papi add devnet -w ws://10.0.0.11:9944 +``` + +## get the new metadata + +```bash +sh get-metadata.sh +``` + +## run all tests + +```bash +yarn run test +``` + +## To run a particular test case, you can pass an argument with the name or part of the name. For example: + +```bash +yarn run test -- -g "Can set subnet parameter" +``` diff --git a/evm-tests/get-metadata.sh b/evm-tests/get-metadata.sh new file mode 100644 index 0000000000..6d7727009d --- /dev/null +++ b/evm-tests/get-metadata.sh @@ -0,0 +1,3 @@ +rm -rf .papi +npx papi add devnet -w ws://localhost:9944 + diff --git a/evm-tests/local.test.ts b/evm-tests/local.test.ts new file mode 100644 index 0000000000..9eb24d4327 --- /dev/null +++ b/evm-tests/local.test.ts @@ -0,0 +1,53 @@ +import * as assert from "assert"; +import { getAliceSigner, getClient, getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate" +import { SUB_LOCAL_URL, } from "../src/config"; +import { devnet } from "@polkadot-api/descriptors" +import { PolkadotSigner, TypedApi } from "polkadot-api"; +import { convertPublicKeyToSs58, convertH160ToSS58 } from "../src/address-utils" +import { ethers } from "ethers" +import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron" +import { generateRandomEthersWallet } from "../src/utils" +import { forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, addNewSubnetwork, burnedRegister } from "../src/subtensor" + +describe("Test neuron precompile Serve Axon Prometheus", () => { + // init eth part + // const wallet1 = generateRandomEthersWallet(); + // const wallet2 = generateRandomEthersWallet(); + // const wallet3 = generateRandomEthersWallet(); + + // init substrate part + + // const coldkey = getRandomSubstrateKeypair(); + + let api: TypedApi + + // sudo account alice as signer + let alice: PolkadotSigner; + before(async () => { + // init variables got from await and async + const subClient = await getClient(SUB_LOCAL_URL) + api = await getDevnetApi() + // alice = await getAliceSigner(); + + // await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey)) + // await forceSetBalanceToEthAddress(api, wallet1.address) + // await forceSetBalanceToEthAddress(api, wallet2.address) + // await forceSetBalanceToEthAddress(api, wallet3.address) + + + let index = 0; + while (index < 30) { + const hotkey = getRandomSubstrateKeypair(); + const coldkey = getRandomSubstrateKeypair(); + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey)) + let netuid = await addNewSubnetwork(api, hotkey, coldkey) + } + + + }) + + it("Serve Axon", async () => { + + }); +}); \ No newline at end of file diff --git a/evm-tests/package.json b/evm-tests/package.json new file mode 100644 index 0000000000..a96a2c4a0c --- /dev/null +++ b/evm-tests/package.json @@ -0,0 +1,31 @@ +{ + "scripts": { + "test": "mocha --timeout 999999 --require ts-node/register test/*test.ts" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "@polkadot-api/descriptors": "file:.papi/descriptors", + "@polkadot-labs/hdkd": "^0.0.10", + "@polkadot-labs/hdkd-helpers": "^0.0.11", + "@polkadot/api": "15.1.1", + "crypto": "^1.0.1", + "dotenv": "16.4.7", + "ethers": "^6.13.5", + "polkadot-api": "^1.9.5", + "viem": "2.23.4" + }, + "devDependencies": { + "@types/bun": "^1.1.13", + "@types/chai": "^5.0.1", + "@types/mocha": "^10.0.10", + "assert": "^2.1.0", + "chai": "^5.2.0", + "mocha": "^11.1.0", + "prettier": "^3.3.3", + "ts-node": "^10.9.2", + "typescript": "^5.7.2", + "vite": "^5.4.8" + } +} diff --git a/evm-tests/src/address-utils.ts b/evm-tests/src/address-utils.ts new file mode 100644 index 0000000000..ed3abc5008 --- /dev/null +++ b/evm-tests/src/address-utils.ts @@ -0,0 +1,77 @@ +import { Address } from "viem" +import { encodeAddress } from "@polkadot/util-crypto"; +import { ss58Address } from "@polkadot-labs/hdkd-helpers"; +import { hexToU8a } from "@polkadot/util"; +import { blake2AsU8a, decodeAddress } from "@polkadot/util-crypto"; +import { Binary } from "polkadot-api"; +import { SS58_PREFIX } from "./config" + +export function toViemAddress(address: string): Address { + let addressNoPrefix = address.replace("0x", "") + return `0x${addressNoPrefix}` +} + +export function convertH160ToSS58(ethAddress: string) { + // get the public key + const hash = convertH160ToPublicKey(ethAddress); + + // Convert the hash to SS58 format + const ss58Address = encodeAddress(hash, SS58_PREFIX); + return ss58Address; +} + +export function convertPublicKeyToSs58(publickey: Uint8Array) { + return ss58Address(publickey, SS58_PREFIX); +} + +export function convertH160ToPublicKey(ethAddress: string) { + const prefix = "evm:"; + const prefixBytes = new TextEncoder().encode(prefix); + const addressBytes = hexToU8a( + ethAddress.startsWith("0x") ? ethAddress : `0x${ethAddress}` + ); + const combined = new Uint8Array(prefixBytes.length + addressBytes.length); + + // Concatenate prefix and Ethereum address + combined.set(prefixBytes); + combined.set(addressBytes, prefixBytes.length); + + // Hash the combined data (the public key) + const hash = blake2AsU8a(combined); + return hash; +} + +export function ss58ToEthAddress(ss58Address: string) { + // Decode the SS58 address to a Uint8Array public key + const publicKey = decodeAddress(ss58Address); + + // Take the first 20 bytes of the hashed public key for the Ethereum address + const ethereumAddressBytes = publicKey.slice(0, 20); + + // Convert the 20 bytes into an Ethereum H160 address format (Hex string) + const ethereumAddress = '0x' + Buffer.from(ethereumAddressBytes).toString('hex'); + + return ethereumAddress; +} + +export function ss58ToH160(ss58Address: string): Binary { + // Decode the SS58 address to a Uint8Array public key + const publicKey = decodeAddress(ss58Address); + + // Take the first 20 bytes of the hashed public key for the Ethereum address + const ethereumAddressBytes = publicKey.slice(0, 20); + + + return new Binary(ethereumAddressBytes); +} + +export function ethAddressToH160(ethAddress: string): Binary { + // Decode the SS58 address to a Uint8Array public key + const publicKey = hexToU8a(ethAddress); + + // Take the first 20 bytes of the hashed public key for the Ethereum address + // const ethereumAddressBytes = publicKey.slice(0, 20); + + + return new Binary(publicKey); +} \ No newline at end of file diff --git a/evm-tests/src/balance-math.ts b/evm-tests/src/balance-math.ts new file mode 100644 index 0000000000..8d6e86bd5a --- /dev/null +++ b/evm-tests/src/balance-math.ts @@ -0,0 +1,26 @@ +import assert from "assert" + +export const TAO = BigInt(1000000000) // 10^9 +export const ETH_PER_RAO = BigInt(1000000000) // 10^9 +export const GWEI = BigInt(1000000000) // 10^9 +export const MAX_TX_FEE = BigInt(21000000) * GWEI // 100 times EVM to EVM transfer fee + +export function bigintToRao(value: bigint) { + return TAO * value +} + +export function tao(value: number) { + return TAO * BigInt(value) +} + +export function raoToEth(value: bigint) { + return ETH_PER_RAO * value +} + +export function compareEthBalanceWithTxFee(balance1: bigint, balance2: bigint) { + if (balance1 > balance2) { + assert((balance1 - balance2) < MAX_TX_FEE) + } else { + assert((balance2 - balance1) < MAX_TX_FEE) + } +} diff --git a/evm-tests/src/config.ts b/evm-tests/src/config.ts new file mode 100644 index 0000000000..601c89c8c1 --- /dev/null +++ b/evm-tests/src/config.ts @@ -0,0 +1,38 @@ +export const ETH_LOCAL_URL = 'http://localhost:9944' +export const SUB_LOCAL_URL = 'ws://localhost:9944' +export const SS58_PREFIX = 42; +// set the tx timeout as 2 second when eable the fast-blocks feature. +export const TX_TIMEOUT = 2000; + +export const IED25519VERIFY_ADDRESS = "0x0000000000000000000000000000000000000402"; +export const IEd25519VerifyABI = [ + { + inputs: [ + { internalType: "bytes32", name: "message", type: "bytes32" }, + { internalType: "bytes32", name: "publicKey", type: "bytes32" }, + { internalType: "bytes32", name: "r", type: "bytes32" }, + { internalType: "bytes32", name: "s", type: "bytes32" }, + ], + name: "verify", + outputs: [{ internalType: "bool", name: "", type: "bool" }], + stateMutability: "pure", + type: "function", + }, +]; + +export const IBALANCETRANSFER_ADDRESS = "0x0000000000000000000000000000000000000800"; +export const IBalanceTransferABI = [ + { + inputs: [ + { + internalType: "bytes32", + name: "data", + type: "bytes32", + }, + ], + name: "transfer", + outputs: [], + stateMutability: "payable", + type: "function", + }, +]; \ No newline at end of file diff --git a/evm-tests/src/contracts/bridgeToken.ts b/evm-tests/src/contracts/bridgeToken.ts new file mode 100644 index 0000000000..f8b3ea4d03 --- /dev/null +++ b/evm-tests/src/contracts/bridgeToken.ts @@ -0,0 +1,631 @@ +export const BRIDGE_TOKEN_CONTRACT_ABI = [ + { + "inputs": [ + { + "internalType": "string", + "name": "name_", + "type": "string" + }, + { + "internalType": "string", + "name": "symbol_", + "type": "string" + }, + { + "internalType": "address", + "name": "admin", + "type": "address" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "inputs": [], + "name": "AccessControlBadConfirmation", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + }, + { + "internalType": "bytes32", + "name": "neededRole", + "type": "bytes32" + } + ], + "name": "AccessControlUnauthorizedAccount", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "allowance", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "needed", + "type": "uint256" + } + ], + "name": "ERC20InsufficientAllowance", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "balance", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "needed", + "type": "uint256" + } + ], + "name": "ERC20InsufficientBalance", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "approver", + "type": "address" + } + ], + "name": "ERC20InvalidApprover", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "receiver", + "type": "address" + } + ], + "name": "ERC20InvalidReceiver", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + } + ], + "name": "ERC20InvalidSender", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + } + ], + "name": "ERC20InvalidSpender", + "type": "error" + }, + { + "inputs": [], + "name": "UnauthorizedHandler", + "type": "error" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "role", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "previousAdminRole", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "newAdminRole", + "type": "bytes32" + } + ], + "name": "RoleAdminChanged", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "role", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "address", + "name": "account", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "sender", + "type": "address" + } + ], + "name": "RoleGranted", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "role", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "address", + "name": "account", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "sender", + "type": "address" + } + ], + "name": "RoleRevoked", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "inputs": [], + "name": "DEFAULT_ADMIN_ROLE", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "spender", + "type": "address" + } + ], + "name": "allowance", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "burn", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "burnFrom", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "decimals", + "outputs": [ + { + "internalType": "uint8", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "role", + "type": "bytes32" + } + ], + "name": "getRoleAdmin", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "role", + "type": "bytes32" + }, + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "grantRole", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "role", + "type": "bytes32" + }, + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "hasRole", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "isAdmin", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "mint", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "role", + "type": "bytes32" + }, + { + "internalType": "address", + "name": "callerConfirmation", + "type": "address" + } + ], + "name": "renounceRole", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "role", + "type": "bytes32" + }, + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "revokeRole", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "interfaceId", + "type": "bytes4" + } + ], + "name": "supportsInterface", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "symbol", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "transfer", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + } +]; + +export const BRIDGE_TOKEN_CONTRACT_BYTECODE = "0x60806040523480156200001157600080fd5b5060405162000fac38038062000fac8339810160408190526200003491620001ea565b8282600362000044838262000308565b50600462000053828262000308565b5062000065915060009050826200006f565b50505050620003d4565b60008281526005602090815260408083206001600160a01b038516845290915281205460ff16620001185760008381526005602090815260408083206001600160a01b03861684529091529020805460ff19166001179055620000cf3390565b6001600160a01b0316826001600160a01b0316847f2f8788117e7eff1d82e926ec794901d17c78024a50270940304540a733656f0d60405160405180910390a45060016200011c565b5060005b92915050565b634e487b7160e01b600052604160045260246000fd5b600082601f8301126200014a57600080fd5b81516001600160401b038082111562000167576200016762000122565b604051601f8301601f19908116603f0116810190828211818310171562000192576200019262000122565b8160405283815260209250866020858801011115620001b057600080fd5b600091505b83821015620001d45785820183015181830184015290820190620001b5565b6000602085830101528094505050505092915050565b6000806000606084860312156200020057600080fd5b83516001600160401b03808211156200021857600080fd5b620002268783880162000138565b945060208601519150808211156200023d57600080fd5b506200024c8682870162000138565b604086015190935090506001600160a01b03811681146200026c57600080fd5b809150509250925092565b600181811c908216806200028c57607f821691505b602082108103620002ad57634e487b7160e01b600052602260045260246000fd5b50919050565b601f82111562000303576000816000526020600020601f850160051c81016020861015620002de5750805b601f850160051c820191505b81811015620002ff57828155600101620002ea565b5050505b505050565b81516001600160401b0381111562000324576200032462000122565b6200033c8162000335845462000277565b84620002b3565b602080601f8311600181146200037457600084156200035b5750858301515b600019600386901b1c1916600185901b178555620002ff565b600085815260208120601f198616915b82811015620003a55788860151825594840194600190910190840162000384565b5085821015620003c45787850151600019600388901b60f8161c191681555b5050505050600190811b01905550565b610bc880620003e46000396000f3fe608060405234801561001057600080fd5b506004361061012c5760003560e01c806340c10f19116100ad57806395d89b411161007157806395d89b4114610288578063a217fddf14610290578063a9059cbb14610298578063d547741f146102ab578063dd62ed3e146102be57600080fd5b806340c10f191461021357806342966c681461022657806370a082311461023957806379cc67901461026257806391d148541461027557600080fd5b8063248a9ca3116100f4578063248a9ca3146101a657806324d7806c146101c95780632f2ff15d146101dc578063313ce567146101f157806336568abe1461020057600080fd5b806301ffc9a71461013157806306fdde0314610159578063095ea7b31461016e57806318160ddd1461018157806323b872dd14610193575b600080fd5b61014461013f3660046109ab565b6102f7565b60405190151581526020015b60405180910390f35b61016161032e565b60405161015091906109dc565b61014461017c366004610a47565b6103c0565b6002545b604051908152602001610150565b6101446101a1366004610a71565b6103d8565b6101856101b4366004610aad565b60009081526005602052604090206001015490565b6101446101d7366004610ac6565b6103fc565b6101ef6101ea366004610ae1565b610408565b005b60405160128152602001610150565b6101ef61020e366004610ae1565b610433565b6101ef610221366004610a47565b61046b565b6101ef610234366004610aad565b610480565b610185610247366004610ac6565b6001600160a01b031660009081526020819052604090205490565b6101ef610270366004610a47565b61048d565b610144610283366004610ae1565b6104a2565b6101616104cd565b610185600081565b6101446102a6366004610a47565b6104dc565b6101ef6102b9366004610ae1565b6104ea565b6101856102cc366004610b0d565b6001600160a01b03918216600090815260016020908152604080832093909416825291909152205490565b60006001600160e01b03198216637965db0b60e01b148061032857506301ffc9a760e01b6001600160e01b03198316145b92915050565b60606003805461033d90610b37565b80601f016020809104026020016040519081016040528092919081815260200182805461036990610b37565b80156103b65780601f1061038b576101008083540402835291602001916103b6565b820191906000526020600020905b81548152906001019060200180831161039957829003601f168201915b5050505050905090565b6000336103ce81858561050f565b5060019392505050565b6000336103e685828561051c565b6103f1858585610599565b506001949350505050565b600061032881836104a2565b600082815260056020526040902060010154610423816105f8565b61042d8383610602565b50505050565b6001600160a01b038116331461045c5760405163334bd91960e11b815260040160405180910390fd5b6104668282610696565b505050565b6000610476816105f8565b6104668383610703565b61048a338261073d565b50565b6000610498816105f8565b610466838361073d565b60009182526005602090815260408084206001600160a01b0393909316845291905290205460ff1690565b60606004805461033d90610b37565b6000336103ce818585610599565b600082815260056020526040902060010154610505816105f8565b61042d8383610696565b6104668383836001610773565b6001600160a01b03838116600090815260016020908152604080832093861683529290522054600019811461042d578181101561058a57604051637dc7a0d960e11b81526001600160a01b038416600482015260248101829052604481018390526064015b60405180910390fd5b61042d84848484036000610773565b6001600160a01b0383166105c357604051634b637e8f60e11b815260006004820152602401610581565b6001600160a01b0382166105ed5760405163ec442f0560e01b815260006004820152602401610581565b610466838383610848565b61048a8133610972565b600061060e83836104a2565b61068e5760008381526005602090815260408083206001600160a01b03861684529091529020805460ff191660011790556106463390565b6001600160a01b0316826001600160a01b0316847f2f8788117e7eff1d82e926ec794901d17c78024a50270940304540a733656f0d60405160405180910390a4506001610328565b506000610328565b60006106a283836104a2565b1561068e5760008381526005602090815260408083206001600160a01b0386168085529252808320805460ff1916905551339286917ff6391f5c32d9c69d2a47ea670b442974b53935d1edc7fd64eb21e047a839171b9190a4506001610328565b6001600160a01b03821661072d5760405163ec442f0560e01b815260006004820152602401610581565b61073960008383610848565b5050565b6001600160a01b03821661076757604051634b637e8f60e11b815260006004820152602401610581565b61073982600083610848565b6001600160a01b03841661079d5760405163e602df0560e01b815260006004820152602401610581565b6001600160a01b0383166107c757604051634a1406b160e11b815260006004820152602401610581565b6001600160a01b038085166000908152600160209081526040808320938716835292905220829055801561042d57826001600160a01b0316846001600160a01b03167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9258460405161083a91815260200190565b60405180910390a350505050565b6001600160a01b0383166108735780600260008282546108689190610b71565b909155506108e59050565b6001600160a01b038316600090815260208190526040902054818110156108c65760405163391434e360e21b81526001600160a01b03851660048201526024810182905260448101839052606401610581565b6001600160a01b03841660009081526020819052604090209082900390555b6001600160a01b03821661090157600280548290039055610920565b6001600160a01b03821660009081526020819052604090208054820190555b816001600160a01b0316836001600160a01b03167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef8360405161096591815260200190565b60405180910390a3505050565b61097c82826104a2565b6107395760405163e2517d3f60e01b81526001600160a01b038216600482015260248101839052604401610581565b6000602082840312156109bd57600080fd5b81356001600160e01b0319811681146109d557600080fd5b9392505050565b60006020808352835180602085015260005b81811015610a0a578581018301518582016040015282016109ee565b506000604082860101526040601f19601f8301168501019250505092915050565b80356001600160a01b0381168114610a4257600080fd5b919050565b60008060408385031215610a5a57600080fd5b610a6383610a2b565b946020939093013593505050565b600080600060608486031215610a8657600080fd5b610a8f84610a2b565b9250610a9d60208501610a2b565b9150604084013590509250925092565b600060208284031215610abf57600080fd5b5035919050565b600060208284031215610ad857600080fd5b6109d582610a2b565b60008060408385031215610af457600080fd5b82359150610b0460208401610a2b565b90509250929050565b60008060408385031215610b2057600080fd5b610b2983610a2b565b9150610b0460208401610a2b565b600181811c90821680610b4b57607f821691505b602082108103610b6b57634e487b7160e01b600052602260045260246000fd5b50919050565b8082018082111561032857634e487b7160e01b600052601160045260246000fdfea2646970667358221220e179fc58c926e64cb6e87416f8ca64c117044e3195b184afe45038857606c15364736f6c63430008160033" diff --git a/evm-tests/src/contracts/incremental.sol b/evm-tests/src/contracts/incremental.sol new file mode 100644 index 0000000000..2b3bc2fd49 --- /dev/null +++ b/evm-tests/src/contracts/incremental.sol @@ -0,0 +1,22 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity >=0.8.2 <0.9.0; + +contract Storage { + uint256 number; + + /** + * @dev Store value in variable + * @param num value to store + */ + function store(uint256 num) public { + number = num; + } + + /** + * @dev Return value + * @return value of 'number' + */ + function retrieve() public view returns (uint256) { + return number; + } +} diff --git a/evm-tests/src/contracts/incremental.ts b/evm-tests/src/contracts/incremental.ts new file mode 100644 index 0000000000..b19909e491 --- /dev/null +++ b/evm-tests/src/contracts/incremental.ts @@ -0,0 +1,39 @@ +export const INCREMENTAL_CONTRACT_ABI = [ + { + "inputs": [], + "name": "retrieve", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "num", + "type": "uint256" + } + ], + "name": "store", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } +]; + +/* +"compiler": { + "version": "0.8.26+commit.8a97fa7a" + }, +*/ + +export const INCREMENTAL_CONTRACT_BYTECODE = "6080604052348015600e575f80fd5b506101438061001c5f395ff3fe608060405234801561000f575f80fd5b5060043610610034575f3560e01c80632e64cec1146100385780636057361d14610056575b5f80fd5b610040610072565b60405161004d919061009b565b60405180910390f35b610070600480360381019061006b91906100e2565b61007a565b005b5f8054905090565b805f8190555050565b5f819050919050565b61009581610083565b82525050565b5f6020820190506100ae5f83018461008c565b92915050565b5f80fd5b6100c181610083565b81146100cb575f80fd5b50565b5f813590506100dc816100b8565b92915050565b5f602082840312156100f7576100f66100b4565b5b5f610104848285016100ce565b9150509291505056fea26469706673582212209a0dd35336aff1eb3eeb11db76aa60a1427a12c1b92f945ea8c8d1dfa337cf2264736f6c634300081a0033" + + + diff --git a/evm-tests/src/contracts/metagraph.ts b/evm-tests/src/contracts/metagraph.ts new file mode 100644 index 0000000000..d0c3bf5154 --- /dev/null +++ b/evm-tests/src/contracts/metagraph.ts @@ -0,0 +1,391 @@ +export const IMETAGRAPH_ADDRESS = "0x0000000000000000000000000000000000000802"; + +export const IMetagraphABI = [ + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getAxon", + outputs: [ + { + components: [ + { + internalType: "uint64", + name: "block", + type: "uint64", + }, + { + internalType: "uint32", + name: "version", + type: "uint32", + }, + { + internalType: "uint128", + name: "ip", + type: "uint128", + }, + { + internalType: "uint16", + name: "port", + type: "uint16", + }, + { + internalType: "uint8", + name: "ip_type", + type: "uint8", + }, + { + internalType: "uint8", + name: "protocol", + type: "uint8", + }, + ], + internalType: "struct AxonInfo", + name: "", + type: "tuple", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getColdkey", + outputs: [ + { + internalType: "bytes32", + name: "", + type: "bytes32", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getConsensus", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getDividends", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getEmission", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getHotkey", + outputs: [ + { + internalType: "bytes32", + name: "", + type: "bytes32", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getIncentive", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getIsActive", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getLastUpdate", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getRank", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getStake", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getTrust", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getUidCount", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getValidatorStatus", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "uid", + type: "uint16", + }, + ], + name: "getVtrust", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, +]; \ No newline at end of file diff --git a/evm-tests/src/contracts/neuron.ts b/evm-tests/src/contracts/neuron.ts new file mode 100644 index 0000000000..4a8fb47e4c --- /dev/null +++ b/evm-tests/src/contracts/neuron.ts @@ -0,0 +1,235 @@ +export const INEURON_ADDRESS = "0x0000000000000000000000000000000000000804"; + +export const INeuronABI = [ + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "bytes32", + name: "commitHash", + type: "bytes32", + }, + ], + name: "commitWeights", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16[]", + name: "uids", + type: "uint16[]", + }, + { + internalType: "uint16[]", + name: "values", + type: "uint16[]", + }, + { + internalType: "uint16[]", + name: "salt", + type: "uint16[]", + }, + { + internalType: "uint64", + name: "versionKey", + type: "uint64", + }, + ], + name: "revealWeights", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16[]", + name: "dests", + type: "uint16[]", + }, + { + internalType: "uint16[]", + name: "weights", + type: "uint16[]", + }, + { + internalType: "uint64", + name: "versionKey", + type: "uint64", + }, + ], + name: "setWeights", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint32", + name: "version", + type: "uint32", + }, + { + internalType: "uint128", + name: "ip", + type: "uint128", + }, + { + internalType: "uint16", + name: "port", + type: "uint16", + }, + { + internalType: "uint8", + name: "ipType", + type: "uint8", + }, + { + internalType: "uint8", + name: "protocol", + type: "uint8", + }, + { + internalType: "uint8", + name: "placeholder1", + type: "uint8", + }, + { + internalType: "uint8", + name: "placeholder2", + type: "uint8", + }, + ], + name: "serveAxon", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint32", + name: "version", + type: "uint32", + }, + { + internalType: "uint128", + name: "ip", + type: "uint128", + }, + { + internalType: "uint16", + name: "port", + type: "uint16", + }, + { + internalType: "uint8", + name: "ipType", + type: "uint8", + }, + { + internalType: "uint8", + name: "protocol", + type: "uint8", + }, + { + internalType: "uint8", + name: "placeholder1", + type: "uint8", + }, + { + internalType: "uint8", + name: "placeholder2", + type: "uint8", + }, + { + internalType: "bytes", + name: "certificate", + type: "bytes", + }, + ], + name: "serveAxonTls", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint32", + name: "version", + type: "uint32", + }, + { + internalType: "uint128", + name: "ip", + type: "uint128", + }, + { + internalType: "uint16", + name: "port", + type: "uint16", + }, + { + internalType: "uint8", + name: "ipType", + type: "uint8", + }, + ], + name: "servePrometheus", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "bytes32", + name: "hotkey", + type: "bytes32", + }, + ], + name: "burnedRegister", + outputs: [], + stateMutability: "payable", + type: "function", + }, +]; \ No newline at end of file diff --git a/evm-tests/src/contracts/staking.ts b/evm-tests/src/contracts/staking.ts new file mode 100644 index 0000000000..9a30d307ba --- /dev/null +++ b/evm-tests/src/contracts/staking.ts @@ -0,0 +1,243 @@ +export const ISTAKING_ADDRESS = "0x0000000000000000000000000000000000000801"; +export const ISTAKING_V2_ADDRESS = "0x0000000000000000000000000000000000000805"; + +export const IStakingABI = [ + { + inputs: [ + { + internalType: "bytes32", + name: "delegate", + type: "bytes32", + }, + ], + name: "addProxy", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "hotkey", + type: "bytes32", + }, + { + internalType: "uint256", + name: "netuid", + type: "uint256", + }, + ], + name: "addStake", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "delegate", + type: "bytes32", + }, + ], + name: "removeProxy", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "hotkey", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "coldkey", + type: "bytes32", + }, + { + internalType: "uint256", + name: "netuid", + type: "uint256", + }, + ], + name: "getStake", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "hotkey", + type: "bytes32", + }, + { + internalType: "uint256", + name: "amount", + type: "uint256", + }, + { + internalType: "uint256", + name: "netuid", + type: "uint256", + }, + ], + name: "removeStake", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, +]; + +export const IStakingV2ABI = [ + { + "inputs": [ + { + "internalType": "bytes32", + "name": "delegate", + "type": "bytes32" + } + ], + "name": "addProxy", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "hotkey", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "netuid", + "type": "uint256" + } + ], + "name": "addStake", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "hotkey", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "coldkey", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "netuid", + "type": "uint256" + } + ], + "name": "getStake", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "coldkey", + "type": "bytes32" + } + ], + "name": "getTotalColdkeyStake", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "hotkey", + "type": "bytes32" + } + ], + "name": "getTotalHotkeyStake", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "delegate", + "type": "bytes32" + } + ], + "name": "removeProxy", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "hotkey", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "netuid", + "type": "uint256" + } + ], + "name": "removeStake", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } +]; \ No newline at end of file diff --git a/evm-tests/src/contracts/subnet.ts b/evm-tests/src/contracts/subnet.ts new file mode 100644 index 0000000000..9b6fe00596 --- /dev/null +++ b/evm-tests/src/contracts/subnet.ts @@ -0,0 +1,889 @@ +export const ISUBNET_ADDRESS = "0x0000000000000000000000000000000000000803"; + +export const ISubnetABI = [ + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getAdjustmentAlpha", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getAlphaValues", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getBondsMovingAverage", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getCommitRevealWeightsEnabled", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getDifficulty", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + name: "getImmunityPeriod", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + name: "getKappa", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getMaxBurn", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getMaxDifficulty", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getMaxWeightLimit", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getMinAllowedWeights", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getMinBurn", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getMinDifficulty", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getNetworkRegistrationAllowed", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + name: "getRho", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getServingRateLimit", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getWeightsSetRateLimit", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getWeightsVersionKey", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "activityCutoff", + type: "uint16", + }, + ], + name: "setActivityCutoff", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getActivityCutoff", + outputs: [ + { + internalType: "uint16", + name: "", + type: "uint16", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "adjustmentAlpha", + type: "uint64", + }, + ], + name: "setAdjustmentAlpha", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "alphaLow", + type: "uint16", + }, + { + internalType: "uint16", + name: "alphaHigh", + type: "uint16", + }, + ], + name: "setAlphaValues", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "bondsMovingAverage", + type: "uint64", + }, + ], + name: "setBondsMovingAverage", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "bool", + name: "commitRevealWeightsEnabled", + type: "bool", + }, + ], + name: "setCommitRevealWeightsEnabled", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getCommitRevealWeightsInterval", + outputs: [ + { + internalType: "uint64", + name: "", + type: "uint64", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "commitRevealWeightsInterval", + type: "uint64", + }, + ], + name: "setCommitRevealWeightsInterval", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "difficulty", + type: "uint64", + }, + ], + name: "setDifficulty", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "immunityPeriod", + type: "uint16", + }, + ], + name: "setImmunityPeriod", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "kappa", + type: "uint16", + }, + ], + name: "setKappa", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getLiquidAlphaEnabled", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "bool", + name: "liquidAlphaEnabled", + type: "bool", + }, + ], + name: "setLiquidAlphaEnabled", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "maxBurn", + type: "uint64", + }, + ], + name: "setMaxBurn", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "maxDifficulty", + type: "uint64", + }, + ], + name: "setMaxDifficulty", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "maxWeightLimit", + type: "uint16", + }, + ], + name: "setMaxWeightLimit", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "minAllowedWeights", + type: "uint16", + }, + ], + name: "setMinAllowedWeights", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "minBurn", + type: "uint64", + }, + ], + name: "setMinBurn", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "minDifficulty", + type: "uint64", + }, + ], + name: "setMinDifficulty", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + ], + name: "getNetworkPowRegistrationAllowed", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "bool", + name: "networkPowRegistrationAllowed", + type: "bool", + }, + ], + name: "setNetworkPowRegistrationAllowed", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "bool", + name: "networkRegistrationAllowed", + type: "bool", + }, + ], + name: "setNetworkRegistrationAllowed", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint16", + name: "rho", + type: "uint16", + }, + ], + name: "setRho", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "servingRateLimit", + type: "uint64", + }, + ], + name: "setServingRateLimit", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "weightsSetRateLimit", + type: "uint64", + }, + ], + name: "setWeightsSetRateLimit", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint16", + name: "netuid", + type: "uint16", + }, + { + internalType: "uint64", + name: "weightsVersionKey", + type: "uint64", + }, + ], + name: "setWeightsVersionKey", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "hotkey", + type: "bytes32", + }, + ], + name: "registerNetwork", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "hotkey", + type: "bytes32" + }, + { + internalType: "string", + name: "subnetName", + type: "string" + }, + { + internalType: "string", + name: "githubRepo", + type: "string" + }, + { + internalType: "string", + name: "subnetContact", + type: "string" + }, + { + internalType: "string", + name: "subnetUrl", + type: "string" + }, + { + internalType: "string", + name: "discord", + type: "string" + }, + { + internalType: "string", + name: "description", + type: "string" + }, + { + internalType: "string", + name: "additional", + type: "string" + } + ], + name: "registerNetwork", + outputs: [], + stateMutability: "payable", + type: "function" + }, +]; \ No newline at end of file diff --git a/evm-tests/src/contracts/withdraw.sol b/evm-tests/src/contracts/withdraw.sol new file mode 100644 index 0000000000..3945661e09 --- /dev/null +++ b/evm-tests/src/contracts/withdraw.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity >=0.7.0 <0.9.0; + +contract Withdraw { + constructor() {} + + function withdraw(uint256 value) public payable { + payable(msg.sender).transfer(value); + } + + receive() external payable {} +} diff --git a/evm-tests/src/contracts/withdraw.ts b/evm-tests/src/contracts/withdraw.ts new file mode 100644 index 0000000000..46fe66bf24 --- /dev/null +++ b/evm-tests/src/contracts/withdraw.ts @@ -0,0 +1,31 @@ +export const WITHDRAW_CONTRACT_ABI = [ + { + "inputs": [], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "withdraw", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "stateMutability": "payable", + "type": "receive" + } +]; + +// "compiler": { +// "version": "0.8.26+commit.8a97fa7a" +// }, + +export const WITHDRAW_CONTRACT_BYTECODE = "6080604052348015600e575f80fd5b506101148061001c5f395ff3fe608060405260043610601e575f3560e01c80632e1a7d4d146028576024565b36602457005b5f80fd5b603e6004803603810190603a919060b8565b6040565b005b3373ffffffffffffffffffffffffffffffffffffffff166108fc8290811502906040515f60405180830381858888f193505050501580156082573d5f803e3d5ffd5b5050565b5f80fd5b5f819050919050565b609a81608a565b811460a3575f80fd5b50565b5f8135905060b2816093565b92915050565b5f6020828403121560ca5760c96086565b5b5f60d58482850160a6565b9150509291505056fea2646970667358221220f43400858bfe4fcc0bf3c1e2e06d3a9e6ced86454a00bd7e4866b3d4d64e46bb64736f6c634300081a0033" + diff --git a/evm-tests/src/eth.ts b/evm-tests/src/eth.ts new file mode 100644 index 0000000000..ea3ebb9976 --- /dev/null +++ b/evm-tests/src/eth.ts @@ -0,0 +1,17 @@ + +import { ethers, Provider, TransactionRequest, Wallet } from "ethers"; +export async function estimateTransactionCost(provider: Provider, tx: TransactionRequest) { + const feeData = await provider.getFeeData(); + const estimatedGas = BigInt(await provider.estimateGas(tx)); + const gasPrice = feeData.gasPrice || feeData.maxFeePerGas; + if (gasPrice === null) + return estimatedGas + else + return estimatedGas * BigInt(gasPrice); +} + +export function getContract(contractAddress: string, abi: {}[], wallet: Wallet) { + const contract = new ethers.Contract(contractAddress, abi, wallet); + return contract + +} \ No newline at end of file diff --git a/evm-tests/src/substrate.ts b/evm-tests/src/substrate.ts new file mode 100644 index 0000000000..ddfdfb626d --- /dev/null +++ b/evm-tests/src/substrate.ts @@ -0,0 +1,274 @@ +import * as assert from "assert"; +import { devnet, MultiAddress } from '@polkadot-api/descriptors'; +import { createClient, TypedApi, Transaction, PolkadotSigner, Binary } from 'polkadot-api'; +import { getWsProvider } from 'polkadot-api/ws-provider/web'; +import { sr25519CreateDerive } from "@polkadot-labs/hdkd" +import { convertPublicKeyToSs58 } from "../src/address-utils" +import { DEV_PHRASE, entropyToMiniSecret, mnemonicToEntropy, KeyPair } from "@polkadot-labs/hdkd-helpers" +import { getPolkadotSigner } from "polkadot-api/signer" +import { randomBytes } from 'crypto'; +import { Keyring } from '@polkadot/keyring'; +import { SS58_PREFIX, TX_TIMEOUT } from "./config"; + +let api: TypedApi | undefined = undefined + +// define url string as type to extend in the future +// export type ClientUrlType = 'ws://localhost:9944' | 'wss://test.finney.opentensor.ai:443' | 'wss://dev.chain.opentensor.ai:443' | 'wss://archive.chain.opentensor.ai'; +export type ClientUrlType = 'ws://localhost:9944' + +export async function getClient(url: ClientUrlType) { + const provider = getWsProvider(url); + const client = createClient(provider); + return client +} + +export async function getDevnetApi() { + if (api === undefined) { + let client = await getClient('ws://localhost:9944') + api = client.getTypedApi(devnet) + } + return api +} + +export function getAlice() { + const entropy = mnemonicToEntropy(DEV_PHRASE) + const miniSecret = entropyToMiniSecret(entropy) + const derive = sr25519CreateDerive(miniSecret) + const hdkdKeyPair = derive("//Alice") + + return hdkdKeyPair +} + +export function getAliceSigner() { + const alice = getAlice() + const polkadotSigner = getPolkadotSigner( + alice.publicKey, + "Sr25519", + alice.sign, + ) + + return polkadotSigner +} + +export function getRandomSubstrateSigner() { + const keypair = getRandomSubstrateKeypair(); + return getSignerFromKeypair(keypair) +} + +export function getSignerFromKeypair(keypair: KeyPair) { + const polkadotSigner = getPolkadotSigner( + keypair.publicKey, + "Sr25519", + keypair.sign, + ) + return polkadotSigner +} + +export function getRandomSubstrateKeypair() { + const seed = randomBytes(32); + const miniSecret = entropyToMiniSecret(seed) + const derive = sr25519CreateDerive(miniSecret) + const hdkdKeyPair = derive("") + + return hdkdKeyPair +} + +export async function getBalance(api: TypedApi) { + const value = await api.query.Balances.Account.getValue("") + return value +} + +export async function getNonce(api: TypedApi, ss58Address: string): Promise { + const value = await api.query.System.Account.getValue(ss58Address); + return value.nonce +} + +export async function getNonceChangePromise(api: TypedApi, ss58Address: string) { + // api.query.System.Account.getValue() + const initValue = await api.query.System.Account.getValue(ss58Address); + return new Promise((resolve, reject) => { + const subscription = api.query.System.Account.watchValue(ss58Address).subscribe({ + next(value) { + if (value.nonce > initValue.nonce) { + subscription.unsubscribe(); + // Resolve the promise when the transaction is finalized + resolve(); + } + }, + + error(err: Error) { + console.error("Transaction failed:", err); + subscription.unsubscribe(); + // Reject the promise in case of an error + reject(err); + }, + complete() { + console.log("Subscription complete"); + } + }) + + setTimeout(() => { + subscription.unsubscribe(); + console.log('unsubscribed!'); + resolve() + }, TX_TIMEOUT); + + }) +} + +export function convertPublicKeyToMultiAddress(publicKey: Uint8Array, ss58Format: number = SS58_PREFIX): MultiAddress { + // Create a keyring instance + const keyring = new Keyring({ type: 'sr25519', ss58Format }); + + // Add the public key to the keyring + const address = keyring.encodeAddress(publicKey); + + return MultiAddress.Id(address); +} + + +export async function waitForTransactionCompletion(api: TypedApi, tx: Transaction<{}, string, string, void>, signer: PolkadotSigner,) { + const transactionPromise = await getTransactionWatchPromise(tx, signer) + return transactionPromise + + // If we can't always get the finalized event, then add nonce subscribe as other evidence for tx is finalized. + // Don't need it based on current testing. + // const ss58Address = convertPublicKeyToSs58(signer.publicKey) + // const noncePromise = await getNonceChangePromise(api, ss58Address) + + // return new Promise((resolve, reject) => { + // Promise.race([transactionPromise, noncePromise]) + // .then(resolve) + // .catch(reject); + // }) +} + +export async function getTransactionWatchPromise(tx: Transaction<{}, string, string, void>, signer: PolkadotSigner,) { + return new Promise((resolve, reject) => { + // store the txHash, then use it in timeout. easier to know which tx is not finalized in time + let txHash = "" + const subscription = tx.signSubmitAndWatch(signer).subscribe({ + next(value) { + console.log("Event:", value); + txHash = value.txHash + + // TODO investigate why finalized not for each extrinsic + if (value.type === "finalized") { + console.log("Transaction is finalized in block:", value.txHash); + subscription.unsubscribe(); + // Resolve the promise when the transaction is finalized + resolve(); + + } + }, + error(err) { + console.error("Transaction failed:", err); + subscription.unsubscribe(); + // Reject the promise in case of an error + reject(err); + + }, + complete() { + console.log("Subscription complete"); + } + }); + + setTimeout(() => { + subscription.unsubscribe(); + console.log('unsubscribed because of timeout for tx {}', txHash); + reject() + }, TX_TIMEOUT); + }); +} + +export async function waitForFinalizedBlock(api: TypedApi) { + const currentBlockNumber = await api.query.System.Number.getValue() + return new Promise((resolve, reject) => { + + const subscription = api.query.System.Number.watchValue().subscribe({ + // TODO check why the block number event just get once + next(value: number) { + console.log("Event block number is :", value); + + if (value > currentBlockNumber + 6) { + console.log("Transaction is finalized in block:", value); + subscription.unsubscribe(); + + resolve(); + + } + + }, + error(err: Error) { + console.error("Transaction failed:", err); + subscription.unsubscribe(); + // Reject the promise in case of an error + reject(err); + + }, + complete() { + console.log("Subscription complete"); + } + }); + + setTimeout(() => { + subscription.unsubscribe(); + console.log('unsubscribed!'); + resolve() + }, 2000); + }); +} + +// second solution to wait for transaction finalization. pass the raw data to avoid the complex transaction type definition +export async function waitForTransactionCompletion2(api: TypedApi, raw: Binary, signer: PolkadotSigner,) { + const tx = await api.txFromCallData(raw); + return new Promise((resolve, reject) => { + const subscription = tx.signSubmitAndWatch(signer).subscribe({ + next(value) { + console.log("Event:", value); + + if (value.type === "txBestBlocksState") { + console.log("Transaction is finalized in block:", value.txHash); + subscription.unsubscribe(); + // Resolve the promise when the transaction is finalized + resolve(); + + } + }, + error(err: Error) { + console.error("Transaction failed:", err); + subscription.unsubscribe(); + // Reject the promise in case of an error + reject(err); + + }, + complete() { + console.log("Subscription complete"); + } + }); + }); +} + +export async function waitForNonceChange(api: TypedApi, ss58Address: string) { + const initNonce = await getNonce(api, ss58Address) + while (true) { + const currentNonce = await getNonce(api, ss58Address) + if (currentNonce > initNonce) { + break + } + + await new Promise(resolve => setTimeout(resolve, 200)); + } +} + + +// other approach to convert public key to ss58 +// export function convertPublicKeyToSs58(publicKey: Uint8Array, ss58Format: number = 42): string { +// // Create a keyring instance +// const keyring = new Keyring({ type: 'sr25519', ss58Format }); + +// // Add the public key to the keyring +// const address = keyring.encodeAddress(publicKey); + +// return address +// } \ No newline at end of file diff --git a/evm-tests/src/subtensor.ts b/evm-tests/src/subtensor.ts new file mode 100644 index 0000000000..48dc5c83c7 --- /dev/null +++ b/evm-tests/src/subtensor.ts @@ -0,0 +1,345 @@ +import * as assert from "assert"; +import { devnet, MultiAddress } from '@polkadot-api/descriptors'; +import { TypedApi, TxCallData } from 'polkadot-api'; +import { KeyPair } from "@polkadot-labs/hdkd-helpers" +import { getAliceSigner, waitForTransactionCompletion, getSignerFromKeypair } from './substrate' +import { convertH160ToSS58, convertPublicKeyToSs58 } from './address-utils' +import { tao } from './balance-math' + +// create a new subnet and return netuid +export async function addNewSubnetwork(api: TypedApi, hotkey: KeyPair, coldkey: KeyPair) { + const alice = getAliceSigner() + const totalNetworks = await api.query.SubtensorModule.TotalNetworks.getValue() + + const rateLimit = await api.query.SubtensorModule.NetworkRateLimit.getValue() + if (rateLimit !== BigInt(0)) { + const internalCall = api.tx.AdminUtils.sudo_set_network_rate_limit({ rate_limit: BigInt(0) }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + } + + const signer = getSignerFromKeypair(coldkey) + const registerNetworkTx = api.tx.SubtensorModule.register_network({ hotkey: convertPublicKeyToSs58(hotkey.publicKey) }) + await waitForTransactionCompletion(api, registerNetworkTx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + + assert.equal(totalNetworks + 1, await api.query.SubtensorModule.TotalNetworks.getValue()) + return totalNetworks +} + +// force set balance for a ss58 address +export async function forceSetBalanceToSs58Address(api: TypedApi, ss58Address: string) { + const alice = getAliceSigner() + const balance = tao(1e8) + const internalCall = api.tx.Balances.force_set_balance({ who: MultiAddress.Id(ss58Address), new_free: balance }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + + const balanceOnChain = (await api.query.System.Account.getValue(ss58Address)).data.free + // check the balance except for sudo account becasue of tx fee + if (ss58Address !== convertPublicKeyToSs58(alice.publicKey)) { + assert.equal(balance, balanceOnChain) + } +} + +// set balance for an eth address +export async function forceSetBalanceToEthAddress(api: TypedApi, ethAddress: string) { + const ss58Address = convertH160ToSS58(ethAddress) + await forceSetBalanceToSs58Address(api, ss58Address) +} + +export async function setCommitRevealWeightsEnabled(api: TypedApi, netuid: number, enabled: boolean) { + const value = await api.query.SubtensorModule.CommitRevealWeightsEnabled.getValue(netuid) + if (value === enabled) { + return; + } + + const alice = getAliceSigner() + const internalCall = api.tx.AdminUtils.sudo_set_commit_reveal_weights_enabled({ netuid: netuid, enabled: enabled }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(enabled, await api.query.SubtensorModule.CommitRevealWeightsEnabled.getValue(netuid)) +} + +export async function setWeightsSetRateLimit(api: TypedApi, netuid: number, rateLimit: bigint) { + const value = await api.query.SubtensorModule.WeightsSetRateLimit.getValue(netuid) + if (value === rateLimit) { + return; + } + + const alice = getAliceSigner() + const internalCall = api.tx.AdminUtils.sudo_set_weights_set_rate_limit({ netuid: netuid, weights_set_rate_limit: rateLimit }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(rateLimit, await api.query.SubtensorModule.WeightsSetRateLimit.getValue(netuid)) +} + +// tempo is u16 in rust, but we just number in js. so value should be less than u16::Max +export async function setTempo(api: TypedApi, netuid: number, tempo: number) { + const value = await api.query.SubtensorModule.Tempo.getValue(netuid) + console.log("init avlue is ", value) + if (value === tempo) { + return; + } + + const alice = getAliceSigner() + const internalCall = api.tx.AdminUtils.sudo_set_tempo({ netuid: netuid, tempo: tempo }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(tempo, await api.query.SubtensorModule.Tempo.getValue(netuid)) +} + +export async function setCommitRevealWeightsInterval(api: TypedApi, netuid: number, interval: bigint) { + const value = await api.query.SubtensorModule.RevealPeriodEpochs.getValue(netuid) + if (value === interval) { + return; + } + + const alice = getAliceSigner() + const internalCall = api.tx.AdminUtils.sudo_set_commit_reveal_weights_interval({ netuid: netuid, interval: interval }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(interval, await api.query.SubtensorModule.RevealPeriodEpochs.getValue(netuid)) +} + + +export async function forceSetChainID(api: TypedApi, chainId: bigint) { + const value = await api.query.EVMChainId.ChainId.getValue() + if (value === chainId) { + return; + } + + const alice = getAliceSigner() + const internalCall = api.tx.AdminUtils.sudo_set_evm_chain_id({ chain_id: chainId }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(chainId, await api.query.EVMChainId.ChainId.getValue()) +} + +export async function disableWhiteListCheck(api: TypedApi, disabled: boolean) { + const value = await api.query.EVM.DisableWhitelistCheck.getValue() + if (value === disabled) { + return; + } + + const alice = getAliceSigner() + const internalCall = api.tx.EVM.disable_whitelist({ disabled: disabled }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(disabled, await api.query.EVM.DisableWhitelistCheck.getValue()) +} + +export async function burnedRegister(api: TypedApi, netuid: number, ss58Address: string, keypair: KeyPair) { + const uids = await api.query.SubtensorModule.SubnetworkN.getValue(netuid) + const signer = getSignerFromKeypair(keypair) + const tx = api.tx.SubtensorModule.burned_register({ hotkey: ss58Address, netuid: netuid }) + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(uids + 1, await api.query.SubtensorModule.SubnetworkN.getValue(netuid)) +} + + +export async function sendProxyCall(api: TypedApi, calldata: TxCallData, ss58Address: string, keypair: KeyPair) { + const signer = getSignerFromKeypair(keypair) + const tx = api.tx.Proxy.proxy({ + call: calldata, + real: MultiAddress.Id(ss58Address), + force_proxy_type: undefined + }); + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); +} + + +export async function setTxRateLimit(api: TypedApi, txRateLimit: bigint) { + const value = await api.query.SubtensorModule.TxRateLimit.getValue() + if (value === txRateLimit) { + return; + } + const alice = getAliceSigner() + + const internalCall = api.tx.AdminUtils.sudo_set_tx_rate_limit({ tx_rate_limit: txRateLimit }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(txRateLimit, await api.query.SubtensorModule.TxRateLimit.getValue()) +} + +export async function setMaxAllowedValidators(api: TypedApi, netuid: number, maxAllowedValidators: number) { + const value = await api.query.SubtensorModule.MaxAllowedValidators.getValue(netuid) + if (value === maxAllowedValidators) { + return; + } + + const alice = getAliceSigner() + + const internalCall = api.tx.AdminUtils.sudo_set_max_allowed_validators({ + netuid: netuid, + max_allowed_validators: maxAllowedValidators + }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(maxAllowedValidators, await api.query.SubtensorModule.MaxAllowedValidators.getValue(netuid)) +} + +export async function setSubnetOwnerCut(api: TypedApi, subnetOwnerCut: number) { + const value = await api.query.SubtensorModule.SubnetOwnerCut.getValue() + if (value === subnetOwnerCut) { + return; + } + + const alice = getAliceSigner() + + const internalCall = api.tx.AdminUtils.sudo_set_subnet_owner_cut({ + subnet_owner_cut: subnetOwnerCut + }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(subnetOwnerCut, await api.query.SubtensorModule.SubnetOwnerCut.getValue()) +} + +export async function setActivityCutoff(api: TypedApi, netuid: number, activityCutoff: number) { + const value = await api.query.SubtensorModule.ActivityCutoff.getValue(netuid) + if (value === activityCutoff) { + return; + } + + const alice = getAliceSigner() + + const internalCall = api.tx.AdminUtils.sudo_set_activity_cutoff({ + netuid: netuid, + activity_cutoff: activityCutoff + }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(activityCutoff, await api.query.SubtensorModule.ActivityCutoff.getValue(netuid)) +} + +export async function setMaxAllowedUids(api: TypedApi, netuid: number, maxAllowedUids: number) { + const value = await api.query.SubtensorModule.MaxAllowedUids.getValue(netuid) + if (value === maxAllowedUids) { + return; + } + + const alice = getAliceSigner() + + const internalCall = api.tx.AdminUtils.sudo_set_max_allowed_uids({ + netuid: netuid, + max_allowed_uids: maxAllowedUids + }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(maxAllowedUids, await api.query.SubtensorModule.MaxAllowedUids.getValue(netuid)) +} + +export async function setMinDelegateTake(api: TypedApi, minDelegateTake: number) { + const value = await api.query.SubtensorModule.MinDelegateTake.getValue() + if (value === minDelegateTake) { + return; + } + + const alice = getAliceSigner() + + const internalCall = api.tx.AdminUtils.sudo_set_min_delegate_take({ + take: minDelegateTake + }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + assert.equal(minDelegateTake, await api.query.SubtensorModule.MinDelegateTake.getValue()) +} + +export async function becomeDelegate(api: TypedApi, ss58Address: string, keypair: KeyPair) { + const singer = getSignerFromKeypair(keypair) + + const tx = api.tx.SubtensorModule.become_delegate({ + hotkey: ss58Address + }) + await waitForTransactionCompletion(api, tx, singer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); +} + +export async function addStake(api: TypedApi, netuid: number, ss58Address: string, amount_staked: bigint, keypair: KeyPair) { + const singer = getSignerFromKeypair(keypair) + let tx = api.tx.SubtensorModule.add_stake({ + netuid: netuid, + hotkey: ss58Address, + amount_staked: amount_staked + }) + + await waitForTransactionCompletion(api, tx, singer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + +} + +export async function setWeight(api: TypedApi, netuid: number, dests: number[], weights: number[], version_key: bigint, keypair: KeyPair) { + const singer = getSignerFromKeypair(keypair) + let tx = api.tx.SubtensorModule.set_weights({ + netuid: netuid, + dests: dests, + weights: weights, + version_key: version_key + }) + + await waitForTransactionCompletion(api, tx, singer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + +} + +export async function rootRegister(api: TypedApi, ss58Address: string, keypair: KeyPair) { + const singer = getSignerFromKeypair(keypair) + let tx = api.tx.SubtensorModule.root_register({ + hotkey: ss58Address + }) + + await waitForTransactionCompletion(api, tx, singer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + +} \ No newline at end of file diff --git a/evm-tests/src/utils.ts b/evm-tests/src/utils.ts new file mode 100644 index 0000000000..36e922b49e --- /dev/null +++ b/evm-tests/src/utils.ts @@ -0,0 +1,55 @@ +import { defineChain, http, publicActions, createPublicClient } from "viem" +import { privateKeyToAccount, generatePrivateKey } from 'viem/accounts' +import { ethers } from "ethers" +import { ETH_LOCAL_URL } from "./config" + +export type ClientUrlType = 'http://localhost:9944'; + +export const chain = (id: number, url: string) => defineChain({ + id: id, + name: 'bittensor', + network: 'bittensor', + nativeCurrency: { + name: 'tao', + symbol: 'TAO', + decimals: 9, + }, + rpcUrls: { + default: { + http: [url], + }, + }, + testnet: true, +}) + + +export async function getPublicClient(url: ClientUrlType) { + const wallet = createPublicClient({ + chain: chain(42, url), + transport: http(), + + }) + + return wallet.extend(publicActions) +} + +/** + * Generates a random Ethereum wallet + * @returns wallet keyring + */ +export function generateRandomEthWallet() { + let privateKey = generatePrivateKey().toString(); + privateKey = privateKey.replace('0x', ''); + + const account = privateKeyToAccount(`0x${privateKey}`) + return account +} + + +export function generateRandomEthersWallet() { + const account = ethers.Wallet.createRandom(); + const provider = new ethers.JsonRpcProvider(ETH_LOCAL_URL); + + const wallet = new ethers.Wallet(account.privateKey, provider); + return wallet; +} \ No newline at end of file diff --git a/evm-tests/test/ed25519.precompile.verify.test.ts b/evm-tests/test/ed25519.precompile.verify.test.ts new file mode 100644 index 0000000000..fcd79ec9d7 --- /dev/null +++ b/evm-tests/test/ed25519.precompile.verify.test.ts @@ -0,0 +1,122 @@ +import { IED25519VERIFY_ADDRESS, IEd25519VerifyABI, ETH_LOCAL_URL } from '../src/config' +import { getPublicClient } from "../src/utils"; +import { toHex, toBytes, keccak256, PublicClient } from 'viem' +import { Keyring } from "@polkadot/keyring"; +import * as assert from "assert"; + +describe("Verfication of ed25519 signature", () => { + // init eth part + let ethClient: PublicClient; + + before(async () => { + ethClient = await getPublicClient(ETH_LOCAL_URL); + }); + + it("Verification of ed25519 works", async () => { + const keyring = new Keyring({ type: "ed25519" }); + const alice = keyring.addFromUri("//Alice"); + + // Use this example: https://github.com/gztensor/evm-demo/blob/main/docs/ed25519verify-precompile.md + // const keyring = new Keyring({ type: "ed25519" }); + // const myAccount = keyring.addFromUri("//Alice"); + + ////////////////////////////////////////////////////////////////////// + // Generate a signature + + // Your message to sign + const message = "Sign this message"; + const messageU8a = new TextEncoder().encode(message); + const messageHex = toHex(messageU8a); // Convert message to hex string + const messageHash = keccak256(messageHex); // Hash the message to fit into bytes32 + console.log(`messageHash = ${messageHash}`); + const hashedMessageBytes = toBytes(messageHash); + console.log(`hashedMessageBytes = ${hashedMessageBytes}`); + + // Sign the message + const signature = await alice.sign(hashedMessageBytes); + console.log(`Signature: ${toHex(signature)}`); + + // Verify the signature locally + const isValid = alice.verify( + hashedMessageBytes, + signature, + alice.publicKey + ); + console.log(`Is the signature valid? ${isValid}`); + + ////////////////////////////////////////////////////////////////////// + // Verify the signature using the precompile contract + + const publicKeyBytes = toHex(alice.publicKey); + console.log(`publicKeyBytes = ${publicKeyBytes}`); + + // Split signture into Commitment (R) and response (s) + let r = signature.slice(0, 32); // Commitment, a.k.a. "r" - first 32 bytes + let s = signature.slice(32, 64); // Response, a.k.a. "s" - second 32 bytes + let rBytes = toHex(r); + let sBytes = toHex(s); + + const isPrecompileValid = await ethClient.readContract({ + address: IED25519VERIFY_ADDRESS, + abi: IEd25519VerifyABI, + functionName: "verify", + args: [messageHash, + publicKeyBytes, + rBytes, + sBytes] + + }); + + console.log( + `Is the signature valid according to the smart contract? ${isPrecompileValid}` + ); + assert.equal(isPrecompileValid, true) + + ////////////////////////////////////////////////////////////////////// + // Verify the signature for bad data using the precompile contract + + let brokenHashedMessageBytes = hashedMessageBytes; + brokenHashedMessageBytes[0] = (brokenHashedMessageBytes[0] + 1) % 0xff; + const brokenMessageHash = toHex(brokenHashedMessageBytes); + console.log(`brokenMessageHash = ${brokenMessageHash}`); + + const isPrecompileValidBadData = await ethClient.readContract({ + address: IED25519VERIFY_ADDRESS, + abi: IEd25519VerifyABI, + functionName: "verify", + args: [brokenMessageHash, + publicKeyBytes, + rBytes, + sBytes] + + }); + + console.log( + `Is the signature valid according to the smart contract for broken data? ${isPrecompileValidBadData}` + ); + assert.equal(isPrecompileValidBadData, false) + + ////////////////////////////////////////////////////////////////////// + // Verify the bad signature for good data using the precompile contract + + let brokenR = r; + brokenR[0] = (brokenR[0] + 1) % 0xff; + rBytes = toHex(r); + const isPrecompileValidBadSignature = await ethClient.readContract({ + address: IED25519VERIFY_ADDRESS, + abi: IEd25519VerifyABI, + functionName: "verify", + args: [messageHash, + publicKeyBytes, + rBytes, + sBytes] + + }); + + console.log( + `Is the signature valid according to the smart contract for broken signature? ${isPrecompileValidBadSignature}` + ); + assert.equal(isPrecompileValidBadSignature, false) + + }); +}); \ No newline at end of file diff --git a/evm-tests/test/eth.bridgeToken.deploy.test.ts b/evm-tests/test/eth.bridgeToken.deploy.test.ts new file mode 100644 index 0000000000..94ebcd1260 --- /dev/null +++ b/evm-tests/test/eth.bridgeToken.deploy.test.ts @@ -0,0 +1,69 @@ +import * as assert from "assert"; +import * as chai from "chai"; + +import { getDevnetApi } from "../src/substrate" +import { generateRandomEthersWallet, getPublicClient } from "../src/utils"; +import { ETH_LOCAL_URL } from "../src/config"; +import { devnet } from "@polkadot-api/descriptors" +import { PublicClient } from "viem"; +import { TypedApi } from "polkadot-api"; +import { BRIDGE_TOKEN_CONTRACT_ABI, BRIDGE_TOKEN_CONTRACT_BYTECODE } from "../src/contracts/bridgeToken"; +import { toViemAddress } from "../src/address-utils"; +import { forceSetBalanceToEthAddress, disableWhiteListCheck } from "../src/subtensor"; +import { ethers } from "ethers" +describe("bridge token contract deployment", () => { + // init eth part + const wallet = generateRandomEthersWallet(); + let publicClient: PublicClient; + + // init substrate part + let api: TypedApi + + before(async () => { + // init variables got from await and async + publicClient = await getPublicClient(ETH_LOCAL_URL) + api = await getDevnetApi() + + await forceSetBalanceToEthAddress(api, wallet.address) + await disableWhiteListCheck(api, true) + }); + + it("Can deploy bridge token smart contract", async () => { + const contractFactory = new ethers.ContractFactory(BRIDGE_TOKEN_CONTRACT_ABI, BRIDGE_TOKEN_CONTRACT_BYTECODE, wallet) + const contract = await contractFactory.deploy("name", + "symbol", wallet.address) + await contract.waitForDeployment() + assert.notEqual(contract.target, undefined) + + const contractAddress = contract.target.toString() + + const code = await publicClient.getCode({ address: toViemAddress(contractAddress) }) + if (code === undefined) { + throw new Error("code not available") + } + assert.ok(code.length > 100) + assert.ok(code.includes("0x60806040523480156")) + }); + + it("Can deploy bridge token contract with gas limit", async () => { + const contractFactory = new ethers.ContractFactory(BRIDGE_TOKEN_CONTRACT_ABI, BRIDGE_TOKEN_CONTRACT_BYTECODE, wallet) + const successful_gas_limit = "12345678"; + const contract = await contractFactory.deploy("name", + "symbol", wallet.address, + { + gasLimit: successful_gas_limit, + } + ) + await contract.waitForDeployment() + assert.notEqual(contract.target, undefined) + + const contractAddress = contract.target.toString() + + const code = await publicClient.getCode({ address: toViemAddress(contractAddress) }) + if (code === undefined) { + throw new Error("code not available") + } + assert.ok(code.length > 100) + assert.ok(code.includes("0x60806040523480156")) + }); +}); \ No newline at end of file diff --git a/evm-tests/test/eth.chain-id.test.ts b/evm-tests/test/eth.chain-id.test.ts new file mode 100644 index 0000000000..09174c1212 --- /dev/null +++ b/evm-tests/test/eth.chain-id.test.ts @@ -0,0 +1,76 @@ + +import * as assert from "assert"; +import * as chai from "chai"; + +import { getDevnetApi, waitForTransactionCompletion, getRandomSubstrateKeypair } from "../src/substrate" +import { generateRandomEthWallet, getPublicClient } from "../src/utils"; +import { convertPublicKeyToSs58 } from "../src/address-utils" +import { ETH_LOCAL_URL } from "../src/config"; +import { devnet } from "@polkadot-api/descriptors" +import { getPolkadotSigner } from "polkadot-api/signer"; +import { PublicClient } from "viem"; +import { TypedApi } from "polkadot-api"; +import { forceSetBalanceToSs58Address, forceSetChainID } from "../src/subtensor"; + +describe("Test the EVM chain ID", () => { + // init eth part + const wallet = generateRandomEthWallet(); + let ethClient: PublicClient; + + // init substrate part + const keyPair = getRandomSubstrateKeypair(); + let api: TypedApi; + + // init other variable + const initChainId = 42; + + before(async () => { + // init variables got from await and async + ethClient = await getPublicClient(ETH_LOCAL_URL); + api = await getDevnetApi() + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(keyPair.publicKey)) + + }); + + it("EVM chain id update is ok", async () => { + let chainId = await ethClient.getChainId(); + // init chain id should be 42 + assert.equal(chainId, initChainId); + + const newChainId = BigInt(100) + await forceSetChainID(api, newChainId) + + chainId = await ethClient.getChainId(); + assert.equal(chainId, newChainId); + + await forceSetChainID(api, BigInt(initChainId)) + + chainId = await ethClient.getChainId(); + // back to original value for other tests. and we can run it repeatedly + assert.equal(chainId, initChainId); + + }); + + it("EVM chain id is the same, only sudo can change it.", async () => { + let chainId = await ethClient.getChainId(); + // init chain id should be 42 + assert.equal(chainId, initChainId); + + // invalide signer for set chain ID + let signer = getPolkadotSigner( + keyPair.publicKey, + "Sr25519", + keyPair.sign, + ) + + let tx = api.tx.AdminUtils.sudo_set_evm_chain_id({ chain_id: BigInt(100) }) + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + + // extrinsic should be failed and chain ID not updated. + chainId = await ethClient.getChainId(); + assert.equal(chainId, 42); + + }); +}); \ No newline at end of file diff --git a/evm-tests/test/eth.incremental.deploy.test.ts b/evm-tests/test/eth.incremental.deploy.test.ts new file mode 100644 index 0000000000..c22187538d --- /dev/null +++ b/evm-tests/test/eth.incremental.deploy.test.ts @@ -0,0 +1,61 @@ + + +import * as assert from "assert"; +import * as chai from "chai"; + +import { getDevnetApi } from "../src/substrate" +import { generateRandomEthersWallet, getPublicClient } from "../src/utils"; +import { ETH_LOCAL_URL } from "../src/config"; +import { devnet } from "@polkadot-api/descriptors" +import { PublicClient } from "viem"; +import { TypedApi } from "polkadot-api"; +import { INCREMENTAL_CONTRACT_ABI, INCREMENTAL_CONTRACT_BYTECODE } from "../src/contracts/incremental"; +import { toViemAddress } from "../src/address-utils"; +import { ethers } from "ethers" +import { disableWhiteListCheck, forceSetBalanceToEthAddress } from "../src/subtensor"; + +describe("bridge token contract deployment", () => { + // init eth part + const wallet = generateRandomEthersWallet(); + let publicClient: PublicClient; + + // init substrate part + let api: TypedApi + + before(async () => { + publicClient = await getPublicClient(ETH_LOCAL_URL) + api = await getDevnetApi() + + await forceSetBalanceToEthAddress(api, wallet.address) + await disableWhiteListCheck(api, true) + }); + + it("Can deploy incremental smart contract", async () => { + const contractFactory = new ethers.ContractFactory(INCREMENTAL_CONTRACT_ABI, INCREMENTAL_CONTRACT_BYTECODE, wallet) + const contract = await contractFactory.deploy() + await contract.waitForDeployment() + + const value = await publicClient.readContract({ + abi: INCREMENTAL_CONTRACT_ABI, + address: toViemAddress(contract.target.toString()), + functionName: "retrieve", + args: [] + }) + assert.equal(value, 0) + + const newValue = 1234 + + const deployContract = new ethers.Contract(contract.target.toString(), INCREMENTAL_CONTRACT_ABI, wallet) + const storeTx = await deployContract.store(newValue) + await storeTx.wait() + + const newValueAfterStore = await publicClient.readContract({ + abi: INCREMENTAL_CONTRACT_ABI, + address: toViemAddress(contract.target.toString()), + functionName: "retrieve", + args: [] + }) + + assert.equal(newValue, newValueAfterStore) + }); +}); diff --git a/evm-tests/test/eth.substrate-transfer.test.ts b/evm-tests/test/eth.substrate-transfer.test.ts new file mode 100644 index 0000000000..9e3a2b2050 --- /dev/null +++ b/evm-tests/test/eth.substrate-transfer.test.ts @@ -0,0 +1,412 @@ +import * as assert from "assert"; + +import { getDevnetApi, waitForTransactionCompletion, getRandomSubstrateSigner, } from "../src/substrate" +import { getPublicClient } from "../src/utils"; +import { ETH_LOCAL_URL, IBALANCETRANSFER_ADDRESS, IBalanceTransferABI } from "../src/config"; +import { devnet, MultiAddress } from "@polkadot-api/descriptors" +import { PublicClient } from "viem"; +import { TypedApi, Binary, FixedSizeBinary } from "polkadot-api"; +import { generateRandomEthersWallet } from "../src/utils"; +import { tao, raoToEth, bigintToRao, compareEthBalanceWithTxFee } from "../src/balance-math"; +import { toViemAddress, convertPublicKeyToSs58, convertH160ToSS58, ss58ToH160, ss58ToEthAddress, ethAddressToH160 } from "../src/address-utils" +import { ethers } from "ethers" +import { estimateTransactionCost, getContract } from "../src/eth" + +import { WITHDRAW_CONTRACT_ABI, WITHDRAW_CONTRACT_BYTECODE } from "../src/contracts/withdraw" + +import { forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, disableWhiteListCheck } from "../src/subtensor"; + +describe("Balance transfers between substrate and EVM", () => { + const gwei = BigInt("1000000000"); + // init eth part + const wallet = generateRandomEthersWallet(); + const wallet2 = generateRandomEthersWallet(); + let publicClient: PublicClient; + const provider = new ethers.JsonRpcProvider(ETH_LOCAL_URL); + // init substrate part + const signer = getRandomSubstrateSigner(); + let api: TypedApi + + before(async () => { + + publicClient = await getPublicClient(ETH_LOCAL_URL) + api = await getDevnetApi() + + await forceSetBalanceToEthAddress(api, wallet.address) + await forceSetBalanceToEthAddress(api, wallet2.address) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(signer.publicKey)) + await disableWhiteListCheck(api, true) + }); + + it("Can transfer token from EVM to EVM", async () => { + const senderBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet2.address) }) + const transferBalance = raoToEth(tao(1)) + const tx = { + to: wallet2.address, + value: transferBalance.toString() + } + const txFee = await estimateTransactionCost(provider, tx) + + const txResponse = await wallet.sendTransaction(tx) + await txResponse.wait(); + + + const senderBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + const receiverBalanceAfterTranser = await publicClient.getBalance({ address: toViemAddress(wallet2.address) }) + + assert.equal(senderBalanceAfterTransfer, senderBalance - transferBalance - txFee) + assert.equal(receiverBalance, receiverBalanceAfterTranser - transferBalance) + }); + + it("Can transfer token from Substrate to EVM", async () => { + const ss58Address = convertH160ToSS58(wallet.address) + const senderBalance = (await api.query.System.Account.getValue(ss58Address)).data.free + const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + const transferBalance = tao(1) + + const tx = api.tx.Balances.transfer_keep_alive({ value: transferBalance, dest: MultiAddress.Id(ss58Address) }) + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + + + const senderBalanceAfterTransfer = (await api.query.System.Account.getValue(ss58Address)).data.free + const receiverBalanceAfterTranser = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + + assert.equal(senderBalanceAfterTransfer, senderBalance + transferBalance) + assert.equal(receiverBalance, receiverBalanceAfterTranser - raoToEth(transferBalance)) + }); + + it("Can transfer token from EVM to Substrate", async () => { + const contract = getContract(IBALANCETRANSFER_ADDRESS, IBalanceTransferABI, wallet) + const senderBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + const receiverBalance = (await api.query.System.Account.getValue(convertPublicKeyToSs58(signer.publicKey))).data.free + const transferBalance = raoToEth(tao(1)) + + const tx = await contract.transfer(signer.publicKey, { value: transferBalance.toString() }) + await tx.wait() + + + const senderBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + const receiverBalanceAfterTranser = (await api.query.System.Account.getValue(convertPublicKeyToSs58(signer.publicKey))).data.free + + compareEthBalanceWithTxFee(senderBalanceAfterTransfer, senderBalance - transferBalance) + assert.equal(receiverBalance, receiverBalanceAfterTranser - tao(1)) + }); + + it("Transfer from EVM to substrate using evm::withdraw", async () => { + const ss58Address = convertPublicKeyToSs58(signer.publicKey) + const senderBalance = (await api.query.System.Account.getValue(ss58Address)).data.free + const ethAddresss = ss58ToH160(ss58Address); + + // transfer token to mirror eth address + const ethTransfer = { + to: ss58ToEthAddress(ss58Address), + value: raoToEth(tao(2)).toString() + } + + const txResponse = await wallet.sendTransaction(ethTransfer) + await txResponse.wait(); + + const tx = api.tx.EVM.withdraw({ address: ethAddresss, value: tao(1) }) + const txFee = (await tx.getPaymentInfo(ss58Address)).partial_fee + + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + + const senderBalanceAfterWithdraw = (await api.query.System.Account.getValue(ss58Address)).data.free + + assert.equal(senderBalance, senderBalanceAfterWithdraw - tao(1) + txFee) + }); + + it("Transfer from EVM to substrate using evm::call", async () => { + const ss58Address = convertPublicKeyToSs58(signer.publicKey) + const ethAddresss = ss58ToH160(ss58Address); + + // transfer token to mirror eth address + const ethTransfer = { + to: ss58ToEthAddress(ss58Address), + value: raoToEth(tao(2)).toString() + } + + const txResponse = await wallet.sendTransaction(ethTransfer) + await txResponse.wait(); + + const source: FixedSizeBinary<20> = ethAddresss; + const target = ethAddressToH160(wallet.address) + const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + + // all these parameter value are tricky, any change could make the call failed + const tx = api.tx.EVM.call({ + source: source, + target: target, + // it is U256 in the extrinsic. + value: [raoToEth(tao(1)), tao(0), tao(0), tao(0)], + gas_limit: BigInt(1000000), + // it is U256 in the extrinsic. + max_fee_per_gas: [BigInt(10e9), BigInt(0), BigInt(0), BigInt(0)], + max_priority_fee_per_gas: undefined, + input: Binary.fromText(""), + nonce: undefined, + access_list: [] + }) + // txFee not accurate + const txFee = (await tx.getPaymentInfo(ss58Address)).partial_fee + + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + + + const receiverBalanceAfterCall = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + assert.equal(receiverBalanceAfterCall, receiverBalance + raoToEth(tao(1))) + }); + + it("Forward value in smart contract", async () => { + + + const contractFactory = new ethers.ContractFactory(WITHDRAW_CONTRACT_ABI, WITHDRAW_CONTRACT_BYTECODE, wallet) + const contract = await contractFactory.deploy() + await contract.waitForDeployment() + + const code = await publicClient.getCode({ address: toViemAddress(contract.target.toString()) }) + if (code === undefined) { + throw new Error("code length is wrong for deployed contract") + } + assert.ok(code.length > 100) + + // transfer 2 TAO to contract + const ethTransfer = { + to: contract.target.toString(), + value: raoToEth(tao(2)).toString() + } + + const txResponse = await wallet.sendTransaction(ethTransfer) + await txResponse.wait(); + + const contractBalance = await publicClient.getBalance({ address: toViemAddress(contract.target.toString()) }) + const callerBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + + const contractForCall = new ethers.Contract(contract.target.toString(), WITHDRAW_CONTRACT_ABI, wallet) + + const withdrawTx = await contractForCall.withdraw( + raoToEth(tao(1)).toString() + ); + + await withdrawTx.wait(); + + const contractBalanceAfterWithdraw = await publicClient.getBalance({ address: toViemAddress(contract.target.toString()) }) + const callerBalanceAfterWithdraw = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + + compareEthBalanceWithTxFee(callerBalanceAfterWithdraw, callerBalance + raoToEth(tao(1))) + assert.equal(contractBalance, contractBalanceAfterWithdraw + raoToEth(tao(1))) + }); + + it("Transfer full balance", async () => { + const ethBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet2.address) }) + const tx = { + to: wallet2.address, + value: ethBalance.toString(), + }; + const txPrice = await estimateTransactionCost(provider, tx); + const finalTx = { + to: wallet2.address, + value: (ethBalance - txPrice).toString(), + }; + try { + // transfer should be failed since substrate requires existial balance to keep account + const txResponse = await wallet.sendTransaction(finalTx) + await txResponse.wait(); + } catch (error) { + if (error instanceof Error) { + assert.equal((error as any).code, "INSUFFICIENT_FUNDS") + assert.equal(error.toString().includes("insufficient funds"), true) + } + } + + const receiverBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet2.address) }) + assert.equal(receiverBalance, receiverBalanceAfterTransfer) + }) + + it("Transfer more than owned balance should fail", async () => { + const ethBalance = await publicClient.getBalance({ address: toViemAddress(wallet.address) }) + const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet2.address) }) + const tx = { + to: wallet2.address, + value: (ethBalance + raoToEth(tao(1))).toString(), + }; + + try { + // transfer should be failed since substrate requires existial balance to keep account + const txResponse = await wallet.sendTransaction(tx) + await txResponse.wait(); + } catch (error) { + if (error instanceof Error) { + assert.equal((error as any).code, "INSUFFICIENT_FUNDS") + assert.equal(error.toString().includes("insufficient funds"), true) + } + } + + const receiverBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet2.address) }) + assert.equal(receiverBalance, receiverBalanceAfterTransfer) + }); + + it("Transfer more than u64::max in substrate equivalent should receive error response", async () => { + const receiverBalance = await publicClient.getBalance({ address: toViemAddress(wallet2.address) }) + try { + const tx = { + to: wallet2.address, + value: raoToEth(BigInt(2) ** BigInt(64)).toString(), + }; + // transfer should be failed since substrate requires existial balance to keep account + const txResponse = await wallet.sendTransaction(tx) + await txResponse.wait(); + } catch (error) { + if (error instanceof Error) { + assert.equal((error as any).code, "INSUFFICIENT_FUNDS") + assert.equal(error.toString().includes("insufficient funds"), true) + } + } + + const contract = getContract(IBALANCETRANSFER_ADDRESS, IBalanceTransferABI, wallet) + try { + const tx = await contract.transfer(signer.publicKey, { value: raoToEth(BigInt(2) ** BigInt(64)).toString() }) + await tx.await() + } catch (error) { + if (error instanceof Error) { + console.log(error.toString()) + assert.equal(error.toString().includes("revert data"), true) + } + } + + try { + const dest = convertH160ToSS58(wallet2.address) + const tx = api.tx.Balances.transfer_keep_alive({ value: bigintToRao(BigInt(2) ** BigInt(64)), dest: MultiAddress.Id(dest) }) + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + } catch (error) { + if (error instanceof Error) { + console.log(error.toString()) + assert.equal(error.toString().includes("Cannot convert"), true) + } + } + + try { + const dest = ethAddressToH160(wallet2.address) + const tx = api.tx.EVM.withdraw({ value: bigintToRao(BigInt(2) ** BigInt(64)), address: dest }) + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + } catch (error) { + if (error instanceof Error) { + assert.equal(error.toString().includes("Cannot convert"), true) + } + } + + try { + const source = ethAddressToH160(wallet.address) + const target = ethAddressToH160(wallet2.address) + const tx = api.tx.EVM.call({ + source: source, + target: target, + // it is U256 in the extrinsic, the value is more than u64::MAX + value: [raoToEth(tao(1)), tao(0), tao(0), tao(1)], + gas_limit: BigInt(1000000), + // it is U256 in the extrinsic. + max_fee_per_gas: [BigInt(10e9), BigInt(0), BigInt(0), BigInt(0)], + max_priority_fee_per_gas: undefined, + input: Binary.fromText(""), + nonce: undefined, + access_list: [] + }) + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + } catch (error) { + if (error instanceof Error) { + console.log(error.toString()) + assert.equal((error as any).code, "INSUFFICIENT_FUNDS") + assert.equal(error.toString().includes("insufficient funds"), true) + } + } + + const receiverBalanceAfterTransfer = await publicClient.getBalance({ address: toViemAddress(wallet2.address) }) + assert.equal(receiverBalance, receiverBalanceAfterTransfer) + }); + + it("Gas price should be 10 GWei", async () => { + const feeData = await provider.getFeeData(); + assert.equal(feeData.gasPrice, BigInt(10000000000)); + }); + + + it("max_fee_per_gas and max_priority_fee_per_gas affect transaction fee properly", async () => { + + const testCases = [ + [10, 0, 21000 * 10 * 1e9], + [10, 10, 21000 * 10 * 1e9], + [11, 0, 21000 * 10 * 1e9], + [11, 1, (21000 * 10 + 21000) * 1e9], + [11, 2, (21000 * 10 + 21000) * 1e9], + ]; + + for (let i in testCases) { + const tc = testCases[i]; + const actualFee = await transferAndGetFee( + wallet, wallet2, publicClient, + gwei * BigInt(tc[0]), + gwei * BigInt(tc[1]) + ); + assert.equal(actualFee, BigInt(tc[2])) + } + }); + + it("Low max_fee_per_gas gets transaction rejected", async () => { + try { + await transferAndGetFee(wallet, wallet2, publicClient, gwei * BigInt(9), BigInt(0)) + } catch (error) { + if (error instanceof Error) { + console.log(error.toString()) + assert.equal(error.toString().includes("gas price less than block base fee"), true) + } + } + }); + + it("max_fee_per_gas lower than max_priority_fee_per_gas gets transaction rejected", async () => { + try { + await transferAndGetFee(wallet, wallet2, publicClient, gwei * BigInt(10), gwei * BigInt(11)) + } catch (error) { + if (error instanceof Error) { + assert.equal(error.toString().includes("priorityFee cannot be more than maxFee"), true) + } + } + }); +}); + +async function transferAndGetFee(wallet: ethers.Wallet, wallet2: ethers.Wallet, client: PublicClient, max_fee_per_gas: BigInt, max_priority_fee_per_gas: BigInt) { + + const ethBalanceBefore = await client.getBalance({ address: toViemAddress(wallet.address) }) + // Send TAO + const tx = { + to: wallet2.address, + value: raoToEth(tao(1)).toString(), + // EIP-1559 transaction parameters + maxPriorityFeePerGas: max_priority_fee_per_gas.toString(), + maxFeePerGas: max_fee_per_gas.toString(), + gasLimit: 21000, + }; + + // Send the transaction + const txResponse = await wallet.sendTransaction(tx); + await txResponse.wait() + + // Check balances + const ethBalanceAfter = await client.getBalance({ address: toViemAddress(wallet.address) }) + const fee = ethBalanceBefore - ethBalanceAfter - raoToEth(tao(1)) + + return fee; +} \ No newline at end of file diff --git a/evm-tests/test/metagraph.precompile.test.ts b/evm-tests/test/metagraph.precompile.test.ts new file mode 100644 index 0000000000..94c0df8861 --- /dev/null +++ b/evm-tests/test/metagraph.precompile.test.ts @@ -0,0 +1,147 @@ +import * as assert from "assert"; + +import { getAliceSigner, getClient, getDevnetApi, waitForTransactionCompletion, convertPublicKeyToMultiAddress, getRandomSubstrateKeypair, getSignerFromKeypair } from "../src/substrate" +import { getPublicClient, } from "../src/utils"; +import { ETH_LOCAL_URL, SUB_LOCAL_URL, } from "../src/config"; +import { devnet } from "@polkadot-api/descriptors" +import { PublicClient } from "viem"; +import { PolkadotSigner, TypedApi } from "polkadot-api"; +import { toViemAddress, convertPublicKeyToSs58 } from "../src/address-utils" +import { IMetagraphABI, IMETAGRAPH_ADDRESS } from "../src/contracts/metagraph" + +describe("Test the EVM chain ID", () => { + // init substrate part + const hotkey = getRandomSubstrateKeypair(); + const coldkey = getRandomSubstrateKeypair(); + let publicClient: PublicClient; + + let api: TypedApi + + // sudo account alice as signer + let alice: PolkadotSigner; + + // init other variable + let subnetId = 0; + + before(async () => { + // init variables got from await and async + publicClient = await getPublicClient(ETH_LOCAL_URL) + const subClient = await getClient(SUB_LOCAL_URL) + api = await getDevnetApi() + alice = await getAliceSigner(); + + { + const multiAddress = convertPublicKeyToMultiAddress(hotkey.publicKey) + const internalCall = api.tx.Balances.force_set_balance({ who: multiAddress, new_free: BigInt(1e12) }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + } + + { + const multiAddress = convertPublicKeyToMultiAddress(coldkey.publicKey) + const internalCall = api.tx.Balances.force_set_balance({ who: multiAddress, new_free: BigInt(1e12) }) + const tx = api.tx.Sudo.sudo({ call: internalCall.decodedCall }) + + await waitForTransactionCompletion(api, tx, alice) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + } + + const signer = getSignerFromKeypair(coldkey) + const registerNetworkTx = api.tx.SubtensorModule.register_network({ hotkey: convertPublicKeyToSs58(hotkey.publicKey) }) + await waitForTransactionCompletion(api, registerNetworkTx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + + let totalNetworks = await api.query.SubtensorModule.TotalNetworks.getValue() + assert.ok(totalNetworks > 1) + subnetId = totalNetworks - 1 + + let uid_count = + await api.query.SubtensorModule.SubnetworkN.getValue(subnetId) + if (uid_count === 0) { + const tx = api.tx.SubtensorModule.burned_register({ hotkey: convertPublicKeyToSs58(hotkey.publicKey), netuid: subnetId }) + await waitForTransactionCompletion(api, tx, signer) + .then(() => { }) + .catch((error) => { console.log(`transaction error ${error}`) }); + } + }) + + it("Metagraph data access via precompile contract is ok", async () => { + const uid = 0 + const uid_count = await publicClient.readContract({ + abi: IMetagraphABI, + address: toViemAddress(IMETAGRAPH_ADDRESS), + functionName: "getUidCount", + args: [subnetId] + }) + // back to original value for other tests. and we can run it repeatedly + assert.ok(uid_count != undefined); + + // const axon = api.query.SubtensorModule.Axons.getValue() + + const axon = await publicClient.readContract({ + abi: IMetagraphABI, + address: toViemAddress(IMETAGRAPH_ADDRESS), + functionName: "getAxon", + args: [subnetId, uid] + }) + + assert.ok(axon != undefined); + if (axon instanceof Object) { + assert.ok(axon != undefined); + if ("block" in axon) { + assert.ok(axon.block != undefined); + } else { + throw new Error("block not included in axon") + } + + if ("version" in axon) { + assert.ok(axon.version != undefined); + } else { + throw new Error("version not included in axon") + } + + if ("ip" in axon) { + assert.ok(axon.ip != undefined); + } else { + throw new Error("ip not included in axon") + } + + if ("port" in axon) { + assert.ok(axon.port != undefined); + } else { + throw new Error("port not included in axon") + } + + if ("ip_type" in axon) { + assert.ok(axon.ip_type != undefined); + } else { + throw new Error("ip_type not included in axon") + } + + if ("protocol" in axon) { + assert.ok(axon.protocol != undefined); + } else { + throw new Error("protocol not included in axon") + } + } + + const methodList = ["getEmission", "getVtrust", "getValidatorStatus", "getLastUpdate", "getIsActive", + "getHotkey", "getColdkey" + ] + for (const method of methodList) { + const value = await publicClient.readContract({ + abi: IMetagraphABI, + address: toViemAddress(IMETAGRAPH_ADDRESS), + functionName: method, + args: [subnetId, uid] + }) + + assert.ok(value != undefined); + } + }); +}); \ No newline at end of file diff --git a/evm-tests/test/neuron.precompile.emission-check.test.ts b/evm-tests/test/neuron.precompile.emission-check.test.ts new file mode 100644 index 0000000000..ac609c1e27 --- /dev/null +++ b/evm-tests/test/neuron.precompile.emission-check.test.ts @@ -0,0 +1,72 @@ +import * as assert from "assert"; + +import { getAliceSigner, getClient, getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate" +import { getPublicClient, } from "../src/utils"; +import { ETH_LOCAL_URL, SUB_LOCAL_URL, } from "../src/config"; +import { devnet } from "@polkadot-api/descriptors" +import { PublicClient } from "viem"; +import { PolkadotSigner, TypedApi } from "polkadot-api"; +import { convertPublicKeyToSs58, } from "../src/address-utils" +import { ethers } from "ethers" +import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron" +import { generateRandomEthersWallet } from "../src/utils" +import { forceSetBalanceToSs58Address, forceSetBalanceToEthAddress, addNewSubnetwork } from "../src/subtensor" + +describe("Test the EVM chain ID", () => { + // init eth part + const wallet = generateRandomEthersWallet(); + + // init substrate part + const hotkey = getRandomSubstrateKeypair(); + const hotkey2 = getRandomSubstrateKeypair(); + const coldkey = getRandomSubstrateKeypair(); + let publicClient: PublicClient; + + let api: TypedApi + + // sudo account alice as signer + let alice: PolkadotSigner; + + before(async () => { + // init variables got from await and async + publicClient = await getPublicClient(ETH_LOCAL_URL) + const subClient = await getClient(SUB_LOCAL_URL) + api = await getDevnetApi() + alice = await getAliceSigner(); + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey2.publicKey)) + + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey)) + await forceSetBalanceToEthAddress(api, wallet.address) + + const netuid = await addNewSubnetwork(api, hotkey2, coldkey) + console.log("test on subnet ", netuid) + }) + + it("Burned register and check emission", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + const uid = await api.query.SubtensorModule.SubnetworkN.getValue(netuid) + const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet); + + const tx = await contract.burnedRegister( + netuid, + hotkey.publicKey + ); + await tx.wait(); + + const uidAfterNew = await api.query.SubtensorModule.SubnetworkN.getValue(netuid) + assert.equal(uid + 1, uidAfterNew) + + const key = await api.query.SubtensorModule.Keys.getValue(netuid, uid) + assert.equal(key, convertPublicKeyToSs58(hotkey.publicKey)) + + let i = 0; + while (i < 10) { + const emission = await api.query.SubtensorModule.PendingEmission.getValue(netuid) + + console.log("emission is ", emission); + await new Promise((resolve) => setTimeout(resolve, 2000)); + i += 1; + } + }) +}); \ No newline at end of file diff --git a/evm-tests/test/neuron.precompile.reveal-weights.test.ts b/evm-tests/test/neuron.precompile.reveal-weights.test.ts new file mode 100644 index 0000000000..85125f0956 --- /dev/null +++ b/evm-tests/test/neuron.precompile.reveal-weights.test.ts @@ -0,0 +1,142 @@ +import * as assert from "assert"; +import { getAliceSigner, getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate" +import { devnet } from "@polkadot-api/descriptors" +import { PolkadotSigner, TypedApi } from "polkadot-api"; +import { convertPublicKeyToSs58, convertH160ToSS58 } from "../src/address-utils" +import { Vec, Tuple, VecFixed, u16, u8, u64 } from "@polkadot/types-codec"; +import { TypeRegistry } from "@polkadot/types"; +import { ethers } from "ethers" +import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron" +import { generateRandomEthersWallet } from "../src/utils" +import { convertH160ToPublicKey } from "../src/address-utils" +import { blake2AsU8a } from "@polkadot/util-crypto" +import { + forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, addNewSubnetwork, setCommitRevealWeightsEnabled, setWeightsSetRateLimit, burnedRegister, + setTempo, setCommitRevealWeightsInterval +} from "../src/subtensor" + +// hardcode some values for reveal hash +const uids = [1]; +const values = [5]; +const salt = [9]; +const version_key = 0; + +function getCommitHash(netuid: number, address: string) { + const registry = new TypeRegistry(); + let publicKey = convertH160ToPublicKey(address); + + const tupleData = new Tuple( + registry, + [ + VecFixed.with(u8, 32), + u16, + Vec.with(u16), + Vec.with(u16), + Vec.with(u16), + u64, + ], + [publicKey, netuid, uids, values, salt, version_key] + ); + + const hash = blake2AsU8a(tupleData.toU8a()); + return hash; +} + +describe("Test neuron precompile reveal weights", () => { + // init eth part + const wallet = generateRandomEthersWallet(); + + // init substrate part + const hotkey = getRandomSubstrateKeypair(); + const coldkey = getRandomSubstrateKeypair(); + + let api: TypedApi + + // sudo account alice as signer + let alice: PolkadotSigner; + before(async () => { + // init variables got from await and async + api = await getDevnetApi() + alice = await getAliceSigner(); + + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(alice.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey)) + await forceSetBalanceToEthAddress(api, wallet.address) + let netuid = await addNewSubnetwork(api, hotkey, coldkey) + + console.log("test the case on subnet ", netuid) + + // enable commit reveal feature + await setCommitRevealWeightsEnabled(api, netuid, true) + // set it as 0, we can set the weight anytime + await setWeightsSetRateLimit(api, netuid, BigInt(0)) + + const ss58Address = convertH160ToSS58(wallet.address) + await burnedRegister(api, netuid, ss58Address, coldkey) + + const uid = await api.query.SubtensorModule.Uids.getValue( + netuid, + ss58Address + ) + // eth wallet account should be the first neuron in the subnet + assert.equal(uid, uids[0]) + }) + + it("EVM neuron commit weights via call precompile", async () => { + let totalNetworks = await api.query.SubtensorModule.TotalNetworks.getValue() + const subnetId = totalNetworks - 1 + const commitHash = getCommitHash(subnetId, wallet.address) + const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet); + const tx = await contract.commitWeights(subnetId, commitHash) + await tx.wait() + + const ss58Address = convertH160ToSS58(wallet.address) + + const weightsCommit = await api.query.SubtensorModule.WeightCommits.getValue(subnetId, ss58Address) + if (weightsCommit === undefined) { + throw new Error("submit weights failed") + } + assert.ok(weightsCommit.length > 0) + }) + + it("EVM neuron reveal weights via call precompile", async () => { + let totalNetworks = await api.query.SubtensorModule.TotalNetworks.getValue() + const netuid = totalNetworks - 1 + const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet); + // set tempo or epoch large, then enough time to reveal weight + await setTempo(api, netuid, 60000) + // set interval epoch as 0, we can reveal at the same epoch + await setCommitRevealWeightsInterval(api, netuid, BigInt(0)) + + const tx = await contract.revealWeights( + netuid, + uids, + values, + salt, + version_key + ); + await tx.wait() + const ss58Address = convertH160ToSS58(wallet.address) + + // check the weight commit is removed after reveal successfully + const weightsCommit = await api.query.SubtensorModule.WeightCommits.getValue(netuid, ss58Address) + assert.equal(weightsCommit, undefined) + + // check the weight is set after reveal with correct uid + const neuron_uid = await api.query.SubtensorModule.Uids.getValue( + netuid, + ss58Address + ) + + const weights = await api.query.SubtensorModule.Weights.getValue(netuid, neuron_uid) + + if (weights === undefined) { + throw new Error("weights not available onchain") + } + for (const weight of weights) { + assert.equal(weight[0], neuron_uid) + assert.ok(weight[1] !== undefined) + } + }) +}); \ No newline at end of file diff --git a/evm-tests/test/neuron.precompile.serve.axon-prometheus.test.ts b/evm-tests/test/neuron.precompile.serve.axon-prometheus.test.ts new file mode 100644 index 0000000000..aee84f130c --- /dev/null +++ b/evm-tests/test/neuron.precompile.serve.axon-prometheus.test.ts @@ -0,0 +1,162 @@ +import * as assert from "assert"; +import { getAliceSigner, getClient, getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate" +import { SUB_LOCAL_URL, } from "../src/config"; +import { devnet } from "@polkadot-api/descriptors" +import { PolkadotSigner, TypedApi } from "polkadot-api"; +import { convertPublicKeyToSs58, convertH160ToSS58 } from "../src/address-utils" +import { ethers } from "ethers" +import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron" +import { generateRandomEthersWallet } from "../src/utils" +import { forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, addNewSubnetwork, burnedRegister } from "../src/subtensor" + +describe("Test neuron precompile Serve Axon Prometheus", () => { + // init eth part + const wallet1 = generateRandomEthersWallet(); + const wallet2 = generateRandomEthersWallet(); + const wallet3 = generateRandomEthersWallet(); + + // init substrate part + const hotkey = getRandomSubstrateKeypair(); + const coldkey = getRandomSubstrateKeypair(); + + let api: TypedApi + + // sudo account alice as signer + let alice: PolkadotSigner; + before(async () => { + // init variables got from await and async + const subClient = await getClient(SUB_LOCAL_URL) + api = await getDevnetApi() + alice = await getAliceSigner(); + + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(alice.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey)) + await forceSetBalanceToEthAddress(api, wallet1.address) + await forceSetBalanceToEthAddress(api, wallet2.address) + await forceSetBalanceToEthAddress(api, wallet3.address) + let netuid = await addNewSubnetwork(api, hotkey, coldkey) + + console.log("test the case on subnet ", netuid) + + await burnedRegister(api, netuid, convertH160ToSS58(wallet1.address), coldkey) + await burnedRegister(api, netuid, convertH160ToSS58(wallet2.address), coldkey) + await burnedRegister(api, netuid, convertH160ToSS58(wallet3.address), coldkey) + }) + + it("Serve Axon", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + const version = 0; + const ip = 1; + const port = 2; + const ipType = 4; + const protocol = 0; + const placeholder1 = 8; + const placeholder2 = 9; + + const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet1); + + const tx = await contract.serveAxon( + netuid, + version, + ip, + port, + ipType, + protocol, + placeholder1, + placeholder2 + ); + await tx.wait(); + + const axon = await api.query.SubtensorModule.Axons.getValue( + netuid, + convertH160ToSS58(wallet1.address) + ) + assert.notEqual(axon?.block, undefined) + assert.equal(axon?.version, version) + assert.equal(axon?.ip, ip) + assert.equal(axon?.port, port) + assert.equal(axon?.ip_type, ipType) + assert.equal(axon?.protocol, protocol) + assert.equal(axon?.placeholder1, placeholder1) + assert.equal(axon?.placeholder2, placeholder2) + }); + + it("Serve Axon TLS", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + const version = 0; + const ip = 1; + const port = 2; + const ipType = 4; + const protocol = 0; + const placeholder1 = 8; + const placeholder2 = 9; + // certificate length is 65 + const certificate = new Uint8Array([ + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63, 64, 65, + ]); + + const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet2); + + const tx = await contract.serveAxonTls( + netuid, + version, + ip, + port, + ipType, + protocol, + placeholder1, + placeholder2, + certificate + ); + await tx.wait(); + + const axon = await api.query.SubtensorModule.Axons.getValue( + netuid, + convertH160ToSS58(wallet2.address)) + + assert.notEqual(axon?.block, undefined) + assert.equal(axon?.version, version) + assert.equal(axon?.ip, ip) + assert.equal(axon?.port, port) + assert.equal(axon?.ip_type, ipType) + assert.equal(axon?.protocol, protocol) + assert.equal(axon?.placeholder1, placeholder1) + assert.equal(axon?.placeholder2, placeholder2) + }); + + it("Serve Prometheus", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + const version = 0; + const ip = 1; + const port = 2; + const ipType = 4; + + const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet3); + + const tx = await contract.servePrometheus( + netuid, + version, + ip, + port, + ipType + ); + await tx.wait(); + + const prometheus = ( + await api.query.SubtensorModule.Prometheus.getValue( + netuid, + convertH160ToSS58(wallet3.address) + ) + ) + + assert.notEqual(prometheus?.block, undefined) + assert.equal(prometheus?.version, version) + assert.equal(prometheus?.ip, ip) + assert.equal(prometheus?.port, port) + assert.equal(prometheus?.ip_type, ipType) + }); +}); \ No newline at end of file diff --git a/evm-tests/test/neuron.precompile.set-weights.test.ts b/evm-tests/test/neuron.precompile.set-weights.test.ts new file mode 100644 index 0000000000..393c2b97b8 --- /dev/null +++ b/evm-tests/test/neuron.precompile.set-weights.test.ts @@ -0,0 +1,65 @@ +import * as assert from "assert"; + +import { getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate" +import { devnet } from "@polkadot-api/descriptors" +import { TypedApi } from "polkadot-api"; +import { convertH160ToSS58, convertPublicKeyToSs58, } from "../src/address-utils" +import { ethers } from "ethers" +import { INEURON_ADDRESS, INeuronABI } from "../src/contracts/neuron" +import { generateRandomEthersWallet } from "../src/utils" +import { + forceSetBalanceToSs58Address, forceSetBalanceToEthAddress, addNewSubnetwork, burnedRegister, setCommitRevealWeightsEnabled, + setWeightsSetRateLimit +} from "../src/subtensor" + +describe("Test neuron precompile contract, set weights function", () => { + // init eth part + const wallet = generateRandomEthersWallet(); + + // init substrate part + const hotkey = getRandomSubstrateKeypair(); + const coldkey = getRandomSubstrateKeypair(); + + let api: TypedApi + + before(async () => { + api = await getDevnetApi() + + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey)) + + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey)) + await forceSetBalanceToEthAddress(api, wallet.address) + + const netuid = await addNewSubnetwork(api, hotkey, coldkey) + console.log("test on subnet ", netuid) + + await burnedRegister(api, netuid, convertH160ToSS58(wallet.address), coldkey) + const uid = await api.query.SubtensorModule.Uids.getValue(netuid, convertH160ToSS58(wallet.address)) + assert.notEqual(uid, undefined) + // disable reveal and enable direct set weights + await setCommitRevealWeightsEnabled(api, netuid, false) + await setWeightsSetRateLimit(api, netuid, BigInt(0)) + }) + + it("Set weights is ok", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + const uid = await api.query.SubtensorModule.Uids.getValue(netuid, convertH160ToSS58(wallet.address)) + + const contract = new ethers.Contract(INEURON_ADDRESS, INeuronABI, wallet); + const dests = [1]; + const weights = [2]; + const version_key = 0; + + const tx = await contract.setWeights(netuid, dests, weights, version_key); + + await tx.wait(); + const weightsOnChain = await api.query.SubtensorModule.Weights.getValue(netuid, uid) + + weightsOnChain.forEach((weight, _) => { + const uidInWeight = weight[0]; + const value = weight[1]; + assert.equal(uidInWeight, uid) + assert.ok(value > 0) + }); + }) +}); \ No newline at end of file diff --git a/evm-tests/test/staking.precompile.add-remove.test.ts b/evm-tests/test/staking.precompile.add-remove.test.ts new file mode 100644 index 0000000000..5387e62428 --- /dev/null +++ b/evm-tests/test/staking.precompile.add-remove.test.ts @@ -0,0 +1,326 @@ +import * as assert from "assert"; +import { getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate" +import { devnet } from "@polkadot-api/descriptors" +import { PolkadotSigner, TypedApi } from "polkadot-api"; +import { convertPublicKeyToSs58, convertH160ToSS58 } from "../src/address-utils" +import { raoToEth, tao } from "../src/balance-math" +import { ethers } from "ethers" +import { generateRandomEthersWallet, getPublicClient } from "../src/utils" +import { convertH160ToPublicKey } from "../src/address-utils" +import { + forceSetBalanceToEthAddress, forceSetBalanceToSs58Address, addNewSubnetwork, burnedRegister, + sendProxyCall, +} from "../src/subtensor" +import { ETH_LOCAL_URL } from "../src/config"; +import { ISTAKING_ADDRESS, ISTAKING_V2_ADDRESS, IStakingABI, IStakingV2ABI } from "../src/contracts/staking" +import { PublicClient } from "viem"; + +describe("Test neuron precompile reveal weights", () => { + // init eth part + const wallet1 = generateRandomEthersWallet(); + const wallet2 = generateRandomEthersWallet(); + let publicClient: PublicClient; + // init substrate part + const hotkey = getRandomSubstrateKeypair(); + const coldkey = getRandomSubstrateKeypair(); + const proxy = getRandomSubstrateKeypair(); + + let api: TypedApi + + // sudo account alice as signer + let alice: PolkadotSigner; + before(async () => { + publicClient = await getPublicClient(ETH_LOCAL_URL) + // init variables got from await and async + api = await getDevnetApi() + + // await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(alice.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(proxy.publicKey)) + await forceSetBalanceToEthAddress(api, wallet1.address) + await forceSetBalanceToEthAddress(api, wallet2.address) + let netuid = await addNewSubnetwork(api, hotkey, coldkey) + + console.log("test the case on subnet ", netuid) + + await burnedRegister(api, netuid, convertH160ToSS58(wallet1.address), coldkey) + await burnedRegister(api, netuid, convertH160ToSS58(wallet2.address), coldkey) + }) + + it("Can add stake", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + // ETH unit + let stakeBalance = raoToEth(tao(20)) + const stakeBefore = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet1.address), netuid) + const contract = new ethers.Contract(ISTAKING_ADDRESS, IStakingABI, wallet1); + const tx = await contract.addStake(hotkey.publicKey, netuid, { value: stakeBalance.toString() }) + await tx.wait() + + const stakeFromContract = BigInt( + await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid) + ); + + assert.ok(stakeFromContract > stakeBefore) + const stakeAfter = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet1.address), netuid) + assert.ok(stakeAfter > stakeBefore) + }) + + it("Can add stake V2", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + // the unit in V2 is RAO, not ETH + let stakeBalance = tao(20) + const stakeBefore = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet2.address), netuid) + const contract = new ethers.Contract(ISTAKING_V2_ADDRESS, IStakingV2ABI, wallet2); + const tx = await contract.addStake(hotkey.publicKey, stakeBalance.toString(), netuid) + await tx.wait() + + const stakeFromContract = BigInt( + await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet2.address), netuid) + ); + + assert.ok(stakeFromContract > stakeBefore) + const stakeAfter = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet2.address), netuid) + assert.ok(stakeAfter > stakeBefore) + }) + + it("Can not add stake if subnet doesn't exist", async () => { + // wrong netuid + let netuid = 12345; + let stakeBalance = raoToEth(tao(20)) + const stakeBefore = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet1.address), netuid) + const contract = new ethers.Contract(ISTAKING_ADDRESS, IStakingABI, wallet1); + try { + const tx = await contract.addStake(hotkey.publicKey, netuid, { value: stakeBalance.toString() }) + await tx.wait() + assert.fail("Transaction should have failed"); + } catch (error) { + // Transaction failed as expected + } + + const stakeFromContract = BigInt( + await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid) + ); + assert.equal(stakeFromContract, stakeBefore) + const stakeAfter = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet1.address), netuid) + assert.equal(stakeAfter, stakeBefore) + }); + + it("Can not add stake V2 if subnet doesn't exist", async () => { + // wrong netuid + let netuid = 12345; + // the unit in V2 is RAO, not ETH + let stakeBalance = tao(20) + const stakeBefore = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet2.address), netuid) + const contract = new ethers.Contract(ISTAKING_V2_ADDRESS, IStakingV2ABI, wallet2); + + try { + const tx = await contract.addStake(hotkey.publicKey, stakeBalance.toString(), netuid); + await tx.wait(); + assert.fail("Transaction should have failed"); + } catch (error) { + // Transaction failed as expected + } + + const stakeFromContract = BigInt( + await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet2.address), netuid) + ); + assert.equal(stakeFromContract, stakeBefore) + const stakeAfter = await api.query.SubtensorModule.Alpha.getValue(convertPublicKeyToSs58(hotkey.publicKey), convertH160ToSS58(wallet2.address), netuid) + assert.equal(stakeAfter, stakeBefore) + }) + + it("Can get stake via contract read method", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + + // TODO need check how to pass bytes32 as parameter of readContract + // const value = await publicClient.readContract({ + // address: ISTAKING_ADDRESS, + // abi: IStakingABI, + // functionName: "getStake", + // args: [hotkey.publicKey, // Convert to bytes32 format + // convertH160ToPublicKey(wallet1.address), + // netuid] + // }) + // if (value === undefined || value === null) { + // throw new Error("value of getStake from contract is undefined") + // } + // const intValue = BigInt(value.toString()) + + const contractV1 = new ethers.Contract(ISTAKING_ADDRESS, IStakingABI, wallet1); + const stakeFromContractV1 = BigInt( + await contractV1.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid) + ); + + const contractV2 = new ethers.Contract(ISTAKING_V2_ADDRESS, IStakingV2ABI, wallet1); + // unit from contract V2 is RAO, not ETH + const stakeFromContractV2 = Number( + await contractV2.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid) + ); + + assert.equal(stakeFromContractV1, tao(stakeFromContractV2)) + + }) + + it("Can remove stake", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + const contract = new ethers.Contract( + ISTAKING_ADDRESS, + IStakingABI, + wallet1 + ); + + const stakeBeforeRemove = BigInt( + await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid) + ); + + let stakeBalance = raoToEth(tao(10)) + const tx = await contract.removeStake(hotkey.publicKey, stakeBalance, netuid) + await tx.wait() + + const stakeAfterRemove = BigInt( + await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet1.address), netuid) + ); + assert.ok(stakeAfterRemove < stakeBeforeRemove) + + }) + + it("Can remove stake V2", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + const contract = new ethers.Contract( + ISTAKING_V2_ADDRESS, + IStakingV2ABI, + wallet2 + ); + + const stakeBeforeRemove = BigInt( + await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet2.address), netuid) + ); + + let stakeBalance = tao(10) + const tx = await contract.removeStake(hotkey.publicKey, stakeBalance, netuid) + await tx.wait() + + const stakeAfterRemove = BigInt( + await contract.getStake(hotkey.publicKey, convertH160ToPublicKey(wallet2.address), netuid) + ); + + assert.ok(stakeAfterRemove < stakeBeforeRemove) + }) + + it("Can add/remove proxy", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + // add/remove are done in a single test case, because we can't use the same private/public key + // between substrate and EVM, but to test the remove part, we must predefine the proxy first. + // it makes `remove` being dependent on `add`, because we should use `addProxy` from contract + // to prepare the proxy for `removeProxy` testing - the proxy is specified for the + // caller/origin. + + // first, check we don't have proxies + const ss58Address = convertH160ToSS58(wallet1.address); + // the result include two items array, first one is delegate info, second one is balance + const initProxies = await api.query.Proxy.Proxies.getValue(ss58Address); + assert.equal(initProxies[0].length, 0); + + // intialize the contract + const contract = new ethers.Contract( + ISTAKING_ADDRESS, + IStakingABI, + wallet1 + ); + + // test "add" + let tx = await contract.addProxy(proxy.publicKey); + await tx.wait(); + + const proxiesAfterAdd = await api.query.Proxy.Proxies.getValue(ss58Address); + + assert.equal(proxiesAfterAdd[0][0].delegate, convertPublicKeyToSs58(proxy.publicKey)) + + let stakeBefore = await api.query.SubtensorModule.Alpha.getValue( + convertPublicKeyToSs58(hotkey.publicKey), + ss58Address, + netuid + ) + + const call = api.tx.SubtensorModule.add_stake({ + hotkey: convertPublicKeyToSs58(hotkey.publicKey), + netuid: netuid, + amount_staked: tao(1) + }) + await sendProxyCall(api, call.decodedCall, ss58Address, proxy) + + let stakeAfter = await api.query.SubtensorModule.Alpha.getValue( + convertPublicKeyToSs58(hotkey.publicKey), + ss58Address, + netuid + ) + + assert.ok(stakeAfter > stakeBefore) + // test "remove" + tx = await contract.removeProxy(proxy.publicKey); + await tx.wait(); + + const proxiesAfterRemove = await api.query.Proxy.Proxies.getValue(ss58Address); + assert.equal(proxiesAfterRemove[0].length, 0) + }); + + it("Can add/remove proxy V2", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + // add/remove are done in a single test case, because we can't use the same private/public key + // between substrate and EVM, but to test the remove part, we must predefine the proxy first. + // it makes `remove` being dependent on `add`, because we should use `addProxy` from contract + // to prepare the proxy for `removeProxy` testing - the proxy is specified for the + // caller/origin. + + // first, check we don't have proxies + const ss58Address = convertH160ToSS58(wallet1.address); + // the result include two items array, first one is delegate info, second one is balance + const initProxies = await api.query.Proxy.Proxies.getValue(ss58Address); + assert.equal(initProxies[0].length, 0); + + // intialize the contract + // const signer = new ethers.Wallet(fundedEthWallet.privateKey, provider); + const contract = new ethers.Contract( + ISTAKING_V2_ADDRESS, + IStakingV2ABI, + wallet1 + ); + + // test "add" + let tx = await contract.addProxy(proxy.publicKey); + await tx.wait(); + + const proxiesAfterAdd = await api.query.Proxy.Proxies.getValue(ss58Address); + + assert.equal(proxiesAfterAdd[0][0].delegate, convertPublicKeyToSs58(proxy.publicKey)) + + let stakeBefore = await api.query.SubtensorModule.Alpha.getValue( + convertPublicKeyToSs58(hotkey.publicKey), + ss58Address, + netuid + ) + + const call = api.tx.SubtensorModule.add_stake({ + hotkey: convertPublicKeyToSs58(hotkey.publicKey), + netuid: netuid, + amount_staked: tao(1) + }) + + await sendProxyCall(api, call.decodedCall, ss58Address, proxy) + + let stakeAfter = await api.query.SubtensorModule.Alpha.getValue( + convertPublicKeyToSs58(hotkey.publicKey), + ss58Address, + netuid + ) + + assert.ok(stakeAfter > stakeBefore) + // test "remove" + tx = await contract.removeProxy(proxy.publicKey); + await tx.wait(); + + const proxiesAfterRemove = await api.query.Proxy.Proxies.getValue(ss58Address); + assert.equal(proxiesAfterRemove[0].length, 0) + }); +}); diff --git a/evm-tests/test/staking.precompile.reward.test.ts b/evm-tests/test/staking.precompile.reward.test.ts new file mode 100644 index 0000000000..3600a6d08d --- /dev/null +++ b/evm-tests/test/staking.precompile.reward.test.ts @@ -0,0 +1,105 @@ +import * as assert from "assert"; +import { getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate" +import { devnet } from "@polkadot-api/descriptors" +import { TypedApi } from "polkadot-api"; +import { convertPublicKeyToSs58 } from "../src/address-utils" +import { tao } from "../src/balance-math" +import { + forceSetBalanceToSs58Address, addNewSubnetwork, burnedRegister, + setTxRateLimit, setTempo, setWeightsSetRateLimit, setSubnetOwnerCut, setMaxAllowedUids, + setMinDelegateTake, becomeDelegate, setActivityCutoff, addStake, setWeight, rootRegister +} from "../src/subtensor" + +describe("Test neuron precompile reveal weights", () => { + const hotkey = getRandomSubstrateKeypair(); + const coldkey = getRandomSubstrateKeypair(); + + const validator = getRandomSubstrateKeypair(); + const miner = getRandomSubstrateKeypair(); + const nominator = getRandomSubstrateKeypair(); + + let api: TypedApi + + before(async () => { + const root_netuid = 0; + const root_tempo = 1; // neet root epoch to happen before subnet tempo + const subnet_tempo = 1; + api = await getDevnetApi() + + // await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(alice.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(coldkey.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(validator.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(miner.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(nominator.publicKey)) + // await forceSetBalanceToEthAddress(api, wallet1.address) + // await forceSetBalanceToEthAddress(api, wallet2.address) + let netuid = await addNewSubnetwork(api, hotkey, coldkey) + + console.log("test the case on subnet ", netuid) + + await setTxRateLimit(api, BigInt(0)) + await setTempo(api, root_netuid, root_tempo) + await setTempo(api, netuid, subnet_tempo) + await setWeightsSetRateLimit(api, netuid, BigInt(0)) + + await burnedRegister(api, netuid, convertPublicKeyToSs58(validator.publicKey), coldkey) + await burnedRegister(api, netuid, convertPublicKeyToSs58(miner.publicKey), coldkey) + await burnedRegister(api, netuid, convertPublicKeyToSs58(nominator.publicKey), coldkey) + await setSubnetOwnerCut(api, 0) + await setActivityCutoff(api, netuid, 65535) + await setMaxAllowedUids(api, netuid, 65535) + await setMinDelegateTake(api, 0) + await becomeDelegate(api, convertPublicKeyToSs58(validator.publicKey), coldkey) + await becomeDelegate(api, convertPublicKeyToSs58(miner.publicKey), coldkey) + }) + + it("Staker receives rewards", async () => { + let netuid = (await api.query.SubtensorModule.TotalNetworks.getValue()) - 1 + + await addStake(api, netuid, convertPublicKeyToSs58(miner.publicKey), tao(1), coldkey) + await addStake(api, netuid, convertPublicKeyToSs58(nominator.publicKey), tao(1), coldkey) + + await addStake(api, netuid, convertPublicKeyToSs58(validator.publicKey), tao(100), coldkey) + + const miner_alpha_before_emission = await api.query.SubtensorModule.Alpha.getValue( + convertPublicKeyToSs58(miner.publicKey), + convertPublicKeyToSs58(coldkey.publicKey), + netuid + ) + + await setWeight(api, netuid, [0, 1], [0xffff, 0xffff], BigInt(0), validator) + await rootRegister(api, convertPublicKeyToSs58(validator.publicKey), coldkey) + + let index = 0; + while (index < 60) { + const pending = await api.query.SubtensorModule.PendingEmission.getValue(netuid); + if (pending > 0) { + console.log("pending amount is ", pending); + break; + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); + console.log("wait for the pendingEmission update"); + index += 1; + } + + index = 0; + while (index < 60) { + let miner_current_alpha = await api.query.SubtensorModule.Alpha.getValue( + convertPublicKeyToSs58(miner.publicKey), + convertPublicKeyToSs58(coldkey.publicKey), + netuid + ) + + if (miner_current_alpha > miner_alpha_before_emission) { + console.log("miner got reward"); + break; + } + + await new Promise((resolve) => setTimeout(resolve, 1000)); + console.log(" waiting for emission"); + index += 1; + } + }) +}) diff --git a/evm-tests/test/subnet.precompile.hyperparameter.test.ts b/evm-tests/test/subnet.precompile.hyperparameter.test.ts new file mode 100644 index 0000000000..1805b85ce9 --- /dev/null +++ b/evm-tests/test/subnet.precompile.hyperparameter.test.ts @@ -0,0 +1,442 @@ +import * as assert from "assert"; + +import { getDevnetApi, getRandomSubstrateKeypair } from "../src/substrate" +import { devnet } from "@polkadot-api/descriptors" +import { TypedApi } from "polkadot-api"; +import { convertPublicKeyToSs58 } from "../src/address-utils" +import { generateRandomEthersWallet } from "../src/utils"; +import { ISubnetABI, ISUBNET_ADDRESS } from "../src/contracts/subnet" +import { ethers } from "ethers" +import { forceSetBalanceToEthAddress, forceSetBalanceToSs58Address } from "../src/subtensor" + +describe("Test the Subnet precompile contract", () => { + // init eth part + const wallet = generateRandomEthersWallet(); + // init substrate part + + const hotkey1 = getRandomSubstrateKeypair(); + const hotkey2 = getRandomSubstrateKeypair(); + let api: TypedApi + + before(async () => { + // init variables got from await and async + api = await getDevnetApi() + + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey1.publicKey)) + await forceSetBalanceToSs58Address(api, convertPublicKeyToSs58(hotkey2.publicKey)) + await forceSetBalanceToEthAddress(api, wallet.address) + }) + + it("Can register network without identity info", async () => { + const totalNetwork = await api.query.SubtensorModule.TotalNetworks.getValue() + + const contract = new ethers.Contract(ISUBNET_ADDRESS, ISubnetABI, wallet); + const tx = await contract.registerNetwork(hotkey1.publicKey); + await tx.wait(); + + const totalNetworkAfterAdd = await api.query.SubtensorModule.TotalNetworks.getValue() + assert.ok(totalNetwork + 1 === totalNetworkAfterAdd) + }); + + it("Can register network with identity info", async () => { + const totalNetwork = await api.query.SubtensorModule.TotalNetworks.getValue() + + const contract = new ethers.Contract(ISUBNET_ADDRESS, ISubnetABI, wallet); + const tx = await contract.registerNetwork(hotkey2.publicKey, + "name", + "repo", + "contact", + "subnetUrl", + "discord", + "description", + "additional" + ); + await tx.wait(); + + const totalNetworkAfterAdd = await api.query.SubtensorModule.TotalNetworks.getValue() + assert.ok(totalNetwork + 1 === totalNetworkAfterAdd) + }); + + it("Can set subnet parameter", async () => { + + const totalNetwork = await api.query.SubtensorModule.TotalNetworks.getValue() + const contract = new ethers.Contract(ISUBNET_ADDRESS, ISubnetABI, wallet); + const netuid = totalNetwork - 1; + + // servingRateLimit hyperparameter + { + const newValue = 100; + const tx = await contract.setServingRateLimit(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.ServingRateLimit.getValue(netuid) + + + let valueFromContract = Number( + await contract.getServingRateLimit(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // minDifficulty hyperparameter + // + // disabled: only by sudo + // + // newValue = 101; + // tx = await contract.setMinDifficulty(netuid, newValue); + // await tx.wait(); + + // await usingApi(async (api) => { + // onchainValue = Number( + // await api.query.subtensorModule.minDifficulty(netuid) + // ); + // }); + + // valueFromContract = Number(await contract.getMinDifficulty(netuid)); + + // expect(valueFromContract).to.eq(newValue); + // expect(valueFromContract).to.eq(onchainValue); + + // maxDifficulty hyperparameter + + { + const newValue = 102; + const tx = await contract.setMaxDifficulty(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.MaxDifficulty.getValue(netuid) + + + let valueFromContract = Number( + await contract.getMaxDifficulty(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // weightsVersionKey hyperparameter + { + const newValue = 103; + const tx = await contract.setWeightsVersionKey(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.WeightsVersionKey.getValue(netuid) + + + let valueFromContract = Number( + await contract.getWeightsVersionKey(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + // weightsSetRateLimit hyperparameter + { + const newValue = 104; + const tx = await contract.setWeightsSetRateLimit(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.WeightsSetRateLimit.getValue(netuid) + + + let valueFromContract = Number( + await contract.getWeightsSetRateLimit(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // adjustmentAlpha hyperparameter + { + const newValue = 105; + const tx = await contract.setAdjustmentAlpha(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.AdjustmentAlpha.getValue(netuid) + + + let valueFromContract = Number( + await contract.getAdjustmentAlpha(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // maxWeightLimit hyperparameter + { + const newValue = 106; + const tx = await contract.setMaxWeightLimit(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.MaxWeightsLimit.getValue(netuid) + + + let valueFromContract = Number( + await contract.getMaxWeightLimit(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + // immunityPeriod hyperparameter + { + const newValue = 107; + const tx = await contract.setImmunityPeriod(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.ImmunityPeriod.getValue(netuid) + + + let valueFromContract = Number( + await contract.getImmunityPeriod(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // minAllowedWeights hyperparameter + { + const newValue = 108; + const tx = await contract.setMinAllowedWeights(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.MinAllowedWeights.getValue(netuid) + + + let valueFromContract = Number( + await contract.getMinAllowedWeights(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // kappa hyperparameter + { + const newValue = 109; + const tx = await contract.setKappa(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.Kappa.getValue(netuid) + + + let valueFromContract = Number( + await contract.getKappa(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // rho hyperparameter + { + const newValue = 110; + const tx = await contract.setRho(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.Rho.getValue(netuid) + + + let valueFromContract = Number( + await contract.getRho(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // activityCutoff hyperparameter + { + const newValue = 111; + const tx = await contract.setActivityCutoff(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.ActivityCutoff.getValue(netuid) + + + let valueFromContract = Number( + await contract.getActivityCutoff(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // networkRegistrationAllowed hyperparameter + { + const newValue = true; + const tx = await contract.setNetworkRegistrationAllowed(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.NetworkRegistrationAllowed.getValue(netuid) + + + let valueFromContract = Boolean( + await contract.getNetworkRegistrationAllowed(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // networkPowRegistrationAllowed hyperparameter + { + const newValue = true; + const tx = await contract.setNetworkPowRegistrationAllowed(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.NetworkPowRegistrationAllowed.getValue(netuid) + + + let valueFromContract = Boolean( + await contract.getNetworkPowRegistrationAllowed(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // minBurn hyperparameter. only sudo can set it now + // newValue = 112; + + // tx = await contract.setMinBurn(netuid, newValue); + // await tx.wait(); + + // await usingApi(async (api) => { + // onchainValue = Number( + // await api.query.subtensorModule.minBurn(netuid) + // ); + // }); + + // valueFromContract = Number(await contract.getMinBurn(netuid)); + + // expect(valueFromContract).to.eq(newValue); + // expect(valueFromContract).to.eq(onchainValue); + + // maxBurn hyperparameter + { + const newValue = 113; + const tx = await contract.setMaxBurn(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.MaxBurn.getValue(netuid) + + + let valueFromContract = Number( + await contract.getMaxBurn(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + + // difficulty hyperparameter (disabled: sudo only) + // newValue = 114; + + // tx = await contract.setDifficulty(netuid, newValue); + // await tx.wait(); + + // await usingApi(async (api) => { + // onchainValue = Number( + // await api.query.subtensorModule.difficulty(netuid) + // ); + // }); + + // valueFromContract = Number(await contract.getDifficulty(netuid)); + + // expect(valueFromContract).to.eq(newValue); + // expect(valueFromContract).to.eq(onchainValue); + + // bondsMovingAverage hyperparameter + { + const newValue = 115; + const tx = await contract.setBondsMovingAverage(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.BondsMovingAverage.getValue(netuid) + + + let valueFromContract = Number( + await contract.getBondsMovingAverage(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + + // commitRevealWeightsEnabled hyperparameter + { + const newValue = true; + const tx = await contract.setCommitRevealWeightsEnabled(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.CommitRevealWeightsEnabled.getValue(netuid) + + + let valueFromContract = Boolean( + await contract.getCommitRevealWeightsEnabled(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // liquidAlphaEnabled hyperparameter + { + const newValue = true; + const tx = await contract.setLiquidAlphaEnabled(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.LiquidAlphaOn.getValue(netuid) + + + let valueFromContract = Boolean( + await contract.getLiquidAlphaEnabled(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + + // alphaValues hyperparameter + { + const newValue = [118, 52429]; + const tx = await contract.setAlphaValues(netuid, newValue[0], newValue[1]); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.AlphaValues.getValue(netuid) + + let value = await contract.getAlphaValues(netuid) + let valueFromContract = [Number(value[0]), Number(value[1])] + + assert.equal(valueFromContract[0], newValue[0]) + assert.equal(valueFromContract[1], newValue[1]) + assert.equal(valueFromContract[0], onchainValue[0]); + assert.equal(valueFromContract[1], onchainValue[1]); + } + + // commitRevealWeightsInterval hyperparameter + { + const newValue = 119; + const tx = await contract.setCommitRevealWeightsInterval(netuid, newValue); + await tx.wait(); + + let onchainValue = await api.query.SubtensorModule.RevealPeriodEpochs.getValue(netuid) + + let valueFromContract = Number( + await contract.getCommitRevealWeightsInterval(netuid) + ); + + assert.equal(valueFromContract, newValue) + assert.equal(valueFromContract, onchainValue); + } + }) +}); \ No newline at end of file diff --git a/evm-tests/tsconfig.json b/evm-tests/tsconfig.json new file mode 100644 index 0000000000..c9c555d96f --- /dev/null +++ b/evm-tests/tsconfig.json @@ -0,0 +1,111 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + // "rootDir": "./", /* Specify the root folder within your source files. */ + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "rewriteRelativeImportExtensions": true, /* Rewrite '.ts', '.tsx', '.mts', and '.cts' file extensions in relative import paths to their JavaScript equivalent in output files. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "noUncheckedSideEffectImports": true, /* Check side effect imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + // "outDir": "./", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "strictBuiltinIteratorReturn": true, /* Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + } +} diff --git a/pallets/admin-utils/src/lib.rs b/pallets/admin-utils/src/lib.rs index 70e4cdd753..0ca6fee2c8 100644 --- a/pallets/admin-utils/src/lib.rs +++ b/pallets/admin-utils/src/lib.rs @@ -1441,6 +1441,35 @@ pub mod pallet { ); Ok(()) } + + /// + /// + /// # Arguments + /// * `origin` - The origin of the call, which must be the root account. + /// * `ema_alpha_period` - Number of blocks for EMA price to halve + /// + /// # Errors + /// * `BadOrigin` - If the caller is not the root account. + /// + /// # Weight + /// Weight is handled by the `#[pallet::weight]` attribute. + #[pallet::call_index(65)] + #[pallet::weight((0, DispatchClass::Operational, Pays::No))] + pub fn sudo_set_ema_price_halving_period( + origin: OriginFor, + netuid: u16, + ema_halving: u64, + ) -> DispatchResult { + ensure_root(origin)?; + pallet_subtensor::EMAPriceHalvingBlocks::::set(netuid, ema_halving); + + log::debug!( + "EMAPriceHalvingBlocks( netuid: {:?}, ema_halving: {:?} )", + netuid, + ema_halving + ); + Ok(()) + } } } diff --git a/pallets/admin-utils/src/tests/mock.rs b/pallets/admin-utils/src/tests/mock.rs index 0c443255c4..fc0d016198 100644 --- a/pallets/admin-utils/src/tests/mock.rs +++ b/pallets/admin-utils/src/tests/mock.rs @@ -134,6 +134,7 @@ parameter_types! { pub const InitialColdkeySwapScheduleDuration: u64 = 5 * 24 * 60 * 60 / 12; // 5 days pub const InitialDissolveNetworkScheduleDuration: u64 = 5 * 24 * 60 * 60 / 12; // 5 days pub const InitialTaoWeight: u64 = u64::MAX/10; // 10% global weight. + pub const InitialEmaPriceHalvingPeriod: u64 = 201_600_u64; // 4 weeks } impl pallet_subtensor::Config for Test { @@ -197,6 +198,7 @@ impl pallet_subtensor::Config for Test { type InitialColdkeySwapScheduleDuration = InitialColdkeySwapScheduleDuration; type InitialDissolveNetworkScheduleDuration = InitialDissolveNetworkScheduleDuration; type InitialTaoWeight = InitialTaoWeight; + type InitialEmaPriceHalvingPeriod = InitialEmaPriceHalvingPeriod; } #[derive_impl(frame_system::config_preludes::TestDefaultConfig)] diff --git a/pallets/admin-utils/src/tests/mod.rs b/pallets/admin-utils/src/tests/mod.rs index 259961e656..ed7ddcbcb2 100644 --- a/pallets/admin-utils/src/tests/mod.rs +++ b/pallets/admin-utils/src/tests/mod.rs @@ -1669,3 +1669,45 @@ fn test_sudo_set_subnet_owner_hotkey() { ); }); } + +// cargo test --package pallet-admin-utils --lib -- tests::test_sudo_set_ema_halving --exact --show-output +#[test] +fn test_sudo_set_ema_halving() { + new_test_ext().execute_with(|| { + let netuid: u16 = 1; + let to_be_set: u64 = 10; + add_network(netuid, 10); + + let value_before: u64 = pallet_subtensor::EMAPriceHalvingBlocks::::get(netuid); + assert_eq!( + AdminUtils::sudo_set_ema_price_halving_period( + <::RuntimeOrigin>::signed(U256::from(1)), + netuid, + to_be_set + ), + Err(DispatchError::BadOrigin) + ); + let value_after_0: u64 = pallet_subtensor::EMAPriceHalvingBlocks::::get(netuid); + assert_eq!(value_after_0, value_before); + + let owner = U256::from(10); + pallet_subtensor::SubnetOwner::::insert(netuid, owner); + assert_eq!( + AdminUtils::sudo_set_ema_price_halving_period( + <::RuntimeOrigin>::signed(owner), + netuid, + to_be_set + ), + Err(DispatchError::BadOrigin) + ); + let value_after_1: u64 = pallet_subtensor::EMAPriceHalvingBlocks::::get(netuid); + assert_eq!(value_after_1, value_before); + assert_ok!(AdminUtils::sudo_set_ema_price_halving_period( + <::RuntimeOrigin>::root(), + netuid, + to_be_set + )); + let value_after_2: u64 = pallet_subtensor::EMAPriceHalvingBlocks::::get(netuid); + assert_eq!(value_after_2, to_be_set); + }); +} diff --git a/pallets/proxy/Cargo.toml b/pallets/proxy/Cargo.toml new file mode 100644 index 0000000000..f3a97dfedf --- /dev/null +++ b/pallets/proxy/Cargo.toml @@ -0,0 +1,57 @@ +[package] +name = "pallet-proxy" +version = "38.0.0" +authors = ["Bittensor Nucleus Team"] +edition = "2021" +license = "Apache-2.0" +homepage = "https://bittensor.com" +description = "FRAME proxying pallet" +readme = "README.md" + +[lints] +workspace = true + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[dependencies] +codec = { features = ["max-encoded-len"], workspace = true } +scale-info = { features = ["derive"], workspace = true } +frame-benchmarking = { optional = true, workspace = true } +frame-support.workspace = true +frame-system.workspace = true +sp-io.workspace = true +sp-runtime.workspace = true +subtensor-macros.workspace = true + +[dev-dependencies] +pallet-balances = { default-features = true, workspace = true } +pallet-utility = { default-features = true, workspace = true } +sp-core = { default-features = true, workspace = true } + +[features] +default = ["std"] +std = [ + "codec/std", + "frame-benchmarking?/std", + "frame-support/std", + "frame-system/std", + "scale-info/std", + "sp-io/std", + "sp-runtime/std", +] +runtime-benchmarks = [ + "frame-benchmarking/runtime-benchmarks", + "frame-support/runtime-benchmarks", + "frame-system/runtime-benchmarks", + "sp-runtime/runtime-benchmarks", + "pallet-balances/runtime-benchmarks", + "pallet-utility/runtime-benchmarks" +] +try-runtime = [ + "frame-support/try-runtime", + "frame-system/try-runtime", + "sp-runtime/try-runtime", + "pallet-balances/try-runtime", + "pallet-utility/try-runtime" +] diff --git a/pallets/proxy/README.md b/pallets/proxy/README.md new file mode 100644 index 0000000000..290c49c050 --- /dev/null +++ b/pallets/proxy/README.md @@ -0,0 +1,26 @@ +# Proxy Module +A module allowing accounts to give permission to other accounts to dispatch types of calls from +their signed origin. + +The accounts to which permission is delegated may be required to announce the action that they +wish to execute some duration prior to execution happens. In this case, the target account may +reject the announcement and in doing so, veto the execution. + +- [`Config`](https://docs.rs/pallet-proxy/latest/pallet_proxy/pallet/trait.Config.html) +- [`Call`](https://docs.rs/pallet-proxy/latest/pallet_proxy/pallet/enum.Call.html) + +## Overview + +## Interface + +### Dispatchable Functions + +[`Call`]: ./enum.Call.html +[`Config`]: ./trait.Config.html + +License: Apache-2.0 + + +## Release + +Polkadot SDK stable2409 diff --git a/pallets/proxy/src/benchmarking.rs b/pallets/proxy/src/benchmarking.rs new file mode 100644 index 0000000000..f519c0f0c3 --- /dev/null +++ b/pallets/proxy/src/benchmarking.rs @@ -0,0 +1,261 @@ +// This file is part of Substrate. +// +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0/ +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Benchmarks for Proxy Pallet + +#![cfg(feature = "runtime-benchmarks")] + +use super::*; +use crate::Pallet as Proxy; +use alloc::{boxed::Box, vec}; +use frame_benchmarking::v1::{account, benchmarks, whitelisted_caller}; +use frame_system::{pallet_prelude::BlockNumberFor, RawOrigin}; +use sp_runtime::traits::{Bounded, CheckedDiv}; + +const SEED: u32 = 0; + +fn assert_last_event(generic_event: ::RuntimeEvent) { + frame_system::Pallet::::assert_last_event(generic_event.into()); +} + +fn half_max_balance() -> BalanceOf { + BalanceOf::::max_value() + .checked_div(&BalanceOf::::from(2_u32)) + .unwrap_or_else(BalanceOf::::max_value) +} + +fn add_proxies(n: u32, maybe_who: Option) -> Result<(), &'static str> { + let caller = maybe_who.unwrap_or_else(whitelisted_caller); + T::Currency::make_free_balance_be(&caller, half_max_balance::()); + for i in 0..n { + let real = T::Lookup::unlookup(account("target", i, SEED)); + + Proxy::::add_proxy( + RawOrigin::Signed(caller.clone()).into(), + real, + T::ProxyType::default(), + BlockNumberFor::::zero(), + )?; + } + Ok(()) +} + +fn add_announcements( + n: u32, + maybe_who: Option, + maybe_real: Option, +) -> Result<(), &'static str> { + let caller = maybe_who.unwrap_or_else(|| account("caller", 0, SEED)); + let caller_lookup = T::Lookup::unlookup(caller.clone()); + T::Currency::make_free_balance_be(&caller, half_max_balance::()); + let real = if let Some(real) = maybe_real { + real + } else { + let real = account("real", 0, SEED); + T::Currency::make_free_balance_be(&real, half_max_balance::()); + Proxy::::add_proxy( + RawOrigin::Signed(real.clone()).into(), + caller_lookup, + T::ProxyType::default(), + BlockNumberFor::::zero(), + )?; + real + }; + let real_lookup = T::Lookup::unlookup(real); + for _ in 0..n { + Proxy::::announce( + RawOrigin::Signed(caller.clone()).into(), + real_lookup.clone(), + T::CallHasher::hash_of(&("add_announcement", n)), + )?; + } + Ok(()) +} + +benchmarks! { + proxy { + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = account("target", p.saturating_sub(1), SEED); + T::Currency::make_free_balance_be(&caller, half_max_balance::()); + let real: T::AccountId = whitelisted_caller(); + let real_lookup = T::Lookup::unlookup(real); + let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into(); + }: _(RawOrigin::Signed(caller), real_lookup, Some(T::ProxyType::default()), Box::new(call)) + verify { + assert_last_event::(Event::ProxyExecuted { result: Ok(()) }.into()) + } + + proxy_announced { + let a in 0 .. T::MaxPending::get().saturating_sub(1); + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = account("pure", 0, SEED); + let delegate: T::AccountId = account("target", p.saturating_sub(1), SEED); + let delegate_lookup = T::Lookup::unlookup(delegate.clone()); + T::Currency::make_free_balance_be(&delegate, half_max_balance::()); + let real: T::AccountId = whitelisted_caller(); + let real_lookup = T::Lookup::unlookup(real); + let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into(); + Proxy::::announce( + RawOrigin::Signed(delegate.clone()).into(), + real_lookup.clone(), + T::CallHasher::hash_of(&call), + )?; + add_announcements::(a, Some(delegate.clone()), None)?; + }: _(RawOrigin::Signed(caller), delegate_lookup, real_lookup, Some(T::ProxyType::default()), Box::new(call)) + verify { + assert_last_event::(Event::ProxyExecuted { result: Ok(()) }.into()) + } + + remove_announcement { + let a in 0 .. T::MaxPending::get().saturating_sub(1); + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = account("target", p.saturating_sub(1), SEED); + T::Currency::make_free_balance_be(&caller, half_max_balance::()); + let real: T::AccountId = whitelisted_caller(); + let real_lookup = T::Lookup::unlookup(real); + let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into(); + Proxy::::announce( + RawOrigin::Signed(caller.clone()).into(), + real_lookup.clone(), + T::CallHasher::hash_of(&call), + )?; + add_announcements::(a, Some(caller.clone()), None)?; + }: _(RawOrigin::Signed(caller.clone()), real_lookup, T::CallHasher::hash_of(&call)) + verify { + let (announcements, _) = Announcements::::get(&caller); + assert_eq!(announcements.len() as u32, a); + } + + reject_announcement { + let a in 0 .. T::MaxPending::get().saturating_sub(1); + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = account("target", p.saturating_sub(1), SEED); + let caller_lookup = T::Lookup::unlookup(caller.clone()); + T::Currency::make_free_balance_be(&caller, half_max_balance::()); + let real: T::AccountId = whitelisted_caller(); + let real_lookup = T::Lookup::unlookup(real.clone()); + let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into(); + Proxy::::announce( + RawOrigin::Signed(caller.clone()).into(), + real_lookup, + T::CallHasher::hash_of(&call), + )?; + add_announcements::(a, Some(caller.clone()), None)?; + }: _(RawOrigin::Signed(real), caller_lookup, T::CallHasher::hash_of(&call)) + verify { + let (announcements, _) = Announcements::::get(&caller); + assert_eq!(announcements.len() as u32, a); + } + + announce { + let a in 0 .. T::MaxPending::get().saturating_sub(1); + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = account("target", p.saturating_sub(1), SEED); + T::Currency::make_free_balance_be(&caller, half_max_balance::()); + let real: T::AccountId = whitelisted_caller(); + let real_lookup = T::Lookup::unlookup(real.clone()); + add_announcements::(a, Some(caller.clone()), None)?; + let call: ::RuntimeCall = frame_system::Call::::remark { remark: vec![] }.into(); + let call_hash = T::CallHasher::hash_of(&call); + }: _(RawOrigin::Signed(caller.clone()), real_lookup, call_hash) + verify { + assert_last_event::(Event::Announced { real, proxy: caller, call_hash }.into()); + } + + add_proxy { + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = whitelisted_caller(); + let real = T::Lookup::unlookup(account("target", T::MaxProxies::get(), SEED)); + }: _( + RawOrigin::Signed(caller.clone()), + real, + T::ProxyType::default(), + BlockNumberFor::::zero() + ) + verify { + let (proxies, _) = Proxies::::get(caller); + assert_eq!(proxies.len() as u32, p.saturating_add(1)); + } + + remove_proxy { + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = whitelisted_caller(); + let delegate = T::Lookup::unlookup(account("target", 0, SEED)); + }: _( + RawOrigin::Signed(caller.clone()), + delegate, + T::ProxyType::default(), + BlockNumberFor::::zero() + ) + verify { + let (proxies, _) = Proxies::::get(caller); + assert_eq!(proxies.len() as u32, p.saturating_sub(1)); + } + + remove_proxies { + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = whitelisted_caller(); + }: _(RawOrigin::Signed(caller.clone())) + verify { + let (proxies, _) = Proxies::::get(caller); + assert_eq!(proxies.len() as u32, 0); + } + + create_pure { + let p in 1 .. (T::MaxProxies::get().saturating_sub(1)) => add_proxies::(p, None)?; + let caller: T::AccountId = whitelisted_caller(); + }: _( + RawOrigin::Signed(caller.clone()), + T::ProxyType::default(), + BlockNumberFor::::zero(), + 0 + ) + verify { + let pure_account = Pallet::::pure_account(&caller, &T::ProxyType::default(), 0, None); + assert_last_event::(Event::PureCreated { + pure: pure_account, + who: caller, + proxy_type: T::ProxyType::default(), + disambiguation_index: 0, + }.into()); + } + + kill_pure { + let p in 0 .. (T::MaxProxies::get().saturating_sub(2)); + + let caller: T::AccountId = whitelisted_caller(); + let caller_lookup = T::Lookup::unlookup(caller.clone()); + T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); + Pallet::::create_pure( + RawOrigin::Signed(whitelisted_caller()).into(), + T::ProxyType::default(), + BlockNumberFor::::zero(), + 0 + )?; + let height = system::Pallet::::block_number(); + let ext_index = system::Pallet::::extrinsic_index().unwrap_or(0); + let pure_account = Pallet::::pure_account(&caller, &T::ProxyType::default(), 0, None); + + add_proxies::(p, Some(pure_account.clone()))?; + ensure!(Proxies::::contains_key(&pure_account), "pure proxy not created"); + }: _(RawOrigin::Signed(pure_account.clone()), caller_lookup, T::ProxyType::default(), 0, height, ext_index) + verify { + assert!(!Proxies::::contains_key(&pure_account)); + } + + impl_benchmark_test_suite!(Proxy, crate::tests::new_test_ext(), crate::tests::Test); +} diff --git a/pallets/proxy/src/lib.rs b/pallets/proxy/src/lib.rs new file mode 100644 index 0000000000..3f45951190 --- /dev/null +++ b/pallets/proxy/src/lib.rs @@ -0,0 +1,891 @@ +// This file is part of Substrate. +// +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0/ +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! # Proxy Pallet +//! A pallet allowing accounts to give permission to other accounts to dispatch types of calls from +//! their signed origin. +//! +//! The accounts to which permission is delegated may be required to announce the action that they +//! wish to execute some duration prior to execution happens. In this case, the target account may +//! reject the announcement and in doing so, veto the execution. +//! +//! - [`Config`] +//! - [`Call`] + +// Ensure we're `no_std` when compiling for Wasm. +#![cfg_attr(not(feature = "std"), no_std)] + +mod benchmarking; +mod tests; +pub mod weights; + +extern crate alloc; + +use alloc::{boxed::Box, vec}; +use codec::{Decode, Encode, MaxEncodedLen}; +use frame_support::pallet_prelude::{Pays, Weight}; +use frame_support::{ + dispatch::GetDispatchInfo, + ensure, + traits::{Currency, Get, InstanceFilter, IsSubType, IsType, OriginTrait, ReservableCurrency}, + BoundedVec, +}; +use frame_system::{self as system, ensure_signed, pallet_prelude::BlockNumberFor}; +pub use pallet::*; +use scale_info::{prelude::cmp::Ordering, TypeInfo}; +use sp_io::hashing::blake2_256; +use sp_runtime::{ + traits::{Dispatchable, Hash, Saturating, StaticLookup, TrailingZeroInput, Zero}, + DispatchError, DispatchResult, RuntimeDebug, +}; +use subtensor_macros::freeze_struct; +pub use weights::WeightInfo; + +type CallHashOf = <::CallHasher as Hash>::Output; + +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; + +type AccountIdLookupOf = <::Lookup as StaticLookup>::Source; + +/// The parameters under which a particular account has a proxy relationship with some other +/// account. +#[derive( + Encode, + Decode, + Clone, + Copy, + Eq, + PartialEq, + Ord, + PartialOrd, + RuntimeDebug, + MaxEncodedLen, + TypeInfo, +)] +#[freeze_struct("a37bb67fe5520678")] +pub struct ProxyDefinition { + /// The account which may act on behalf of another. + pub delegate: AccountId, + /// A value defining the subset of calls that it is allowed to make. + pub proxy_type: ProxyType, + /// The number of blocks that an announcement must be in place for before the corresponding + /// call may be dispatched. If zero, then no announcement is needed. + pub delay: BlockNumber, +} + +/// Details surrounding a specific instance of an announcement to make a call. +#[derive(Encode, Decode, Clone, Copy, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] +#[freeze_struct("4c1b5c8c3bc489ad")] +pub struct Announcement { + /// The account which made the announcement. + real: AccountId, + /// The hash of the call to be made. + call_hash: Hash, + /// The height at which the announcement was made. + height: BlockNumber, +} + +#[frame_support::pallet] +pub mod pallet { + use super::{DispatchResult, *}; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + pub struct Pallet(_); + + /// Configuration trait. + #[pallet::config] + pub trait Config: frame_system::Config { + /// The overarching event type. + type RuntimeEvent: From> + IsType<::RuntimeEvent>; + + /// The overarching call type. + type RuntimeCall: Parameter + + Dispatchable + + GetDispatchInfo + + From> + + IsSubType> + + IsType<::RuntimeCall>; + + /// The currency mechanism. + type Currency: ReservableCurrency; + + /// A kind of proxy; specified with the proxy and passed in to the `IsProxyable` filter. + /// The instance filter determines whether a given call may be proxied under this type. + /// + /// IMPORTANT: `Default` must be provided and MUST BE the the *most permissive* value. + type ProxyType: Parameter + + Member + + Ord + + PartialOrd + + InstanceFilter<::RuntimeCall> + + Default + + MaxEncodedLen; + + /// The base amount of currency needed to reserve for creating a proxy. + /// + /// This is held for an additional storage item whose value size is + /// `sizeof(Balance)` bytes and whose key size is `sizeof(AccountId)` bytes. + #[pallet::constant] + type ProxyDepositBase: Get>; + + /// The amount of currency needed per proxy added. + /// + /// This is held for adding 32 bytes plus an instance of `ProxyType` more into a + /// pre-existing storage value. Thus, when configuring `ProxyDepositFactor` one should take + /// into account `32 + proxy_type.encode().len()` bytes of data. + #[pallet::constant] + type ProxyDepositFactor: Get>; + + /// The maximum amount of proxies allowed for a single account. + #[pallet::constant] + type MaxProxies: Get; + + /// Weight information for extrinsics in this pallet. + type WeightInfo: WeightInfo; + + /// The maximum amount of time-delayed announcements that are allowed to be pending. + #[pallet::constant] + type MaxPending: Get; + + /// The type of hash used for hashing the call. + type CallHasher: Hash; + + /// The base amount of currency needed to reserve for creating an announcement. + /// + /// This is held when a new storage item holding a `Balance` is created (typically 16 + /// bytes). + #[pallet::constant] + type AnnouncementDepositBase: Get>; + + /// The amount of currency needed per announcement made. + /// + /// This is held for adding an `AccountId`, `Hash` and `BlockNumber` (typically 68 bytes) + /// into a pre-existing storage value. + #[pallet::constant] + type AnnouncementDepositFactor: Get>; + } + + #[pallet::call] + impl Pallet { + /// Dispatch the given `call` from an account that the sender is authorised for through + /// `add_proxy`. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `real`: The account that the proxy will make a call on behalf of. + /// - `force_proxy_type`: Specify the exact proxy type to be used and checked for this call. + /// - `call`: The call to be made by the `real` account. + #[pallet::call_index(0)] + #[pallet::weight({ + let di = call.get_dispatch_info(); + let inner_call_weight = match di.pays_fee { + Pays::Yes => di.weight, + Pays::No => Weight::zero(), + }; + let base_weight = T::WeightInfo::proxy(T::MaxProxies::get()) + .saturating_add(T::DbWeight::get().reads_writes(1, 1)); + (base_weight.saturating_add(inner_call_weight), di.class) + })] + pub fn proxy( + origin: OriginFor, + real: AccountIdLookupOf, + force_proxy_type: Option, + call: Box<::RuntimeCall>, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + let real = T::Lookup::lookup(real)?; + let def = Self::find_proxy(&real, &who, force_proxy_type)?; + ensure!(def.delay.is_zero(), Error::::Unannounced); + + Self::do_proxy(def, real, *call); + + Ok(()) + } + + /// Register a proxy account for the sender that is able to make calls on its behalf. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `proxy`: The account that the `caller` would like to make a proxy. + /// - `proxy_type`: The permissions allowed for this proxy account. + /// - `delay`: The announcement period required of the initial proxy. Will generally be + /// zero. + #[pallet::call_index(1)] + #[pallet::weight(T::WeightInfo::add_proxy(T::MaxProxies::get()))] + pub fn add_proxy( + origin: OriginFor, + delegate: AccountIdLookupOf, + proxy_type: T::ProxyType, + delay: BlockNumberFor, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + let delegate = T::Lookup::lookup(delegate)?; + Self::add_proxy_delegate(&who, delegate, proxy_type, delay) + } + + /// Unregister a proxy account for the sender. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `proxy`: The account that the `caller` would like to remove as a proxy. + /// - `proxy_type`: The permissions currently enabled for the removed proxy account. + #[pallet::call_index(2)] + #[pallet::weight(T::WeightInfo::remove_proxy(T::MaxProxies::get()))] + pub fn remove_proxy( + origin: OriginFor, + delegate: AccountIdLookupOf, + proxy_type: T::ProxyType, + delay: BlockNumberFor, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + let delegate = T::Lookup::lookup(delegate)?; + Self::remove_proxy_delegate(&who, delegate, proxy_type, delay) + } + + /// Unregister all proxy accounts for the sender. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// WARNING: This may be called on accounts created by `pure`, however if done, then + /// the unreserved fees will be inaccessible. **All access to this account will be lost.** + #[pallet::call_index(3)] + #[pallet::weight(T::WeightInfo::remove_proxies(T::MaxProxies::get()))] + pub fn remove_proxies(origin: OriginFor) -> DispatchResult { + let who = ensure_signed(origin)?; + Self::remove_all_proxy_delegates(&who); + Ok(()) + } + + /// Spawn a fresh new account that is guaranteed to be otherwise inaccessible, and + /// initialize it with a proxy of `proxy_type` for `origin` sender. + /// + /// Requires a `Signed` origin. + /// + /// - `proxy_type`: The type of the proxy that the sender will be registered as over the + /// new account. This will almost always be the most permissive `ProxyType` possible to + /// allow for maximum flexibility. + /// - `index`: A disambiguation index, in case this is called multiple times in the same + /// transaction (e.g. with `utility::batch`). Unless you're using `batch` you probably just + /// want to use `0`. + /// - `delay`: The announcement period required of the initial proxy. Will generally be + /// zero. + /// + /// Fails with `Duplicate` if this has already been called in this transaction, from the + /// same sender, with the same parameters. + /// + /// Fails if there are insufficient funds to pay for deposit. + #[pallet::call_index(4)] + #[pallet::weight(T::WeightInfo::create_pure(T::MaxProxies::get()))] + pub fn create_pure( + origin: OriginFor, + proxy_type: T::ProxyType, + delay: BlockNumberFor, + index: u16, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + + let pure = Self::pure_account(&who, &proxy_type, index, None); + ensure!(!Proxies::::contains_key(&pure), Error::::Duplicate); + + let proxy_def = ProxyDefinition { + delegate: who.clone(), + proxy_type: proxy_type.clone(), + delay, + }; + let bounded_proxies: BoundedVec<_, T::MaxProxies> = vec![proxy_def] + .try_into() + .map_err(|_| Error::::TooMany)?; + + let deposit = T::ProxyDepositBase::get().saturating_add(T::ProxyDepositFactor::get()); + T::Currency::reserve(&who, deposit)?; + + Proxies::::insert(&pure, (bounded_proxies, deposit)); + Self::deposit_event(Event::PureCreated { + pure, + who, + proxy_type, + disambiguation_index: index, + }); + + Ok(()) + } + + /// Removes a previously spawned pure proxy. + /// + /// WARNING: **All access to this account will be lost.** Any funds held in it will be + /// inaccessible. + /// + /// Requires a `Signed` origin, and the sender account must have been created by a call to + /// `pure` with corresponding parameters. + /// + /// - `spawner`: The account that originally called `pure` to create this account. + /// - `index`: The disambiguation index originally passed to `pure`. Probably `0`. + /// - `proxy_type`: The proxy type originally passed to `pure`. + /// - `height`: The height of the chain when the call to `pure` was processed. + /// - `ext_index`: The extrinsic index in which the call to `pure` was processed. + /// + /// Fails with `NoPermission` in case the caller is not a previously created pure + /// account whose `pure` call has corresponding parameters. + #[pallet::call_index(5)] + #[pallet::weight(T::WeightInfo::kill_pure(T::MaxProxies::get()))] + pub fn kill_pure( + origin: OriginFor, + spawner: AccountIdLookupOf, + proxy_type: T::ProxyType, + index: u16, + #[pallet::compact] height: BlockNumberFor, + #[pallet::compact] ext_index: u32, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + let spawner = T::Lookup::lookup(spawner)?; + + let when = (height, ext_index); + let proxy = Self::pure_account(&spawner, &proxy_type, index, Some(when)); + ensure!(proxy == who, Error::::NoPermission); + + let (_, deposit) = Proxies::::take(&who); + T::Currency::unreserve(&spawner, deposit); + + Ok(()) + } + + /// Publish the hash of a proxy-call that will be made in the future. + /// + /// This must be called some number of blocks before the corresponding `proxy` is attempted + /// if the delay associated with the proxy relationship is greater than zero. + /// + /// No more than `MaxPending` announcements may be made at any one time. + /// + /// This will take a deposit of `AnnouncementDepositFactor` as well as + /// `AnnouncementDepositBase` if there are no other pending announcements. + /// + /// The dispatch origin for this call must be _Signed_ and a proxy of `real`. + /// + /// Parameters: + /// - `real`: The account that the proxy will make a call on behalf of. + /// - `call_hash`: The hash of the call to be made by the `real` account. + #[pallet::call_index(6)] + #[pallet::weight(T::WeightInfo::announce(T::MaxPending::get(), T::MaxProxies::get()))] + pub fn announce( + origin: OriginFor, + real: AccountIdLookupOf, + call_hash: CallHashOf, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + let real = T::Lookup::lookup(real)?; + Proxies::::get(&real) + .0 + .into_iter() + .find(|x| x.delegate == who) + .ok_or(Error::::NotProxy)?; + + let announcement = Announcement { + real: real.clone(), + call_hash, + height: system::Pallet::::block_number(), + }; + + Announcements::::try_mutate(&who, |(ref mut pending, ref mut deposit)| { + pending + .try_push(announcement) + .map_err(|_| Error::::TooMany)?; + Self::rejig_deposit( + &who, + *deposit, + T::AnnouncementDepositBase::get(), + T::AnnouncementDepositFactor::get(), + pending.len(), + ) + .map(|d| { + d.expect("Just pushed; pending.len() > 0; rejig_deposit returns Some; qed") + }) + .map(|d| *deposit = d) + })?; + Self::deposit_event(Event::Announced { + real, + proxy: who, + call_hash, + }); + + Ok(()) + } + + /// Remove a given announcement. + /// + /// May be called by a proxy account to remove a call they previously announced and return + /// the deposit. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `real`: The account that the proxy will make a call on behalf of. + /// - `call_hash`: The hash of the call to be made by the `real` account. + #[pallet::call_index(7)] + #[pallet::weight(T::WeightInfo::remove_announcement( + T::MaxPending::get(), + T::MaxProxies::get() + ))] + pub fn remove_announcement( + origin: OriginFor, + real: AccountIdLookupOf, + call_hash: CallHashOf, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + let real = T::Lookup::lookup(real)?; + Self::edit_announcements(&who, |ann| ann.real != real || ann.call_hash != call_hash)?; + + Ok(()) + } + + /// Remove the given announcement of a delegate. + /// + /// May be called by a target (proxied) account to remove a call that one of their delegates + /// (`delegate`) has announced they want to execute. The deposit is returned. + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `delegate`: The account that previously announced the call. + /// - `call_hash`: The hash of the call to be made. + #[pallet::call_index(8)] + #[pallet::weight(T::WeightInfo::reject_announcement( + T::MaxPending::get(), + T::MaxProxies::get() + ))] + pub fn reject_announcement( + origin: OriginFor, + delegate: AccountIdLookupOf, + call_hash: CallHashOf, + ) -> DispatchResult { + let who = ensure_signed(origin)?; + let delegate = T::Lookup::lookup(delegate)?; + Self::edit_announcements(&delegate, |ann| { + ann.real != who || ann.call_hash != call_hash + })?; + + Ok(()) + } + + /// Dispatch the given `call` from an account that the sender is authorized for through + /// `add_proxy`. + /// + /// Removes any corresponding announcement(s). + /// + /// The dispatch origin for this call must be _Signed_. + /// + /// Parameters: + /// - `real`: The account that the proxy will make a call on behalf of. + /// - `force_proxy_type`: Specify the exact proxy type to be used and checked for this call. + /// - `call`: The call to be made by the `real` account. + #[pallet::call_index(9)] + #[pallet::weight({ + let di = call.get_dispatch_info(); + (T::WeightInfo::proxy_announced(T::MaxPending::get(), T::MaxProxies::get()) + // AccountData for inner call origin accountdata. + .saturating_add(T::DbWeight::get().reads_writes(1, 1)) + .saturating_add(di.weight), + di.class) + })] + pub fn proxy_announced( + origin: OriginFor, + delegate: AccountIdLookupOf, + real: AccountIdLookupOf, + force_proxy_type: Option, + call: Box<::RuntimeCall>, + ) -> DispatchResult { + ensure_signed(origin)?; + let delegate = T::Lookup::lookup(delegate)?; + let real = T::Lookup::lookup(real)?; + let def = Self::find_proxy(&real, &delegate, force_proxy_type)?; + + let call_hash = T::CallHasher::hash_of(&call); + let now = system::Pallet::::block_number(); + Self::edit_announcements(&delegate, |ann| { + ann.real != real + || ann.call_hash != call_hash + || now.saturating_sub(ann.height) < def.delay + }) + .map_err(|_| Error::::Unannounced)?; + + Self::do_proxy(def, real, *call); + + Ok(()) + } + } + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + pub enum Event { + /// A proxy was executed correctly, with the given. + ProxyExecuted { result: DispatchResult }, + /// A pure account has been created by new proxy with given + /// disambiguation index and proxy type. + PureCreated { + pure: T::AccountId, + who: T::AccountId, + proxy_type: T::ProxyType, + disambiguation_index: u16, + }, + /// An announcement was placed to make a call in the future. + Announced { + real: T::AccountId, + proxy: T::AccountId, + call_hash: CallHashOf, + }, + /// A proxy was added. + ProxyAdded { + delegator: T::AccountId, + delegatee: T::AccountId, + proxy_type: T::ProxyType, + delay: BlockNumberFor, + }, + /// A proxy was removed. + ProxyRemoved { + delegator: T::AccountId, + delegatee: T::AccountId, + proxy_type: T::ProxyType, + delay: BlockNumberFor, + }, + } + + #[pallet::error] + pub enum Error { + /// There are too many proxies registered or too many announcements pending. + TooMany, + /// Proxy registration not found. + NotFound, + /// Sender is not a proxy of the account to be proxied. + NotProxy, + /// A call which is incompatible with the proxy type's filter was attempted. + Unproxyable, + /// Account is already a proxy. + Duplicate, + /// Call may not be made by proxy because it may escalate its privileges. + NoPermission, + /// Announcement, if made at all, was made too recently. + Unannounced, + /// Cannot add self as proxy. + NoSelfProxy, + } + + /// The set of account proxies. Maps the account which has delegated to the accounts + /// which are being delegated to, together with the amount held on deposit. + #[pallet::storage] + pub type Proxies = StorageMap< + _, + Twox64Concat, + T::AccountId, + ( + BoundedVec< + ProxyDefinition>, + T::MaxProxies, + >, + BalanceOf, + ), + ValueQuery, + >; + + /// The announcements made by the proxy (key). + #[pallet::storage] + pub type Announcements = StorageMap< + _, + Twox64Concat, + T::AccountId, + ( + BoundedVec, BlockNumberFor>, T::MaxPending>, + BalanceOf, + ), + ValueQuery, + >; +} + +impl Pallet { + /// Public function to proxies storage. + pub fn proxies( + account: T::AccountId, + ) -> ( + BoundedVec>, T::MaxProxies>, + BalanceOf, + ) { + Proxies::::get(account) + } + + /// Public function to announcements storage. + pub fn announcements( + account: T::AccountId, + ) -> ( + BoundedVec, BlockNumberFor>, T::MaxPending>, + BalanceOf, + ) { + Announcements::::get(account) + } + + /// Calculate the address of an pure account. + /// + /// - `who`: The spawner account. + /// - `proxy_type`: The type of the proxy that the sender will be registered as over the + /// new account. This will almost always be the most permissive `ProxyType` possible to + /// allow for maximum flexibility. + /// - `index`: A disambiguation index, in case this is called multiple times in the same + /// transaction (e.g. with `utility::batch`). Unless you're using `batch` you probably just + /// want to use `0`. + /// - `maybe_when`: The block height and extrinsic index of when the pure account was + /// created. None to use current block height and extrinsic index. + pub fn pure_account( + who: &T::AccountId, + proxy_type: &T::ProxyType, + index: u16, + maybe_when: Option<(BlockNumberFor, u32)>, + ) -> T::AccountId { + let (height, ext_index) = maybe_when.unwrap_or_else(|| { + ( + system::Pallet::::block_number(), + system::Pallet::::extrinsic_index().unwrap_or_default(), + ) + }); + let entropy = ( + b"modlpy/proxy____", + who, + height, + ext_index, + proxy_type, + index, + ) + .using_encoded(blake2_256); + Decode::decode(&mut TrailingZeroInput::new(entropy.as_ref())) + .expect("infinite length input; no invalid inputs for type; qed") + } + + /// Register a proxy account for the delegator that is able to make calls on its behalf. + /// + /// Parameters: + /// - `delegator`: The delegator account. + /// - `delegatee`: The account that the `delegator` would like to make a proxy. + /// - `proxy_type`: The permissions allowed for this proxy account. + /// - `delay`: The announcement period required of the initial proxy. Will generally be + /// zero. + pub fn add_proxy_delegate( + delegator: &T::AccountId, + delegatee: T::AccountId, + proxy_type: T::ProxyType, + delay: BlockNumberFor, + ) -> DispatchResult { + ensure!(delegator != &delegatee, Error::::NoSelfProxy); + Proxies::::try_mutate(delegator, |(ref mut proxies, ref mut deposit)| { + let proxy_def = ProxyDefinition { + delegate: delegatee.clone(), + proxy_type: proxy_type.clone(), + delay, + }; + let i = proxies + .binary_search(&proxy_def) + .err() + .ok_or(Error::::Duplicate)?; + proxies + .try_insert(i, proxy_def) + .map_err(|_| Error::::TooMany)?; + let new_deposit = Self::deposit(proxies.len() as u32); + match new_deposit.cmp(deposit) { + Ordering::Greater => { + T::Currency::reserve(delegator, new_deposit.saturating_sub(*deposit))?; + } + Ordering::Less => { + T::Currency::unreserve(delegator, deposit.saturating_sub(new_deposit)); + } + Ordering::Equal => (), + } + *deposit = new_deposit; + Self::deposit_event(Event::::ProxyAdded { + delegator: delegator.clone(), + delegatee, + proxy_type, + delay, + }); + Ok(()) + }) + } + + /// Unregister a proxy account for the delegator. + /// + /// Parameters: + /// - `delegator`: The delegator account. + /// - `delegatee`: The account that the `delegator` would like to make a proxy. + /// - `proxy_type`: The permissions allowed for this proxy account. + /// - `delay`: The announcement period required of the initial proxy. Will generally be + /// zero. + pub fn remove_proxy_delegate( + delegator: &T::AccountId, + delegatee: T::AccountId, + proxy_type: T::ProxyType, + delay: BlockNumberFor, + ) -> DispatchResult { + Proxies::::try_mutate_exists(delegator, |x| { + let (mut proxies, old_deposit) = x.take().ok_or(Error::::NotFound)?; + let proxy_def = ProxyDefinition { + delegate: delegatee.clone(), + proxy_type: proxy_type.clone(), + delay, + }; + let i = proxies + .binary_search(&proxy_def) + .ok() + .ok_or(Error::::NotFound)?; + proxies.remove(i); + let new_deposit = Self::deposit(proxies.len() as u32); + match new_deposit.cmp(&old_deposit) { + Ordering::Greater => { + T::Currency::reserve(delegator, new_deposit.saturating_sub(old_deposit))?; + } + Ordering::Less => { + T::Currency::unreserve(delegator, old_deposit.saturating_sub(new_deposit)); + } + Ordering::Equal => (), + } + if !proxies.is_empty() { + *x = Some((proxies, new_deposit)) + } + Self::deposit_event(Event::::ProxyRemoved { + delegator: delegator.clone(), + delegatee, + proxy_type, + delay, + }); + Ok(()) + }) + } + + pub fn deposit(num_proxies: u32) -> BalanceOf { + if num_proxies == 0 { + Zero::zero() + } else { + T::ProxyDepositBase::get() + .saturating_add(T::ProxyDepositFactor::get().saturating_mul(num_proxies.into())) + } + } + + fn rejig_deposit( + who: &T::AccountId, + old_deposit: BalanceOf, + base: BalanceOf, + factor: BalanceOf, + len: usize, + ) -> Result>, DispatchError> { + let new_deposit = if len == 0 { + BalanceOf::::zero() + } else { + base.saturating_add(factor.saturating_mul((len as u32).into())) + }; + match new_deposit.cmp(&old_deposit) { + Ordering::Greater => { + T::Currency::reserve(who, new_deposit.saturating_sub(old_deposit))?; + } + Ordering::Less => { + T::Currency::unreserve(who, old_deposit.saturating_sub(new_deposit)); + } + Ordering::Equal => (), + } + Ok(if len == 0 { None } else { Some(new_deposit) }) + } + + fn edit_announcements< + F: FnMut(&Announcement, BlockNumberFor>) -> bool, + >( + delegate: &T::AccountId, + mut f: F, + ) -> DispatchResult { + Announcements::::try_mutate_exists(delegate, |x| { + let (mut pending, old_deposit) = x.take().ok_or(Error::::NotFound)?; + let orig_pending_len = pending.len(); + pending.retain(&mut f); + ensure!(orig_pending_len > pending.len(), Error::::NotFound); + *x = Self::rejig_deposit( + delegate, + old_deposit, + T::AnnouncementDepositBase::get(), + T::AnnouncementDepositFactor::get(), + pending.len(), + )? + .map(|deposit| (pending, deposit)); + Ok(()) + }) + } + + pub fn find_proxy( + real: &T::AccountId, + delegate: &T::AccountId, + force_proxy_type: Option, + ) -> Result>, DispatchError> { + let f = |x: &ProxyDefinition>| -> bool { + &x.delegate == delegate && force_proxy_type.as_ref().is_none_or(|y| &x.proxy_type == y) + }; + Ok(Proxies::::get(real) + .0 + .into_iter() + .find(f) + .ok_or(Error::::NotProxy)?) + } + + fn do_proxy( + def: ProxyDefinition>, + real: T::AccountId, + call: ::RuntimeCall, + ) { + // This is a freshly authenticated new account, the origin restrictions doesn't apply. + let mut origin: T::RuntimeOrigin = frame_system::RawOrigin::Signed(real).into(); + origin.add_filter(move |c: &::RuntimeCall| { + let c = ::RuntimeCall::from_ref(c); + // We make sure the proxy call does access this pallet to change modify proxies. + match c.is_sub_type() { + // Proxy call cannot add or remove a proxy with more permissions than it already + // has. + Some(Call::add_proxy { ref proxy_type, .. }) + | Some(Call::remove_proxy { ref proxy_type, .. }) + if !def.proxy_type.is_superset(proxy_type) => + { + false + } + // Proxy call cannot remove all proxies or kill pure proxies unless it has full + // permissions. + Some(Call::remove_proxies { .. }) | Some(Call::kill_pure { .. }) + if def.proxy_type != T::ProxyType::default() => + { + false + } + _ => def.proxy_type.filter(c), + } + }); + let e = call.dispatch(origin); + Self::deposit_event(Event::ProxyExecuted { + result: e.map(|_| ()).map_err(|e| e.error), + }); + } + + /// Removes all proxy delegates for a given delegator. + /// + /// Parameters: + /// - `delegator`: The delegator account. + pub fn remove_all_proxy_delegates(delegator: &T::AccountId) { + let (_, old_deposit) = Proxies::::take(delegator); + T::Currency::unreserve(delegator, old_deposit); + } +} diff --git a/pallets/proxy/src/tests.rs b/pallets/proxy/src/tests.rs new file mode 100644 index 0000000000..04bd0bf566 --- /dev/null +++ b/pallets/proxy/src/tests.rs @@ -0,0 +1,965 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Tests for Proxy Pallet + +#![cfg(test)] + +use super::*; + +use crate as proxy; +use alloc::{vec, vec::Vec}; +use codec::{Decode, Encode}; +use frame_support::{ + assert_noop, assert_ok, derive_impl, + traits::{ConstU32, ConstU64, Contains}, +}; +use sp_core::H256; +use sp_runtime::{traits::BlakeTwo256, BuildStorage, DispatchError, RuntimeDebug}; + +type Block = frame_system::mocking::MockBlock; + +frame_support::construct_runtime!( + pub enum Test + { + System: frame_system = 1, + Balances: pallet_balances = 2, + Proxy: proxy = 3, + Utility: pallet_utility = 4, + } +); + +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] +impl frame_system::Config for Test { + type Block = Block; + type BaseCallFilter = BaseFilter; + type AccountData = pallet_balances::AccountData; +} + +#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] +impl pallet_balances::Config for Test { + type ReserveIdentifier = [u8; 8]; + type AccountStore = System; +} + +impl pallet_utility::Config for Test { + type RuntimeEvent = RuntimeEvent; + type RuntimeCall = RuntimeCall; + type PalletsOrigin = OriginCaller; + type WeightInfo = (); +} + +#[derive( + Copy, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Encode, + Decode, + RuntimeDebug, + MaxEncodedLen, + scale_info::TypeInfo, +)] +pub enum ProxyType { + Any, + JustTransfer, + JustUtility, +} +impl Default for ProxyType { + fn default() -> Self { + Self::Any + } +} +impl InstanceFilter for ProxyType { + fn filter(&self, c: &RuntimeCall) -> bool { + match self { + ProxyType::Any => true, + ProxyType::JustTransfer => { + matches!( + c, + RuntimeCall::Balances(pallet_balances::Call::transfer_allow_death { .. }) + ) + } + ProxyType::JustUtility => matches!(c, RuntimeCall::Utility { .. }), + } + } + fn is_superset(&self, o: &Self) -> bool { + self == &ProxyType::Any || self == o + } +} +pub struct BaseFilter; +impl Contains for BaseFilter { + fn contains(c: &RuntimeCall) -> bool { + match *c { + // Remark is used as a no-op call in the benchmarking + RuntimeCall::System(SystemCall::remark { .. }) => true, + RuntimeCall::System(_) => false, + _ => true, + } + } +} +impl Config for Test { + type RuntimeEvent = RuntimeEvent; + type RuntimeCall = RuntimeCall; + type Currency = Balances; + type ProxyType = ProxyType; + type ProxyDepositBase = ConstU64<1>; + type ProxyDepositFactor = ConstU64<1>; + type MaxProxies = ConstU32<4>; + type WeightInfo = (); + type CallHasher = BlakeTwo256; + type MaxPending = ConstU32<2>; + type AnnouncementDepositBase = ConstU64<1>; + type AnnouncementDepositFactor = ConstU64<1>; +} + +use super::{Call as ProxyCall, Event as ProxyEvent}; +use frame_system::Call as SystemCall; +use pallet_balances::{Call as BalancesCall, Event as BalancesEvent}; +use pallet_utility::{Call as UtilityCall, Event as UtilityEvent}; + +type SystemError = frame_system::Error; + +pub fn new_test_ext() -> sp_io::TestExternalities { + let mut t = frame_system::GenesisConfig::::default() + .build_storage() + .expect("Expected to not panic"); + pallet_balances::GenesisConfig:: { + balances: vec![(1, 10), (2, 10), (3, 10), (4, 10), (5, 3)], + } + .assimilate_storage(&mut t) + .expect("Expected to not panic"); + let mut ext = sp_io::TestExternalities::new(t); + ext.execute_with(|| System::set_block_number(1)); + ext +} + +fn last_events(n: usize) -> Vec { + system::Pallet::::events() + .into_iter() + .rev() + .take(n) + .rev() + .map(|e| e.event) + .collect() +} + +fn expect_events(e: Vec) { + assert_eq!(last_events(e.len()), e); +} + +fn call_transfer(dest: u64, value: u64) -> RuntimeCall { + RuntimeCall::Balances(BalancesCall::transfer_allow_death { dest, value }) +} + +#[test] +fn announcement_works() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 3, + ProxyType::Any, + 1 + )); + System::assert_last_event( + ProxyEvent::ProxyAdded { + delegator: 1, + delegatee: 3, + proxy_type: ProxyType::Any, + delay: 1, + } + .into(), + ); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(2), + 3, + ProxyType::Any, + 1 + )); + assert_eq!(Balances::reserved_balance(3), 0); + + assert_ok!(Proxy::announce(RuntimeOrigin::signed(3), 1, [1; 32].into())); + let announcements = Announcements::::get(3); + assert_eq!( + announcements.0, + vec![Announcement { + real: 1, + call_hash: [1; 32].into(), + height: 1 + }] + ); + assert_eq!(Balances::reserved_balance(3), announcements.1); + + assert_ok!(Proxy::announce(RuntimeOrigin::signed(3), 2, [2; 32].into())); + let announcements = Announcements::::get(3); + assert_eq!( + announcements.0, + vec![ + Announcement { + real: 1, + call_hash: [1; 32].into(), + height: 1 + }, + Announcement { + real: 2, + call_hash: [2; 32].into(), + height: 1 + }, + ] + ); + assert_eq!(Balances::reserved_balance(3), announcements.1); + + assert_noop!( + Proxy::announce(RuntimeOrigin::signed(3), 2, [3; 32].into()), + Error::::TooMany + ); + }); +} + +#[test] +fn remove_announcement_works() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 3, + ProxyType::Any, + 1 + )); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(2), + 3, + ProxyType::Any, + 1 + )); + assert_ok!(Proxy::announce(RuntimeOrigin::signed(3), 1, [1; 32].into())); + assert_ok!(Proxy::announce(RuntimeOrigin::signed(3), 2, [2; 32].into())); + let e = Error::::NotFound; + assert_noop!( + Proxy::remove_announcement(RuntimeOrigin::signed(3), 1, [0; 32].into()), + e + ); + assert_ok!(Proxy::remove_announcement( + RuntimeOrigin::signed(3), + 1, + [1; 32].into() + )); + let announcements = Announcements::::get(3); + assert_eq!( + announcements.0, + vec![Announcement { + real: 2, + call_hash: [2; 32].into(), + height: 1 + }] + ); + assert_eq!(Balances::reserved_balance(3), announcements.1); + }); +} + +#[test] +fn reject_announcement_works() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 3, + ProxyType::Any, + 1 + )); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(2), + 3, + ProxyType::Any, + 1 + )); + assert_ok!(Proxy::announce(RuntimeOrigin::signed(3), 1, [1; 32].into())); + assert_ok!(Proxy::announce(RuntimeOrigin::signed(3), 2, [2; 32].into())); + let e = Error::::NotFound; + assert_noop!( + Proxy::reject_announcement(RuntimeOrigin::signed(1), 3, [0; 32].into()), + e + ); + let e = Error::::NotFound; + assert_noop!( + Proxy::reject_announcement(RuntimeOrigin::signed(4), 3, [1; 32].into()), + e + ); + assert_ok!(Proxy::reject_announcement( + RuntimeOrigin::signed(1), + 3, + [1; 32].into() + )); + let announcements = Announcements::::get(3); + assert_eq!( + announcements.0, + vec![Announcement { + real: 2, + call_hash: [2; 32].into(), + height: 1 + }] + ); + assert_eq!(Balances::reserved_balance(3), announcements.1); + }); +} + +#[test] +fn announcer_must_be_proxy() { + new_test_ext().execute_with(|| { + assert_noop!( + Proxy::announce(RuntimeOrigin::signed(2), 1, H256::zero()), + Error::::NotProxy + ); + }); +} + +#[test] +fn calling_proxy_doesnt_remove_announcement() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 2, + ProxyType::Any, + 0 + )); + + let call = Box::new(call_transfer(6, 1)); + let call_hash = BlakeTwo256::hash_of(&call); + + assert_ok!(Proxy::announce(RuntimeOrigin::signed(2), 1, call_hash)); + assert_ok!(Proxy::proxy(RuntimeOrigin::signed(2), 1, None, call)); + + // The announcement is not removed by calling proxy. + let announcements = Announcements::::get(2); + assert_eq!( + announcements.0, + vec![Announcement { + real: 1, + call_hash, + height: 1 + }] + ); + }); +} + +#[test] +fn delayed_requires_pre_announcement() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 2, + ProxyType::Any, + 1 + )); + let call = Box::new(call_transfer(6, 1)); + let e = Error::::Unannounced; + assert_noop!( + Proxy::proxy(RuntimeOrigin::signed(2), 1, None, call.clone()), + e + ); + let e = Error::::Unannounced; + assert_noop!( + Proxy::proxy_announced(RuntimeOrigin::signed(0), 2, 1, None, call.clone()), + e + ); + let call_hash = BlakeTwo256::hash_of(&call); + assert_ok!(Proxy::announce(RuntimeOrigin::signed(2), 1, call_hash)); + system::Pallet::::set_block_number(2); + assert_ok!(Proxy::proxy_announced( + RuntimeOrigin::signed(0), + 2, + 1, + None, + call.clone() + )); + }); +} + +#[test] +fn proxy_announced_removes_announcement_and_returns_deposit() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 3, + ProxyType::Any, + 1 + )); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(2), + 3, + ProxyType::Any, + 1 + )); + let call = Box::new(call_transfer(6, 1)); + let call_hash = BlakeTwo256::hash_of(&call); + assert_ok!(Proxy::announce(RuntimeOrigin::signed(3), 1, call_hash)); + assert_ok!(Proxy::announce(RuntimeOrigin::signed(3), 2, call_hash)); + // Too early to execute announced call + let e = Error::::Unannounced; + assert_noop!( + Proxy::proxy_announced(RuntimeOrigin::signed(0), 3, 1, None, call.clone()), + e + ); + + system::Pallet::::set_block_number(2); + assert_ok!(Proxy::proxy_announced( + RuntimeOrigin::signed(0), + 3, + 1, + None, + call.clone() + )); + let announcements = Announcements::::get(3); + assert_eq!( + announcements.0, + vec![Announcement { + real: 2, + call_hash, + height: 1 + }] + ); + assert_eq!(Balances::reserved_balance(3), announcements.1); + }); +} + +#[test] +fn filtering_works() { + new_test_ext().execute_with(|| { + Balances::make_free_balance_be(&1, 1000); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 2, + ProxyType::Any, + 0 + )); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 3, + ProxyType::JustTransfer, + 0 + )); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 4, + ProxyType::JustUtility, + 0 + )); + + let call = Box::new(call_transfer(6, 1)); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(2), + 1, + None, + call.clone() + )); + System::assert_last_event(ProxyEvent::ProxyExecuted { result: Ok(()) }.into()); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(3), + 1, + None, + call.clone() + )); + System::assert_last_event(ProxyEvent::ProxyExecuted { result: Ok(()) }.into()); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(4), + 1, + None, + call.clone() + )); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + + let derivative_id = Utility::derivative_account_id(1, 0); + Balances::make_free_balance_be(&derivative_id, 1000); + let inner = Box::new(call_transfer(6, 1)); + + let call = Box::new(RuntimeCall::Utility(UtilityCall::as_derivative { + index: 0, + call: inner.clone(), + })); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(2), + 1, + None, + call.clone() + )); + System::assert_last_event(ProxyEvent::ProxyExecuted { result: Ok(()) }.into()); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(3), + 1, + None, + call.clone() + )); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(4), + 1, + None, + call.clone() + )); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + + let call = Box::new(RuntimeCall::Utility(UtilityCall::batch { + calls: vec![*inner], + })); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(2), + 1, + None, + call.clone() + )); + expect_events(vec![ + UtilityEvent::BatchCompleted.into(), + ProxyEvent::ProxyExecuted { result: Ok(()) }.into(), + ]); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(3), + 1, + None, + call.clone() + )); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(4), + 1, + None, + call.clone() + )); + expect_events(vec![ + UtilityEvent::BatchInterrupted { + index: 0, + error: SystemError::CallFiltered.into(), + } + .into(), + ProxyEvent::ProxyExecuted { result: Ok(()) }.into(), + ]); + + let inner = Box::new(RuntimeCall::Proxy(ProxyCall::new_call_variant_add_proxy( + 5, + ProxyType::Any, + 0, + ))); + let call = Box::new(RuntimeCall::Utility(UtilityCall::batch { + calls: vec![*inner], + })); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(2), + 1, + None, + call.clone() + )); + expect_events(vec![ + UtilityEvent::BatchCompleted.into(), + ProxyEvent::ProxyExecuted { result: Ok(()) }.into(), + ]); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(3), + 1, + None, + call.clone() + )); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(4), + 1, + None, + call.clone() + )); + expect_events(vec![ + UtilityEvent::BatchInterrupted { + index: 0, + error: SystemError::CallFiltered.into(), + } + .into(), + ProxyEvent::ProxyExecuted { result: Ok(()) }.into(), + ]); + + let call = Box::new(RuntimeCall::Proxy(ProxyCall::remove_proxies {})); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(3), + 1, + None, + call.clone() + )); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(4), + 1, + None, + call.clone() + )); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(2), + 1, + None, + call.clone() + )); + expect_events(vec![ + BalancesEvent::::Unreserved { who: 1, amount: 5 }.into(), + ProxyEvent::ProxyExecuted { result: Ok(()) }.into(), + ]); + }); +} + +#[test] +fn add_remove_proxies_works() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 2, + ProxyType::Any, + 0 + )); + assert_noop!( + Proxy::add_proxy(RuntimeOrigin::signed(1), 2, ProxyType::Any, 0), + Error::::Duplicate + ); + assert_eq!(Balances::reserved_balance(1), 2); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 2, + ProxyType::JustTransfer, + 0 + )); + assert_eq!(Balances::reserved_balance(1), 3); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 3, + ProxyType::Any, + 0 + )); + assert_eq!(Balances::reserved_balance(1), 4); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 4, + ProxyType::JustUtility, + 0 + )); + assert_eq!(Balances::reserved_balance(1), 5); + assert_noop!( + Proxy::add_proxy(RuntimeOrigin::signed(1), 4, ProxyType::Any, 0), + Error::::TooMany + ); + assert_noop!( + Proxy::remove_proxy(RuntimeOrigin::signed(1), 3, ProxyType::JustTransfer, 0), + Error::::NotFound + ); + assert_ok!(Proxy::remove_proxy( + RuntimeOrigin::signed(1), + 4, + ProxyType::JustUtility, + 0 + )); + System::assert_last_event( + ProxyEvent::ProxyRemoved { + delegator: 1, + delegatee: 4, + proxy_type: ProxyType::JustUtility, + delay: 0, + } + .into(), + ); + assert_eq!(Balances::reserved_balance(1), 4); + assert_ok!(Proxy::remove_proxy( + RuntimeOrigin::signed(1), + 3, + ProxyType::Any, + 0 + )); + assert_eq!(Balances::reserved_balance(1), 3); + System::assert_last_event( + ProxyEvent::ProxyRemoved { + delegator: 1, + delegatee: 3, + proxy_type: ProxyType::Any, + delay: 0, + } + .into(), + ); + assert_ok!(Proxy::remove_proxy( + RuntimeOrigin::signed(1), + 2, + ProxyType::Any, + 0 + )); + assert_eq!(Balances::reserved_balance(1), 2); + System::assert_last_event( + ProxyEvent::ProxyRemoved { + delegator: 1, + delegatee: 2, + proxy_type: ProxyType::Any, + delay: 0, + } + .into(), + ); + assert_ok!(Proxy::remove_proxy( + RuntimeOrigin::signed(1), + 2, + ProxyType::JustTransfer, + 0 + )); + assert_eq!(Balances::reserved_balance(1), 0); + System::assert_last_event( + ProxyEvent::ProxyRemoved { + delegator: 1, + delegatee: 2, + proxy_type: ProxyType::JustTransfer, + delay: 0, + } + .into(), + ); + assert_noop!( + Proxy::add_proxy(RuntimeOrigin::signed(1), 1, ProxyType::Any, 0), + Error::::NoSelfProxy + ); + }); +} + +#[test] +fn cannot_add_proxy_without_balance() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(5), + 3, + ProxyType::Any, + 0 + )); + assert_eq!(Balances::reserved_balance(5), 2); + assert_noop!( + Proxy::add_proxy(RuntimeOrigin::signed(5), 4, ProxyType::Any, 0), + DispatchError::ConsumerRemaining, + ); + }); +} + +#[test] +fn proxying_works() { + new_test_ext().execute_with(|| { + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 2, + ProxyType::JustTransfer, + 0 + )); + assert_ok!(Proxy::add_proxy( + RuntimeOrigin::signed(1), + 3, + ProxyType::Any, + 0 + )); + + let call = Box::new(call_transfer(6, 1)); + assert_noop!( + Proxy::proxy(RuntimeOrigin::signed(4), 1, None, call.clone()), + Error::::NotProxy + ); + assert_noop!( + Proxy::proxy( + RuntimeOrigin::signed(2), + 1, + Some(ProxyType::Any), + call.clone() + ), + Error::::NotProxy + ); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(2), + 1, + None, + call.clone() + )); + System::assert_last_event(ProxyEvent::ProxyExecuted { result: Ok(()) }.into()); + assert_eq!(Balances::free_balance(6), 1); + + let call = Box::new(RuntimeCall::System(SystemCall::set_code { code: vec![] })); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(3), + 1, + None, + call.clone() + )); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + + let call = Box::new(RuntimeCall::Balances(BalancesCall::transfer_keep_alive { + dest: 6, + value: 1, + })); + assert_ok!( + RuntimeCall::Proxy(super::Call::new_call_variant_proxy(1, None, call.clone())) + .dispatch(RuntimeOrigin::signed(2)) + ); + System::assert_last_event( + ProxyEvent::ProxyExecuted { + result: Err(SystemError::CallFiltered.into()), + } + .into(), + ); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(3), + 1, + None, + call.clone() + )); + System::assert_last_event(ProxyEvent::ProxyExecuted { result: Ok(()) }.into()); + assert_eq!(Balances::free_balance(6), 2); + }); +} + +#[test] +fn pure_works() { + new_test_ext().execute_with(|| { + Balances::make_free_balance_be(&1, 11); // An extra one for the ED. + assert_ok!(Proxy::create_pure( + RuntimeOrigin::signed(1), + ProxyType::Any, + 0, + 0 + )); + let anon = Proxy::pure_account(&1, &ProxyType::Any, 0, None); + System::assert_last_event( + ProxyEvent::PureCreated { + pure: anon, + who: 1, + proxy_type: ProxyType::Any, + disambiguation_index: 0, + } + .into(), + ); + + // other calls to pure allowed as long as they're not exactly the same. + assert_ok!(Proxy::create_pure( + RuntimeOrigin::signed(1), + ProxyType::JustTransfer, + 0, + 0 + )); + assert_ok!(Proxy::create_pure( + RuntimeOrigin::signed(1), + ProxyType::Any, + 0, + 1 + )); + let anon2 = Proxy::pure_account(&2, &ProxyType::Any, 0, None); + assert_ok!(Proxy::create_pure( + RuntimeOrigin::signed(2), + ProxyType::Any, + 0, + 0 + )); + assert_noop!( + Proxy::create_pure(RuntimeOrigin::signed(1), ProxyType::Any, 0, 0), + Error::::Duplicate + ); + System::set_extrinsic_index(1); + assert_ok!(Proxy::create_pure( + RuntimeOrigin::signed(1), + ProxyType::Any, + 0, + 0 + )); + System::set_extrinsic_index(0); + System::set_block_number(2); + assert_ok!(Proxy::create_pure( + RuntimeOrigin::signed(1), + ProxyType::Any, + 0, + 0 + )); + + let call = Box::new(call_transfer(6, 1)); + assert_ok!(Balances::transfer_allow_death( + RuntimeOrigin::signed(3), + anon, + 5 + )); + assert_ok!(Proxy::proxy(RuntimeOrigin::signed(1), anon, None, call)); + System::assert_last_event(ProxyEvent::ProxyExecuted { result: Ok(()) }.into()); + assert_eq!(Balances::free_balance(6), 1); + + let call = Box::new(RuntimeCall::Proxy(ProxyCall::new_call_variant_kill_pure( + 1, + ProxyType::Any, + 0, + 1, + 0, + ))); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(2), + anon2, + None, + call.clone() + )); + let de = DispatchError::from(Error::::NoPermission).stripped(); + System::assert_last_event(ProxyEvent::ProxyExecuted { result: Err(de) }.into()); + assert_noop!( + Proxy::kill_pure(RuntimeOrigin::signed(1), 1, ProxyType::Any, 0, 1, 0), + Error::::NoPermission + ); + assert_eq!(Balances::free_balance(1), 1); + assert_ok!(Proxy::proxy( + RuntimeOrigin::signed(1), + anon, + None, + call.clone() + )); + assert_eq!(Balances::free_balance(1), 3); + assert_noop!( + Proxy::proxy(RuntimeOrigin::signed(1), anon, None, call.clone()), + Error::::NotProxy + ); + }); +} diff --git a/pallets/proxy/src/weights.rs b/pallets/proxy/src/weights.rs new file mode 100644 index 0000000000..3093298e3e --- /dev/null +++ b/pallets/proxy/src/weights.rs @@ -0,0 +1,415 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Autogenerated weights for `pallet_proxy` +//! +//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 32.0.0 +//! DATE: 2024-04-09, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! WORST CASE MAP SIZE: `1000000` +//! HOSTNAME: `runner-anb7yjbi-project-674-concurrent-0`, CPU: `Intel(R) Xeon(R) CPU @ 2.60GHz` +//! WASM-EXECUTION: `Compiled`, CHAIN: `Some("dev")`, DB CACHE: `1024` + +// Executed Command: +// ./target/production/substrate-node +// benchmark +// pallet +// --chain=dev +// --steps=50 +// --repeat=20 +// --pallet=pallet_proxy +// --no-storage-info +// --no-median-slopes +// --no-min-squares +// --extrinsic=* +// --wasm-execution=compiled +// --heap-pages=4096 +// --output=./substrate/frame/proxy/src/weights.rs +// --header=./substrate/HEADER-APACHE2 +// --template=./substrate/.maintain/frame-weight-template.hbs + +#![cfg_attr(rustfmt, rustfmt_skip)] +#![allow(unused_parens)] +#![allow(unused_imports)] +#![allow(missing_docs)] + +use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use core::marker::PhantomData; + +/// Weight functions needed for `pallet_proxy`. +pub trait WeightInfo { + fn proxy(p: u32, ) -> Weight; + fn proxy_announced(a: u32, p: u32, ) -> Weight; + fn remove_announcement(a: u32, p: u32, ) -> Weight; + fn reject_announcement(a: u32, p: u32, ) -> Weight; + fn announce(a: u32, p: u32, ) -> Weight; + fn add_proxy(p: u32, ) -> Weight; + fn remove_proxy(p: u32, ) -> Weight; + fn remove_proxies(p: u32, ) -> Weight; + fn create_pure(p: u32, ) -> Weight; + fn kill_pure(p: u32, ) -> Weight; +} + +/// Weights for `pallet_proxy` using the Substrate node and recommended hardware. +pub struct SubstrateWeight(PhantomData); +impl WeightInfo for SubstrateWeight { + /// Storage: `Proxy::Proxies` (r:1 w:0) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn proxy(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `306 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 18_280_000 picoseconds. + Weight::from_parts(19_655_145, 4706) + // Standard Error: 2_345 + .saturating_add(Weight::from_parts(36_306, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(3_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:0) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// Storage: `Proxy::Announcements` (r:1 w:1) + /// Proof: `Proxy::Announcements` (`max_values`: None, `max_size`: Some(2233), added: 4708, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:1 w:1) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `a` is `[0, 31]`. + /// The range of component `p` is `[1, 31]`. + fn proxy_announced(a: u32, p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `633 + a * (68 ±0) + p * (37 ±0)` + // Estimated: `5698` + // Minimum execution time: 41_789_000 picoseconds. + Weight::from_parts(41_812_078, 5698) + // Standard Error: 3_694 + .saturating_add(Weight::from_parts(163_029, 0).saturating_mul(a.into())) + // Standard Error: 3_817 + .saturating_add(Weight::from_parts(79_539, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(5_u64)) + .saturating_add(T::DbWeight::get().writes(2_u64)) + } + /// Storage: `Proxy::Announcements` (r:1 w:1) + /// Proof: `Proxy::Announcements` (`max_values`: None, `max_size`: Some(2233), added: 4708, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:1 w:1) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) + /// The range of component `a` is `[0, 31]`. + /// The range of component `p` is `[1, 31]`. + fn remove_announcement(a: u32, p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `403 + a * (68 ±0)` + // Estimated: `5698` + // Minimum execution time: 22_475_000 picoseconds. + Weight::from_parts(22_666_821, 5698) + // Standard Error: 1_797 + .saturating_add(Weight::from_parts(170_629, 0).saturating_mul(a.into())) + // Standard Error: 1_857 + .saturating_add(Weight::from_parts(18_799, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(2_u64)) + .saturating_add(T::DbWeight::get().writes(2_u64)) + } + /// Storage: `Proxy::Announcements` (r:1 w:1) + /// Proof: `Proxy::Announcements` (`max_values`: None, `max_size`: Some(2233), added: 4708, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:1 w:1) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) + /// The range of component `a` is `[0, 31]`. + /// The range of component `p` is `[1, 31]`. + fn reject_announcement(a: u32, p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `403 + a * (68 ±0)` + // Estimated: `5698` + // Minimum execution time: 22_326_000 picoseconds. + Weight::from_parts(22_654_227, 5698) + // Standard Error: 1_859 + .saturating_add(Weight::from_parts(168_822, 0).saturating_mul(a.into())) + // Standard Error: 1_921 + .saturating_add(Weight::from_parts(21_839, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(2_u64)) + .saturating_add(T::DbWeight::get().writes(2_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:0) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// Storage: `Proxy::Announcements` (r:1 w:1) + /// Proof: `Proxy::Announcements` (`max_values`: None, `max_size`: Some(2233), added: 4708, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:1 w:1) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) + /// The range of component `a` is `[0, 31]`. + /// The range of component `p` is `[1, 31]`. + fn announce(a: u32, p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `420 + a * (68 ±0) + p * (37 ±0)` + // Estimated: `5698` + // Minimum execution time: 31_551_000 picoseconds. + Weight::from_parts(32_205_445, 5698) + // Standard Error: 4_089 + .saturating_add(Weight::from_parts(167_596, 0).saturating_mul(a.into())) + // Standard Error: 4_225 + .saturating_add(Weight::from_parts(67_833, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(3_u64)) + .saturating_add(T::DbWeight::get().writes(2_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn add_proxy(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `161 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 21_495_000 picoseconds. + Weight::from_parts(22_358_457, 4706) + // Standard Error: 1_606 + .saturating_add(Weight::from_parts(64_322, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(1_u64)) + .saturating_add(T::DbWeight::get().writes(1_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn remove_proxy(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `161 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 21_495_000 picoseconds. + Weight::from_parts(22_579_308, 4706) + // Standard Error: 2_571 + .saturating_add(Weight::from_parts(62_404, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(1_u64)) + .saturating_add(T::DbWeight::get().writes(1_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn remove_proxies(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `161 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 20_541_000 picoseconds. + Weight::from_parts(21_456_750, 4706) + // Standard Error: 1_697 + .saturating_add(Weight::from_parts(45_387, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(1_u64)) + .saturating_add(T::DbWeight::get().writes(1_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn create_pure(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `173` + // Estimated: `4706` + // Minimum execution time: 22_809_000 picoseconds. + Weight::from_parts(23_878_644, 4706) + // Standard Error: 1_600 + .saturating_add(Weight::from_parts(10_149, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(1_u64)) + .saturating_add(T::DbWeight::get().writes(1_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[0, 30]`. + fn kill_pure(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `198 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 20_993_000 picoseconds. + Weight::from_parts(22_067_418, 4706) + // Standard Error: 1_673 + .saturating_add(Weight::from_parts(52_703, 0).saturating_mul(p.into())) + .saturating_add(T::DbWeight::get().reads(1_u64)) + .saturating_add(T::DbWeight::get().writes(1_u64)) + } +} + +// For backwards compatibility and tests. +impl WeightInfo for () { + /// Storage: `Proxy::Proxies` (r:1 w:0) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn proxy(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `306 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 18_280_000 picoseconds. + Weight::from_parts(19_655_145, 4706) + // Standard Error: 2_345 + .saturating_add(Weight::from_parts(36_306, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(3_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:0) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// Storage: `Proxy::Announcements` (r:1 w:1) + /// Proof: `Proxy::Announcements` (`max_values`: None, `max_size`: Some(2233), added: 4708, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:1 w:1) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `a` is `[0, 31]`. + /// The range of component `p` is `[1, 31]`. + fn proxy_announced(a: u32, p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `633 + a * (68 ±0) + p * (37 ±0)` + // Estimated: `5698` + // Minimum execution time: 41_789_000 picoseconds. + Weight::from_parts(41_812_078, 5698) + // Standard Error: 3_694 + .saturating_add(Weight::from_parts(163_029, 0).saturating_mul(a.into())) + // Standard Error: 3_817 + .saturating_add(Weight::from_parts(79_539, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(5_u64)) + .saturating_add(RocksDbWeight::get().writes(2_u64)) + } + /// Storage: `Proxy::Announcements` (r:1 w:1) + /// Proof: `Proxy::Announcements` (`max_values`: None, `max_size`: Some(2233), added: 4708, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:1 w:1) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) + /// The range of component `a` is `[0, 31]`. + /// The range of component `p` is `[1, 31]`. + fn remove_announcement(a: u32, p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `403 + a * (68 ±0)` + // Estimated: `5698` + // Minimum execution time: 22_475_000 picoseconds. + Weight::from_parts(22_666_821, 5698) + // Standard Error: 1_797 + .saturating_add(Weight::from_parts(170_629, 0).saturating_mul(a.into())) + // Standard Error: 1_857 + .saturating_add(Weight::from_parts(18_799, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(2_u64)) + .saturating_add(RocksDbWeight::get().writes(2_u64)) + } + /// Storage: `Proxy::Announcements` (r:1 w:1) + /// Proof: `Proxy::Announcements` (`max_values`: None, `max_size`: Some(2233), added: 4708, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:1 w:1) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) + /// The range of component `a` is `[0, 31]`. + /// The range of component `p` is `[1, 31]`. + fn reject_announcement(a: u32, p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `403 + a * (68 ±0)` + // Estimated: `5698` + // Minimum execution time: 22_326_000 picoseconds. + Weight::from_parts(22_654_227, 5698) + // Standard Error: 1_859 + .saturating_add(Weight::from_parts(168_822, 0).saturating_mul(a.into())) + // Standard Error: 1_921 + .saturating_add(Weight::from_parts(21_839, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(2_u64)) + .saturating_add(RocksDbWeight::get().writes(2_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:0) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// Storage: `Proxy::Announcements` (r:1 w:1) + /// Proof: `Proxy::Announcements` (`max_values`: None, `max_size`: Some(2233), added: 4708, mode: `MaxEncodedLen`) + /// Storage: `System::Account` (r:1 w:1) + /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) + /// The range of component `a` is `[0, 31]`. + /// The range of component `p` is `[1, 31]`. + fn announce(a: u32, p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `420 + a * (68 ±0) + p * (37 ±0)` + // Estimated: `5698` + // Minimum execution time: 31_551_000 picoseconds. + Weight::from_parts(32_205_445, 5698) + // Standard Error: 4_089 + .saturating_add(Weight::from_parts(167_596, 0).saturating_mul(a.into())) + // Standard Error: 4_225 + .saturating_add(Weight::from_parts(67_833, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(3_u64)) + .saturating_add(RocksDbWeight::get().writes(2_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn add_proxy(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `161 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 21_495_000 picoseconds. + Weight::from_parts(22_358_457, 4706) + // Standard Error: 1_606 + .saturating_add(Weight::from_parts(64_322, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(1_u64)) + .saturating_add(RocksDbWeight::get().writes(1_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn remove_proxy(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `161 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 21_495_000 picoseconds. + Weight::from_parts(22_579_308, 4706) + // Standard Error: 2_571 + .saturating_add(Weight::from_parts(62_404, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(1_u64)) + .saturating_add(RocksDbWeight::get().writes(1_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn remove_proxies(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `161 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 20_541_000 picoseconds. + Weight::from_parts(21_456_750, 4706) + // Standard Error: 1_697 + .saturating_add(Weight::from_parts(45_387, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(1_u64)) + .saturating_add(RocksDbWeight::get().writes(1_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[1, 31]`. + fn create_pure(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `173` + // Estimated: `4706` + // Minimum execution time: 22_809_000 picoseconds. + Weight::from_parts(23_878_644, 4706) + // Standard Error: 1_600 + .saturating_add(Weight::from_parts(10_149, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(1_u64)) + .saturating_add(RocksDbWeight::get().writes(1_u64)) + } + /// Storage: `Proxy::Proxies` (r:1 w:1) + /// Proof: `Proxy::Proxies` (`max_values`: None, `max_size`: Some(1241), added: 3716, mode: `MaxEncodedLen`) + /// The range of component `p` is `[0, 30]`. + fn kill_pure(p: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `198 + p * (37 ±0)` + // Estimated: `4706` + // Minimum execution time: 20_993_000 picoseconds. + Weight::from_parts(22_067_418, 4706) + // Standard Error: 1_673 + .saturating_add(Weight::from_parts(52_703, 0).saturating_mul(p.into())) + .saturating_add(RocksDbWeight::get().reads(1_u64)) + .saturating_add(RocksDbWeight::get().writes(1_u64)) + } +} diff --git a/pallets/subtensor/src/lib.rs b/pallets/subtensor/src/lib.rs index f3fccbfcef..23272a0993 100644 --- a/pallets/subtensor/src/lib.rs +++ b/pallets/subtensor/src/lib.rs @@ -396,6 +396,11 @@ pub mod pallet { 0 } #[pallet::type_value] + /// Default EMA price halving blocks + pub fn DefaultEMAPriceMovingBlocks() -> u64 { + T::InitialEmaPriceHalvingPeriod::get() + } + #[pallet::type_value] /// Default registrations this block. pub fn DefaultBurn() -> u64 { T::InitialBurn::get() @@ -1294,6 +1299,10 @@ pub mod pallet { pub type RegistrationsThisBlock = StorageMap<_, Identity, u16, u16, ValueQuery, DefaultRegistrationsThisBlock>; #[pallet::storage] + /// --- MAP ( netuid ) --> Halving time of average moving price. + pub type EMAPriceHalvingBlocks = + StorageMap<_, Identity, u16, u64, ValueQuery, DefaultEMAPriceMovingBlocks>; + #[pallet::storage] /// --- MAP ( netuid ) --> global_RAO_recycled_for_registration pub type RAORecycledForRegistration = StorageMap<_, Identity, u16, u64, ValueQuery, DefaultRAORecycledForRegistration>; @@ -1566,15 +1575,21 @@ pub mod pallet { } /// Returns the transaction priority for stake operations. - pub fn get_priority_staking(coldkey: &T::AccountId, hotkey: &T::AccountId) -> u64 { + pub fn get_priority_staking( + coldkey: &T::AccountId, + hotkey: &T::AccountId, + stake_amount: u64, + ) -> u64 { match LastColdkeyHotkeyStakeBlock::::get(coldkey, hotkey) { Some(last_stake_block) => { let current_block_number = Self::get_current_block_as_u64(); let default_priority = current_block_number.saturating_sub(last_stake_block); - default_priority.saturating_add(u32::MAX as u64) + default_priority + .saturating_add(u32::MAX as u64) + .saturating_add(stake_amount) } - None => 0, + None => stake_amount, } } @@ -1703,8 +1718,12 @@ where Pallet::::get_priority_set_weights(who, netuid) } - pub fn get_priority_staking(coldkey: &T::AccountId, hotkey: &T::AccountId) -> u64 { - Pallet::::get_priority_staking(coldkey, hotkey) + pub fn get_priority_staking( + coldkey: &T::AccountId, + hotkey: &T::AccountId, + stake_amount: u64, + ) -> u64 { + Pallet::::get_priority_staking(coldkey, hotkey, stake_amount) } pub fn check_weights_min_stake(who: &T::AccountId, netuid: u16) -> bool { @@ -1919,7 +1938,7 @@ where *amount_staked, false, ), - Self::get_priority_staking(who, hotkey), + Self::get_priority_staking(who, hotkey, *amount_staked), ) } Some(Call::add_stake_limit { @@ -1949,7 +1968,7 @@ where max_amount, *allow_partial, ), - Self::get_priority_staking(who, hotkey), + Self::get_priority_staking(who, hotkey, *amount_staked), ) } Some(Call::remove_stake { @@ -1967,7 +1986,7 @@ where *amount_unstaked, false, ), - Self::get_priority_staking(who, hotkey), + Self::get_priority_staking(who, hotkey, *amount_unstaked), ) } Some(Call::remove_stake_limit { @@ -1990,7 +2009,7 @@ where max_amount, *allow_partial, ), - Self::get_priority_staking(who, hotkey), + Self::get_priority_staking(who, hotkey, *amount_unstaked), ) } Some(Call::move_stake { @@ -2021,7 +2040,7 @@ where None, false, ), - Self::get_priority_staking(who, origin_hotkey), + Self::get_priority_staking(who, origin_hotkey, *alpha_amount), ) } Some(Call::transfer_stake { @@ -2052,7 +2071,7 @@ where None, true, ), - Self::get_priority_staking(who, hotkey), + Self::get_priority_staking(who, hotkey, *alpha_amount), ) } Some(Call::swap_stake { @@ -2082,7 +2101,7 @@ where None, false, ), - Self::get_priority_staking(who, hotkey), + Self::get_priority_staking(who, hotkey, *alpha_amount), ) } Some(Call::swap_stake_limit { @@ -2121,7 +2140,7 @@ where Some(*allow_partial), false, ), - Self::get_priority_staking(who, hotkey), + Self::get_priority_staking(who, hotkey, *alpha_amount), ) } Some(Call::register { netuid, .. } | Call::burned_register { netuid, .. }) => { diff --git a/pallets/subtensor/src/macros/config.rs b/pallets/subtensor/src/macros/config.rs index 60140a8ab2..af448c8771 100644 --- a/pallets/subtensor/src/macros/config.rs +++ b/pallets/subtensor/src/macros/config.rs @@ -207,5 +207,8 @@ mod config { /// Initial TAO weight. #[pallet::constant] type InitialTaoWeight: Get; + /// Initial EMA price halving period + #[pallet::constant] + type InitialEmaPriceHalvingPeriod: Get; } } diff --git a/pallets/subtensor/src/staking/stake_utils.rs b/pallets/subtensor/src/staking/stake_utils.rs index be9a3744f2..8be22c32f9 100644 --- a/pallets/subtensor/src/staking/stake_utils.rs +++ b/pallets/subtensor/src/staking/stake_utils.rs @@ -60,10 +60,15 @@ impl Pallet { let blocks_since_registration = I96F32::saturating_from_num( Self::get_current_block_as_u64().saturating_sub(NetworkRegisteredAt::::get(netuid)), ); - // 7200 * 14 = 100_800 is the halving time + + // Use halving time hyperparameter. The meaning of this parameter can be best explained under + // the assumption of a constant price and SubnetMovingAlpha == 0.5: It is how many blocks it + // will take in order for the distance between current EMA of price and current price to shorten + // by half. + let halving_time = EMAPriceHalvingBlocks::::get(netuid); let alpha: I96F32 = SubnetMovingAlpha::::get().saturating_mul(blocks_since_registration.safe_div( - blocks_since_registration.saturating_add(I96F32::saturating_from_num(100_800)), + blocks_since_registration.saturating_add(I96F32::saturating_from_num(halving_time)), )); let minus_alpha: I96F32 = I96F32::saturating_from_num(1.0).saturating_sub(alpha); let current_price: I96F32 = alpha diff --git a/pallets/subtensor/src/tests/coinbase.rs b/pallets/subtensor/src/tests/coinbase.rs index 4ab5d349fd..9f59fe338e 100644 --- a/pallets/subtensor/src/tests/coinbase.rs +++ b/pallets/subtensor/src/tests/coinbase.rs @@ -218,8 +218,8 @@ fn test_coinbase_moving_prices() { SubnetMovingPrice::::insert(netuid, I96F32::from_num(0)); SubnetMovingAlpha::::set(I96F32::from_num(0.1)); - // EMA price 14 days after registration - System::set_block_number(7_200 * 14); + // EMA price 28 days after registration + System::set_block_number(7_200 * 28); // Run moving 14 times. for _ in 0..14 { @@ -274,7 +274,7 @@ fn test_update_moving_price_after_time() { SubnetMovingPrice::::insert(netuid, I96F32::from_num(0)); // Registered long time ago - System::set_block_number(72_000_500); + System::set_block_number(144_000_500); NetworkRegisteredAt::::insert(netuid, 500); SubtensorModule::update_moving_price(netuid); diff --git a/pallets/subtensor/src/tests/mock.rs b/pallets/subtensor/src/tests/mock.rs index 79da754815..0d979a6126 100644 --- a/pallets/subtensor/src/tests/mock.rs +++ b/pallets/subtensor/src/tests/mock.rs @@ -184,6 +184,7 @@ parameter_types! { pub const InitialColdkeySwapScheduleDuration: u64 = 5 * 24 * 60 * 60 / 12; // Default as 5 days pub const InitialDissolveNetworkScheduleDuration: u64 = 5 * 24 * 60 * 60 / 12; // Default as 5 days pub const InitialTaoWeight: u64 = 0; // 100% global weight. + pub const InitialEmaPriceHalvingPeriod: u64 = 201_600_u64; // 4 weeks } // Configure collective pallet for council @@ -406,6 +407,7 @@ impl crate::Config for Test { type InitialColdkeySwapScheduleDuration = InitialColdkeySwapScheduleDuration; type InitialDissolveNetworkScheduleDuration = InitialDissolveNetworkScheduleDuration; type InitialTaoWeight = InitialTaoWeight; + type InitialEmaPriceHalvingPeriod = InitialEmaPriceHalvingPeriod; } pub struct OriginPrivilegeCmp; diff --git a/pallets/utility/Cargo.toml b/pallets/utility/Cargo.toml new file mode 100644 index 0000000000..6d217ebd4b --- /dev/null +++ b/pallets/utility/Cargo.toml @@ -0,0 +1,64 @@ +[package] +name = "pallet-utility" +version = "38.0.0" +edition = "2021" +license = "Apache-2.0" +description = "FRAME utilities pallet" +readme = "README.md" + +[lints] +workspace = true + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[dependencies] +codec = { workspace = true } +scale-info = { features = ["derive"], workspace = true } +frame-benchmarking = { workspace = true, default-features = false, optional = true } +frame-support = { workspace = true, default-features = false } +frame-system = { workspace = true, default-features = false } +sp-core = { workspace = true, default-features = false } +sp-io = { workspace = true, default-features = false} +sp-runtime = { workspace = true, default-features = false} +subtensor-macros = { workspace = true } + +[dev-dependencies] +pallet-balances = { default-features = true, workspace = true } +pallet-collective = { default-features = false, path = "../collective" } +pallet-timestamp = { default-features = true, workspace = true } +sp-core = { default-features = true, workspace = true } +pallet-root-testing = { workspace = true, default-features = false } + +[features] +default = ["std"] +std = [ + "codec/std", + "frame-benchmarking?/std", + "frame-support/std", + "frame-system/std", + "scale-info/std", + "sp-core/std", + "sp-io/std", + "sp-runtime/std", + "pallet-collective/std", + "pallet-root-testing/std" +] +runtime-benchmarks = [ + "frame-benchmarking/runtime-benchmarks", + "frame-support/runtime-benchmarks", + "frame-system/runtime-benchmarks", + "sp-runtime/runtime-benchmarks", + "pallet-balances/runtime-benchmarks", + "pallet-collective/runtime-benchmarks", + "pallet-timestamp/runtime-benchmarks" +] +try-runtime = [ + "frame-support/try-runtime", + "frame-system/try-runtime", + "sp-runtime/try-runtime", + "pallet-balances/try-runtime", + "pallet-collective/try-runtime", + "pallet-root-testing/try-runtime", + "pallet-timestamp/try-runtime" +] diff --git a/pallets/utility/README.md b/pallets/utility/README.md new file mode 100644 index 0000000000..5366951a89 --- /dev/null +++ b/pallets/utility/README.md @@ -0,0 +1,43 @@ +# Utility Module +A stateless module with helpers for dispatch management which does no re-authentication. + +- [`utility::Config`](https://docs.rs/pallet-utility/latest/pallet_utility/pallet/trait.Config.html) +- [`Call`](https://docs.rs/pallet-utility/latest/pallet_utility/pallet/enum.Call.html) + +## Overview + +This module contains two basic pieces of functionality: +- Batch dispatch: A stateless operation, allowing any origin to execute multiple calls in a + single dispatch. This can be useful to amalgamate proposals, combining `set_code` with + corresponding `set_storage`s, for efficient multiple payouts with just a single signature + verify, or in combination with one of the other two dispatch functionality. +- Pseudonymal dispatch: A stateless operation, allowing a signed origin to execute a call from + an alternative signed origin. Each account has 2 * 2**16 possible "pseudonyms" (alternative + account IDs) and these can be stacked. This can be useful as a key management tool, where you + need multiple distinct accounts (e.g. as controllers for many staking accounts), but where + it's perfectly fine to have each of them controlled by the same underlying keypair. + Derivative accounts are, for the purposes of proxy filtering considered exactly the same as + the origin and are thus hampered with the origin's filters. + +Since proxy filters are respected in all dispatches of this module, it should never need to be +filtered by any proxy. + +## Interface + +### Dispatchable Functions + +#### For batch dispatch +- `batch` - Dispatch multiple calls from the sender's origin. + +#### For pseudonymal dispatch +- `as_derivative` - Dispatch a call from a derivative signed origin. + +[`Call`]: ./enum.Call.html +[`Config`]: ./trait.Config.html + +License: Apache-2.0 + + +## Release + +Polkadot SDK stable2409 diff --git a/pallets/utility/src/benchmarking.rs b/pallets/utility/src/benchmarking.rs new file mode 100644 index 0000000000..6980552c36 --- /dev/null +++ b/pallets/utility/src/benchmarking.rs @@ -0,0 +1,91 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Benchmarks for Utility Pallet + +#![cfg(feature = "runtime-benchmarks")] + +use super::*; +use alloc::{vec, vec::Vec}; +use frame_benchmarking::v1::{account, benchmarks, whitelisted_caller}; +use frame_system::RawOrigin; + +const SEED: u32 = 0; + +fn assert_last_event(generic_event: ::RuntimeEvent) { + frame_system::Pallet::::assert_last_event(generic_event.into()); +} + +benchmarks! { + where_clause { where ::PalletsOrigin: Clone } + batch { + let c in 0 .. 1000; + let mut calls: Vec<::RuntimeCall> = Vec::new(); + for i in 0 .. c { + let call = frame_system::Call::remark { remark: vec![] }.into(); + calls.push(call); + } + let caller = whitelisted_caller(); + }: _(RawOrigin::Signed(caller), calls) + verify { + assert_last_event::(Event::BatchCompleted.into()) + } + + as_derivative { + let caller = account("caller", SEED, SEED); + let call = Box::new(frame_system::Call::remark { remark: vec![] }.into()); + // Whitelist caller account from further DB operations. + let caller_key = frame_system::Account::::hashed_key_for(&caller); + frame_benchmarking::benchmarking::add_to_whitelist(caller_key.into()); + }: _(RawOrigin::Signed(caller), SEED as u16, call) + + batch_all { + let c in 0 .. 1000; + let mut calls: Vec<::RuntimeCall> = Vec::new(); + for i in 0 .. c { + let call = frame_system::Call::remark { remark: vec![] }.into(); + calls.push(call); + } + let caller = whitelisted_caller(); + }: _(RawOrigin::Signed(caller), calls) + verify { + assert_last_event::(Event::BatchCompleted.into()) + } + + dispatch_as { + let caller = account("caller", SEED, SEED); + let call = Box::new(frame_system::Call::remark { remark: vec![] }.into()); + let origin: T::RuntimeOrigin = RawOrigin::Signed(caller).into(); + let pallets_origin: ::PalletsOrigin = origin.caller().clone(); + let pallets_origin = Into::::into(pallets_origin); + }: _(RawOrigin::Root, Box::new(pallets_origin), call) + + force_batch { + let c in 0 .. 1000; + let mut calls: Vec<::RuntimeCall> = Vec::new(); + for i in 0 .. c { + let call = frame_system::Call::remark { remark: vec![] }.into(); + calls.push(call); + } + let caller = whitelisted_caller(); + }: _(RawOrigin::Signed(caller), calls) + verify { + assert_last_event::(Event::BatchCompleted.into()) + } + + impl_benchmark_test_suite!(Pallet, crate::tests::new_test_ext(), crate::tests::Test); +} diff --git a/pallets/utility/src/lib.rs b/pallets/utility/src/lib.rs new file mode 100644 index 0000000000..2677f744b6 --- /dev/null +++ b/pallets/utility/src/lib.rs @@ -0,0 +1,521 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! # Utility Pallet +//! A stateless pallet with helpers for dispatch management which does no re-authentication. +//! +//! - [`Config`] +//! - [`Call`] +//! +//! ## Overview +//! +//! This pallet contains two basic pieces of functionality: +//! - Batch dispatch: A stateless operation, allowing any origin to execute multiple calls in a +//! single dispatch. This can be useful to amalgamate proposals, combining `set_code` with +//! corresponding `set_storage`s, for efficient multiple payouts with just a single signature +//! verify, or in combination with one of the other two dispatch functionality. +//! - Pseudonymal dispatch: A stateless operation, allowing a signed origin to execute a call from +//! an alternative signed origin. Each account has 2 * 2**16 possible "pseudonyms" (alternative +//! account IDs) and these can be stacked. This can be useful as a key management tool, where you +//! need multiple distinct accounts (e.g. as controllers for many staking accounts), but where +//! it's perfectly fine to have each of them controlled by the same underlying keypair. Derivative +//! accounts are, for the purposes of proxy filtering considered exactly the same as the origin +//! and are thus hampered with the origin's filters. +//! +//! Since proxy filters are respected in all dispatches of this pallet, it should never need to be +//! filtered by any proxy. +//! +//! ## Interface +//! +//! ### Dispatchable Functions +//! +//! #### For batch dispatch +//! * `batch` - Dispatch multiple calls from the sender's origin. +//! +//! #### For pseudonymal dispatch +//! * `as_derivative` - Dispatch a call from a derivative signed origin. + +// Ensure we're `no_std` when compiling for Wasm. +#![cfg_attr(not(feature = "std"), no_std)] + +mod benchmarking; +mod tests; +pub mod weights; + +extern crate alloc; + +use alloc::{boxed::Box, vec::Vec}; +use codec::{Decode, Encode}; +use frame_support::{ + dispatch::{extract_actual_weight, GetDispatchInfo, PostDispatchInfo}, + traits::{IsSubType, OriginTrait, UnfilteredDispatchable}, +}; +use sp_core::TypeId; +use sp_io::hashing::blake2_256; +use sp_runtime::traits::{BadOrigin, Dispatchable, TrailingZeroInput}; +pub use weights::WeightInfo; + +use subtensor_macros::freeze_struct; + +pub use pallet::*; + +#[frame_support::pallet] +pub mod pallet { + use super::*; + use frame_support::{dispatch::DispatchClass, pallet_prelude::*}; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + pub struct Pallet(_); + + /// Configuration trait. + #[pallet::config] + pub trait Config: frame_system::Config { + /// The overarching event type. + type RuntimeEvent: From + IsType<::RuntimeEvent>; + + /// The overarching call type. + type RuntimeCall: Parameter + + Dispatchable + + GetDispatchInfo + + From> + + UnfilteredDispatchable + + IsSubType> + + IsType<::RuntimeCall>; + + /// The caller origin, overarching type of all pallets origins. + type PalletsOrigin: Parameter + + Into<::RuntimeOrigin> + + IsType<<::RuntimeOrigin as frame_support::traits::OriginTrait>::PalletsOrigin>; + + /// Weight information for extrinsics in this pallet. + type WeightInfo: WeightInfo; + } + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + pub enum Event { + /// Batch of dispatches did not complete fully. Index of first failing dispatch given, as + /// well as the error. + BatchInterrupted { index: u32, error: DispatchError }, + /// Batch of dispatches completed fully with no error. + BatchCompleted, + /// Batch of dispatches completed but has errors. + BatchCompletedWithErrors, + /// A single item within a Batch of dispatches has completed with no error. + ItemCompleted, + /// A single item within a Batch of dispatches has completed with error. + ItemFailed { error: DispatchError }, + /// A call was dispatched. + DispatchedAs { result: DispatchResult }, + } + + // Align the call size to 1KB. As we are currently compiling the runtime for native/wasm + // the `size_of` of the `Call` can be different. To ensure that this don't leads to + // mismatches between native/wasm or to different metadata for the same runtime, we + // algin the call size. The value is chosen big enough to hopefully never reach it. + const CALL_ALIGN: u32 = 1024; + + #[pallet::extra_constants] + impl Pallet { + /// The limit on the number of batched calls. + fn batched_calls_limit() -> u32 { + let allocator_limit = sp_core::MAX_POSSIBLE_ALLOCATION; + let size = core::mem::size_of::<::RuntimeCall>() as u32; + + let align_up = size.saturating_add(CALL_ALIGN.saturating_sub(1)); + let call_size = align_up + .checked_div(CALL_ALIGN) + .unwrap_or(0) + .saturating_mul(CALL_ALIGN); + + let margin_factor: u32 = 3; + + let after_margin = allocator_limit.checked_div(margin_factor).unwrap_or(0); + + after_margin.checked_div(call_size).unwrap_or(0) + } + } + + #[pallet::hooks] + impl Hooks> for Pallet { + fn integrity_test() { + // If you hit this error, you need to try to `Box` big dispatchable parameters. + assert!( + core::mem::size_of::<::RuntimeCall>() as u32 <= CALL_ALIGN, + "Call enum size should be smaller than {} bytes.", + CALL_ALIGN, + ); + } + } + + #[pallet::error] + pub enum Error { + /// Too many calls batched. + TooManyCalls, + } + + #[pallet::call] + impl Pallet { + /// Send a batch of dispatch calls. + /// + /// May be called from any origin except `None`. + /// + /// - `calls`: The calls to be dispatched from the same origin. The number of call must not + /// exceed the constant: `batched_calls_limit` (available in constant metadata). + /// + /// If origin is root then the calls are dispatched without checking origin filter. (This + /// includes bypassing `frame_system::Config::BaseCallFilter`). + /// + /// ## Complexity + /// - O(C) where C is the number of calls to be batched. + /// + /// This will return `Ok` in all circumstances. To determine the success of the batch, an + /// event is deposited. If a call failed and the batch was interrupted, then the + /// `BatchInterrupted` event is deposited, along with the number of successful calls made + /// and the error of the failed call. If all were successful, then the `BatchCompleted` + /// event is deposited. + #[pallet::call_index(0)] + #[pallet::weight({ + let (dispatch_weight, dispatch_class) = Pallet::::weight_and_dispatch_class(calls); + let dispatch_weight = dispatch_weight.saturating_add(T::WeightInfo::batch(calls.len() as u32)); + (dispatch_weight, dispatch_class) + })] + pub fn batch( + origin: OriginFor, + calls: Vec<::RuntimeCall>, + ) -> DispatchResultWithPostInfo { + // Do not allow the `None` origin. + if ensure_none(origin.clone()).is_ok() { + return Err(BadOrigin.into()); + } + + let is_root = ensure_root(origin.clone()).is_ok(); + let calls_len = calls.len(); + ensure!( + calls_len <= Self::batched_calls_limit() as usize, + Error::::TooManyCalls + ); + + // Track the actual weight of each of the batch calls. + let mut weight = Weight::zero(); + for (index, call) in calls.into_iter().enumerate() { + let info = call.get_dispatch_info(); + // If origin is root, don't apply any dispatch filters; root can call anything. + let result = if is_root { + call.dispatch_bypass_filter(origin.clone()) + } else { + call.dispatch(origin.clone()) + }; + // Add the weight of this call. + weight = weight.saturating_add(extract_actual_weight(&result, &info)); + if let Err(e) = result { + Self::deposit_event(Event::BatchInterrupted { + index: index as u32, + error: e.error, + }); + // Take the weight of this function itself into account. + let base_weight = T::WeightInfo::batch(index.saturating_add(1) as u32); + // Return the actual used weight + base_weight of this call. + return Ok(Some(base_weight.saturating_add(weight)).into()); + } + Self::deposit_event(Event::ItemCompleted); + } + Self::deposit_event(Event::BatchCompleted); + let base_weight = T::WeightInfo::batch(calls_len as u32); + Ok(Some(base_weight.saturating_add(weight)).into()) + } + + /// Send a call through an indexed pseudonym of the sender. + /// + /// Filter from origin are passed along. The call will be dispatched with an origin which + /// use the same filter as the origin of this call. + /// + /// NOTE: If you need to ensure that any account-based filtering is not honored (i.e. + /// because you expect `proxy` to have been used prior in the call stack and you do not want + /// the call restrictions to apply to any sub-accounts), then use `as_multi_threshold_1` + /// in the Multisig pallet instead. + /// + /// NOTE: Prior to version *12, this was called `as_limited_sub`. + /// + /// The dispatch origin for this call must be _Signed_. + #[pallet::call_index(1)] + #[pallet::weight({ + let dispatch_info = call.get_dispatch_info(); + ( + T::WeightInfo::as_derivative() + // AccountData for inner call origin accountdata. + .saturating_add(T::DbWeight::get().reads_writes(1, 1)) + .saturating_add(dispatch_info.weight), + dispatch_info.class, + ) + })] + pub fn as_derivative( + origin: OriginFor, + index: u16, + call: Box<::RuntimeCall>, + ) -> DispatchResultWithPostInfo { + let mut origin = origin; + let who = ensure_signed(origin.clone())?; + let pseudonym = Self::derivative_account_id(who, index); + origin.set_caller_from(frame_system::RawOrigin::Signed(pseudonym)); + let info = call.get_dispatch_info(); + let result = call.dispatch(origin); + // Always take into account the base weight of this call. + let mut weight = T::WeightInfo::as_derivative() + .saturating_add(T::DbWeight::get().reads_writes(1, 1)); + // Add the real weight of the dispatch. + weight = weight.saturating_add(extract_actual_weight(&result, &info)); + result + .map_err(|mut err| { + err.post_info = Some(weight).into(); + err + }) + .map(|_| Some(weight).into()) + } + + /// Send a batch of dispatch calls and atomically execute them. + /// The whole transaction will rollback and fail if any of the calls failed. + /// + /// May be called from any origin except `None`. + /// + /// - `calls`: The calls to be dispatched from the same origin. The number of call must not + /// exceed the constant: `batched_calls_limit` (available in constant metadata). + /// + /// If origin is root then the calls are dispatched without checking origin filter. (This + /// includes bypassing `frame_system::Config::BaseCallFilter`). + /// + /// ## Complexity + /// - O(C) where C is the number of calls to be batched. + #[pallet::call_index(2)] + #[pallet::weight({ + let (dispatch_weight, dispatch_class) = Pallet::::weight_and_dispatch_class(calls); + let dispatch_weight = dispatch_weight.saturating_add(T::WeightInfo::batch_all(calls.len() as u32)); + (dispatch_weight, dispatch_class) + })] + pub fn batch_all( + origin: OriginFor, + calls: Vec<::RuntimeCall>, + ) -> DispatchResultWithPostInfo { + // Do not allow the `None` origin. + if ensure_none(origin.clone()).is_ok() { + return Err(BadOrigin.into()); + } + + let is_root = ensure_root(origin.clone()).is_ok(); + let calls_len = calls.len(); + ensure!( + calls_len <= Self::batched_calls_limit() as usize, + Error::::TooManyCalls + ); + + // Track the actual weight of each of the batch calls. + let mut weight = Weight::zero(); + for (index, call) in calls.into_iter().enumerate() { + let info = call.get_dispatch_info(); + // If origin is root, bypass any dispatch filter; root can call anything. + let result = if is_root { + call.dispatch_bypass_filter(origin.clone()) + } else { + let mut filtered_origin = origin.clone(); + // Don't allow users to nest `batch_all` calls. + filtered_origin.add_filter( + move |c: &::RuntimeCall| { + let c = ::RuntimeCall::from_ref(c); + !matches!(c.is_sub_type(), Some(Call::batch_all { .. })) + }, + ); + call.dispatch(filtered_origin) + }; + // Add the weight of this call. + weight = weight.saturating_add(extract_actual_weight(&result, &info)); + result.map_err(|mut err| { + // Take the weight of this function itself into account. + let base_weight = T::WeightInfo::batch_all(index.saturating_add(1) as u32); + // Return the actual used weight + base_weight of this call. + err.post_info = Some(base_weight.saturating_add(weight)).into(); + err + })?; + Self::deposit_event(Event::ItemCompleted); + } + Self::deposit_event(Event::BatchCompleted); + let base_weight = T::WeightInfo::batch_all(calls_len as u32); + Ok(Some(base_weight.saturating_add(weight)).into()) + } + + /// Dispatches a function call with a provided origin. + /// + /// The dispatch origin for this call must be _Root_. + /// + /// ## Complexity + /// - O(1). + #[pallet::call_index(3)] + #[pallet::weight({ + let dispatch_info = call.get_dispatch_info(); + ( + T::WeightInfo::dispatch_as() + .saturating_add(dispatch_info.weight), + dispatch_info.class, + ) + })] + pub fn dispatch_as( + origin: OriginFor, + as_origin: Box, + call: Box<::RuntimeCall>, + ) -> DispatchResult { + ensure_root(origin)?; + + let res = call.dispatch_bypass_filter((*as_origin).into()); + + Self::deposit_event(Event::DispatchedAs { + result: res.map(|_| ()).map_err(|e| e.error), + }); + Ok(()) + } + + /// Send a batch of dispatch calls. + /// Unlike `batch`, it allows errors and won't interrupt. + /// + /// May be called from any origin except `None`. + /// + /// - `calls`: The calls to be dispatched from the same origin. The number of call must not + /// exceed the constant: `batched_calls_limit` (available in constant metadata). + /// + /// If origin is root then the calls are dispatch without checking origin filter. (This + /// includes bypassing `frame_system::Config::BaseCallFilter`). + /// + /// ## Complexity + /// - O(C) where C is the number of calls to be batched. + #[pallet::call_index(4)] + #[pallet::weight({ + let (dispatch_weight, dispatch_class) = Pallet::::weight_and_dispatch_class(calls); + let dispatch_weight = dispatch_weight.saturating_add(T::WeightInfo::force_batch(calls.len() as u32)); + (dispatch_weight, dispatch_class) + })] + pub fn force_batch( + origin: OriginFor, + calls: Vec<::RuntimeCall>, + ) -> DispatchResultWithPostInfo { + // Do not allow the `None` origin. + if ensure_none(origin.clone()).is_ok() { + return Err(BadOrigin.into()); + } + + let is_root = ensure_root(origin.clone()).is_ok(); + let calls_len = calls.len(); + ensure!( + calls_len <= Self::batched_calls_limit() as usize, + Error::::TooManyCalls + ); + + // Track the actual weight of each of the batch calls. + let mut weight = Weight::zero(); + // Track failed dispatch occur. + let mut has_error: bool = false; + for call in calls.into_iter() { + let info = call.get_dispatch_info(); + // If origin is root, don't apply any dispatch filters; root can call anything. + let result = if is_root { + call.dispatch_bypass_filter(origin.clone()) + } else { + call.dispatch(origin.clone()) + }; + // Add the weight of this call. + weight = weight.saturating_add(extract_actual_weight(&result, &info)); + if let Err(e) = result { + has_error = true; + Self::deposit_event(Event::ItemFailed { error: e.error }); + } else { + Self::deposit_event(Event::ItemCompleted); + } + } + if has_error { + Self::deposit_event(Event::BatchCompletedWithErrors); + } else { + Self::deposit_event(Event::BatchCompleted); + } + let base_weight = T::WeightInfo::batch(calls_len as u32); + Ok(Some(base_weight.saturating_add(weight)).into()) + } + + /// Dispatch a function call with a specified weight. + /// + /// This function does not check the weight of the call, and instead allows the + /// Root origin to specify the weight of the call. + /// + /// The dispatch origin for this call must be _Root_. + #[pallet::call_index(5)] + #[pallet::weight((*weight, call.get_dispatch_info().class))] + pub fn with_weight( + origin: OriginFor, + call: Box<::RuntimeCall>, + weight: Weight, + ) -> DispatchResult { + ensure_root(origin)?; + let _ = weight; // Explicitly don't check the the weight witness. + + let res = call.dispatch_bypass_filter(frame_system::RawOrigin::Root.into()); + res.map(|_| ()).map_err(|e| e.error) + } + } + + impl Pallet { + /// Get the accumulated `weight` and the dispatch class for the given `calls`. + fn weight_and_dispatch_class( + calls: &[::RuntimeCall], + ) -> (Weight, DispatchClass) { + let dispatch_infos = calls.iter().map(|call| call.get_dispatch_info()); + let (dispatch_weight, dispatch_class) = dispatch_infos.fold( + (Weight::zero(), DispatchClass::Operational), + |(total_weight, dispatch_class), di| { + ( + if di.pays_fee == Pays::Yes { + total_weight.saturating_add(di.weight) + } else { + total_weight + }, + if di.class == DispatchClass::Normal { + di.class + } else { + dispatch_class + }, + ) + }, + ); + + (dispatch_weight, dispatch_class) + } + } +} + +/// A pallet identifier. These are per pallet and should be stored in a registry somewhere. +#[freeze_struct("7e600c53ace0630a")] +#[derive(Clone, Copy, Eq, PartialEq, Encode, Decode)] +struct IndexedUtilityPalletId(u16); + +impl TypeId for IndexedUtilityPalletId { + const TYPE_ID: [u8; 4] = *b"suba"; +} + +impl Pallet { + /// Derive a derivative account ID from the owner account and the sub-account index. + pub fn derivative_account_id(who: T::AccountId, index: u16) -> T::AccountId { + let entropy = (b"modlpy/utilisuba", who, index).using_encoded(blake2_256); + Decode::decode(&mut TrailingZeroInput::new(entropy.as_ref())) + .expect("infinite length input; no invalid inputs for type; qed") + } +} diff --git a/pallets/utility/src/tests.rs b/pallets/utility/src/tests.rs new file mode 100644 index 0000000000..16ae0a3a51 --- /dev/null +++ b/pallets/utility/src/tests.rs @@ -0,0 +1,1001 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Tests for Utility Pallet + +#![cfg(test)] + +use super::*; + +use crate as utility; +use frame_support::{ + assert_err_ignore_postinfo, assert_noop, assert_ok, derive_impl, + dispatch::{DispatchErrorWithPostInfo, Pays}, + parameter_types, storage, + traits::{ConstU64, Contains}, + weights::Weight, +}; +use pallet_collective::{EnsureProportionAtLeast, Instance1}; +use sp_runtime::{ + traits::{BadOrigin, BlakeTwo256, Dispatchable, Hash}, + BuildStorage, DispatchError, TokenError, +}; + +type BlockNumber = u64; + +// example module to test behaviors. +#[frame_support::pallet(dev_mode)] +#[allow(clippy::large_enum_variant)] +pub mod example { + use frame_support::{dispatch::WithPostDispatchInfo, pallet_prelude::*}; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + pub struct Pallet(_); + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::call] + impl Pallet { + #[pallet::call_index(0)] + #[pallet::weight(*_weight)] + pub fn noop(_origin: OriginFor, _weight: Weight) -> DispatchResult { + Ok(()) + } + + #[pallet::call_index(1)] + #[pallet::weight(*_start_weight)] + pub fn foobar( + origin: OriginFor, + err: bool, + _start_weight: Weight, + end_weight: Option, + ) -> DispatchResultWithPostInfo { + let _ = ensure_signed(origin)?; + if err { + let error: DispatchError = "The cake is a lie.".into(); + if let Some(weight) = end_weight { + Err(error.with_weight(weight)) + } else { + Err(error)? + } + } else { + Ok(end_weight.into()) + } + } + + #[pallet::call_index(2)] + #[pallet::weight(0)] + pub fn big_variant(_origin: OriginFor, _arg: [u8; 400]) -> DispatchResult { + Ok(()) + } + } +} + +mod mock_democracy { + pub use pallet::*; + #[frame_support::pallet(dev_mode)] + pub mod pallet { + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + pub struct Pallet(_); + + #[pallet::config] + pub trait Config: frame_system::Config + Sized { + type RuntimeEvent: From> + + IsType<::RuntimeEvent>; + type ExternalMajorityOrigin: EnsureOrigin; + } + + #[pallet::call] + impl Pallet { + #[pallet::call_index(3)] + #[pallet::weight(0)] + pub fn external_propose_majority(origin: OriginFor) -> DispatchResult { + T::ExternalMajorityOrigin::ensure_origin(origin)?; + Self::deposit_event(Event::::ExternalProposed); + Ok(()) + } + } + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + pub enum Event { + ExternalProposed, + } + } +} + +type Block = frame_system::mocking::MockBlock; + +frame_support::construct_runtime!( + pub enum Test + { + System: frame_system = 1, + Timestamp: pallet_timestamp = 2, + Balances: pallet_balances = 3, + RootTesting: pallet_root_testing = 4, + Council: pallet_collective:: = 5, + Utility: utility = 6, + Example: example = 7, + Democracy: mock_democracy = 8, + } +); + +parameter_types! { + pub BlockWeights: frame_system::limits::BlockWeights = + frame_system::limits::BlockWeights::simple_max(Weight::MAX); +} +#[derive_impl(frame_system::config_preludes::TestDefaultConfig)] +impl frame_system::Config for Test { + type BaseCallFilter = TestBaseCallFilter; + type BlockWeights = BlockWeights; + type Block = Block; + type AccountData = pallet_balances::AccountData; +} + +#[derive_impl(pallet_balances::config_preludes::TestDefaultConfig)] +impl pallet_balances::Config for Test { + type AccountStore = System; +} + +impl pallet_root_testing::Config for Test { + type RuntimeEvent = RuntimeEvent; +} + +impl pallet_timestamp::Config for Test { + type Moment = u64; + type OnTimestampSet = (); + type MinimumPeriod = ConstU64<3>; + type WeightInfo = (); +} + +const MOTION_DURATION_IN_BLOCKS: BlockNumber = 3; +parameter_types! { + pub const MultisigDepositBase: u64 = 1; + pub const MultisigDepositFactor: u64 = 1; + pub const MaxSignatories: u32 = 3; + pub const MotionDuration: BlockNumber = MOTION_DURATION_IN_BLOCKS; + pub const MaxProposals: u32 = 100; + pub const MaxMembers: u32 = 100; + pub MaxProposalWeight: Weight = BlockWeights::get().max_block.saturating_div(2); +} + +pub struct MemberProposals; +impl pallet_collective::CanPropose for MemberProposals { + fn can_propose(who: &u64) -> bool { + [1, 2, 3].contains(who) + } +} + +pub struct MemberVotes; +impl pallet_collective::CanVote for MemberVotes { + fn can_vote(who: &u64) -> bool { + [1, 2, 3].contains(who) + } +} + +pub struct StoredVotingMembers; +impl pallet_collective::GetVotingMembers for StoredVotingMembers { + fn get_count() -> u32 { + 3 + } +} + +type CouncilCollective = pallet_collective::Instance1; +impl pallet_collective::Config for Test { + type RuntimeOrigin = RuntimeOrigin; + type Proposal = RuntimeCall; + type RuntimeEvent = RuntimeEvent; + type MotionDuration = MotionDuration; + type MaxProposals = MaxProposals; + type MaxMembers = MaxMembers; + type DefaultVote = pallet_collective::PrimeDefaultVote; + type WeightInfo = (); + type SetMembersOrigin = frame_system::EnsureRoot; + type CanPropose = MemberProposals; + type CanVote = MemberVotes; + type GetVotingMembers = StoredVotingMembers; +} + +impl example::Config for Test {} + +pub struct TestBaseCallFilter; +impl Contains for TestBaseCallFilter { + fn contains(c: &RuntimeCall) -> bool { + match *c { + // Transfer works. Use `transfer_keep_alive` for a call that doesn't pass the filter. + RuntimeCall::Balances(pallet_balances::Call::transfer_allow_death { .. }) => true, + RuntimeCall::Utility(_) => true, + // For benchmarking, this acts as a noop call + RuntimeCall::System(frame_system::Call::remark { .. }) => true, + // For tests + RuntimeCall::Example(_) => true, + // For council origin tests. + RuntimeCall::Democracy(_) => true, + _ => false, + } + } +} +impl mock_democracy::Config for Test { + type RuntimeEvent = RuntimeEvent; + type ExternalMajorityOrigin = EnsureProportionAtLeast; +} +impl Config for Test { + type RuntimeEvent = RuntimeEvent; + type RuntimeCall = RuntimeCall; + type PalletsOrigin = OriginCaller; + type WeightInfo = (); +} + +type ExampleCall = example::Call; +type UtilityCall = crate::Call; + +use frame_system::Call as SystemCall; +use pallet_balances::Call as BalancesCall; +use pallet_root_testing::Call as RootTestingCall; +use pallet_timestamp::Call as TimestampCall; + +pub fn new_test_ext() -> sp_io::TestExternalities { + let mut t = frame_system::GenesisConfig::::default() + .build_storage() + .expect("Failed to build storage for test"); + pallet_balances::GenesisConfig:: { + balances: vec![(1, 10), (2, 10), (3, 10), (4, 10), (5, 2)], + } + .assimilate_storage(&mut t) + .expect("Failed to build storage for test"); + + pallet_collective::GenesisConfig:: { + members: vec![1, 2, 3], + phantom: Default::default(), + } + .assimilate_storage(&mut t) + .expect("Failed to build storage for test"); + + let mut ext = sp_io::TestExternalities::new(t); + ext.execute_with(|| System::set_block_number(1)); + ext +} + +fn call_transfer(dest: u64, value: u64) -> RuntimeCall { + RuntimeCall::Balances(BalancesCall::transfer_allow_death { dest, value }) +} + +fn call_foobar(err: bool, start_weight: Weight, end_weight: Option) -> RuntimeCall { + RuntimeCall::Example(ExampleCall::foobar { + err, + start_weight, + end_weight, + }) +} + +#[test] +fn as_derivative_works() { + new_test_ext().execute_with(|| { + let sub_1_0 = Utility::derivative_account_id(1, 0); + assert_ok!(Balances::transfer_allow_death( + RuntimeOrigin::signed(1), + sub_1_0, + 5 + )); + assert_err_ignore_postinfo!( + Utility::as_derivative(RuntimeOrigin::signed(1), 1, Box::new(call_transfer(6, 3)),), + TokenError::FundsUnavailable, + ); + assert_ok!(Utility::as_derivative( + RuntimeOrigin::signed(1), + 0, + Box::new(call_transfer(2, 3)), + )); + assert_eq!(Balances::free_balance(sub_1_0), 2); + assert_eq!(Balances::free_balance(2), 13); + }); +} + +#[test] +fn as_derivative_handles_weight_refund() { + new_test_ext().execute_with(|| { + let start_weight = Weight::from_parts(100, 0); + let end_weight = Weight::from_parts(75, 0); + let diff = start_weight - end_weight; + + // Full weight when ok + let inner_call = call_foobar(false, start_weight, None); + let call = RuntimeCall::Utility(UtilityCall::as_derivative { + index: 0, + call: Box::new(inner_call), + }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + assert_eq!(extract_actual_weight(&result, &info), info.weight); + + // Refund weight when ok + let inner_call = call_foobar(false, start_weight, Some(end_weight)); + let call = RuntimeCall::Utility(UtilityCall::as_derivative { + index: 0, + call: Box::new(inner_call), + }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + // Diff is refunded + assert_eq!(extract_actual_weight(&result, &info), info.weight - diff); + + // Full weight when err + let inner_call = call_foobar(true, start_weight, None); + let call = RuntimeCall::Utility(UtilityCall::as_derivative { + index: 0, + call: Box::new(inner_call), + }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_noop!( + result, + DispatchErrorWithPostInfo { + post_info: PostDispatchInfo { + // No weight is refunded + actual_weight: Some(info.weight), + pays_fee: Pays::Yes, + }, + error: DispatchError::Other("The cake is a lie."), + } + ); + + // Refund weight when err + let inner_call = call_foobar(true, start_weight, Some(end_weight)); + let call = RuntimeCall::Utility(UtilityCall::as_derivative { + index: 0, + call: Box::new(inner_call), + }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_noop!( + result, + DispatchErrorWithPostInfo { + post_info: PostDispatchInfo { + // Diff is refunded + actual_weight: Some(info.weight - diff), + pays_fee: Pays::Yes, + }, + error: DispatchError::Other("The cake is a lie."), + } + ); + }); +} + +#[test] +fn as_derivative_filters() { + new_test_ext().execute_with(|| { + assert_err_ignore_postinfo!( + Utility::as_derivative( + RuntimeOrigin::signed(1), + 1, + Box::new(RuntimeCall::Balances( + pallet_balances::Call::transfer_keep_alive { dest: 2, value: 1 } + )), + ), + DispatchError::from(frame_system::Error::::CallFiltered), + ); + }); +} + +#[test] +fn batch_with_root_works() { + new_test_ext().execute_with(|| { + let k = b"a".to_vec(); + let call = RuntimeCall::System(frame_system::Call::set_storage { + items: vec![(k.clone(), k.clone())], + }); + assert!(!TestBaseCallFilter::contains(&call)); + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + assert_ok!(Utility::batch( + RuntimeOrigin::root(), + vec![ + RuntimeCall::Balances(BalancesCall::force_transfer { + source: 1, + dest: 2, + value: 5 + }), + RuntimeCall::Balances(BalancesCall::force_transfer { + source: 1, + dest: 2, + value: 5 + }), + call, // Check filters are correctly bypassed + ] + )); + assert_eq!(Balances::free_balance(1), 0); + assert_eq!(Balances::free_balance(2), 20); + assert_eq!(storage::unhashed::get_raw(&k), Some(k)); + }); +} + +#[test] +fn batch_with_signed_works() { + new_test_ext().execute_with(|| { + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + assert_ok!(Utility::batch( + RuntimeOrigin::signed(1), + vec![call_transfer(2, 5), call_transfer(2, 5)] + ),); + assert_eq!(Balances::free_balance(1), 0); + assert_eq!(Balances::free_balance(2), 20); + }); +} + +#[test] +fn batch_with_signed_filters() { + new_test_ext().execute_with(|| { + assert_ok!(Utility::batch( + RuntimeOrigin::signed(1), + vec![RuntimeCall::Balances( + pallet_balances::Call::transfer_keep_alive { dest: 2, value: 1 } + )] + ),); + System::assert_last_event( + utility::Event::BatchInterrupted { + index: 0, + error: frame_system::Error::::CallFiltered.into(), + } + .into(), + ); + }); +} + +#[test] +fn batch_early_exit_works() { + new_test_ext().execute_with(|| { + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + assert_ok!(Utility::batch( + RuntimeOrigin::signed(1), + vec![ + call_transfer(2, 5), + call_transfer(2, 10), + call_transfer(2, 5), + ] + ),); + assert_eq!(Balances::free_balance(1), 5); + assert_eq!(Balances::free_balance(2), 15); + }); +} + +#[test] +fn batch_weight_calculation_doesnt_overflow() { + use sp_runtime::Perbill; + new_test_ext().execute_with(|| { + let big_call = RuntimeCall::RootTesting(RootTestingCall::fill_block { + ratio: Perbill::from_percent(50), + }); + assert_eq!(big_call.get_dispatch_info().weight, Weight::MAX / 2); + + // 3 * 50% saturates to 100% + let batch_call = RuntimeCall::Utility(crate::Call::batch { + calls: vec![big_call.clone(), big_call.clone(), big_call.clone()], + }); + + assert_eq!(batch_call.get_dispatch_info().weight, Weight::MAX); + }); +} + +#[test] +fn batch_handles_weight_refund() { + new_test_ext().execute_with(|| { + let start_weight = Weight::from_parts(100, 0); + let end_weight = Weight::from_parts(75, 0); + let diff = start_weight - end_weight; + let batch_len = 4; + + // Full weight when ok + let inner_call = call_foobar(false, start_weight, None); + let batch_calls = vec![inner_call; batch_len as usize]; + let call = RuntimeCall::Utility(UtilityCall::batch { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + assert_eq!(extract_actual_weight(&result, &info), info.weight); + + // Refund weight when ok + let inner_call = call_foobar(false, start_weight, Some(end_weight)); + let batch_calls = vec![inner_call; batch_len as usize]; + let call = RuntimeCall::Utility(UtilityCall::batch { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + // Diff is refunded + assert_eq!( + extract_actual_weight(&result, &info), + info.weight - diff * batch_len + ); + + // Full weight when err + let good_call = call_foobar(false, start_weight, None); + let bad_call = call_foobar(true, start_weight, None); + let batch_calls = vec![good_call, bad_call]; + let call = RuntimeCall::Utility(UtilityCall::batch { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + System::assert_last_event( + utility::Event::BatchInterrupted { + index: 1, + error: DispatchError::Other(""), + } + .into(), + ); + // No weight is refunded + assert_eq!(extract_actual_weight(&result, &info), info.weight); + + // Refund weight when err + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); + let batch_calls = vec![good_call, bad_call]; + let batch_len = batch_calls.len() as u64; + let call = RuntimeCall::Utility(UtilityCall::batch { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + System::assert_last_event( + utility::Event::BatchInterrupted { + index: 1, + error: DispatchError::Other(""), + } + .into(), + ); + assert_eq!( + extract_actual_weight(&result, &info), + info.weight - diff * batch_len + ); + + // Partial batch completion + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); + let batch_calls = vec![good_call, bad_call.clone(), bad_call]; + let call = RuntimeCall::Utility(UtilityCall::batch { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + System::assert_last_event( + utility::Event::BatchInterrupted { + index: 1, + error: DispatchError::Other(""), + } + .into(), + ); + assert_eq!( + extract_actual_weight(&result, &info), + // Real weight is 2 calls at end_weight + ::WeightInfo::batch(2) + end_weight * 2, + ); + }); +} + +#[test] +fn batch_all_works() { + new_test_ext().execute_with(|| { + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + assert_ok!(Utility::batch_all( + RuntimeOrigin::signed(1), + vec![call_transfer(2, 5), call_transfer(2, 5)] + ),); + assert_eq!(Balances::free_balance(1), 0); + assert_eq!(Balances::free_balance(2), 20); + }); +} + +#[test] +fn batch_all_revert() { + new_test_ext().execute_with(|| { + let call = call_transfer(2, 5); + let info = call.get_dispatch_info(); + + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + let batch_all_calls = RuntimeCall::Utility(crate::Call::::batch_all { + calls: vec![ + call_transfer(2, 5), + call_transfer(2, 10), + call_transfer(2, 5), + ], + }); + assert_noop!( + batch_all_calls.dispatch(RuntimeOrigin::signed(1)), + DispatchErrorWithPostInfo { + post_info: PostDispatchInfo { + actual_weight: Some( + ::WeightInfo::batch_all(2) + info.weight * 2 + ), + pays_fee: Pays::Yes + }, + error: TokenError::FundsUnavailable.into(), + } + ); + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + }); +} + +#[test] +fn batch_all_handles_weight_refund() { + new_test_ext().execute_with(|| { + let start_weight = Weight::from_parts(100, 0); + let end_weight = Weight::from_parts(75, 0); + let diff = start_weight - end_weight; + let batch_len = 4; + + // Full weight when ok + let inner_call = call_foobar(false, start_weight, None); + let batch_calls = vec![inner_call; batch_len as usize]; + let call = RuntimeCall::Utility(UtilityCall::batch_all { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + assert_eq!(extract_actual_weight(&result, &info), info.weight); + + // Refund weight when ok + let inner_call = call_foobar(false, start_weight, Some(end_weight)); + let batch_calls = vec![inner_call; batch_len as usize]; + let call = RuntimeCall::Utility(UtilityCall::batch_all { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_ok!(result); + // Diff is refunded + assert_eq!( + extract_actual_weight(&result, &info), + info.weight - diff * batch_len + ); + + // Full weight when err + let good_call = call_foobar(false, start_weight, None); + let bad_call = call_foobar(true, start_weight, None); + let batch_calls = vec![good_call, bad_call]; + let call = RuntimeCall::Utility(UtilityCall::batch_all { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_err_ignore_postinfo!(result, "The cake is a lie."); + // No weight is refunded + assert_eq!(extract_actual_weight(&result, &info), info.weight); + + // Refund weight when err + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); + let batch_calls = vec![good_call, bad_call]; + let batch_len = batch_calls.len() as u64; + let call = RuntimeCall::Utility(UtilityCall::batch_all { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_err_ignore_postinfo!(result, "The cake is a lie."); + assert_eq!( + extract_actual_weight(&result, &info), + info.weight.saturating_sub(diff.saturating_mul(batch_len)) + ); + + // Partial batch completion + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); + let batch_calls = vec![good_call, bad_call.clone(), bad_call]; + let call = RuntimeCall::Utility(UtilityCall::batch_all { calls: batch_calls }); + let info = call.get_dispatch_info(); + let result = call.dispatch(RuntimeOrigin::signed(1)); + assert_err_ignore_postinfo!(result, "The cake is a lie."); + assert_eq!( + extract_actual_weight(&result, &info), + // Real weight is 2 calls at end_weight + ::WeightInfo::batch_all(2).saturating_add(end_weight.saturating_mul(2)), + ); + }); +} + +#[test] +fn batch_all_does_not_nest() { + new_test_ext().execute_with(|| { + let batch_all = RuntimeCall::Utility(UtilityCall::batch_all { + calls: vec![ + call_transfer(2, 1), + call_transfer(2, 1), + call_transfer(2, 1), + ], + }); + + let info = batch_all.get_dispatch_info(); + + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + // A nested batch_all call will not pass the filter, and fail with `BadOrigin`. + assert_noop!( + Utility::batch_all(RuntimeOrigin::signed(1), vec![batch_all.clone()]), + DispatchErrorWithPostInfo { + post_info: PostDispatchInfo { + actual_weight: Some(::WeightInfo::batch_all(1) + info.weight), + pays_fee: Pays::Yes + }, + error: frame_system::Error::::CallFiltered.into(), + } + ); + + // And for those who want to get a little fancy, we check that the filter persists across + // other kinds of dispatch wrapping functions... in this case + // `batch_all(batch(batch_all(..)))` + let batch_nested = RuntimeCall::Utility(UtilityCall::batch { + calls: vec![batch_all], + }); + // Batch will end with `Ok`, but does not actually execute as we can see from the event + // and balances. + assert_ok!(Utility::batch_all( + RuntimeOrigin::signed(1), + vec![batch_nested] + )); + System::assert_has_event( + utility::Event::BatchInterrupted { + index: 0, + error: frame_system::Error::::CallFiltered.into(), + } + .into(), + ); + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + }); +} + +#[test] +fn batch_limit() { + new_test_ext().execute_with(|| { + let calls = vec![RuntimeCall::System(SystemCall::remark { remark: vec![] }); 40_000]; + assert_noop!( + Utility::batch(RuntimeOrigin::signed(1), calls.clone()), + Error::::TooManyCalls + ); + assert_noop!( + Utility::batch_all(RuntimeOrigin::signed(1), calls), + Error::::TooManyCalls + ); + }); +} + +#[test] +fn force_batch_works() { + new_test_ext().execute_with(|| { + assert_eq!(Balances::free_balance(1), 10); + assert_eq!(Balances::free_balance(2), 10); + assert_ok!(Utility::force_batch( + RuntimeOrigin::signed(1), + vec![ + call_transfer(2, 5), + call_foobar(true, Weight::from_parts(75, 0), None), + call_transfer(2, 10), + call_transfer(2, 5), + ] + )); + System::assert_last_event(utility::Event::BatchCompletedWithErrors.into()); + System::assert_has_event( + utility::Event::ItemFailed { + error: DispatchError::Other(""), + } + .into(), + ); + assert_eq!(Balances::free_balance(1), 0); + assert_eq!(Balances::free_balance(2), 20); + + assert_ok!(Utility::force_batch( + RuntimeOrigin::signed(2), + vec![call_transfer(1, 5), call_transfer(1, 5),] + )); + System::assert_last_event(utility::Event::BatchCompleted.into()); + + assert_ok!(Utility::force_batch( + RuntimeOrigin::signed(1), + vec![call_transfer(2, 50),] + ),); + System::assert_last_event(utility::Event::BatchCompletedWithErrors.into()); + }); +} + +#[test] +fn none_origin_does_not_work() { + new_test_ext().execute_with(|| { + assert_noop!( + Utility::force_batch(RuntimeOrigin::none(), vec![]), + BadOrigin + ); + assert_noop!(Utility::batch(RuntimeOrigin::none(), vec![]), BadOrigin); + assert_noop!(Utility::batch_all(RuntimeOrigin::none(), vec![]), BadOrigin); + }) +} + +#[test] +fn batch_doesnt_work_with_inherents() { + new_test_ext().execute_with(|| { + // fails because inherents expect the origin to be none. + assert_ok!(Utility::batch( + RuntimeOrigin::signed(1), + vec![RuntimeCall::Timestamp(TimestampCall::set { now: 42 }),] + )); + System::assert_last_event( + utility::Event::BatchInterrupted { + index: 0, + error: frame_system::Error::::CallFiltered.into(), + } + .into(), + ); + }) +} + +#[test] +fn force_batch_doesnt_work_with_inherents() { + new_test_ext().execute_with(|| { + // fails because inherents expect the origin to be none. + assert_ok!(Utility::force_batch( + RuntimeOrigin::root(), + vec![RuntimeCall::Timestamp(TimestampCall::set { now: 42 }),] + )); + System::assert_last_event(utility::Event::BatchCompletedWithErrors.into()); + }) +} + +#[test] +fn batch_all_doesnt_work_with_inherents() { + new_test_ext().execute_with(|| { + let batch_all = RuntimeCall::Utility(UtilityCall::batch_all { + calls: vec![RuntimeCall::Timestamp(TimestampCall::set { now: 42 })], + }); + let info = batch_all.get_dispatch_info(); + + // fails because inherents expect the origin to be none. + assert_noop!( + batch_all.dispatch(RuntimeOrigin::signed(1)), + DispatchErrorWithPostInfo { + post_info: PostDispatchInfo { + actual_weight: Some(info.weight), + pays_fee: Pays::Yes + }, + error: frame_system::Error::::CallFiltered.into(), + } + ); + }) +} + +#[test] +fn batch_works_with_council_origin() { + new_test_ext().execute_with(|| { + let proposal = RuntimeCall::Utility(UtilityCall::batch { + calls: vec![RuntimeCall::Democracy( + mock_democracy::Call::external_propose_majority {}, + )], + }); + let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); + let proposal_weight = proposal.get_dispatch_info().weight; + let hash = BlakeTwo256::hash_of(&proposal); + + assert_ok!(Council::propose( + RuntimeOrigin::signed(1), + Box::new(proposal.clone()), + proposal_len, + 3, + )); + + assert_ok!(Council::vote(RuntimeOrigin::signed(1), hash, 0, true)); + assert_ok!(Council::vote(RuntimeOrigin::signed(2), hash, 0, true)); + assert_ok!(Council::vote(RuntimeOrigin::signed(3), hash, 0, true)); + + System::set_block_number(4); + + assert_ok!(Council::close( + RuntimeOrigin::root(), + hash, + 0, + proposal_weight, + proposal_len + )); + + System::assert_last_event(RuntimeEvent::Council(pallet_collective::Event::Executed { + proposal_hash: hash, + result: Ok(()), + })); + }) +} + +#[test] +fn force_batch_works_with_council_origin() { + new_test_ext().execute_with(|| { + let proposal = RuntimeCall::Utility(UtilityCall::force_batch { + calls: vec![RuntimeCall::Democracy( + mock_democracy::Call::external_propose_majority {}, + )], + }); + let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); + let proposal_weight = proposal.get_dispatch_info().weight; + let hash = BlakeTwo256::hash_of(&proposal); + + assert_ok!(Council::propose( + RuntimeOrigin::signed(1), + Box::new(proposal.clone()), + proposal_len, + 3, + )); + + assert_ok!(Council::vote(RuntimeOrigin::signed(1), hash, 0, true)); + assert_ok!(Council::vote(RuntimeOrigin::signed(2), hash, 0, true)); + assert_ok!(Council::vote(RuntimeOrigin::signed(3), hash, 0, true)); + + System::set_block_number(4); + assert_ok!(Council::close( + RuntimeOrigin::root(), + hash, + 0, + proposal_weight, + proposal_len + )); + + System::assert_last_event(RuntimeEvent::Council(pallet_collective::Event::Executed { + proposal_hash: hash, + result: Ok(()), + })); + }) +} + +#[test] +fn batch_all_works_with_council_origin() { + new_test_ext().execute_with(|| { + assert_ok!(Utility::batch_all( + RuntimeOrigin::from(pallet_collective::RawOrigin::Members(3, 3)), + vec![RuntimeCall::Democracy( + mock_democracy::Call::external_propose_majority {} + )] + )); + }) +} + +#[test] +fn with_weight_works() { + new_test_ext().execute_with(|| { + use frame_system::WeightInfo; + let upgrade_code_call = Box::new(RuntimeCall::System( + frame_system::Call::set_code_without_checks { code: vec![] }, + )); + // Weight before is max. + assert_eq!( + upgrade_code_call.get_dispatch_info().weight, + ::SystemWeightInfo::set_code() + ); + assert_eq!( + upgrade_code_call.get_dispatch_info().class, + frame_support::dispatch::DispatchClass::Operational + ); + + let with_weight_call = Call::::with_weight { + call: upgrade_code_call, + weight: Weight::from_parts(123, 456), + }; + // Weight after is set by Root. + assert_eq!( + with_weight_call.get_dispatch_info().weight, + Weight::from_parts(123, 456) + ); + assert_eq!( + with_weight_call.get_dispatch_info().class, + frame_support::dispatch::DispatchClass::Operational + ); + }) +} diff --git a/pallets/utility/src/weights.rs b/pallets/utility/src/weights.rs new file mode 100644 index 0000000000..502f85a3f1 --- /dev/null +++ b/pallets/utility/src/weights.rs @@ -0,0 +1,196 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Autogenerated weights for `pallet_utility` +//! +//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 32.0.0 +//! DATE: 2024-04-09, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! WORST CASE MAP SIZE: `1000000` +//! HOSTNAME: `runner-anb7yjbi-project-674-concurrent-0`, CPU: `Intel(R) Xeon(R) CPU @ 2.60GHz` +//! WASM-EXECUTION: `Compiled`, CHAIN: `Some("dev")`, DB CACHE: `1024` + +// Executed Command: +// ./target/production/substrate-node +// benchmark +// pallet +// --chain=dev +// --steps=50 +// --repeat=20 +// --pallet=pallet_utility +// --no-storage-info +// --no-median-slopes +// --no-min-squares +// --extrinsic=* +// --wasm-execution=compiled +// --heap-pages=4096 +// --output=./substrate/frame/utility/src/weights.rs +// --header=./substrate/HEADER-APACHE2 +// --template=./substrate/.maintain/frame-weight-template.hbs + +#![cfg_attr(rustfmt, rustfmt_skip)] +#![allow(unused_parens)] +#![allow(unused_imports)] +#![allow(missing_docs)] + +use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use core::marker::PhantomData; + +/// Weight functions needed for `pallet_utility`. +pub trait WeightInfo { + fn batch(c: u32, ) -> Weight; + fn as_derivative() -> Weight; + fn batch_all(c: u32, ) -> Weight; + fn dispatch_as() -> Weight; + fn force_batch(c: u32, ) -> Weight; +} + +/// Weights for `pallet_utility` using the Substrate node and recommended hardware. +pub struct SubstrateWeight(PhantomData); +impl WeightInfo for SubstrateWeight { + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `c` is `[0, 1000]`. + fn batch(c: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `145` + // Estimated: `3997` + // Minimum execution time: 5_312_000 picoseconds. + Weight::from_parts(2_694_370, 3997) + // Standard Error: 5_055 + .saturating_add(Weight::from_parts(5_005_941, 0).saturating_mul(c.into())) + .saturating_add(T::DbWeight::get().reads(2_u64)) + } + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + fn as_derivative() -> Weight { + // Proof Size summary in bytes: + // Measured: `145` + // Estimated: `3997` + // Minimum execution time: 9_263_000 picoseconds. + Weight::from_parts(9_639_000, 3997) + .saturating_add(T::DbWeight::get().reads(2_u64)) + } + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `c` is `[0, 1000]`. + fn batch_all(c: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `145` + // Estimated: `3997` + // Minimum execution time: 5_120_000 picoseconds. + Weight::from_parts(12_948_874, 3997) + // Standard Error: 4_643 + .saturating_add(Weight::from_parts(5_162_821, 0).saturating_mul(c.into())) + .saturating_add(T::DbWeight::get().reads(2_u64)) + } + fn dispatch_as() -> Weight { + // Proof Size summary in bytes: + // Measured: `0` + // Estimated: `0` + // Minimum execution time: 7_126_000 picoseconds. + Weight::from_parts(7_452_000, 0) + } + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `c` is `[0, 1000]`. + fn force_batch(c: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `145` + // Estimated: `3997` + // Minimum execution time: 5_254_000 picoseconds. + Weight::from_parts(4_879_712, 3997) + // Standard Error: 4_988 + .saturating_add(Weight::from_parts(4_955_816, 0).saturating_mul(c.into())) + .saturating_add(T::DbWeight::get().reads(2_u64)) + } +} + +// For backwards compatibility and tests. +impl WeightInfo for () { + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `c` is `[0, 1000]`. + fn batch(c: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `145` + // Estimated: `3997` + // Minimum execution time: 5_312_000 picoseconds. + Weight::from_parts(2_694_370, 3997) + // Standard Error: 5_055 + .saturating_add(Weight::from_parts(5_005_941, 0).saturating_mul(c.into())) + .saturating_add(RocksDbWeight::get().reads(2_u64)) + } + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + fn as_derivative() -> Weight { + // Proof Size summary in bytes: + // Measured: `145` + // Estimated: `3997` + // Minimum execution time: 9_263_000 picoseconds. + Weight::from_parts(9_639_000, 3997) + .saturating_add(RocksDbWeight::get().reads(2_u64)) + } + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `c` is `[0, 1000]`. + fn batch_all(c: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `145` + // Estimated: `3997` + // Minimum execution time: 5_120_000 picoseconds. + Weight::from_parts(12_948_874, 3997) + // Standard Error: 4_643 + .saturating_add(Weight::from_parts(5_162_821, 0).saturating_mul(c.into())) + .saturating_add(RocksDbWeight::get().reads(2_u64)) + } + fn dispatch_as() -> Weight { + // Proof Size summary in bytes: + // Measured: `0` + // Estimated: `0` + // Minimum execution time: 7_126_000 picoseconds. + Weight::from_parts(7_452_000, 0) + } + /// Storage: `SafeMode::EnteredUntil` (r:1 w:0) + /// Proof: `SafeMode::EnteredUntil` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `TxPause::PausedCalls` (r:1 w:0) + /// Proof: `TxPause::PausedCalls` (`max_values`: None, `max_size`: Some(532), added: 3007, mode: `MaxEncodedLen`) + /// The range of component `c` is `[0, 1000]`. + fn force_batch(c: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `145` + // Estimated: `3997` + // Minimum execution time: 5_254_000 picoseconds. + Weight::from_parts(4_879_712, 3997) + // Standard Error: 4_988 + .saturating_add(Weight::from_parts(4_955_816, 0).saturating_mul(c.into())) + .saturating_add(RocksDbWeight::get().reads(2_u64)) + } +} diff --git a/precompiles/src/staking.rs b/precompiles/src/staking.rs index 9022f45a36..192e55f57c 100644 --- a/precompiles/src/staking.rs +++ b/precompiles/src/staking.rs @@ -120,6 +120,7 @@ where } #[precompile::public("getTotalColdkeyStake(bytes32)")] + #[precompile::view] fn get_total_coldkey_stake( _handle: &mut impl PrecompileHandle, coldkey: H256, @@ -131,6 +132,7 @@ where } #[precompile::public("getTotalHotkeyStake(bytes32)")] + #[precompile::view] fn get_total_hotkey_stake( _handle: &mut impl PrecompileHandle, hotkey: H256, @@ -275,6 +277,7 @@ where } #[precompile::public("getTotalColdkeyStake(bytes32)")] + #[precompile::view] fn get_total_coldkey_stake( _handle: &mut impl PrecompileHandle, coldkey: H256, @@ -292,6 +295,7 @@ where } #[precompile::public("getTotalHotkeyStake(bytes32)")] + #[precompile::view] fn get_total_hotkey_stake( _handle: &mut impl PrecompileHandle, hotkey: H256, diff --git a/runtime/src/lib.rs b/runtime/src/lib.rs index 909207e142..a938615df2 100644 --- a/runtime/src/lib.rs +++ b/runtime/src/lib.rs @@ -205,7 +205,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { // `spec_version`, and `authoring_version` are the same between Wasm and native. // This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use // the compatible custom types. - spec_version: 246, + spec_version: 247, impl_version: 1, apis: RUNTIME_API_VERSIONS, transaction_version: 1, @@ -1017,6 +1017,7 @@ parameter_types! { pub const InitialColdkeySwapScheduleDuration: BlockNumber = 5 * 24 * 60 * 60 / 12; // 5 days pub const InitialDissolveNetworkScheduleDuration: BlockNumber = 5 * 24 * 60 * 60 / 12; // 5 days pub const SubtensorInitialTaoWeight: u64 = 971_718_665_099_567_868; // 0.05267697438728329% tao weight. + pub const InitialEmaPriceHalvingPeriod: u64 = 201_600_u64; // 4 weeks } impl pallet_subtensor::Config for Runtime { @@ -1080,6 +1081,7 @@ impl pallet_subtensor::Config for Runtime { type Preimages = Preimage; type InitialColdkeySwapScheduleDuration = InitialColdkeySwapScheduleDuration; type InitialDissolveNetworkScheduleDuration = InitialDissolveNetworkScheduleDuration; + type InitialEmaPriceHalvingPeriod = InitialEmaPriceHalvingPeriod; } use sp_runtime::BoundedVec; diff --git a/scripts/localnet.sh b/scripts/localnet.sh index b82b5f9f59..cecce2d814 100755 --- a/scripts/localnet.sh +++ b/scripts/localnet.sh @@ -2,10 +2,15 @@ # Check if `--no-purge` passed as a parameter NO_PURGE=0 + +# Check if `--build-only` passed as parameter +BUILD_ONLY=0 + for arg in "$@"; do if [ "$arg" = "--no-purge" ]; then NO_PURGE=1 - break + elif [ "$arg" = "--build-only" ]; then + BUILD_ONLY=1 fi done @@ -15,94 +20,98 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" # The base directory of the subtensor project BASE_DIR="$SCRIPT_DIR/.." -# get parameters # Get the value of fast_blocks from the first argument fast_blocks=${1:-"True"} -# Check the value of fast_blocks +# Define the target directory for compilation if [ "$fast_blocks" == "False" ]; then # Block of code to execute if fast_blocks is False echo "fast_blocks is Off" : "${CHAIN:=local}" : "${BUILD_BINARY:=1}" : "${FEATURES:="pow-faucet"}" + BUILD_DIR="$BASE_DIR/target/non-fast-blocks" else # Block of code to execute if fast_blocks is not False echo "fast_blocks is On" : "${CHAIN:=local}" : "${BUILD_BINARY:=1}" : "${FEATURES:="pow-faucet fast-blocks"}" + BUILD_DIR="$BASE_DIR/target/fast-blocks" fi +# Ensure the build directory exists +mkdir -p "$BUILD_DIR" + SPEC_PATH="${SCRIPT_DIR}/specs/" FULL_PATH="$SPEC_PATH$CHAIN.json" -# Kill any existing nodes which may have not exited correctly after a previous -# run. +# Kill any existing nodes which may have not exited correctly after a previous run. pkill -9 'node-subtensor' if [ ! -d "$SPEC_PATH" ]; then echo "*** Creating directory ${SPEC_PATH}..." - mkdir $SPEC_PATH + mkdir -p "$SPEC_PATH" fi if [[ $BUILD_BINARY == "1" ]]; then echo "*** Building substrate binary..." - cargo build --workspace --profile=release --features "$FEATURES" --manifest-path "$BASE_DIR/Cargo.toml" + CARGO_TARGET_DIR="$BUILD_DIR" cargo build --workspace --profile=release --features "$FEATURES" --manifest-path "$BASE_DIR/Cargo.toml" echo "*** Binary compiled" fi echo "*** Building chainspec..." -"$BASE_DIR/target/release/node-subtensor" build-spec --disable-default-bootnode --raw --chain $CHAIN >$FULL_PATH +"$BUILD_DIR/release/node-subtensor" build-spec --disable-default-bootnode --raw --chain $CHAIN >$FULL_PATH echo "*** Chainspec built and output to file" -# generate node keys -$BASE_DIR/target/release/node-subtensor key generate-node-key --chain="$FULL_PATH" --base-path /tmp/alice -$BASE_DIR/target/release/node-subtensor key generate-node-key --chain="$FULL_PATH" --base-path /tmp/bob +# Generate node keys +"$BUILD_DIR/release/node-subtensor" key generate-node-key --chain="$FULL_PATH" --base-path /tmp/alice +"$BUILD_DIR/release/node-subtensor" key generate-node-key --chain="$FULL_PATH" --base-path /tmp/bob if [ $NO_PURGE -eq 1 ]; then echo "*** Purging previous state skipped..." else echo "*** Purging previous state..." - "$BASE_DIR/target/release/node-subtensor" purge-chain -y --base-path /tmp/bob --chain="$FULL_PATH" >/dev/null 2>&1 - "$BASE_DIR/target/release/node-subtensor" purge-chain -y --base-path /tmp/alice --chain="$FULL_PATH" >/dev/null 2>&1 + "$BUILD_DIR/release/node-subtensor" purge-chain -y --base-path /tmp/bob --chain="$FULL_PATH" >/dev/null 2>&1 + "$BUILD_DIR/release/node-subtensor" purge-chain -y --base-path /tmp/alice --chain="$FULL_PATH" >/dev/null 2>&1 echo "*** Previous chainstate purged" fi -echo "*** Starting localnet nodes..." -alice_start=( - "$BASE_DIR/target/release/node-subtensor" - --base-path /tmp/alice - --chain="$FULL_PATH" - --alice - --port 30334 - --rpc-port 9944 - --validator - --rpc-cors=all - --allow-private-ipv4 - --discover-local - --unsafe-force-node-key-generation -) - -bob_start=( - "$BASE_DIR"/target/release/node-subtensor - --base-path /tmp/bob - --chain="$FULL_PATH" - --bob - --port 30335 - --rpc-port 9945 - --validator - --rpc-cors=all - --allow-private-ipv4 - --discover-local - --unsafe-force-node-key-generation -# --offchain-worker=Never -) - -trap 'pkill -P $$' EXIT SIGINT SIGTERM - -( - ("${alice_start[@]}" 2>&1) & - ("${bob_start[@]}" 2>&1) - wait -) +if [ $BUILD_ONLY -eq 0 ]; then + echo "*** Starting localnet nodes..." + alice_start=( + "$BUILD_DIR/release/node-subtensor" + --base-path /tmp/alice + --chain="$FULL_PATH" + --alice + --port 30334 + --rpc-port 9944 + --validator + --rpc-cors=all + --allow-private-ipv4 + --discover-local + --unsafe-force-node-key-generation + ) + + bob_start=( + "$BUILD_DIR/release/node-subtensor" + --base-path /tmp/bob + --chain="$FULL_PATH" + --bob + --port 30335 + --rpc-port 9945 + --validator + --rpc-cors=all + --allow-private-ipv4 + --discover-local + --unsafe-force-node-key-generation + ) + + trap 'pkill -P $$' EXIT SIGINT SIGTERM + + ( + ("${alice_start[@]}" 2>&1) & + ("${bob_start[@]}" 2>&1) + wait + ) +fi \ No newline at end of file