diff --git a/.dockerignore b/.dockerignore index 0fa9ce9d18..fec7622b6c 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,3 +6,11 @@ docker-compose.yml cli/bin cli/tests !cli/bin/snark-worker + +# Heartbeats processor +tools/heartbeats-processor/.env +tools/heartbeats-processor/data/ +tools/heartbeats-processor/credentials/ +tools/heartbeats-processor/*.db +# Ensure .sqlx files are included +!tools/heartbeats-processor/.sqlx/ diff --git a/.drone.yml b/.drone.yml index d66e3de853..c2bd1da3ab 100644 --- a/.drone.yml +++ b/.drone.yml @@ -53,10 +53,10 @@ steps: - cp /usr/local/bin/mina cli/bin/ - name: build - image: rust:1.83-bullseye + image: rust:1.84-bullseye commands: - apt-get update && apt-get install -y libssl-dev libjemalloc-dev jq protobuf-compiler - - rustup update 1.83 && rustup default 1.83 + - rustup update 1.84 && rustup default 1.84 - rustup component add rustfmt # just to be sure it builds without errors - cargo build diff --git a/.github/workflows/archive.yaml b/.github/workflows/archive.yaml new file mode 100644 index 0000000000..67acaf5b95 --- /dev/null +++ b/.github/workflows/archive.yaml @@ -0,0 +1,162 @@ +# Note: Disabling this workflow for now, have to figure out how to run the complicated setup on github actions + +name: Archive Comparison + +# TODO: Add proper triggers +on: + workflow_dispatch: + +env: + PG_PORT: 5432 + PG_DB: archive + # TODO: Add proper secrets + # POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }} + POSTGRES_PASSWORD: mina-testnet + ARCHIVE_OUTPUT_DIR: ./archive-outputs + ARCHIVE_PORT: 3086 + P2P_PORT: 8302 + CLIENT_PORT: 8301 + RPC_PORT: 5000 + PEER_LIST_URL: https://bootnodes.minaprotocol.com/networks/devnet.txt + +jobs: + compare-archives: + runs-on: ubuntu-latest + + services: + postgres-ocaml: + image: postgres + env: + POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }} + options: >- + --health-cmd pg_isready + --health-interval 5s + --health-timeout 10s + --health-retries 10 + + postgres-openmina: + image: postgres + env: + POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }} + options: >- + --health-cmd pg_isready + --health-interval 5s + --health-timeout 10s + --health-retries 10 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Create output directories + run: | + mkdir -p ${{ github.workspace }}/archive-outputs/ocaml + mkdir -p ${{ github.workspace }}/archive-outputs/openmina + + - name: Initialize Databases + run: | + sudo apt-get update + sudo apt-get install -y postgresql-client + + # Initialize OCaml database + psql postgres://postgres:${{ env.POSTGRES_PASSWORD }}@postgres-ocaml:${{ env.PG_PORT }}/${{ env.PG_DB }} -c "CREATE DATABASE ${{ env.PG_DB }};" + psql postgres://postgres:${{ env.POSTGRES_PASSWORD }}@postgres-ocaml:${{ env.PG_PORT }}/${{ env.PG_DB }} -c " + ALTER SYSTEM SET max_connections = 500; + ALTER SYSTEM SET max_locks_per_transaction = 100; + ALTER SYSTEM SET max_pred_locks_per_relation = 100; + ALTER SYSTEM SET max_pred_locks_per_transaction = 5000; + " + psql postgres://postgres:${{ env.POSTGRES_PASSWORD }}@postgres-ocaml:${{ env.PG_PORT }}/${{ env.PG_DB }} \ + -f producer-dashboard/src/archive/sql/archive_schema.sql + + # Initialize OpenMina database + psql postgres://postgres:${{ env.POSTGRES_PASSWORD }}@postgres-openmina:${{ env.PG_PORT }}/${{ env.PG_DB }} -c "CREATE DATABASE ${{ env.PG_DB }};" + psql postgres://postgres:${{ env.POSTGRES_PASSWORD }}@postgres-openmina:${{ env.PG_PORT }}/${{ env.PG_DB }} -c " + ALTER SYSTEM SET max_connections = 500; + ALTER SYSTEM SET max_locks_per_transaction = 100; + ALTER SYSTEM SET max_pred_locks_per_relation = 100; + ALTER SYSTEM SET max_pred_locks_per_transaction = 5000; + " + psql postgres://postgres:${{ env.POSTGRES_PASSWORD }}@postgres-openmina:${{ env.PG_PORT }}/${{ env.PG_DB }} \ + -f producer-dashboard/src/archive/sql/archive_schema.sql + + - name: Start OCaml Archive + uses: docker://adrnagy/mina-archive + with: + args: > + mina-archive run + --postgres-uri postgres://postgres:${{ env.POSTGRES_PASSWORD }}@postgres-ocaml:${{ env.PG_PORT }}/${{ env.PG_DB }} + --server-port ${{ env.ARCHIVE_PORT }} + --output-dir /data + options: >- + --name archive-ocaml + --network ${{ job.container.network }} + -v ${{ github.workspace }}/archive-outputs/ocaml:/data + -d + + - name: Start OpenMina Archive + uses: docker://adrnagy/mina-archive + with: + args: > + mina-archive run + --postgres-uri postgres://postgres:${{ env.POSTGRES_PASSWORD }}@postgres-openmina:${{ env.PG_PORT }}/${{ env.PG_DB }} + --server-port ${{ env.ARCHIVE_PORT }} + --output-dir /data + options: >- + --name archive-openmina + --network ${{ job.container.network }} + -v ${{ github.workspace }}/archive-outputs/openmina:/data + -d + + - name: Wait for Archive processes + run: | + sleep 10 # Replace with proper health check + + - name: Start OCaml Node + uses: docker://gcr.io/o1labs-192920/mina-daemon:3.0.0-dc6bf78-bullseye-devnet + with: + args: > + daemon + --archive-address archive-ocaml:${{ env.ARCHIVE_PORT }} + --insecure-rest-server + --log-level Info + options: >- + --name node-ocaml + --network ${{ job.container.network }} + -e MINA_CLIENT_TRUSTLIST="10.0.0.0/8,172.16.0.0/12,192.168.0.0/16" + -d + + - name: Start OpenMina Node + uses: docker://adrnagy/openmina:archive-test + with: + args: > + node + --archive-address archive-openmina:${{ env.ARCHIVE_PORT }} + options: >- + --name node-openmina + --network ${{ job.container.network }} + -d + + - name: Wait for nodes to be ready + run: | + # Add health check for nodes + sleep 10 # Replace with proper health check + + - name: Build comparison tool + run: | + cargo build --release -p archive-breadcrumb-compare + + - name: Run comparison + env: + OCAML_NODE_GRAPHQL: http://node-ocaml:3085/graphql + OPENMINA_NODE_GRAPHQL: http://node-openmina:3085/graphql + OCAML_NODE_DIR: ${{ github.workspace }}/archive-outputs/ocaml + OPENMINA_NODE_DIR: ${{ github.workspace }}/archive-outputs/openmina + run: | + ./target/release/archive-breadcrumb-compare + + - name: Upload results + uses: actions/upload-artifact@v4 + with: + name: comparison-results + path: ${{ github.workspace }}/archive-outputs diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 68b954677c..0201a4ce89 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -113,6 +113,7 @@ jobs: run: | rustup install nightly rustup override set nightly + rustup component add --toolchain nightly-x86_64-unknown-linux-gnu rustfmt - name: Check for compilation errors in transaction fuzzer run: | cd tools/fuzzing @@ -131,7 +132,7 @@ jobs: - name: Setup Rust run: | - rustup default 1.83 + rustup default 1.84 rustup component add rustfmt - name: Setup Rust Cache @@ -157,7 +158,7 @@ jobs: - name: Setup Rust run: | - rustup default 1.83 + rustup default 1.84 rustup component add rustfmt - name: Setup Rust Cache @@ -217,7 +218,7 @@ jobs: - name: Setup Rust run: | - rustup default 1.83 + rustup default 1.84 rustup component add rustfmt - name: Setup Rust Cache @@ -253,7 +254,7 @@ jobs: - name: Setup Rust run: | - rustup default 1.83 + rustup default 1.84 rustup component add rustfmt - name: Setup Rust Cache diff --git a/.github/workflows/docker-heartbeats-processor.yaml b/.github/workflows/docker-heartbeats-processor.yaml new file mode 100644 index 0000000000..94e22d83bd --- /dev/null +++ b/.github/workflows/docker-heartbeats-processor.yaml @@ -0,0 +1,105 @@ +name: Heartbeats Processor Docker Build +on: + workflow_dispatch: + inputs: + version: + description: 'Version tag for the image' + required: true + type: string + +env: + REGISTRY_IMAGE: openmina/heartbeats-processor + +jobs: + build-heartbeat-processor-image: + strategy: + matrix: + arch: + - platform: linux/amd64 + runs-on: ubuntu-latest + - platform: linux/arm64 + runs-on: ubuntu-arm64 + runs-on: ${{ matrix.arch.runs-on }} + steps: + - name: Prepare + run: | + platform=${{ matrix.arch.platform }} + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + + - name: Git checkout + uses: actions/checkout@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build and push by digest + id: build + uses: docker/build-push-action@v6 + with: + context: . + file: ./tools/heartbeats-processor/Dockerfile + platforms: ${{ matrix.arch.platform }} + cache-from: type=gha + cache-to: type=gha,mode=max + outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true + + - name: Export digest + run: | + mkdir -p /tmp/digests + digest="${{ steps.build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: heartbeat-processor-digests-${{ env.PLATFORM_PAIR }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + merge-heartbeat-processor-image: + runs-on: ubuntu-latest + needs: + - build-heartbeat-processor-image + steps: + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: /tmp/digests + pattern: heartbeat-processor-digests-* + merge-multiple: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY_IMAGE }} + tags: | + type=sha,format=short + type=raw,value=${{ inputs.version }},enable=${{ inputs.version != '' }} + type=raw,value=v${{ inputs.version }},enable=${{ inputs.version != '' }} + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + + - name: Create manifest list and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index a45a0d70d4..58ae89d868 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -17,7 +17,7 @@ jobs: sudo apt install -y protobuf-compiler - uses: actions-rs/toolchain@v1 with: - toolchain: 1.83 + toolchain: 1.84 components: rustfmt, clippy default: true - uses: actions-rs/cargo@v1 @@ -36,4 +36,4 @@ jobs: name: clippy with: token: ${{ secrets.GITHUB_TOKEN }} - args: --all-targets -- -D warnings --allow clippy::mutable_key_type --allow clippy::result_unit_err + args: --all-targets -- -D warnings --allow clippy::mutable_key_type diff --git a/CHANGELOG.md b/CHANGELOG.md index 395b5634f9..76ab5914e3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,24 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.14.0] - 2025-01-31 + +### Changed + +- **Rust Toolchain**: Updated the minimum required Rust toolchain to version 1.84. +- **Proofs**: Optimizations (MSM, field inversion) that speed up proof production. + +### Fixed + +- **P2P**: Correct handling of yamux windows limits +- **P2P**: Wait until full validation is complete before broadcasting blocks. +- **WebRTC/P2P**: Handle propagation of messages received from the WebRTC network to libp2p's gossip network (blocks, snarks and transactions). +- **Transaction pool**: Fixed checks for deep account updates when pre-validating transactions. + +### Added + +- **Archive mode**: Added support for archive mode, which allows the node to connect to an archiver process and store node data in a database. + ## [0.13.0] - 2025-01-06 ### Fixed @@ -348,7 +366,8 @@ First public release. - Alpha version of the node which can connect and syncup to the berkeleynet network, and keep applying new blocks to maintain consensus state and ledger up to date. - Web-based frontend for the node. -[Unreleased]: https://github.com/openmina/openmina/compare/v0.13.0...develop +[Unreleased]: https://github.com/openmina/openmina/compare/v0.14.0...develop +[0.14.0]: https://github.com/openmina/openmina/compare/v0.13.0...v0.14.0 [0.13.0]: https://github.com/openmina/openmina/compare/v0.12.0...v0.13.0 [0.12.0]: https://github.com/openmina/openmina/compare/v0.11.0...v0.12.0 [0.11.0]: https://github.com/openmina/openmina/compare/v0.10.3...v0.11.0 diff --git a/Cargo.lock b/Cargo.lock index b1037c08b2..849639cdcc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -105,7 +105,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42cd52102d3df161c77a887b608d7a4897d7cc112886a9537b738a887a03aaff" dependencies = [ "cfg-if", - "getrandom", "once_cell", "version_check", "zerocopy", @@ -131,7 +130,7 @@ dependencies = [ "num", "serde", "serde_json", - "thiserror", + "thiserror 1.0.60", "toml", "wasm-bindgen", "wasm-bindgen-test", @@ -242,7 +241,7 @@ dependencies = [ [[package]] name = "ark-ec" version = "0.3.0" -source = "git+https://github.com/openmina/algebra?rev=d0343f5#d0343f56c517675d2c340d066727d470ce22d552" +source = "git+https://github.com/openmina/algebra?rev=aea157a#aea157a8e81ddc9f16bae5123b5dda169e3842c9" dependencies = [ "ark-ff", "ark-serialize 0.3.0", @@ -256,7 +255,7 @@ dependencies = [ [[package]] name = "ark-ff" version = "0.3.0" -source = "git+https://github.com/openmina/algebra?rev=d0343f5#d0343f56c517675d2c340d066727d470ce22d552" +source = "git+https://github.com/openmina/algebra?rev=aea157a#aea157a8e81ddc9f16bae5123b5dda169e3842c9" dependencies = [ "ark-ff-asm", "ark-ff-macros", @@ -274,27 +273,27 @@ dependencies = [ [[package]] name = "ark-ff-asm" version = "0.3.0" -source = "git+https://github.com/openmina/algebra?rev=d0343f5#d0343f56c517675d2c340d066727d470ce22d552" +source = "git+https://github.com/openmina/algebra?rev=aea157a#aea157a8e81ddc9f16bae5123b5dda169e3842c9" dependencies = [ - "quote", + "quote 1.0.35", "syn 1.0.109", ] [[package]] name = "ark-ff-macros" version = "0.3.0" -source = "git+https://github.com/openmina/algebra?rev=d0343f5#d0343f56c517675d2c340d066727d470ce22d552" +source = "git+https://github.com/openmina/algebra?rev=aea157a#aea157a8e81ddc9f16bae5123b5dda169e3842c9" dependencies = [ "num-bigint", "num-traits", - "quote", + "quote 1.0.35", "syn 1.0.109", ] [[package]] name = "ark-poly" version = "0.3.0" -source = "git+https://github.com/openmina/algebra?rev=d0343f5#d0343f56c517675d2c340d066727d470ce22d552" +source = "git+https://github.com/openmina/algebra?rev=aea157a#aea157a8e81ddc9f16bae5123b5dda169e3842c9" dependencies = [ "ark-ff", "ark-serialize 0.3.0", @@ -307,7 +306,7 @@ dependencies = [ [[package]] name = "ark-serialize" version = "0.3.0" -source = "git+https://github.com/openmina/algebra?rev=d0343f5#d0343f56c517675d2c340d066727d470ce22d552" +source = "git+https://github.com/openmina/algebra?rev=aea157a#aea157a8e81ddc9f16bae5123b5dda169e3842c9" dependencies = [ "ark-serialize-derive", "ark-std 0.3.0", @@ -328,10 +327,10 @@ dependencies = [ [[package]] name = "ark-serialize-derive" version = "0.3.0" -source = "git+https://github.com/openmina/algebra?rev=d0343f5#d0343f56c517675d2c340d066727d470ce22d552" +source = "git+https://github.com/openmina/algebra?rev=aea157a#aea157a8e81ddc9f16bae5123b5dda169e3842c9" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -386,7 +385,7 @@ dependencies = [ "nom", "num-traits", "rusticata-macros", - "thiserror", + "thiserror 1.0.60", "time", ] @@ -402,7 +401,7 @@ dependencies = [ "nom", "num-traits", "rusticata-macros", - "thiserror", + "thiserror 1.0.60", "time", ] @@ -412,8 +411,8 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", "synstructure 0.12.6", ] @@ -424,9 +423,9 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", "synstructure 0.13.1", ] @@ -436,8 +435,8 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -447,9 +446,22 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", +] + +[[package]] +name = "async-compression" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" +dependencies = [ + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", ] [[package]] @@ -478,7 +490,29 @@ version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" dependencies = [ - "event-listener", + "event-listener 2.5.3", +] + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -487,9 +521,9 @@ version = "0.1.74" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -558,9 +592,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "459b77b7e855f875fd15f101064825cd79eb83185a961d66e6298560126facfb" dependencies = [ "derive_utils", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -597,7 +631,7 @@ dependencies = [ "serde_urlencoded", "sync_wrapper 1.0.1", "tokio", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", "tracing", @@ -624,6 +658,20 @@ dependencies = [ "tracing", ] +[[package]] +name = "backoff" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1" +dependencies = [ + "futures-core", + "getrandom", + "instant", + "pin-project-lite", + "rand", + "tokio", +] + [[package]] name = "backtrace" version = "0.3.69" @@ -682,7 +730,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85b6598a2f5d564fb7855dc6b06fd1c38cff5a72bd8b863a4d021938497b440a" dependencies = [ "serde", - "thiserror", + "thiserror 1.0.60", ] [[package]] @@ -700,7 +748,7 @@ version = "0.69.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.8.0", "cexpr", "clang-sys", "itertools 0.12.0", @@ -708,12 +756,12 @@ dependencies = [ "lazycell", "log", "prettyplease", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", - "syn 2.0.58", + "syn 2.0.96", "which", ] @@ -732,7 +780,7 @@ name = "binprot_derive" version = "0.1.7" source = "git+https://github.com/openmina/binprot-rs?rev=400b52c#400b52c204d7a820ba60202dbd4cf8c3dfb7903b" dependencies = [ - "quote", + "quote 1.0.35", "syn 1.0.109", ] @@ -750,9 +798,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" dependencies = [ "serde", ] @@ -882,9 +930,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.5.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" dependencies = [ "serde", ] @@ -900,12 +948,13 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.83" +version = "1.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e" dependencies = [ "jobserver", "libc", + "shlex", ] [[package]] @@ -935,6 +984,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chacha20" version = "0.8.2" @@ -992,9 +1047,11 @@ checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" dependencies = [ "android-tzdata", "iana-time-zone", + "js-sys", "num-traits", "serde", - "windows-targets 0.52.0", + "wasm-bindgen", + "windows-targets 0.52.6", ] [[package]] @@ -1072,9 +1129,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck 0.5.0", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -1085,7 +1142,7 @@ checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "cli" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "bytes", @@ -1105,7 +1162,7 @@ dependencies = [ "rand", "rayon", "redux", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", "sha2 0.10.8", @@ -1154,9 +1211,9 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "2.3.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ "crossbeam-utils", ] @@ -1202,6 +1259,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "core-foundation" version = "0.9.3" @@ -1212,11 +1278,21 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "core2" @@ -1236,10 +1312,10 @@ dependencies = [ "cc", "cpp_common", "lazy_static", - "proc-macro2", + "proc-macro2 1.0.93", "regex", - "syn 2.0.58", - "unicode-xid", + "syn 2.0.96", + "unicode-xid 0.2.4", ] [[package]] @@ -1249,8 +1325,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25fcfea2ee05889597d35e986c2ad0169694320ae5cc8f6d2640a4bb8a884560" dependencies = [ "lazy_static", - "proc-macro2", - "syn 2.0.58", + "proc-macro2 1.0.93", + "syn 2.0.96", ] [[package]] @@ -1443,9 +1519,9 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -1486,8 +1562,8 @@ checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "strsim 0.10.0", "syn 1.0.109", ] @@ -1500,8 +1576,8 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "strsim 0.10.0", "syn 1.0.109", ] @@ -1514,10 +1590,10 @@ checksum = "33043dcd19068b8192064c704b3f83eb464f91f1ff527b44a4e2b08d9cdb8855" dependencies = [ "fnv", "ident_case", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.58", + "syn 2.0.96", ] [[package]] @@ -1527,7 +1603,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core 0.13.4", - "quote", + "quote 1.0.35", "syn 1.0.109", ] @@ -1538,7 +1614,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ "darling_core 0.14.4", - "quote", + "quote 1.0.35", "syn 1.0.109", ] @@ -1549,8 +1625,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" dependencies = [ "darling_core 0.20.6", - "quote", - "syn 2.0.58", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -1659,8 +1735,8 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -1680,8 +1756,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f91d4cfa921f1c05904dc3c57b4a32c38aed3340cce209f3a6fd1478babafc4" dependencies = [ "darling 0.14.4", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -1701,9 +1777,9 @@ version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ - "convert_case", - "proc-macro2", - "quote", + "convert_case 0.4.0", + "proc-macro2 1.0.93", + "quote 1.0.35", "rustc_version 0.4.0", "syn 1.0.109", ] @@ -1714,9 +1790,9 @@ version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65f152f4b8559c4da5d574bafc7af85454d706b4c5fe8b530d508cacbb6807ea" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -1785,9 +1861,9 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -1805,10 +1881,16 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f87ab5c4c35c1d2fc7928e96d1cee3786a9bc9571ebaf4bf8f4207e6271165fa" dependencies = [ - "quote", - "syn 2.0.58", + "quote 1.0.35", + "syn 2.0.96", ] +[[package]] +name = "dotenv" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" + [[package]] name = "dotenvy" version = "0.15.7" @@ -1925,9 +2007,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" dependencies = [ "heck 0.4.1", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -1937,9 +2019,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f33313078bb8d4d05a2733a94ac4c2d8a0df9a2b84424ebf4f33bfc224a890e" dependencies = [ "once_cell", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -1973,12 +2055,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1998,6 +2080,17 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +[[package]] +name = "event-listener" +version = "5.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + [[package]] name = "faster-stun" version = "1.0.1" @@ -2024,9 +2117,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "ff" @@ -2050,6 +2143,29 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" +[[package]] +name = "firestore" +version = "0.44.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdbba2c9a18b47ce16358ffe33bb442256d0cc9f166883aba856746382a8bd3e" +dependencies = [ + "async-trait", + "backoff", + "chrono", + "futures", + "gcloud-sdk", + "hex", + "hyper 1.5.0", + "rand", + "rsb_derive", + "rvstruct", + "serde", + "struct-path", + "tokio", + "tokio-stream", + "tracing", +] + [[package]] name = "fixedbitset" version = "0.4.2" @@ -2101,9 +2217,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -2225,9 +2341,9 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -2326,8 +2442,8 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30ce01e8bbb3e7e0758dcf907fe799f5998a54368963f766ae94b84624ba60c8" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -2340,6 +2456,34 @@ dependencies = [ "byteorder", ] +[[package]] +name = "gcloud-sdk" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a771db7ee43ad84638d0e6131cb514e402af6a5a96051f4425d66dc0ee527d8" +dependencies = [ + "async-trait", + "bytes", + "chrono", + "futures", + "hyper 1.5.0", + "jsonwebtoken", + "once_cell", + "prost 0.13.4", + "prost-types 0.13.4", + "reqwest 0.12.12", + "secret-vault-value", + "serde", + "serde_json", + "tokio", + "tonic", + "tower 0.5.2", + "tower-layer", + "tower-util", + "tracing", + "url", +] + [[package]] name = "generic-array" version = "0.14.7" @@ -2430,6 +2574,22 @@ dependencies = [ "web-sys", ] +[[package]] +name = "graphannis-malloc_size_of" +version = "2.0.0" +source = "git+https://github.com/openmina/graphannis-malloc_size_of.git?rev=f7da9f6#f7da9f679a0d5c339fa674e1ffebb00275370141" + +[[package]] +name = "graphannis-malloc_size_of_derive" +version = "2.0.1-alpha.0" +source = "git+https://github.com/openmina/graphannis-malloc_size_of_derive.git#30c74e817f9f74434aa6fffe707be44d0a73c47b" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", + "synstructure 0.11.0", +] + [[package]] name = "graphql-introspection-query" version = "0.2.0" @@ -2446,7 +2606,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2ebc8013b4426d5b81a4364c419a95ed0b404af2b82e2457de52d9348f0e474" dependencies = [ "combine", - "thiserror", + "thiserror 1.0.60", ] [[package]] @@ -2456,7 +2616,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a50cfdc7f34b7f01909d55c2dcb71d4c13cbcbb4a1605d6c8bd760d654c1144b" dependencies = [ "graphql_query_derive", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", ] @@ -2471,8 +2631,8 @@ dependencies = [ "graphql-parser", "heck 0.4.1", "lazy_static", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "serde", "serde_json", "syn 1.0.109", @@ -2485,7 +2645,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83febfa838f898cfa73dfaa7a8eb69ff3409021ac06ee94cfb3d622f6eeb1a97" dependencies = [ "graphql_client_codegen", - "proc-macro2", + "proc-macro2 1.0.93", "syn 1.0.109", ] @@ -2503,7 +2663,7 @@ dependencies = [ [[package]] name = "groupmap" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ec", "ark-ff", @@ -2529,9 +2689,28 @@ dependencies = [ "tracing", ] +[[package]] +name = "h2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", + "indexmap 2.0.2", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "hash-tool" -version = "0.13.0" +version = "0.14.0" dependencies = [ "bs58 0.5.0", "hex", @@ -2564,9 +2743,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.14.2" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash 0.8.8", "allocator-api2", @@ -2574,11 +2753,11 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.8.4" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.5", ] [[package]] @@ -2619,6 +2798,25 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "heartbeats-processor" +version = "0.14.0" +dependencies = [ + "anyhow", + "base64 0.22.0", + "chrono", + "clap 4.5.20", + "dotenv", + "firestore", + "gcloud-sdk", + "mina-p2p-messages", + "openmina-core", + "serde", + "serde_json", + "sqlx", + "tokio", +] + [[package]] name = "heck" version = "0.3.3" @@ -2633,9 +2831,6 @@ name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] [[package]] name = "heck" @@ -2801,7 +2996,7 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.3.24", "http 0.2.9", "http-body 0.4.5", "httparse", @@ -2824,14 +3019,47 @@ dependencies = [ "bytes", "futures-channel", "futures-util", + "h2 0.4.7", "http 1.1.0", "http-body 1.0.1", "httparse", "httpdate", "itoa", "pin-project-lite", - "smallvec", + "smallvec 1.13.2", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.5.0", + "hyper-util", + "rustls 0.23.21", + "rustls-native-certs", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-timeout" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +dependencies = [ + "hyper 1.5.0", + "hyper-util", + "pin-project-lite", "tokio", + "tower-service", ] [[package]] @@ -2847,19 +3075,39 @@ dependencies = [ "tokio-native-tls", ] +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.5.0", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + [[package]] name = "hyper-util" -version = "0.1.7" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", + "futures-channel", "futures-util", "http 1.1.0", "http-body 1.0.1", "hyper 1.5.0", "pin-project-lite", + "socket2 0.5.5", "tokio", + "tower-service", + "tracing", ] [[package]] @@ -2886,56 +3134,195 @@ dependencies = [ ] [[package]] -name = "ident_case" -version = "1.0.1" +name = "icu_collections" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] [[package]] -name = "idna" -version = "0.4.0" +name = "icu_locid" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", ] [[package]] -name = "if-addrs" -version = "0.7.0" +name = "icu_locid_transform" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc0fa01ffc752e9dbc72818cdb072cd028b86be5e09dd04c5a643704fe101a9" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ - "libc", - "winapi 0.3.9", + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", ] [[package]] -name = "if-watch" -version = "3.1.0" +name = "icu_locid_transform_data" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb892e5777fe09e16f3d44de7802f4daa7267ecbe8c466f19d94e25bb0c303e" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" dependencies = [ - "async-io", - "core-foundation", - "fnv", - "futures", - "if-addrs", - "ipnet", - "log", - "rtnetlink", - "system-configuration", - "tokio", - "windows", + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec 1.13.2", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", ] [[package]] -name = "igd-next" -version = "0.14.2" +name = "icu_normalizer_data" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57e065e90a518ab5fedf79aa1e4b784e10f8e484a834f6bda85c42633a2cb7af" -dependencies = [ +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec 1.13.2", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "if-addrs" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbc0fa01ffc752e9dbc72818cdb072cd028b86be5e09dd04c5a643704fe101a9" +dependencies = [ + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "if-watch" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb892e5777fe09e16f3d44de7802f4daa7267ecbe8c466f19d94e25bb0c303e" +dependencies = [ + "async-io", + "core-foundation 0.9.3", + "fnv", + "futures", + "if-addrs", + "ipnet", + "log", + "rtnetlink", + "system-configuration 0.5.1", + "tokio", + "windows", +] + +[[package]] +name = "igd-next" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57e065e90a518ab5fedf79aa1e4b784e10f8e484a834f6bda85c42633a2cb7af" +dependencies = [ "async-trait", "attohttpc", "bytes", @@ -2967,7 +3354,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" dependencies = [ "equivalent", - "hashbrown 0.14.2", + "hashbrown 0.14.5", "serde", ] @@ -3002,7 +3389,7 @@ dependencies = [ "rand", "rtcp", "rtp", - "thiserror", + "thiserror 1.0.60", "tokio", "waitgroup", "webrtc-srtp", @@ -3012,7 +3399,7 @@ dependencies = [ [[package]] name = "internal-tracing" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" [[package]] name = "io-lifetimes" @@ -3078,9 +3465,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jobserver" -version = "0.1.27" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] @@ -3107,7 +3494,22 @@ dependencies = [ "pest_derive", "regex", "serde_json", - "thiserror", + "thiserror 1.0.60", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" +dependencies = [ + "base64 0.21.7", + "js-sys", + "pem 3.0.4", + "ring 0.17.8", + "serde", + "serde_json", + "simple_asn1", ] [[package]] @@ -3133,9 +3535,9 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "760dbe46660494d469023d661e8d268f413b2cb68c999975dcc237407096a693" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", "url", ] @@ -3149,7 +3551,7 @@ dependencies = [ "juniper", "serde", "serde_json", - "thiserror", + "thiserror 1.0.60", "tokio", "warp", ] @@ -3176,7 +3578,7 @@ dependencies = [ [[package]] name = "kimchi" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ec", "ark-ff", @@ -3205,7 +3607,7 @@ dependencies = [ "serde_with 1.14.0", "strum 0.24.1", "strum_macros 0.24.3", - "thiserror", + "thiserror 1.0.60", "turshi", ] @@ -3232,7 +3634,7 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "ledger-tool" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "mina-curves", @@ -3240,7 +3642,7 @@ dependencies = [ "mina-signer", "mina-tree", "node", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", "structopt", @@ -3248,9 +3650,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.155" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libloading" @@ -3259,7 +3661,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.52.0", + "windows-targets 0.52.6", ] [[package]] @@ -3297,9 +3699,9 @@ dependencies = [ "libp2p-upnp", "libp2p-yamux", "multiaddr", - "pin-project", + "pin-project 1.1.5", "rw-stream-sink", - "thiserror", + "thiserror 1.0.60", ] [[package]] @@ -3341,13 +3743,13 @@ dependencies = [ "multistream-select", "once_cell", "parking_lot 0.12.1", - "pin-project", + "pin-project 1.1.5", "quick-protobuf", "rand", "rw-stream-sink", "serde", - "smallvec", - "thiserror", + "smallvec 1.13.2", + "thiserror 1.0.60", "unsigned-varint 0.7.2", "void", ] @@ -3363,7 +3765,7 @@ dependencies = [ "libp2p-identity", "log", "parking_lot 0.12.1", - "smallvec", + "smallvec 1.13.2", "trust-dns-resolver", ] @@ -3394,7 +3796,7 @@ dependencies = [ "regex", "serde", "sha2 0.10.8", - "smallvec", + "smallvec 1.13.2", "unsigned-varint 0.7.2", "void", ] @@ -3416,8 +3818,8 @@ dependencies = [ "lru", "quick-protobuf", "quick-protobuf-codec", - "smallvec", - "thiserror", + "smallvec 1.13.2", + "thiserror 1.0.60", "void", ] @@ -3436,7 +3838,7 @@ dependencies = [ "rand", "serde", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.60", "zeroize", ] @@ -3462,8 +3864,8 @@ dependencies = [ "rand", "serde", "sha2 0.10.8", - "smallvec", - "thiserror", + "smallvec 1.13.2", + "thiserror 1.0.60", "uint", "unsigned-varint 0.7.2", "void", @@ -3482,7 +3884,7 @@ dependencies = [ "libp2p-swarm", "log", "rand", - "smallvec", + "smallvec 1.13.2", "socket2 0.5.5", "tokio", "trust-dns-proto", @@ -3525,7 +3927,7 @@ dependencies = [ "sha2 0.10.8", "snow", "static_assertions", - "thiserror", + "thiserror 1.0.60", "x25519-dalek", "zeroize", ] @@ -3537,7 +3939,7 @@ source = "git+https://github.com/openmina/rust-libp2p?rev=5c44c7d9#5c44c7d953f50 dependencies = [ "futures", "log", - "pin-project", + "pin-project 1.1.5", "rand", "salsa20", "sha3", @@ -3557,18 +3959,18 @@ dependencies = [ "libp2p-tls", "log", "parking_lot 0.12.1", - "quinn", + "quinn 0.10.2", "rand", "ring 0.16.20", - "rustls 0.23.12", + "rustls 0.23.21", "socket2 0.5.5", - "thiserror", + "thiserror 1.0.60", "tokio", ] [[package]] name = "libp2p-rpc-behaviour" -version = "0.13.0" +version = "0.14.0" dependencies = [ "libp2p", "log", @@ -3592,7 +3994,7 @@ dependencies = [ "multistream-select", "once_cell", "rand", - "smallvec", + "smallvec 1.13.2", "tokio", "void", ] @@ -3604,9 +4006,9 @@ source = "git+https://github.com/openmina/rust-libp2p?rev=5c44c7d9#5c44c7d953f50 dependencies = [ "heck 0.4.1", "proc-macro-warning", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -3638,7 +4040,7 @@ dependencies = [ "ring 0.16.20", "rustls 0.21.12", "rustls-webpki 0.101.7", - "thiserror", + "thiserror 1.0.60", "x509-parser 0.15.1", "yasna", ] @@ -3666,15 +4068,15 @@ dependencies = [ "futures", "libp2p-core", "log", - "thiserror", + "thiserror 1.0.60", "yamux", ] [[package]] name = "libsqlite3-sys" -version = "0.27.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ "cc", "pkg-config", @@ -3713,9 +4115,9 @@ version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adf157a4dc5a29b7b464aa8fe7edeff30076e07e13646a1c3874f58477dc99f8" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -3726,9 +4128,15 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] name = "linux-raw-sys" -version = "0.4.10" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "litemap" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "local-ip-address" @@ -3738,7 +4146,7 @@ checksum = "136ef34e18462b17bf39a7826f8f3bbc223341f8e83822beb8b77db9a3d49696" dependencies = [ "libc", "neli", - "thiserror", + "thiserror 1.0.60", "windows-sys 0.48.0", ] @@ -3764,7 +4172,7 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efa59af2ddfad1854ae27d75009d538d0998b4b2fd47083e743ac1a10e46c60" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.5", ] [[package]] @@ -3856,7 +4264,7 @@ dependencies = [ [[package]] name = "mina-curves" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ec", "ark-ff", @@ -3865,7 +4273,7 @@ dependencies = [ [[package]] name = "mina-hasher" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ff", "bitvec", @@ -3890,6 +4298,8 @@ dependencies = [ "clap 4.5.20", "derive_more", "fuzzcheck", + "graphannis-malloc_size_of", + "graphannis-malloc_size_of_derive", "hex", "lazy_static", "mina-curves", @@ -3907,7 +4317,7 @@ dependencies = [ "sha2 0.10.8", "strum 0.26.2", "strum_macros 0.26.4", - "thiserror", + "thiserror 1.0.60", "time", "toml", "wasm-bindgen", @@ -3918,7 +4328,7 @@ dependencies = [ [[package]] name = "mina-poseidon" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ec", "ark-ff", @@ -3935,7 +4345,7 @@ dependencies = [ [[package]] name = "mina-signer" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ec", "ark-ff", @@ -3948,12 +4358,12 @@ dependencies = [ "o1-utils", "rand", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.60", ] [[package]] name = "mina-transport" -version = "0.13.0" +version = "0.14.0" dependencies = [ "blake2", "hex", @@ -3964,7 +4374,7 @@ dependencies = [ [[package]] name = "mina-tree" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "ark-ec", @@ -4005,7 +4415,7 @@ dependencies = [ "rand_seeder", "rayon", "redux", - "reqwest", + "reqwest 0.11.24", "rsa", "serde", "serde_json", @@ -4013,7 +4423,7 @@ dependencies = [ "sha2 0.10.8", "strum 0.26.2", "strum_macros 0.26.4", - "thiserror", + "thiserror 1.0.60", "tuple-map", "uuid", "wasm-bindgen", @@ -4050,6 +4460,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.52.0", +] + [[package]] name = "multer" version = "2.1.0" @@ -4134,8 +4556,8 @@ checksum = "fc076939022111618a5026d3be019fd8b366e76314538ff9a1b59ffbcbf98bcd" dependencies = [ "proc-macro-crate", "proc-macro-error", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", "synstructure 0.12.6", ] @@ -4154,8 +4576,8 @@ dependencies = [ "bytes", "futures", "log", - "pin-project", - "smallvec", + "pin-project 1.1.5", + "smallvec 1.13.2", "unsigned-varint 0.7.2", ] @@ -4172,7 +4594,7 @@ dependencies = [ "openssl-probe", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.9.2", "security-framework-sys", "tempfile", ] @@ -4196,8 +4618,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c168194d373b1e134786274020dae7fc5513d565ea2ebb9bc9ff17ffb69106d4" dependencies = [ "either", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "serde", "syn 1.0.109", ] @@ -4237,7 +4659,7 @@ dependencies = [ "anyhow", "byteorder", "paste", - "thiserror", + "thiserror 1.0.60", ] [[package]] @@ -4251,7 +4673,7 @@ dependencies = [ "log", "netlink-packet-core", "netlink-sys", - "thiserror", + "thiserror 1.0.60", "tokio", ] @@ -4298,18 +4720,23 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.8.0", "cfg-if", "libc", ] [[package]] name = "node" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "ark-ff", + "base64 0.22.0", + "blake2", "derive_more", + "graphannis-malloc_size_of", + "graphannis-malloc_size_of_derive", + "hex", "lazy_static", "linkme", "mina-hasher", @@ -4334,7 +4761,7 @@ dependencies = [ "static_assertions", "strum 0.26.2", "strum_macros 0.26.4", - "thiserror", + "thiserror 1.0.60", "time", "tokio", "vergen", @@ -4417,7 +4844,7 @@ dependencies = [ "num-iter", "num-traits", "rand", - "smallvec", + "smallvec 1.13.2", "zeroize", ] @@ -4443,8 +4870,8 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -4524,8 +4951,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -4536,9 +4963,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -4553,7 +4980,7 @@ dependencies = [ [[package]] name = "o1-utils" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ec", "ark-ff", @@ -4570,7 +4997,7 @@ dependencies = [ "serde", "serde_with 1.14.0", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.60", ] [[package]] @@ -4660,11 +5087,26 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +[[package]] +name = "openmina-archive-breadcrumb-compare" +version = "0.14.0" +dependencies = [ + "anyhow", + "binprot", + "clap 4.5.20", + "mina-p2p-messages", + "reqwest 0.11.24", + "serde", + "serde_json", + "similar", + "tokio", +] + [[package]] name = "openmina-bootstrap-sandbox" -version = "0.13.0" +version = "0.14.0" dependencies = [ - "base64 0.21.7", + "base64 0.22.0", "binprot", "bs58 0.5.0", "env_logger", @@ -4681,13 +5123,13 @@ dependencies = [ "serde", "serde_json", "structopt", - "thiserror", + "thiserror 1.0.60", "tokio", ] [[package]] name = "openmina-core" -version = "0.13.0" +version = "0.14.0" dependencies = [ "argon2", "ark-ff", @@ -4696,6 +5138,8 @@ dependencies = [ "binprot_derive", "bs58 0.4.0", "crypto_secretbox", + "graphannis-malloc_size_of", + "graphannis-malloc_size_of_derive", "hex", "js-sys", "lazy_static", @@ -4713,7 +5157,7 @@ dependencies = [ "serde_json", "sha2 0.10.8", "slab", - "thiserror", + "thiserror 1.0.60", "time", "tokio", "tracing", @@ -4725,7 +5169,7 @@ dependencies = [ [[package]] name = "openmina-fuzzer" -version = "0.13.0" +version = "0.14.0" dependencies = [ "lazy_static", "rand", @@ -4736,7 +5180,7 @@ dependencies = [ [[package]] name = "openmina-gossipsub-sandbox" -version = "0.13.0" +version = "0.14.0" dependencies = [ "bs58 0.5.0", "env_logger", @@ -4750,20 +5194,20 @@ dependencies = [ [[package]] name = "openmina-macros" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "openmina-core", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "rust-format", - "syn 2.0.58", + "syn 2.0.96", "tracing", ] [[package]] name = "openmina-node-account" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "bs58 0.4.0", @@ -4776,14 +5220,16 @@ dependencies = [ "rand", "serde", "serde_json", - "thiserror", + "thiserror 1.0.60", ] [[package]] name = "openmina-node-common" -version = "0.13.0" +version = "0.14.0" dependencies = [ "ark-ff", + "binprot", + "binprot_derive", "gloo-timers", "gloo-utils", "jsonpath-rust", @@ -4791,16 +5237,18 @@ dependencies = [ "mina-p2p-messages", "mina-signer", "mina-tree", + "mio 1.0.3", "node", "openmina-core", "rand", "rayon", "redux", + "reqwest 0.12.12", "rsa", "serde", "serde_json", "sha3", - "thiserror", + "thiserror 1.0.60", "tokio", "tracing", "tracing-appender", @@ -4813,7 +5261,7 @@ dependencies = [ [[package]] name = "openmina-node-invariants" -version = "0.13.0" +version = "0.14.0" dependencies = [ "documented", "lazy_static", @@ -4827,7 +5275,7 @@ dependencies = [ [[package]] name = "openmina-node-native" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "bs58 0.4.0", @@ -4845,16 +5293,17 @@ dependencies = [ "node", "openmina-core", "openmina-node-common", + "openmina-producer-dashboard", "rand", "rayon", "redux", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", "sha3", "strum 0.26.2", "strum_macros 0.26.4", - "thiserror", + "thiserror 1.0.60", "tokio", "tracing", "tracing-subscriber", @@ -4864,7 +5313,7 @@ dependencies = [ [[package]] name = "openmina-node-testing" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "axum", @@ -4893,13 +5342,13 @@ dependencies = [ "rand", "rayon", "redux", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", "strum 0.26.2", "strum_macros 0.26.4", "temp-dir", - "thiserror", + "thiserror 1.0.60", "time", "tokio", "tower-http", @@ -4910,7 +5359,7 @@ dependencies = [ [[package]] name = "openmina-node-web" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "bytes", @@ -4930,7 +5379,7 @@ dependencies = [ "redux", "serde", "serde_json", - "thiserror", + "thiserror 1.0.60", "vrf", "wasm-bindgen", "wasm-bindgen-futures", @@ -4938,7 +5387,7 @@ dependencies = [ [[package]] name = "openmina-producer-dashboard" -version = "0.13.0" +version = "0.14.0" dependencies = [ "bincode", "clap 4.5.20", @@ -4948,12 +5397,12 @@ dependencies = [ "num-bigint", "num-traits", "openmina-node-account", - "reqwest", + "reqwest 0.11.24", "serde", "serde_json", "sled", "sqlx", - "thiserror", + "thiserror 1.0.60", "time", "tokio", "vrf", @@ -4966,7 +5415,7 @@ version = "0.10.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79a4c6c3a2b158f7f8f2a2fc5a969fa3a068df6fc9dbb4a43845436e3af7c800" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.8.0", "cfg-if", "foreign-types", "libc", @@ -4981,9 +5430,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -5039,14 +5488,14 @@ dependencies = [ [[package]] name = "p2p" -version = "0.13.0" +version = "0.14.0" dependencies = [ "aes-gcm 0.10.3", "anyhow", "base64 0.22.0", "binprot", "binprot_derive", - "bitflags 2.4.1", + "bitflags 2.8.0", "blake2", "bs58 0.4.0", "bytes", @@ -5062,6 +5511,8 @@ dependencies = [ "getrandom", "gloo-timers", "gloo-utils", + "graphannis-malloc_size_of", + "graphannis-malloc_size_of_derive", "hex", "hkdf", "js-sys", @@ -5069,28 +5520,28 @@ dependencies = [ "libp2p-identity", "local-ip-address", "mina-p2p-messages", - "mio", + "mio 0.8.11", "multiaddr", "multihash 0.18.1", "openmina-core", "openmina-fuzzer", "openmina-macros", "p2p-testing", - "prost", + "prost 0.12.4", "prost-build", "quick-protobuf", "rand", "redux", - "reqwest", + "reqwest 0.11.24", "salsa-simple", "serde", "serde_json", "serde_with 3.7.0", "sha2 0.10.8", - "smallvec", + "smallvec 1.13.2", "strum 0.26.2", "strum_macros 0.26.4", - "thiserror", + "thiserror 1.0.60", "tokio", "unsigned-varint 0.8.0", "url", @@ -5105,7 +5556,7 @@ dependencies = [ [[package]] name = "p2p-testing" -version = "0.13.0" +version = "0.14.0" dependencies = [ "derive_more", "futures", @@ -5120,7 +5571,7 @@ dependencies = [ "rand", "redux", "serde_json", - "thiserror", + "thiserror 1.0.60", "tokio", "tracing", "tracing-log", @@ -5176,7 +5627,7 @@ dependencies = [ "instant", "libc", "redox_syscall 0.2.16", - "smallvec", + "smallvec 1.13.2", "winapi 0.3.9", ] @@ -5189,7 +5640,7 @@ dependencies = [ "cfg-if", "libc", "redox_syscall 0.4.1", - "smallvec", + "smallvec 1.13.2", "windows-targets 0.48.5", ] @@ -5240,9 +5691,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" @@ -5251,7 +5702,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f8023d0fb78c8e03784ea1c7f3fa36e68a723138990b8d5a47d916b651e7a8" dependencies = [ "memchr", - "thiserror", + "thiserror 1.0.60", "ucd-trie", ] @@ -5273,9 +5724,9 @@ checksum = "fdc17e2a6c7d0a492f0158d7a4bd66cc17280308bbaff78d5bef566dca35ab80" dependencies = [ "pest", "pest_meta", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -5327,9 +5778,9 @@ checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" dependencies = [ "phf_generator", "phf_shared", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -5341,13 +5792,33 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pin-project" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ef0f924a5ee7ea9cbcea77529dba45f8a9ba9f622419fe3386ca581a3ae9d5a" +dependencies = [ + "pin-project-internal 0.4.30", +] + [[package]] name = "pin-project" version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ - "pin-project-internal", + "pin-project-internal 1.1.5", +] + +[[package]] +name = "pin-project-internal" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "851c8d0ce9bebe43790dedfc86614c23494ac9f423dd618d3a61fc693eafe61e" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 1.0.109", ] [[package]] @@ -5356,9 +5827,9 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -5425,7 +5896,7 @@ dependencies = [ [[package]] name = "poly-commitment" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ec", "ark-ff", @@ -5444,7 +5915,8 @@ dependencies = [ "rmp-serde", "serde", "serde_with 1.14.0", - "thiserror", + "smallvec 2.0.0-alpha.9", + "thiserror 1.0.60", ] [[package]] @@ -5501,7 +5973,7 @@ checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" [[package]] name = "poseidon" -version = "0.13.0" +version = "0.14.0" dependencies = [ "ark-ff", "mina-curves", @@ -5539,8 +6011,8 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d3928fb5db768cb86f891ff014f0144589297e3c6a1aba6ed7cecfdace270c7" dependencies = [ - "proc-macro2", - "syn 2.0.58", + "proc-macro2 1.0.93", + "syn 2.0.96", ] [[package]] @@ -5569,8 +6041,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", "version_check", ] @@ -5581,8 +6053,8 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "version_check", ] @@ -5592,16 +6064,25 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b698b0b09d40e9b7c1a47b132d66a8b54bcd20583d9b6d06e4535e383b4405c" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", +] + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid 0.1.0", ] [[package]] name = "proc-macro2" -version = "1.0.79" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] @@ -5624,9 +6105,9 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -5636,7 +6117,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0f5d036824e4761737860779c906171497f6d55681139d8312388f8fe398922" dependencies = [ "bytes", - "prost-derive", + "prost-derive 0.12.5", +] + +[[package]] +name = "prost" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" +dependencies = [ + "bytes", + "prost-derive 0.13.4", ] [[package]] @@ -5653,10 +6144,10 @@ dependencies = [ "once_cell", "petgraph", "prettyplease", - "prost", - "prost-types", + "prost 0.12.4", + "prost-types 0.12.4", "regex", - "syn 2.0.58", + "syn 2.0.96", "tempfile", ] @@ -5668,9 +6159,22 @@ checksum = "9554e3ab233f0a932403704f1a1d08c30d5ccd931adfdfa1e8b5a19b52c1d55a" dependencies = [ "anyhow", "itertools 0.12.0", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", +] + +[[package]] +name = "prost-derive" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" +dependencies = [ + "anyhow", + "itertools 0.12.0", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -5679,7 +6183,16 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3235c33eb02c1f1e212abdbe34c78b264b038fb58ca612664343271e36e55ffe" dependencies = [ - "prost", + "prost 0.12.4", +] + +[[package]] +name = "prost-types" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" +dependencies = [ + "prost 0.13.4", ] [[package]] @@ -5705,7 +6218,7 @@ dependencies = [ "asynchronous-codec", "bytes", "quick-protobuf", - "thiserror", + "thiserror 1.0.60", "unsigned-varint 0.7.2", ] @@ -5718,11 +6231,29 @@ dependencies = [ "bytes", "futures-io", "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash", + "quinn-proto 0.10.5", + "quinn-udp 0.4.1", + "rustc-hash 1.1.0", "rustls 0.21.12", - "thiserror", + "thiserror 1.0.60", + "tokio", + "tracing", +] + +[[package]] +name = "quinn" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" +dependencies = [ + "bytes", + "pin-project-lite", + "quinn-proto 0.11.9", + "quinn-udp 0.5.9", + "rustc-hash 2.1.0", + "rustls 0.23.21", + "socket2 0.5.5", + "thiserror 2.0.11", "tokio", "tracing", ] @@ -5736,12 +6267,32 @@ dependencies = [ "bytes", "rand", "ring 0.16.20", - "rustc-hash", + "rustc-hash 1.1.0", "rustls 0.21.12", "slab", - "thiserror", + "thiserror 1.0.60", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" +dependencies = [ + "bytes", + "getrandom", + "rand", + "ring 0.17.8", + "rustc-hash 2.1.0", + "rustls 0.23.21", + "rustls-pki-types", + "slab", + "thiserror 2.0.11", "tinyvec", "tracing", + "web-time", ] [[package]] @@ -5758,16 +6309,39 @@ dependencies = [ ] [[package]] -name = "quote" -version = "1.0.35" +name = "quinn-udp" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "1c40286217b4ba3a71d644d752e6a0b71f13f1b6a2c5311acfcbe0c2418ed904" dependencies = [ - "proc-macro2", + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.5.5", + "tracing", + "windows-sys 0.52.0", ] [[package]] -name = "radium" +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2 1.0.93", +] + +[[package]] +name = "radium" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" @@ -5885,15 +6459,6 @@ dependencies = [ "bitflags 1.3.2", ] -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.4.1" @@ -5911,7 +6476,7 @@ checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ "getrandom", "redox_syscall 0.2.16", - "thiserror", + "thiserror 1.0.60", ] [[package]] @@ -5972,7 +6537,7 @@ checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "replay_dynamic_effects" -version = "0.13.0" +version = "0.14.0" dependencies = [ "node", "openmina-node-invariants", @@ -5990,11 +6555,11 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2", + "h2 0.3.24", "http 0.2.9", "http-body 0.4.5", "hyper 0.14.27", - "hyper-tls", + "hyper-tls 0.5.0", "ipnet", "js-sys", "log", @@ -6003,12 +6568,12 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", + "rustls-pemfile 1.0.3", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 0.1.2", - "system-configuration", + "system-configuration 0.5.1", "tokio", "tokio-native-tls", "tower-service", @@ -6019,6 +6584,60 @@ dependencies = [ "winreg", ] +[[package]] +name = "reqwest" +version = "0.12.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" +dependencies = [ + "async-compression", + "base64 0.22.0", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.4.7", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.0", + "hyper-rustls", + "hyper-tls 0.6.0", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "mime_guess", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn 0.11.6", + "rustls 0.23.21", + "rustls-native-certs", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.1", + "system-configuration 0.6.1", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tokio-util", + "tower 0.5.2", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "windows-registry", +] + [[package]] name = "resolv-conf" version = "0.7.0" @@ -6117,6 +6736,17 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rsb_derive" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2c53e42fccdc5f1172e099785fe78f89bc0c1e657d0c2ef591efbfac427e9a4" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 1.0.109", +] + [[package]] name = "rsexp" version = "0.2.3" @@ -6129,8 +6759,8 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0726130f64c2f613129d9870fa803071173f317f92bfe924d2d21252c4b3403" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -6153,7 +6783,7 @@ version = "0.11.0" source = "git+https://github.com/openmina/webrtc.git?rev=e8705db39af1b198b324a5db6ff57fb213ba75e9#e8705db39af1b198b324a5db6ff57fb213ba75e9" dependencies = [ "bytes", - "thiserror", + "thiserror 1.0.60", "webrtc-util", ] @@ -6168,7 +6798,7 @@ dependencies = [ "netlink-packet-route", "netlink-proto", "nix 0.24.3", - "thiserror", + "thiserror 1.0.60", "tokio", ] @@ -6181,7 +6811,7 @@ dependencies = [ "portable-atomic", "rand", "serde", - "thiserror", + "thiserror 1.0.60", "webrtc-util", ] @@ -6191,7 +6821,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60e7c00b6c3bf5e38a880eec01d7e829d12ca682079f8238a464def3c4b31627" dependencies = [ - "proc-macro2", + "proc-macro2 1.0.93", ] [[package]] @@ -6206,6 +6836,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" + [[package]] name = "rustc_version" version = "0.3.3" @@ -6249,15 +6885,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.20" +version = "0.38.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ce50cb2e16c2903e30d1cbccfd8387a74b9d4c938b6a4c5ec6cc7556f7a8a0" +checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.8.0", "errno", "libc", - "linux-raw-sys 0.4.10", - "windows-sys 0.48.0", + "linux-raw-sys 0.4.14", + "windows-sys 0.52.0", ] [[package]] @@ -6274,18 +6910,31 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.12" +version = "0.23.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" +checksum = "8f287924602bf649d949c63dc8ac8b235fa5387d394020705b80c4eb597ce5b8" dependencies = [ + "log", "once_cell", "ring 0.17.8", "rustls-pki-types", - "rustls-webpki 0.102.6", + "rustls-webpki 0.102.8", "subtle", "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework 3.2.0", +] + [[package]] name = "rustls-pemfile" version = "1.0.3" @@ -6295,11 +6944,23 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "rustls-pki-types" -version = "1.8.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" +dependencies = [ + "web-time", +] [[package]] name = "rustls-webpki" @@ -6313,9 +6974,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.102.6" +version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ "ring 0.17.8", "rustls-pki-types", @@ -6337,13 +6998,33 @@ dependencies = [ "twox-hash", ] +[[package]] +name = "rvs_derive" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1fa12378eb54f3d4f2db8dcdbe33af610b7e7d001961c1055858282ecef2a5" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 1.0.109", +] + +[[package]] +name = "rvstruct" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5107860ec34506b64cf3680458074eac5c2c564f7ccc140918bbcd1714fd8d5d" +dependencies = [ + "rvs_derive", +] + [[package]] name = "rw-stream-sink" version = "0.4.0" source = "git+https://github.com/openmina/rust-libp2p?rev=5c44c7d9#5c44c7d953f50134a00ded1f9924fa2edd5e09b9" dependencies = [ "futures", - "pin-project", + "pin-project 1.1.5", "static_assertions", ] @@ -6355,7 +7036,7 @@ checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "salsa-simple" -version = "0.13.0" +version = "0.14.0" dependencies = [ "generic-array", "hex", @@ -6414,7 +7095,7 @@ source = "git+https://github.com/openmina/webrtc.git?rev=e8705db39af1b198b324a5d dependencies = [ "rand", "substring", - "thiserror", + "thiserror 1.0.60", "url", ] @@ -6432,6 +7113,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "secret-vault-value" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5f8cfb86d2019f64a4cfb49e499f401f406fbec946c1ffeea9d0504284347de" +dependencies = [ + "prost 0.12.4", + "prost-types 0.12.4", + "serde", + "serde_json", + "zeroize", +] + [[package]] name = "security-framework" version = "2.9.2" @@ -6439,7 +7133,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ "bitflags 1.3.2", - "core-foundation", + "core-foundation 0.9.3", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" +dependencies = [ + "bitflags 2.8.0", + "core-foundation 0.10.0", "core-foundation-sys", "libc", "security-framework-sys", @@ -6447,9 +7154,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.1" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" dependencies = [ "core-foundation-sys", "libc", @@ -6503,9 +7210,9 @@ version = "1.0.190" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -6577,8 +7284,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ "darling 0.13.4", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -6589,9 +7296,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6561dc161a9224638a31d876ccdfefbc1df91d3f3a8342eddb35f055d48c7655" dependencies = [ "darling 0.20.6", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -6709,6 +7416,24 @@ dependencies = [ "rand_core", ] +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + +[[package]] +name = "simple_asn1" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror 2.0.11", + "time", +] + [[package]] name = "siphasher" version = "0.3.11" @@ -6746,6 +7471,14 @@ name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +dependencies = [ + "serde", +] + +[[package]] +name = "smallvec" +version = "2.0.0-alpha.9" +source = "git+https://github.com/servo/rust-smallvec.git#a176a870987f61b04e001a7c4d0863fdeb427083" [[package]] name = "smartstring" @@ -6769,7 +7502,7 @@ dependencies = [ [[package]] name = "snark" -version = "0.13.0" +version = "0.14.0" dependencies = [ "ark-ec", "ark-ff", @@ -6797,7 +7530,7 @@ dependencies = [ "serde_json", "sha2 0.10.8", "strum_macros 0.26.4", - "thiserror", + "thiserror 1.0.60", "wasm-bindgen-test", ] @@ -6875,9 +7608,9 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9a2ccff1a000a5a59cd33da541d9f2fdcd9e6e8229cc200565942bff36d0aaa" +checksum = "93334716a037193fac19df402f8571269c84a00852f6a7066b5d2616dcd64d3e" dependencies = [ "sqlx-core", "sqlx-macros", @@ -6888,37 +7621,39 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" +checksum = "d4d8060b456358185f7d50c55d9b5066ad956956fddec42ee2e8567134a8936e" dependencies = [ - "ahash 0.8.8", "atoi", "byteorder", "bytes", + "chrono", "crc", "crossbeam-queue", "either", - "event-listener", + "event-listener 5.3.1", "futures-channel", "futures-core", "futures-intrusive", "futures-io", "futures-util", + "hashbrown 0.14.5", "hashlink", "hex", "indexmap 2.0.2", "log", "memchr", + "native-tls", "once_cell", "paste", "percent-encoding", "serde", "serde_json", "sha2 0.10.8", - "smallvec", + "smallvec 1.13.2", "sqlformat", - "thiserror", + "thiserror 1.0.60", "tokio", "tokio-stream", "tracing", @@ -6927,30 +7662,30 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" +checksum = "cac0692bcc9de3b073e8d747391827297e075c7710ff6276d9f7a1f3d58c6657" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "sqlx-core", "sqlx-macros-core", - "syn 1.0.109", + "syn 2.0.96", ] [[package]] name = "sqlx-macros-core" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8" +checksum = "1804e8a7c7865599c9c79be146dc8a9fd8cc86935fa641d3ea58e5f0688abaa5" dependencies = [ "dotenvy", "either", - "heck 0.4.1", + "heck 0.5.0", "hex", "once_cell", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "serde", "serde_json", "sha2 0.10.8", @@ -6958,7 +7693,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 1.0.109", + "syn 2.0.96", "tempfile", "tokio", "url", @@ -6966,15 +7701,16 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" +checksum = "64bb4714269afa44aef2755150a0fc19d756fb580a67db8885608cf02f47d06a" dependencies = [ "atoi", - "base64 0.21.7", - "bitflags 2.4.1", + "base64 0.22.0", + "bitflags 2.8.0", "byteorder", "bytes", + "chrono", "crc", "digest 0.10.7", "dotenvy", @@ -6998,24 +7734,25 @@ dependencies = [ "serde", "sha1", "sha2 0.10.8", - "smallvec", + "smallvec 1.13.2", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.60", "tracing", "whoami", ] [[package]] name = "sqlx-postgres" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" +checksum = "6fa91a732d854c5d7726349bb4bb879bb9478993ceb764247660aee25f67c2f8" dependencies = [ "atoi", - "base64 0.21.7", - "bitflags 2.4.1", + "base64 0.22.0", + "bitflags 2.8.0", "byteorder", + "chrono", "crc", "dotenvy", "etcetera", @@ -7036,21 +7773,22 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "smallvec", + "smallvec 1.13.2", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.60", "tracing", "whoami", ] [[package]] name = "sqlx-sqlite" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b244ef0a8414da0bed4bb1910426e890b19e5e9bccc27ada6b797d05c55ae0aa" +checksum = "d5b2cf34a45953bfd3daaf3db0f7a7878ab9b7a6b91b422d24a7a9e4c857b680" dependencies = [ "atoi", + "chrono", "flume", "futures-channel", "futures-core", @@ -7061,10 +7799,10 @@ dependencies = [ "log", "percent-encoding", "serde", + "serde_urlencoded", "sqlx-core", "tracing", "url", - "urlencoding", ] [[package]] @@ -7108,6 +7846,15 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" +[[package]] +name = "struct-path" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899edf28cf7320503eda593b4bbce1bc5e9533501a11d45537e2c5be90128fc7" +dependencies = [ + "convert_case 0.6.0", +] + [[package]] name = "structopt" version = "0.3.26" @@ -7127,8 +7874,8 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", ] @@ -7151,8 +7898,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "rustversion", "syn 1.0.109", ] @@ -7164,10 +7911,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ "heck 0.5.0", - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "rustversion", - "syn 2.0.58", + "syn 2.0.96", ] [[package]] @@ -7182,7 +7929,7 @@ dependencies = [ "rand", "ring 0.17.8", "subtle", - "thiserror", + "thiserror 1.0.60", "tokio", "url", "webrtc-util", @@ -7203,25 +7950,36 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid 0.1.0", +] + [[package]] name = "syn" version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.58" +version = "2.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "unicode-ident", ] @@ -7236,6 +7994,21 @@ name = "sync_wrapper" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a31c862c910b86af7938de3a4f176e89c4dfcb484d119593d456b86c3f0f8ea" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", + "unicode-xid 0.1.0", +] [[package]] name = "synstructure" @@ -7243,10 +8016,10 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", "syn 1.0.109", - "unicode-xid", + "unicode-xid 0.2.4", ] [[package]] @@ -7255,9 +8028,9 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -7267,8 +8040,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", + "core-foundation 0.9.3", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.8.0", + "core-foundation 0.9.3", + "system-configuration-sys 0.6.0", ] [[package]] @@ -7281,6 +8065,16 @@ dependencies = [ "libc", ] +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tap" version = "1.0.1" @@ -7295,15 +8089,15 @@ checksum = "af547b166dd1ea4b472165569fc456cfb6818116f854690b0ff205e636523dab" [[package]] name = "tempfile" -version = "3.8.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", - "fastrand 2.0.1", - "redox_syscall 0.3.5", - "rustix 0.38.20", - "windows-sys 0.48.0", + "fastrand 2.3.0", + "once_cell", + "rustix 0.38.42", + "windows-sys 0.52.0", ] [[package]] @@ -7341,7 +8135,16 @@ version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.60", +] + +[[package]] +name = "thiserror" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +dependencies = [ + "thiserror-impl 2.0.11", ] [[package]] @@ -7350,9 +8153,20 @@ version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -7418,11 +8232,21 @@ dependencies = [ "time-core", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" dependencies = [ "tinyvec_macros", ] @@ -7442,7 +8266,7 @@ dependencies = [ "backtrace", "bytes", "libc", - "mio", + "mio 0.8.11", "num_cpus", "parking_lot 0.12.1", "pin-project-lite", @@ -7458,9 +8282,9 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -7473,11 +8297,21 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" +dependencies = [ + "rustls 0.23.21", + "tokio", +] + [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -7536,6 +8370,39 @@ dependencies = [ "winnow", ] +[[package]] +name = "tonic" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.22.0", + "bytes", + "h2 0.4.7", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.0", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project 1.1.5", + "prost 0.13.4", + "rustls-native-certs", + "rustls-pemfile 2.2.0", + "socket2 0.5.5", + "tokio", + "tokio-rustls", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.4.13" @@ -7544,21 +8411,40 @@ checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", - "pin-project", + "indexmap 1.9.3", + "pin-project 1.1.5", "pin-project-lite", + "rand", + "slab", "tokio", + "tokio-util", "tower-layer", "tower-service", "tracing", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 1.0.1", + "tokio", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-http" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.8.0", "bytes", "futures-util", "http 1.1.0", @@ -7585,9 +8471,21 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tower-util" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1093c19826d33807c72511e68f73b4a0469a3f22c2bd5f7d5212178b4b89674" +dependencies = [ + "futures-core", + "futures-util", + "pin-project 0.4.30", + "tower-service", +] [[package]] name = "tracing" @@ -7608,7 +8506,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" dependencies = [ "crossbeam-channel", - "thiserror", + "thiserror 1.0.60", "time", "tracing-subscriber", ] @@ -7619,9 +8517,9 @@ version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] @@ -7668,7 +8566,7 @@ dependencies = [ "serde", "serde_json", "sharded-slab", - "smallvec", + "smallvec 1.13.2", "thread_local", "tracing", "tracing-core", @@ -7689,7 +8587,7 @@ dependencies = [ [[package]] name = "transaction_fuzzer" -version = "0.13.0" +version = "0.14.0" dependencies = [ "ark-ec", "ark-ff", @@ -7708,6 +8606,7 @@ dependencies = [ "mina-p2p-messages", "mina-signer", "mina-tree", + "node", "num-bigint", "object 0.36.5", "once_cell", @@ -7735,13 +8634,13 @@ dependencies = [ "futures-channel", "futures-io", "futures-util", - "idna", + "idna 0.4.0", "ipnet", "once_cell", "rand", - "smallvec", + "smallvec 1.13.2", "socket2 0.5.5", - "thiserror", + "thiserror 1.0.60", "tinyvec", "tokio", "tracing", @@ -7762,8 +8661,8 @@ dependencies = [ "parking_lot 0.12.1", "rand", "resolv-conf", - "smallvec", - "thiserror", + "smallvec 1.13.2", + "thiserror 1.0.60", "tokio", "tracing", "trust-dns-proto", @@ -7789,7 +8688,7 @@ dependencies = [ "log", "rand", "sha1", - "thiserror", + "thiserror 1.0.60", "url", "utf-8", ] @@ -7814,7 +8713,7 @@ dependencies = [ "rand", "ring 0.17.8", "stun", - "thiserror", + "thiserror 1.0.60", "tokio", "tokio-util", "webrtc-util", @@ -7823,7 +8722,7 @@ dependencies = [ [[package]] name = "turshi" version = "0.1.0" -source = "git+https://github.com/openmina/proof-systems?rev=c478b19#c478b197ddb7fcefee87c4cfdc097a217a855086" +source = "git+https://github.com/openmina/proof-systems?rev=dec49a9#dec49a95fd483d8a90b7b602d13bfa8ea9485491" dependencies = [ "ark-ff", "hex", @@ -7906,6 +8805,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + [[package]] name = "unicode-xid" version = "0.2.4" @@ -7977,27 +8882,33 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.4.1" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", - "idna", + "idna 1.0.3", "percent-encoding", ] -[[package]] -name = "urlencoding" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" - [[package]] name = "utf-8" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.1" @@ -8057,7 +8968,7 @@ checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" [[package]] name = "vrf" -version = "0.13.0" +version = "0.14.0" dependencies = [ "anyhow", "ark-ec", @@ -8080,7 +8991,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.60", ] [[package]] @@ -8124,8 +9035,8 @@ dependencies = [ "mime_guess", "multer", "percent-encoding", - "pin-project", - "rustls-pemfile", + "pin-project 1.1.5", + "rustls-pemfile 1.0.3", "scoped-tls", "serde", "serde_json", @@ -8169,9 +9080,9 @@ checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", "wasm-bindgen-shared", ] @@ -8194,7 +9105,7 @@ version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ - "quote", + "quote 1.0.35", "wasm-bindgen-macro-support", ] @@ -8204,9 +9115,9 @@ version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -8237,8 +9148,21 @@ version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ecb993dd8c836930ed130e020e77d9b2e65dd0fbab1b67c790b0f5d80b11a575" dependencies = [ - "proc-macro2", - "quote", + "proc-macro2 1.0.93", + "quote 1.0.35", +] + +[[package]] +name = "wasm-streams" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e072d4e72f700fb3443d8fe94a39315df013eef1104903cdb0a2abd322bbecd" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", ] [[package]] @@ -8277,6 +9201,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "webrtc" version = "0.11.0" @@ -8297,14 +9231,14 @@ dependencies = [ "ring 0.17.8", "rtcp", "rtp", - "rustls 0.23.12", + "rustls 0.23.21", "sdp", "serde", "serde_json", "sha2 0.10.8", "smol_str", "stun", - "thiserror", + "thiserror 1.0.60", "time", "tokio", "turn", @@ -8328,7 +9262,7 @@ dependencies = [ "bytes", "log", "portable-atomic", - "thiserror", + "thiserror 1.0.60", "tokio", "webrtc-sctp", "webrtc-util", @@ -8357,13 +9291,13 @@ dependencies = [ "rand_core", "rcgen 0.13.1", "ring 0.17.8", - "rustls 0.23.12", + "rustls 0.23.21", "sec1", "serde", "sha1", "sha2 0.10.8", "subtle", - "thiserror", + "thiserror 1.0.60", "tokio", "webrtc-util", "x25519-dalek", @@ -8384,7 +9318,7 @@ dependencies = [ "serde", "serde_json", "stun", - "thiserror", + "thiserror 1.0.60", "tokio", "turn", "url", @@ -8401,7 +9335,7 @@ source = "git+https://github.com/openmina/webrtc.git?rev=e8705db39af1b198b324a5d dependencies = [ "log", "socket2 0.5.5", - "thiserror", + "thiserror 1.0.60", "tokio", "webrtc-util", ] @@ -8415,7 +9349,7 @@ dependencies = [ "bytes", "rand", "rtp", - "thiserror", + "thiserror 1.0.60", ] [[package]] @@ -8430,7 +9364,7 @@ dependencies = [ "log", "portable-atomic", "rand", - "thiserror", + "thiserror 1.0.60", "tokio", "webrtc-util", ] @@ -8462,7 +9396,7 @@ dependencies = [ "rtp", "sha1", "subtle", - "thiserror", + "thiserror 1.0.60", "tokio", "webrtc-util", ] @@ -8482,7 +9416,7 @@ dependencies = [ "nix 0.26.4", "portable-atomic", "rand", - "thiserror", + "thiserror 1.0.60", "tokio", "winapi 0.3.9", ] @@ -8496,7 +9430,7 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.20", + "rustix 0.38.42", ] [[package]] @@ -8568,6 +9502,36 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", +] + [[package]] name = "windows-sys" version = "0.45.0" @@ -8592,7 +9556,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.0", + "windows-targets 0.52.6", ] [[package]] @@ -8627,17 +9591,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.0", - "windows_aarch64_msvc 0.52.0", - "windows_i686_gnu 0.52.0", - "windows_i686_msvc 0.52.0", - "windows_x86_64_gnu 0.52.0", - "windows_x86_64_gnullvm 0.52.0", - "windows_x86_64_msvc 0.52.0", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -8654,9 +9619,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -8672,9 +9637,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -8690,9 +9655,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.0" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -8708,9 +9679,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -8726,9 +9697,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -8744,9 +9715,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -8762,9 +9733,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.0" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" @@ -8785,6 +9756,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "wyz" version = "0.5.1" @@ -8819,7 +9802,7 @@ dependencies = [ "nom", "oid-registry 0.6.1", "rusticata-macros", - "thiserror", + "thiserror 1.0.60", "time", ] @@ -8837,7 +9820,7 @@ dependencies = [ "oid-registry 0.7.1", "ring 0.17.8", "rusticata-macros", - "thiserror", + "thiserror 1.0.60", "time", ] @@ -8866,7 +9849,7 @@ dependencies = [ "log", "nohash-hasher", "parking_lot 0.12.1", - "pin-project", + "pin-project 1.1.5", "rand", "static_assertions", ] @@ -8880,6 +9863,30 @@ dependencies = [ "time", ] +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", + "synstructure 0.13.1", +] + [[package]] name = "zerocopy" version = "0.7.32" @@ -8895,9 +9902,30 @@ version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", + "synstructure 0.13.1", ] [[package]] @@ -8915,9 +9943,31 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2 1.0.93", + "quote 1.0.35", + "syn 2.0.96", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 0d5184a1d7..889c49c481 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,8 @@ members = [ "tools/ledger-tool", "tools/salsa-simple", "tools/fuzzing", + "tools/archive-breadcrumb-compare", + "tools/heartbeats-processor", "producer-dashboard", "fuzzer", @@ -46,19 +48,21 @@ mina-p2p-messages = { path = "mina-p2p-messages" } poseidon = { path = "poseidon" } ledger = { path = "ledger", package = "mina-tree" } -mina-hasher = { git = "https://github.com/openmina/proof-systems", rev = "c478b19" } -mina-signer = { git = "https://github.com/openmina/proof-systems", rev = "c478b19" } -mina-curves = { git = "https://github.com/openmina/proof-systems", rev = "c478b19" } -# UNCOMMENTED_IN_CI mina-curves = { git = "https://github.com/openmina/proof-systems", rev = "c478b19", features = [ "32x9" ] } -o1-utils = { git = "https://github.com/openmina/proof-systems", rev = "c478b19" } -kimchi = { git = "https://github.com/openmina/proof-systems", rev = "c478b19" } -mina-poseidon = {git = "https://github.com/openmina/proof-systems", rev = "c478b19" } -poly-commitment = {git = "https://github.com/openmina/proof-systems", rev = "c478b19" } +mina-hasher = { git = "https://github.com/openmina/proof-systems", rev = "dec49a9" } +mina-signer = { git = "https://github.com/openmina/proof-systems", rev = "dec49a9" } +mina-curves = { git = "https://github.com/openmina/proof-systems", rev = "dec49a9" } +# UNCOMMENTED_IN_CI mina-curves = { git = "https://github.com/openmina/proof-systems", rev = "dec49a9", features = [ "32x9" ] } +o1-utils = { git = "https://github.com/openmina/proof-systems", rev = "dec49a9" } +kimchi = { git = "https://github.com/openmina/proof-systems", rev = "dec49a9" } +mina-poseidon = {git = "https://github.com/openmina/proof-systems", rev = "dec49a9" } +poly-commitment = {git = "https://github.com/openmina/proof-systems", rev = "dec49a9" } libp2p = { git = "https://github.com/openmina/rust-libp2p", rev = "5c44c7d9", default-features = false } vrf = { path = "vrf" } openmina-node-account = { path = "node/account" } -redux = { git = "https://github.com/openmina/redux-rs.git", rev = "ab14890c", features = ["serde"] } +redux = { git = "https://github.com/openmina/redux-rs.git", rev = "ab14890c", features = [ + "serde", +] } serde = "1.0.190" serde_json = "1.0.107" serde_with = { version = "3.7.0", features = ["hex"] } @@ -66,15 +70,19 @@ linkme = "0.3.22" static_assertions = "1.1.0" juniper = { version = "0.16" } -ark-ff = { version = "0.3.0", features = [ "parallel", "asm", "std" ] } +ark-ff = { version = "0.3.0", features = ["parallel", "asm", "std"] } # UNCOMMENTED_IN_CI ark-ff = { version = "0.3.0", features = [ "parallel", "asm", "std", "32x9" ] } +graphannis-malloc_size_of = { git = "https://github.com/openmina/graphannis-malloc_size_of.git", rev = "f7da9f6" } +graphannis-malloc_size_of_derive = { git = "https://github.com/openmina/graphannis-malloc_size_of_derive.git" } + +openmina-producer-dashboard = { path = "producer-dashboard" } [profile.fuzz] inherits = "release" opt-level = 3 debug = 2 -debug-assertions = true # TODO: test and disable if too slow +debug-assertions = true # TODO: test and disable if too slow overflow-checks = true lto = false panic = "abort" @@ -82,12 +90,12 @@ incremental = false codegen-units = 1 [patch.crates-io] -ark-ff = { git = "https://github.com/openmina/algebra", rev = "d0343f5" } # branch: fix-openmina-webnode -ark-ec = { git = "https://github.com/openmina/algebra", rev = "d0343f5" } # branch: fix-openmina-webnode -ark-poly = { git = "https://github.com/openmina/algebra", rev = "d0343f5" } # branch: fix-openmina-webnode -ark-serialize = { git = "https://github.com/openmina/algebra", rev = "d0343f5" } # branch: fix-openmina-webnode +ark-ff = { git = "https://github.com/openmina/algebra", rev = "aea157a" } # branch: fix-openmina-webnode +ark-ec = { git = "https://github.com/openmina/algebra", rev = "aea157a" } # branch: fix-openmina-webnode +ark-poly = { git = "https://github.com/openmina/algebra", rev = "aea157a" } # branch: fix-openmina-webnode +ark-serialize = { git = "https://github.com/openmina/algebra", rev = "aea157a" } # branch: fix-openmina-webnode -num-bigint = { git = "https://github.com/openmina/num-bigint", rev = "8bb5ee4" } # branch: on-stack +num-bigint = { git = "https://github.com/openmina/num-bigint", rev = "8bb5ee4" } # branch: on-stack num-rational = { git = "https://github.com/openmina/num-rational", rev = "336f11d" } # branch: on-stack [profile.test.package."*"] diff --git a/Dockerfile b/Dockerfile index fb375a0697..80d7e72f1f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,18 @@ FROM rust:buster AS build RUN apt-get update && apt-get install -y protobuf-compiler && apt-get clean -RUN rustup default 1.83 && rustup component add rustfmt +RUN rustup default 1.84 && rustup component add rustfmt WORKDIR /openmina COPY . . -RUN cargo build --release --package=cli --bin=openmina -RUN cargo build --release --features scenario-generators --bin openmina-node-testing +# Build with cache mount +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/openmina/target,id=rust-target \ + cargo build --release --package=cli --bin=openmina && \ + cp -r /openmina/target/release /openmina/release-bin/ + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/openmina/target,id=rust-target \ + cargo build --release --features scenario-generators --bin openmina-node-testing && \ + cp -r /openmina/target/release /openmina/testing-release-bin/ # necessary for proof generation when running a block producer. RUN git clone --depth 1 https://github.com/openmina/circuit-blobs.git \ @@ -12,9 +20,10 @@ RUN git clone --depth 1 https://github.com/openmina/circuit-blobs.git \ FROM debian:buster RUN apt-get update && apt-get install -y libjemalloc2 libssl1.1 libpq5 curl jq procps && apt-get clean -COPY --from=build /openmina/cli/bin/snark-worker /usr/local/bin/ -COPY --from=build /openmina/target/release/openmina /usr/local/bin/ -COPY --from=build /openmina/target/release/openmina-node-testing /usr/local/bin/ + +COPY --from=build /openmina/release-bin/openmina /usr/local/bin/ +COPY --from=build /openmina/testing-release-bin/openmina-node-testing /usr/local/bin/ + RUN mkdir -p /usr/local/lib/openmina/circuit-blobs COPY --from=build /openmina/circuit-blobs/ /usr/local/lib/openmina/circuit-blobs/ diff --git a/README.md b/README.md index 0dcc46dfbd..ab46516470 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,7 @@ _Currently in **public beta**, join our [Discord community](https://discord.com/ - [Non-Block Producing Node](/docs/alpha-testing-guide.md) Connect to peers and sync a node on the devnet; no devnet stake needed. - [Block Producing Node](/docs/block-producer-guide.md) Produce blocks on the devnet; sufficient devnet stake needed. - [Local Block Production Demo](/docs/local-demo-guide.md) Produce blocks on a custom local chain without devnet stake. +- [Devnet Archive Node](/docs/archive-node-guide.md) Run an archive node on devnet. Block production Node UI diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 1a1effc6e1..defd502f5d 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cli" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/cli/replay_dynamic_effects/Cargo.toml b/cli/replay_dynamic_effects/Cargo.toml index 5854332517..e226cf2399 100644 --- a/cli/replay_dynamic_effects/Cargo.toml +++ b/cli/replay_dynamic_effects/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "replay_dynamic_effects" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/cli/src/commands/node/mod.rs b/cli/src/commands/node/mod.rs index 3c6284a630..098316bbc1 100644 --- a/cli/src/commands/node/mod.rs +++ b/cli/src/commands/node/mod.rs @@ -137,6 +137,10 @@ pub struct Node { // TODO: make this argument required. #[arg(short = 'c', long, env)] pub config: Option, + + /// Enable archive mode (seding blocks to the archive process). + #[arg(long, env)] + pub archive_address: Option, } impl Node { @@ -268,6 +272,25 @@ impl Node { } } + if let Some(address) = self.archive_address { + openmina_core::IS_ARCHIVE + .set(true) + .expect("IS_ARCHIVE already set"); + node::core::info!( + summary = "Archive mode enabled", + address = address.to_string() + ); + // Convert URL to SocketAddr + let socket_addrs = address.socket_addrs(|| None).expect("Invalid URL"); + + let socket_addr = socket_addrs.first().expect("No socket address found"); + node_builder.archive(*socket_addr); + } else { + openmina_core::IS_ARCHIVE + .set(false) + .expect("IS_ARCHIVE already set"); + } + if let Some(sec_key) = self.run_snarker { node_builder.snarker(sec_key, self.snarker_fee, self.snarker_strategy); } diff --git a/cli/tests/bootstrap.rs b/cli/tests/bootstrap.rs index 64f8166ba5..fecedbce8e 100644 --- a/cli/tests/bootstrap.rs +++ b/cli/tests/bootstrap.rs @@ -117,12 +117,12 @@ const READY: Duration = Duration::from_secs(20 * 60); fn is_healthy() -> bool { reqwest::blocking::get(format!("http://localhost:{HTTP_PORT}/healthz")) - .map_or(false, |res| res.status().is_success()) + .is_ok_and(|res| res.status().is_success()) } fn is_ready() -> bool { let ready = reqwest::blocking::get(format!("http://localhost:{HTTP_PORT}/readyz")) - .map_or(false, |res| res.status().is_success()); + .is_ok_and(|res| res.status().is_success()); if let Err(err) = sync_stats() { println!("error getting stats: {err}"); diff --git a/core/Cargo.toml b/core/Cargo.toml index 9aff894310..2ba5c357db 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-core" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" @@ -28,6 +28,9 @@ base64 = "0.22" bs58 = "0.4.0" thiserror = "1.0.37" +graphannis-malloc_size_of = { workspace = true } +graphannis-malloc_size_of_derive = { workspace = true } + mina-hasher = { workspace = true } mina-p2p-messages = { workspace = true } poseidon = { workspace = true } @@ -35,12 +38,12 @@ hex = "0.4.3" ark-ff = { workspace = true } [target.'cfg(not(target_family = "wasm"))'.dependencies] -redux = { workspace = true, features=["serializable_callbacks"] } +redux = { workspace = true, features = ["serializable_callbacks"] } [target.'cfg(target_family = "wasm")'.dependencies] wasm-bindgen = "0.2.99" wasm-bindgen-futures = "0.4.49" -wasm_thread = { version = "0.3", features = [ "es_modules" ] } +wasm_thread = { version = "0.3", features = ["es_modules"] } js-sys = "0.3" web-sys = { version = "0.3", features = ["Window", "Response"] } diff --git a/core/src/block/block_with_hash.rs b/core/src/block/block_with_hash.rs index ed09a5a57f..17b0a262fc 100644 --- a/core/src/block/block_with_hash.rs +++ b/core/src/block/block_with_hash.rs @@ -118,7 +118,7 @@ impl> BlockWithHash { || constraint_constants() .fork .as_ref() - .map_or(false, |fork| fork.blockchain_length + 1 == self.height()) + .is_some_and(|fork| fork.blockchain_length + 1 == self.height()) } pub fn root_block_height(&self) -> u32 { @@ -138,8 +138,10 @@ impl> BlockWithHash { self.body().commands_iter() } - pub fn coinbases_iter(&self) -> impl Iterator { - self.body().coinbases_iter() + pub fn coinbase_fee_transfers_iter( + &self, + ) -> impl Iterator { + self.body().coinbase_fee_transfers_iter() } pub fn completed_works_iter<'a>( diff --git a/core/src/distributed_pool.rs b/core/src/distributed_pool.rs index 7346caa20a..7cec46ef9c 100644 --- a/core/src/distributed_pool.rs +++ b/core/src/distributed_pool.rs @@ -37,7 +37,7 @@ where pub fn contains(&self, key: &Key) -> bool { self.by_key .get(key) - .map_or(false, |i| self.list.contains_key(i)) + .is_some_and(|i| self.list.contains_key(i)) } pub fn get(&self, key: &Key) -> Option<&State> { diff --git a/core/src/lib.rs b/core/src/lib.rs index 2ae805537e..967b6276ff 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -1,3 +1,6 @@ +extern crate graphannis_malloc_size_of as malloc_size_of; +extern crate graphannis_malloc_size_of_derive as malloc_size_of_derive; + pub mod distributed_pool; pub mod invariants; pub mod log; @@ -20,6 +23,8 @@ pub mod transaction; pub mod consensus; mod substate; +use std::sync::OnceLock; + pub use substate::{Substate, SubstateAccess, SubstateResult}; pub mod network; @@ -31,6 +36,9 @@ pub use chain_id::*; pub mod encrypted_key; pub use encrypted_key::*; +// FIXME(#1043): refactor +pub static IS_ARCHIVE: OnceLock = OnceLock::new(); + mod work_dir { use once_cell::sync::OnceCell; use std::path::PathBuf; diff --git a/core/src/requests/request_id.rs b/core/src/requests/request_id.rs index 8272dc56e9..255fc221b1 100644 --- a/core/src/requests/request_id.rs +++ b/core/src/requests/request_id.rs @@ -142,3 +142,15 @@ impl Clone for RequestId { } impl Copy for RequestId {} + +mod measurement { + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::RequestId; + + impl MallocSizeOf for RequestId { + fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { + 0 + } + } +} diff --git a/core/src/snark/snark.rs b/core/src/snark/snark.rs index b7830f492d..51b61ebd3d 100644 --- a/core/src/snark/snark.rs +++ b/core/src/snark/snark.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use malloc_size_of::MallocSizeOf; use mina_p2p_messages::binprot::macros::{BinProtRead, BinProtWrite}; use mina_p2p_messages::v2::{ CurrencyFeeStableV1, MinaBaseFeeWithProverStableV1, @@ -97,3 +98,10 @@ impl From<&Snark> for NetworkPoolSnarkPoolDiffVersionedStableV2AddSolvedWork1 { } } } + +impl MallocSizeOf for Snark { + fn size_of(&self, ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + usize::from(!ops.have_seen_ptr(Arc::as_ptr(&self.proofs))) + * (size_of::() + self.proofs.size_of(ops)) + } +} diff --git a/docker-compose.archive.devnet.compare.yml b/docker-compose.archive.devnet.compare.yml new file mode 100644 index 0000000000..433df964ed --- /dev/null +++ b/docker-compose.archive.devnet.compare.yml @@ -0,0 +1,147 @@ +services: + postgres-ocaml: + image: postgres + container_name: postgres-ocaml + env_file: + - .env + healthcheck: + # test: ["CMD-SHELL", "psql -U postgres -d archive -tAc \"SELECT COUNT(*) FROM pg_database WHERE datname='archive';\" | grep -q '^1$'"] + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 10s + retries: 10 + volumes: + - /tmp/archive-data-ocaml:/var/lib/postgresql/data + # ports: + # - "127.0.0.1:3333:${PG_PORT}" + + postgres-openmina: + image: postgres + container_name: postgres-openmina + env_file: + - .env + healthcheck: + # test: ["CMD-SHELL", "psql -U postgres -d archive -tAc \"SELECT COUNT(*) FROM pg_database WHERE datname='archive';\" | grep -q '^1$'"] + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 10s + retries: 10 + volumes: + - /tmp/archive-data-openmina:/var/lib/postgresql/data + # ports: + # - "127.0.0.1:4444:${PG_PORT}" + + create-db: + image: postgres + container_name: create-dbs + env_file: + - .env + command: > + bash -c ' + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-ocaml:${PG_PORT} -c "CREATE DATABASE ${PG_DB};"; + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-openmina:${PG_PORT} -c "CREATE DATABASE ${PG_DB};"; + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-ocaml:${PG_PORT}/${PG_DB} -c " + ALTER SYSTEM SET max_connections = 500; + ALTER SYSTEM SET max_locks_per_transaction = 100; + ALTER SYSTEM SET max_pred_locks_per_relation = 100; + ALTER SYSTEM SET max_pred_locks_per_transaction = 5000; + "; + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-openmina:${PG_PORT}/${PG_DB} -c " + ALTER SYSTEM SET max_connections = 500; + ALTER SYSTEM SET max_locks_per_transaction = 100; + ALTER SYSTEM SET max_pred_locks_per_relation = 100; + ALTER SYSTEM SET max_pred_locks_per_transaction = 5000; + "; + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-ocaml:${PG_PORT}/${PG_DB} -f /sql/archive_schema.sql; + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-openmina:${PG_PORT}/${PG_DB} -f /sql/archive_schema.sql; + ' + volumes: + - ./producer-dashboard/src/archive/sql:/sql + depends_on: + postgres-ocaml: + condition: service_healthy + postgres-openmina: + condition: service_healthy + + archive-ocaml: + image: adrnagy/mina-archive + container_name: archive-ocaml + volumes: + - /tmp/archive-outputs/ocaml:/data + entrypoint: ["mina-archive"] + command: > + run + --postgres-uri postgres://postgres:${POSTGRES_PASSWORD}@postgres-ocaml:${PG_PORT}/${PG_DB} + --server-port 3086 + --output-dir /data + ports: + - 3086:3086 + depends_on: + postgres-ocaml: + condition: service_healthy + + archive-openmina: + image: adrnagy/mina-archive + container_name: archive-openmina + env_file: + - .env + volumes: + - /tmp/archive-outputs/openmina:/data + entrypoint: ["mina-archive"] + command: > + run + --postgres-uri postgres://postgres:${POSTGRES_PASSWORD}@postgres-openmina:${PG_PORT}/${PG_DB} + --server-port 3087 + --output-dir /data + ports: + - 3087:3087 + depends_on: + postgres-openmina: + condition: service_healthy + + node-ocaml: + image: gcr.io/o1labs-192920/mina-daemon:3.0.4-alpha1-889607b-bullseye-devnet + container_name: node-ocaml + environment: + MINA_CLIENT_TRUSTLIST: 10.0.0.0/8,172.16.0.0/12,192.168.0.0/16 + VERBOSE: true + LOG_LEVEL: Info + MINA_LIBP2P_PASS: v3rys3cr3t + command: > + daemon + --archive-address archive-ocaml:3086 + --insecure-rest-server + --rest-port 5000 + --enable-peer-exchange true + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40101/p2p/12D3KooWNGY3guz8pYHrVEqs8se4MSnnmpgguyQYDazMbVCyrMnS + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40102/p2p/12D3KooWSqZ4qtysb8Du4yVpcc5SYc3gsRuNqgMomggw6hekATWg + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40103/p2p/12D3KooWSHiGavQpamDPEc6rPaqT4PoS1Lr9aDfrfg5dKM2V6x3H + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40104/p2p/12D3KooWA3yPrTaLXsggVSCG4mr7c33YNdz5DSs87LszRUVt9vLT + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40105/p2p/12D3KooWCLcUWCdU4VstETztxE3feQyS57dVDdzBhmkj5tiCaha8 + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40106/p2p/12D3KooWNZWqEoCuhMrc9tTMxtEsfxmeFhjh2agUcmzJFNKxQnNA + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40107/p2p/12D3KooWAMSP94SM3icSeAXeBmPUuZ5JvwrZ5w87fpRHVeJkdboe + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40108/p2p/12D3KooWL5gPf5CrARVPhBi6KsDHmB1gsJKZ4vWrcLweWyMjpB5e + --peer /dns4/primary-tcp-proxy.hz.minaprotocol.network/tcp/40109/p2p/12D3KooWT1nNJLGE8jWcshPSq3FmSXmmNn2MzfmvJcWYZ1HrtHnZ + # volumes: + # # - /tmp/ocaml-node-keys:/keys + ports: + - 8302:8302 + - 8301:8301 + - 5000:5000 + depends_on: + # libp2p-keys-permissions: + # condition: service_completed_successfully + archive-ocaml: + condition: service_started + + node-openmina: + image: adrnagy/openmina:archive-test + container_name: node-openmina + command: > + node + --archive-address http://archive-openmina:3087 + ports: + - "3000:3000" + depends_on: + archive-openmina: + condition: service_started diff --git a/docker-compose.archive.devnet.yml b/docker-compose.archive.devnet.yml new file mode 100644 index 0000000000..6175c5b65c --- /dev/null +++ b/docker-compose.archive.devnet.yml @@ -0,0 +1,73 @@ +services: + postgres-openmina: + image: postgres + container_name: postgres-openmina + env_file: + - .env + healthcheck: + # test: ["CMD-SHELL", "psql -U postgres -d archive -tAc \"SELECT COUNT(*) FROM pg_database WHERE datname='archive';\" | grep -q '^1$'"] + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 10s + retries: 10 + volumes: + - /tmp/archive-data-openmina:/var/lib/postgresql/data + # ports: + # - "127.0.0.1:${PG_PORT}:${PG_PORT}" + + create-db: + image: postgres + container_name: create-dbs + env_file: + - .env + command: > + bash -c ' + sleep 10; + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-openmina:${PG_PORT} -c "CREATE DATABASE ${PG_DB};"; + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-openmina:${PG_PORT}/${PG_DB} -c " + ALTER SYSTEM SET max_connections = 500; + ALTER SYSTEM SET max_locks_per_transaction = 100; + ALTER SYSTEM SET max_pred_locks_per_relation = 100; + ALTER SYSTEM SET max_pred_locks_per_transaction = 5000; + "; + psql postgres://postgres:${POSTGRES_PASSWORD}@postgres-openmina:${PG_PORT}/${PG_DB} -f /sql/archive_schema.sql; + ' + volumes: + - ./producer-dashboard/src/archive/sql:/sql + depends_on: + postgres-openmina: + condition: service_healthy + + archive-openmina: + # Note: Modified with patch ./docker/producer-dashboard/output_binprot_breadcrumbs.patch to output binprot breadcrumbs + image: adrnagy/mina-archive + container_name: archive-openmina + env_file: + - .env + volumes: + - /tmp/archive-outputs/openmina:/data + entrypoint: ["mina-archive"] + command: > + run + --postgres-uri postgres://postgres:${POSTGRES_PASSWORD}@postgres-openmina:${PG_PORT}/${PG_DB} + --server-port 3086 + --log-level debug + --output-dir /data + depends_on: + postgres-openmina: + condition: service_healthy + + ports: + - "127.0.0.1:3086:3086" + + node-openmina: + image: openmina/openmina:latest + container_name: node-openmina + command: > + node + --archive-address http://archive-openmina:3086 + ports: + - "127.0.0.1:3000:3000" + depends_on: + archive-openmina: + condition: service_started diff --git a/docker-compose.local.producers.yml b/docker-compose.local.producers.yml index 223dc58022..9fff9d1a7f 100644 --- a/docker-compose.local.producers.yml +++ b/docker-compose.local.producers.yml @@ -1,7 +1,7 @@ services: local-producer-cluster: container_name: local-producer-cluster - image: openmina/openmina:0.13.0 + image: openmina/openmina:0.14.0 environment: - RUST_BACKTRACE=1 entrypoint: ["openmina-node-testing", "scenarios-generate", "--name", "simulation-small-forever-real-time"] @@ -12,7 +12,7 @@ services: frontend: container_name: frontend - image: openmina/frontend:0.13.0 + image: openmina/frontend:0.14.0 environment: OPENMINA_FRONTEND_ENVIRONMENT: block-producers ports: diff --git a/docker/producer-dashboard/Dockerfile b/docker/producer-dashboard/Dockerfile index c6ae4a58f0..0db06f8521 100644 --- a/docker/producer-dashboard/Dockerfile +++ b/docker/producer-dashboard/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.83 AS app-builder +FROM rust:1.84 AS app-builder WORKDIR /usr/src/openmina-producer-dashboard @@ -6,7 +6,7 @@ COPY ../ . RUN cd producer-dashboard && SQLX_OFFLINE=true cargo install --path . -FROM ubuntu:latest AS mina-builder +FROM ubuntu:noble AS mina-builder RUN apt-get update && apt-get install -y openssl ca-certificates @@ -25,7 +25,6 @@ RUN apt-get update && \ liblmdb-dev \ liblmdb0 \ libpq-dev \ - libprocps-dev \ libsodium-dev \ libssl-dev \ build-essential \ @@ -47,21 +46,24 @@ RUN git clone https://github.com/MinaProtocol/mina.git ENV DUNE_PROFILE=devnet WORKDIR /go/mina -RUN git checkout 3.0.0devnet && \ +COPY ../docker/producer-dashboard/output_binprot_breadcrumbs.patch . +RUN git checkout 3.0.1 && \ git submodule update --init --recursive && \ git config --local --add submodule.recurse true +RUN git apply ./output_binprot_breadcrumbs.patch + # RUN make libp2p_helper RUN curl -s -L https://github.com/ocaml/opam/releases/download/2.1.2/opam-2.1.2-x86_64-linux -o /usr/local/bin/opam && chmod +x /usr/local/bin/opam RUN apt-get -y --no-install-recommends install m4 pkg-config -RUN opam init --disable-sandboxing \ - && opam switch create . \ - && eval $(opam config env) \ - && opam switch import -y opam.export \ - && ./scripts/pin-external-packages.sh +RUN opam init --disable-sandboxing +RUN opam switch create . +RUN eval $(opam config env) +RUN opam switch import -y opam.export +RUN ./scripts/pin-external-packages.sh RUN curl -L https://go.dev/dl/go1.19.linux-amd64.tar.gz -o go1.19.tar.gz \ && tar -C /usr/local -xzf go1.19.tar.gz \ @@ -74,7 +76,7 @@ RUN apt-get -y --no-install-recommends install zlib1g-dev RUN eval $(opam config env) && make build_all_sigs # RUN /bin/bash -c "source ~/.cargo/env && eval $(opam config env) && make build_all_sigs" -FROM ubuntu:latest +FROM ubuntu:noble RUN apt-get update && apt-get install -y libpq5 libjemalloc2 diff --git a/docker/producer-dashboard/output_binprot_breadcrumbs.patch b/docker/producer-dashboard/output_binprot_breadcrumbs.patch new file mode 100644 index 0000000000..0feee7619f --- /dev/null +++ b/docker/producer-dashboard/output_binprot_breadcrumbs.patch @@ -0,0 +1,120 @@ +diff --git a/src/app/archive/cli/archive_cli.ml b/src/app/archive/cli/archive_cli.ml +index 0868f63e37..7115e72668 100644 +--- a/src/app/archive/cli/archive_cli.ml ++++ b/src/app/archive/cli/archive_cli.ml +@@ -1,6 +1,7 @@ + open Core + open Async + open Cli_lib ++open Pipe_lib + + let command_run = + let open Command.Let_syntax in +@@ -37,6 +38,11 @@ let command_run = + ~doc: + "int Delete blocks that are more than n blocks lower than the \ + maximum seen block." ++ and output_dir = ++ flag "--output-dir" ~aliases:[ "-output-dir" ] (optional string) ++ ~doc: ++ "PATH to the output directory for storing breadcrumbs in binprot \ ++ format" + in + let runtime_config_opt = + Option.map runtime_config_file ~f:(fun file -> +@@ -51,7 +57,8 @@ let command_run = + ~postgres_address:postgres.value + ~server_port: + (Option.value server_port.value ~default:server_port.default) +- ~delete_older_than ~runtime_config_opt ~missing_blocks_width ) ++ ~delete_older_than ~runtime_config_opt ~missing_blocks_width ++ ~output_dir ) + + let time_arg = + (* Same timezone as Genesis_constants.genesis_state_timestamp. *) +diff --git a/src/app/archive/lib/processor.ml b/src/app/archive/lib/processor.ml +index 2ae3823b25..5b2d1ac2a7 100644 +--- a/src/app/archive/lib/processor.ml ++++ b/src/app/archive/lib/processor.ml +@@ -4695,14 +4695,48 @@ let add_block_aux_extensional ~logger ?retries ~pool ~delete_older_than block = + ~accounts_created:block.Extensional.Block.accounts_created + ~tokens_used:block.Extensional.Block.tokens_used block + ++let write_binprot_to_file (diff : Diff.Transition_frontier.t) (filename : string) ++ = ++ let bin_writer = Diff.Transition_frontier.bin_writer_t in ++ let buffer = Bin_prot.Common.create_buf (10 * 1024 * 1024) in ++ let len = bin_writer.write buffer ~pos:0 diff in ++ let data = Bigstring.to_string ~pos:0 ~len buffer in ++ Out_channel.with_file filename ~f:(fun out_channel -> ++ Out_channel.output_string out_channel data ) ++ + (* receive blocks from a daemon, write them to the database *) +-let run pool reader ~constraint_constants ~logger ~delete_older_than : +- unit Deferred.t = ++let run pool reader ~constraint_constants ~logger ~delete_older_than ~output_dir ++ : unit Deferred.t = + Strict_pipe.Reader.iter reader ~f:(function + | Diff.Transition_frontier + (Breadcrumb_added +- { block; accounts_accessed; accounts_created; tokens_used; _ } ) -> ( ++ ( { block; accounts_accessed; accounts_created; tokens_used; _ } as ++ breadcrumb ) ) -> ( + let add_block = Block.add_if_doesn't_exist ~constraint_constants in ++ (* here *) ++ let state_hash = (With_hash.hash block).state_hash in ++ let%bind () = ++ match output_dir with ++ | None -> ++ Deferred.unit ++ | Some dir -> ++ let filename = ++ Filename.concat dir ++ (State_hash.to_base58_check state_hash ^ ".bin") ++ in ++ (* Create output directory if it doesn't exist *) ++ let%bind () = ++ match%bind Async.Sys.file_exists dir with ++ | `Yes -> ++ Deferred.unit ++ | `No -> ++ Async.Unix.mkdir ~p:() dir ++ | `Unknown -> ++ Deferred.unit ++ in ++ write_binprot_to_file (Breadcrumb_added breadcrumb) filename ; ++ Deferred.unit ++ in + let hash = State_hash.With_state_hashes.state_hash in + match%bind + add_block_aux ~logger ~pool ~delete_older_than ~hash ~add_block +@@ -4871,7 +4905,7 @@ let create_metrics_server ~logger ~metrics_server_port ~missing_blocks_width + (* for running the archive process *) + let setup_server ~metrics_server_port ~constraint_constants ~logger + ~postgres_address ~server_port ~delete_older_than ~runtime_config_opt +- ~missing_blocks_width = ++ ~missing_blocks_width ~output_dir = + let where_to_listen = + Async.Tcp.Where_to_listen.bind_to All_addresses (On_port server_port) + in +@@ -4904,6 +4938,7 @@ let setup_server ~metrics_server_port ~constraint_constants ~logger + ~metadata:[ ("commit", `String Mina_version.commit_id) ] ; + let%bind () = add_genesis_accounts pool ~logger ~runtime_config_opt in + run ~constraint_constants pool reader ~logger ~delete_older_than ++ ~output_dir + |> don't_wait_for ; + Strict_pipe.Reader.iter precomputed_block_reader + ~f:(fun precomputed_block -> +diff --git a/src/app/archive/lib/test.ml b/src/app/archive/lib/test.ml +index 7795a5817f..023f916def 100644 +--- a/src/app/archive/lib/test.ml ++++ b/src/app/archive/lib/test.ml +@@ -307,7 +307,7 @@ let%test_module "Archive node unit tests" = + let%bind () = + Processor.run + ~constraint_constants:precomputed_values.constraint_constants pool +- reader ~logger ~delete_older_than:None ++ reader ~logger ~delete_older_than:None ~output_dir:None + in + match%map + Mina_caqti.deferred_result_list_fold breadcrumbs ~init:() diff --git a/docs/archive-node-guide.md b/docs/archive-node-guide.md new file mode 100644 index 0000000000..35f60820f0 --- /dev/null +++ b/docs/archive-node-guide.md @@ -0,0 +1,17 @@ +# Run Archive Node on Devnet + +This guide is intended for setting up archive nodes on **Mina Devnet** only. Do not use this guide for Mina Mainnet + +## Prerequisites + +Ensure Docker and Docker Compose are installed on your system - [Docker Installation Guide](./docker-installation.md) + +## Docker compose setup + +The compose file sets up a PG database, the archiver process and the openmina node. The archiver process is responsible for storing the blocks in the database by receiving the blocks from the openmina node. We start the archive mode in openmina by setting the `--archive-mode` flag to the address fo archiver process. See [docker-compose.archive.devnet.yml](../docker-compose.archive.devnet.yml) for more details. + +## Starting the setup + +```bash +docker compose -f docker-compose.archive.devnet.yml up -d +``` diff --git a/docs/building-from-source-guide.md b/docs/building-from-source-guide.md index dd749d8417..231e77e8ec 100644 --- a/docs/building-from-source-guide.md +++ b/docs/building-from-source-guide.md @@ -39,8 +39,8 @@ Open up the command line and enter the following: And then: ```sh -# Install rustup and set the default Rust toolchain to 1.80 (newer versions work too) -curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain 1.80 +# Install rustup and set the default Rust toolchain to 1.84 (newer versions work too) +curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain 1.84 # Setup the current shell with rustup source "$HOME/.cargo/env" # Clone the openmina repository diff --git a/docs/testing/README.md b/docs/testing/README.md index 3fe8f6d31b..f1af329259 100644 --- a/docs/testing/README.md +++ b/docs/testing/README.md @@ -103,8 +103,8 @@ Test that node discovers peers another rust node and is able to bootstrap Tests related to pubsub layer. -* `P2pReceiveBlock` -Test that node receives block over meshsub from node +* `P2pReceiveMessage` +Test that node receives message over meshsub from node ### [P2P Incoming](../../node/testing/tests/p2p_basic_incoming.rs) diff --git a/frontend/.firebaserc b/frontend/.firebaserc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/frontend/angular.json b/frontend/angular.json index 172d639578..a9fea5b127 100644 --- a/frontend/angular.json +++ b/frontend/angular.json @@ -48,7 +48,8 @@ ], "sourceMap": true, "styles": [ - "src/styles.scss" + "src/styles.scss", + "node_modules/aos/dist/aos.css" ], "stylePreprocessorOptions": { "includePaths": [ @@ -58,7 +59,9 @@ "src/assets/images" ] }, - "scripts": [] + "scripts": [ + "node_modules/aos/dist/aos.js" + ] }, "configurations": { "production": { diff --git a/frontend/firebase.json b/frontend/firebase.json index 314966c493..7776496dc4 100644 --- a/frontend/firebase.json +++ b/frontend/firebase.json @@ -54,5 +54,29 @@ ] } ] - } + }, + "emulators": { + "functions": { + "port": 5000 + }, + "firestore": { + "port": 8080 + }, + "ui": { + "enabled": true + } + }, + "functions": [ + { + "source": "functions", + "codebase": "default", + "ignore": [ + "node_modules", + ".git", + "firebase-debug.log", + "firebase-debug.*.log", + "*.local" + ] + } + ] } diff --git a/frontend/functions/.gitignore b/frontend/functions/.gitignore new file mode 100644 index 0000000000..5d00677939 --- /dev/null +++ b/frontend/functions/.gitignore @@ -0,0 +1,4 @@ +node_modules/ +*.local +coverage/ +lib/ diff --git a/frontend/functions/__tests__/signature.test.js b/frontend/functions/__tests__/signature.test.js new file mode 100644 index 0000000000..9099b8cef2 --- /dev/null +++ b/frontend/functions/__tests__/signature.test.js @@ -0,0 +1,51 @@ +const { validateSignature } = require('../src/index'); + +describe('validateSignature', () => { + let signedHeartbeat; + + beforeAll(() => { + signedHeartbeat = JSON.parse(`{ + "version": 1, + "payload": "eyJzdGF0dXMiOnsiY2hhaW5faWQiOm51bGwsInRyYW5zaXRpb25fZnJvbnRpZXIiOnsiYmVzdF90aXAiOm51bGwsInN5bmMiOnsidGltZSI6bnVsbCwic3RhdHVzIjoiU3luY2VkIiwicGhhc2UiOiJSdW5uaW5nIiwidGFyZ2V0IjpudWxsfX0sInBlZXJzIjpbXSwic25hcmtfcG9vbCI6eyJ0b3RhbF9qb2JzIjowLCJzbmFya3MiOjB9LCJ0cmFuc2FjdGlvbl9wb29sIjp7InRyYW5zYWN0aW9ucyI6MCwidHJhbnNhY3Rpb25zX2Zvcl9wcm9wYWdhdGlvbiI6MCwidHJhbnNhY3Rpb25fY2FuZGlkYXRlcyI6MH0sImN1cnJlbnRfYmxvY2tfcHJvZHVjdGlvbl9hdHRlbXB0IjpudWxsfSwibm9kZV90aW1lc3RhbXAiOjAsInBlZXJfaWQiOiIyYkVnQnJQVHpMOHdvdjJENEt6MzRXVkxDeFI0dUNhcnNCbUhZWFdLUUE1d3ZCUXpkOUgiLCJsYXN0X3Byb2R1Y2VkX2Jsb2NrIjpudWxsfQ==", + "submitter": "B62qnLjgW4LAnrxkcdLc7Snb49qx6aP5qsmPsp6ueZN4XPMC621cqGc", + "signature": { + "field": "25500978175045040705256298774101531557080530394536110798266178142513301557846", + "scalar": "27991123709623419396663280967637181749724990269901703962618583375785482061803" + } + }`); + }); + + test('should validate correct signature', () => { + const result = validateSignature( + signedHeartbeat.payload, + signedHeartbeat.signature, + signedHeartbeat.submitter + ); + expect(result).toBe(true); + }); + + test('should reject invalid signature length', () => { + const result = validateSignature( + signedHeartbeat.payload, + 'invalid-signature', + signedHeartbeat.submitter + ); + expect(result).toBe(false); + }); + + test('should reject tampered data', () => { + const tamperedPayload = signedHeartbeat.payload + 'tampered'; + const result = validateSignature( + tamperedPayload, + signedHeartbeat.signature, + signedHeartbeat.submitter + ); + expect(result).toBe(false); + }); + + test('should handle null values', () => { + expect(validateSignature(null, null, null)).toBe(false); + expect(validateSignature(signedHeartbeat.payload, null, signedHeartbeat.submitter)).toBe(false); + expect(validateSignature(signedHeartbeat.payload, signedHeartbeat.signature, null)).toBe(false); + }); +}); diff --git a/frontend/functions/build.js b/frontend/functions/build.js new file mode 100644 index 0000000000..485733fd6b --- /dev/null +++ b/frontend/functions/build.js @@ -0,0 +1,25 @@ +const fs = require('fs'); +const path = require('path'); + +const keysFilePath = path.resolve(__dirname, 'allowed_keys.txt'); +let keys = []; + +if (fs.existsSync(keysFilePath)) { + keys = fs.readFileSync(keysFilePath, 'utf-8') + .split('\n') + .map(key => key.trim()) + .filter(key => key.length > 0); + + const validatorFilePath = path.resolve(__dirname, 'functions/submitterValidator.ts'); + let validatorFileContent = fs.readFileSync(validatorFilePath, 'utf-8'); + + const keysSetString = keys.map(key => `'${key}'`).join(',\n '); + validatorFileContent = validatorFileContent.replace( + '// ALLOWED_PUBLIC_KEYS_PLACEHOLDER', + keysSetString, + ); + + fs.writeFileSync(validatorFilePath, validatorFileContent); +} else { + console.warn('allowed_keys.txt not found. All submitters will be allowed.'); +} diff --git a/frontend/functions/jest.config.js b/frontend/functions/jest.config.js new file mode 100644 index 0000000000..7d5f854ffb --- /dev/null +++ b/frontend/functions/jest.config.js @@ -0,0 +1,8 @@ +module.exports = { + preset: "ts-jest", + testEnvironment: 'node', + testMatch: ['**/__tests__/**/*.test.js'], + collectCoverage: true, + coverageReporters: ['text', 'lcov'], + coverageDirectory: 'coverage' +}; diff --git a/frontend/functions/package-lock.json b/frontend/functions/package-lock.json new file mode 100644 index 0000000000..63bbe4fb94 --- /dev/null +++ b/frontend/functions/package-lock.json @@ -0,0 +1,6374 @@ +{ + "name": "functions", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "functions", + "dependencies": { + "blake2": "^5.0.0", + "bs58check": "^3.0.1", + "firebase-admin": "^12.1.0", + "firebase-functions": "^6.2.0", + "mina-signer": "^3.0.7" + }, + "devDependencies": { + "@types/blake2": "^4.0.1", + "@types/bs58check": "^2.1.0", + "@types/jest": "^29.5.14", + "firebase-functions-test": "^3.1.0", + "jest": "^29.0.0", + "ts-jest": "^29.2.5", + "typescript": "^4.9.5" + }, + "engines": { + "node": "22" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.3.tgz", + "integrity": "sha512-nHIxvKPniQXpmQLb0vhY3VaFb3S0YrTAwpOWJZh1wn3oJPjJk9Asva204PsBdmAE8vpzfHudT8DB0scYvy9q0g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz", + "integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.26.0", + "@babel/generator": "^7.26.0", + "@babel/helper-compilation-targets": "^7.25.9", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.0", + "@babel/parser": "^7.26.0", + "@babel/template": "^7.25.9", + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.26.0", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@babel/core/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@babel/generator": { + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.3.tgz", + "integrity": "sha512-6FF/urZvD0sTeO7k6/B15pMLC4CHUv1426lzr3N01aHJTl046uCAh9LXW/fzeXXjPNCJ6iABW5XaWOsIZB93aQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.26.3", + "@babel/types": "^7.26.3", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.9.tgz", + "integrity": "sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.25.9", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.9.tgz", + "integrity": "sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.0.tgz", + "integrity": "sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.25.9", + "@babel/types": "^7.26.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.3.tgz", + "integrity": "sha512-WJ/CvmY8Mea8iDXo6a7RK2wbmJITT5fN3BEkRuFlxVyNx8jOKIIhmC4fSkTcPcf8JyavbBwIe6OpiCOBXt/IcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.26.3" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz", + "integrity": "sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", + "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", + "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.9.tgz", + "integrity": "sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.25.9", + "@babel/parser": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.26.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.4.tgz", + "integrity": "sha512-fH+b7Y4p3yqvApJALCPJcwb0/XaOSgtK4pzV6WVjPR5GLFQBRI7pfoX2V2iM48NXvX07NUxxm1Vw98YjqTcU5w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.3", + "@babel/parser": "^7.26.3", + "@babel/template": "^7.25.9", + "@babel/types": "^7.26.3", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@babel/traverse/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@babel/types": { + "version": "7.26.3", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.3.tgz", + "integrity": "sha512-vN5p+1kl59GVKMvTHt55NzzmYVxprfJD+ql7U9NFIfKCBkYE55LYtS+WtPlaYOyzydrKI8Nezd+aZextrd+FMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@fastify/busboy": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.1.1.tgz", + "integrity": "sha512-5DGmA8FTdB2XbDeEwc/5ZXBl6UbBAyBOOLlPuBnZ/N1SwdH9Ii+cOX3tBROlDgcTXxjOYnLMVoKk9+FXAw0CJw==", + "license": "MIT" + }, + "node_modules/@firebase/app-check-interop-types": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.2.tgz", + "integrity": "sha512-LMs47Vinv2HBMZi49C09dJxp0QT5LwDzFaVGf/+ITHe3BlIhUiLNttkATSXplc89A2lAaeTqjgqVkiRfUGyQiQ==", + "license": "Apache-2.0" + }, + "node_modules/@firebase/app-types": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.2.tgz", + "integrity": "sha512-oMEZ1TDlBz479lmABwWsWjzHwheQKiAgnuKxE0pz0IXCVx7/rtlkx1fQ6GfgK24WCrxDKMplZrT50Kh04iMbXQ==", + "license": "Apache-2.0" + }, + "node_modules/@firebase/auth-interop-types": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.3.tgz", + "integrity": "sha512-Fc9wuJGgxoxQeavybiuwgyi+0rssr76b+nHpj+eGhXFYAdudMWyfBHvFL/I5fEHniUM/UQdFzi9VXJK2iZF7FQ==", + "license": "Apache-2.0" + }, + "node_modules/@firebase/component": { + "version": "0.6.9", + "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.6.9.tgz", + "integrity": "sha512-gm8EUEJE/fEac86AvHn8Z/QW8BvR56TBw3hMW0O838J/1mThYQXAIQBgUv75EqlCZfdawpWLrKt1uXvp9ciK3Q==", + "license": "Apache-2.0", + "dependencies": { + "@firebase/util": "1.10.0", + "tslib": "^2.1.0" + } + }, + "node_modules/@firebase/database": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.0.8.tgz", + "integrity": "sha512-dzXALZeBI1U5TXt6619cv0+tgEhJiwlUtQ55WNZY7vGAjv7Q1QioV969iYwt1AQQ0ovHnEW0YW9TiBfefLvErg==", + "license": "Apache-2.0", + "dependencies": { + "@firebase/app-check-interop-types": "0.3.2", + "@firebase/auth-interop-types": "0.2.3", + "@firebase/component": "0.6.9", + "@firebase/logger": "0.4.2", + "@firebase/util": "1.10.0", + "faye-websocket": "0.11.4", + "tslib": "^2.1.0" + } + }, + "node_modules/@firebase/database-compat": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-1.0.8.tgz", + "integrity": "sha512-OpeWZoPE3sGIRPBKYnW9wLad25RaWbGyk7fFQe4xnJQKRzlynWeFBSRRAoLE2Old01WXwskUiucNqUUVlFsceg==", + "license": "Apache-2.0", + "dependencies": { + "@firebase/component": "0.6.9", + "@firebase/database": "1.0.8", + "@firebase/database-types": "1.0.5", + "@firebase/logger": "0.4.2", + "@firebase/util": "1.10.0", + "tslib": "^2.1.0" + } + }, + "node_modules/@firebase/database-types": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.5.tgz", + "integrity": "sha512-fTlqCNwFYyq/C6W7AJ5OCuq5CeZuBEsEwptnVxlNPkWCo5cTTyukzAHRSO/jaQcItz33FfYrrFk1SJofcu2AaQ==", + "license": "Apache-2.0", + "dependencies": { + "@firebase/app-types": "0.9.2", + "@firebase/util": "1.10.0" + } + }, + "node_modules/@firebase/logger": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.4.2.tgz", + "integrity": "sha512-Q1VuA5M1Gjqrwom6I6NUU4lQXdo9IAQieXlujeHZWvRt1b7qQ0KwBaNAjgxG27jgF9/mUwsNmO8ptBCGVYhB0A==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/@firebase/util": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.10.0.tgz", + "integrity": "sha512-xKtx4A668icQqoANRxyDLBLz51TAbDP9KRfpbKGxiCAW346d0BeJe5vN6/hKxxmWwnZ0mautyv39JxviwwQMOQ==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/@google-cloud/firestore": { + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.0.tgz", + "integrity": "sha512-88uZ+jLsp1aVMj7gh3EKYH1aulTAMFAp8sH/v5a9w8q8iqSG27RiWLoxSAFr/XocZ9hGiWH1kEnBw+zl3xAgNA==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "@opentelemetry/api": "^1.3.0", + "fast-deep-equal": "^3.1.1", + "functional-red-black-tree": "^1.0.1", + "google-gax": "^4.3.3", + "protobufjs": "^7.2.6" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/paginator": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz", + "integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/projectify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", + "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", + "license": "Apache-2.0", + "optional": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/promisify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", + "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", + "license": "Apache-2.0", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.14.0.tgz", + "integrity": "sha512-H41bPL2cMfSi4EEnFzKvg7XSb7T67ocSXrmF7MPjfgFB0L6CKGzfIYJheAZi1iqXjz6XaCT1OBf6HCG5vDBTOQ==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "@google-cloud/paginator": "^5.0.0", + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "duplexify": "^4.1.3", + "fast-xml-parser": "^4.4.1", + "gaxios": "^6.0.2", + "google-auth-library": "^9.6.3", + "html-entities": "^2.5.2", + "mime": "^3.0.0", + "p-limit": "^3.0.1", + "retry-request": "^7.0.0", + "teeny-request": "^9.0.0", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/storage/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "license": "MIT", + "optional": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@grpc/grpc-js": { + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", + "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", + "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "license": "MIT", + "optional": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@noble/hashes": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.7.0.tgz", + "integrity": "sha512-HXydb0DgzTpDPwbVeDGCG1gIu7X6+AuU6Zl6av/E/KG8LMsvPntvq+w17CHRpKBmN6Ybdrt1eP3k4cj8DJa78w==", + "license": "MIT", + "engines": { + "node": "^14.21.3 || >=16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "license": "Apache-2.0", + "optional": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==", + "license": "BSD-3-Clause" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==", + "license": "BSD-3-Clause" + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.8", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", + "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.6", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", + "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/blake2": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/blake2/-/blake2-4.0.4.tgz", + "integrity": "sha512-r84TojGHMbBoH91XQjqoc1N89xy/LmcGb15k9OSdB2APb+xQfNcfbcFGMa9RbMmFsnIKCRDMpuKHBM04AwdgxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.5", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", + "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/bs58check": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@types/bs58check/-/bs58check-2.1.2.tgz", + "integrity": "sha512-xpXaQlOIY1KoXlA/ytHGHpEIU87PJt+g9SH7nC6HdCgaBwT2IEZIwBMHbjuX6BpnfbiUMlmwqurdLDwXpcdmSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/caseless": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", + "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==", + "license": "MIT", + "optional": true + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", + "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/cors": { + "version": "2.8.17", + "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.17.tgz", + "integrity": "sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.19.6", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.6.tgz", + "integrity": "sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==", + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", + "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==", + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.7.tgz", + "integrity": "sha512-ugo316mmTYBl2g81zDFnZ7cfxlut3o+/EQdaP7J8QN2kY6lJ22hmQYCK5EHcJHbrW+dkCGSCPgbG8JtYj6qSrg==", + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/lodash": { + "version": "4.17.14", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.14.tgz", + "integrity": "sha512-jsxagdikDiDBeIRaPYtArcT8my4tN1og7MtMRquFT3XNA6axxyHDRUemqDz/taRDdOUn0GnGHRCuff4q48sW9A==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/long": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", + "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==", + "license": "MIT", + "optional": true + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", + "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.10.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.5.tgz", + "integrity": "sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==", + "license": "MIT", + "dependencies": { + "undici-types": "~6.20.0" + } + }, + "node_modules/@types/qs": { + "version": "6.9.17", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.17.tgz", + "integrity": "sha512-rX4/bPcfmvxHDv0XjfJELTTr+iB+tn032nPILqHm5wbthUUUuVtNGGqzhya9XUxjTP8Fpr0qYgSZZKxGY++svQ==", + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", + "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==", + "license": "MIT" + }, + "node_modules/@types/request": { + "version": "2.48.12", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz", + "integrity": "sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==", + "license": "MIT", + "optional": true, + "dependencies": { + "@types/caseless": "*", + "@types/node": "*", + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + } + }, + "node_modules/@types/send": { + "version": "0.17.4", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", + "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", + "license": "MIT", + "optional": true + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "license": "MIT", + "optional": true, + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "license": "MIT", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 14" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", + "license": "MIT" + }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true, + "license": "MIT" + }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "license": "MIT", + "optional": true, + "dependencies": { + "retry": "0.13.1" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT", + "optional": true + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", + "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/base-x": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-4.0.0.tgz", + "integrity": "sha512-FuwxlW4H5kh37X/oW59pwTzzTKRzfrrQwhmyspRM7swOEZcHtDZSCt45U6oKgtuFE+WYPblePMVIPR4RZrh/hw==", + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "optional": true + }, + "node_modules/bignumber.js": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", + "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", + "license": "MIT", + "optional": true, + "engines": { + "node": "*" + } + }, + "node_modules/blake2": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/blake2/-/blake2-5.0.0.tgz", + "integrity": "sha512-MLpq1DwBB9rC0IHuRc2gXLEAeNNTTYHEtvYCA5lK4RmoUPRmQLSLQrwgJvou62BvH9KP7whe8n+xxw45++fnYg==", + "hasInstallScript": true, + "license": "ISC", + "dependencies": { + "nan": "^2.17.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/blakejs": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.2.1.tgz", + "integrity": "sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ==", + "license": "MIT" + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.24.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", + "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001688", + "electron-to-chromium": "^1.5.73", + "node-releases": "^2.0.19", + "update-browserslist-db": "^1.1.1" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bs58": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/bs58/-/bs58-5.0.0.tgz", + "integrity": "sha512-r+ihvQJvahgYT50JD05dyJNKlmmSlMoOGwn1lCcEzanPglg7TxYjioQUYehQ9mAR/+hOSd2jRc/Z2y5UxBymvQ==", + "license": "MIT", + "dependencies": { + "base-x": "^4.0.0" + } + }, + "node_modules/bs58check": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/bs58check/-/bs58check-3.0.1.tgz", + "integrity": "sha512-hjuuJvoWEybo7Hn/0xOrczQKKEKD63WguEjlhLExYs2wUBcebDC1jDNK17eEAD2lYfw82d5ASC1d7K3SWszjaQ==", + "license": "MIT", + "dependencies": { + "@noble/hashes": "^1.2.0", + "bs58": "^5.0.0" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "license": "BSD-3-Clause" + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", + "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001690", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001690.tgz", + "integrity": "sha512-5ExiE3qQN6oF8Clf8ifIDcMRCRE/dMGcETG/XGMD8/XiXm6HXQgQTh1yZYLXXpSOsEUlJm1Xr7kGULZTuGtP/w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.1.tgz", + "integrity": "sha512-cuSVIHi9/9E/+821Qjdvngor+xpnlwnuwIyZOaLmHBVdXL+gP+I6QQB9VkO7RI77YIcTV+S1W9AreJ5eN63JBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "devOptional": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "optional": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "license": "MIT" + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/create-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", + "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "prompts": "^2.0.1" + }, + "bin": { + "create-jest": "bin/create-jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/dedent": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz", + "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/duplexify": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", + "license": "MIT", + "optional": true, + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.79", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.79.tgz", + "integrity": "sha512-nYOxJNxQ9Om4EC88BE4pPoNI8xwSFf8pU/BAeOl4Hh/b/i6V4biTAzwV7pXi3ARKeoYO5JZKMIXTryXSVer5RA==", + "dev": true, + "license": "ISC" + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "license": "MIT", + "optional": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "license": "MIT", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT", + "optional": true + }, + "node_modules/farmhash-modern": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/farmhash-modern/-/farmhash-modern-1.1.0.tgz", + "integrity": "sha512-6ypT4XfgqJk/F3Yuv4SX26I3doUjt0GTG4a+JgWxXQpxXzTBq8fPUeGHfcYMMDPHJHm3yPOSjaeBwBGAHWXCdA==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT", + "optional": true + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-xml-parser": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.1.tgz", + "integrity": "sha512-y655CeyUQ+jj7KBbYMc4FG01V8ZQqjN+gDYGJ50RtfsUB8iG9AmwmwoAgeKLJdmueKKMrH1RJ7yXHTSoczdv5w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "optional": true, + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/faye-websocket": { + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", + "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", + "license": "Apache-2.0", + "dependencies": { + "websocket-driver": ">=0.5.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/firebase-admin": { + "version": "12.7.0", + "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-12.7.0.tgz", + "integrity": "sha512-raFIrOyTqREbyXsNkSHyciQLfv8AUZazehPaQS1lZBSCDYW74FYXU0nQZa3qHI4K+hawohlDbywZ4+qce9YNxA==", + "license": "Apache-2.0", + "dependencies": { + "@fastify/busboy": "^3.0.0", + "@firebase/database-compat": "1.0.8", + "@firebase/database-types": "1.0.5", + "@types/node": "^22.0.1", + "farmhash-modern": "^1.1.0", + "jsonwebtoken": "^9.0.0", + "jwks-rsa": "^3.1.0", + "node-forge": "^1.3.1", + "uuid": "^10.0.0" + }, + "engines": { + "node": ">=14" + }, + "optionalDependencies": { + "@google-cloud/firestore": "^7.7.0", + "@google-cloud/storage": "^7.7.0" + } + }, + "node_modules/firebase-functions": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/firebase-functions/-/firebase-functions-6.2.0.tgz", + "integrity": "sha512-vfyyVHS8elxplzEQ9To+NaINRPFUsDasQrasTa2eFJBYSPzdhkw6rwLmvwyYw622+ze+g4sDIb14VZym+afqXQ==", + "license": "MIT", + "dependencies": { + "@types/cors": "^2.8.5", + "@types/express": "^4.17.21", + "cors": "^2.8.5", + "express": "^4.21.0", + "protobufjs": "^7.2.2" + }, + "bin": { + "firebase-functions": "lib/bin/firebase-functions.js" + }, + "engines": { + "node": ">=14.10.0" + }, + "peerDependencies": { + "firebase-admin": "^11.10.0 || ^12.0.0 || ^13.0.0" + } + }, + "node_modules/firebase-functions-test": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/firebase-functions-test/-/firebase-functions-test-3.4.0.tgz", + "integrity": "sha512-ignkiegIvGtCbDZFEKerLrzrKGonCHD/VJsuNhcfz3jDfhP9yN7mJeq7AHXz8cOJaAnBnJ0WxHj3xezem2dEbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/lodash": "^4.14.104", + "lodash": "^4.17.5", + "ts-deepmerge": "^2.0.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "firebase-admin": "^8.0.0 || ^9.0.0 || ^10.0.0 || ^11.0.0 || ^12.0.0 || ^13.0.0", + "firebase-functions": ">=4.9.0", + "jest": ">=28.0.0" + } + }, + "node_modules/form-data": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.2.tgz", + "integrity": "sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q==", + "license": "MIT", + "optional": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12", + "safe-buffer": "^5.2.1" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", + "license": "MIT", + "optional": true + }, + "node_modules/gaxios": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", + "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gaxios/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "optional": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/gcp-metadata": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", + "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "devOptional": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", + "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "function-bind": "^1.1.2", + "get-proto": "^1.0.0", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/google-auth-library": { + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz", + "integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "@grpc/grpc-js": "^1.10.9", + "@grpc/proto-loader": "^0.7.13", + "@types/long": "^4.0.0", + "abort-controller": "^3.0.0", + "duplexify": "^4.0.0", + "google-auth-library": "^9.3.0", + "node-fetch": "^2.7.0", + "object-hash": "^3.0.0", + "proto3-json-serializer": "^2.0.2", + "protobufjs": "^7.3.2", + "retry-request": "^7.0.0", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "optional": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "license": "MIT", + "optional": true, + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/html-entities": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.5.2.tgz", + "integrity": "sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/mdevils" + }, + { + "type": "patreon", + "url": "https://patreon.com/mdevils" + } + ], + "license": "MIT", + "optional": true + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-parser-js": { + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz", + "integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==", + "license": "MIT" + }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "license": "MIT", + "optional": true, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/http-proxy-agent/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/http-proxy-agent/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "optional": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/http-proxy-agent/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT", + "optional": true + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "optional": true, + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "optional": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/https-proxy-agent/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT", + "optional": true + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/istanbul-lib-source-maps/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jake": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", + "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.4", + "minimatch": "^3.1.2" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", + "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/types": "^29.6.3", + "import-local": "^3.0.2", + "jest-cli": "^29.7.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", + "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.0.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.7.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", + "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "create-jest": "^29.7.0", + "exit": "^0.1.2", + "import-local": "^3.0.2", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", + "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-jest": "^29.7.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", + "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", + "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "jest-util": "^29.7.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", + "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", + "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", + "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", + "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.3", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", + "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", + "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", + "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "^29.6.3", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", + "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/environment": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-leak-detector": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-resolve": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-util": "^29.7.0", + "jest-watcher": "^29.7.0", + "jest-worker": "^29.7.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", + "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/globals": "^29.7.0", + "@jest/source-map": "^29.6.3", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", + "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.7.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", + "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "leven": "^3.1.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", + "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.7.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jose": { + "version": "4.15.9", + "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.9.tgz", + "integrity": "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, + "node_modules/js-sha256": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/js-sha256/-/js-sha256-0.9.0.tgz", + "integrity": "sha512-sga3MHh9sgQN2+pJ9VYZ+1LPwXOxuBJBA5nrR5/ofPfuiJBE2hnjsaN8se8JznOmGLN2p49Pe5U/ttafcs/apA==", + "license": "MIT" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "license": "MIT", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jsonwebtoken/node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "license": "MIT", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "license": "MIT", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/jsonwebtoken/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "license": "MIT", + "optional": true, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jwks-rsa": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jwks-rsa/-/jwks-rsa-3.1.0.tgz", + "integrity": "sha512-v7nqlfezb9YfHHzYII3ef2a2j1XnGeSE/bK3WfumaYCqONAIstJbrEGapz4kadScZzEt7zYCN7bucj8C0Mv/Rg==", + "license": "MIT", + "dependencies": { + "@types/express": "^4.17.17", + "@types/jsonwebtoken": "^9.0.2", + "debug": "^4.3.4", + "jose": "^4.14.6", + "limiter": "^1.1.5", + "lru-memoizer": "^2.2.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/jwks-rsa/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/jwks-rsa/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "license": "MIT", + "optional": true, + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/limiter": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", + "integrity": "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==" + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", + "license": "MIT", + "optional": true + }, + "node_modules/lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==", + "license": "MIT" + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==", + "license": "MIT" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "license": "MIT" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==", + "license": "MIT" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==", + "license": "MIT" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "license": "MIT" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==", + "license": "MIT" + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "license": "MIT" + }, + "node_modules/long": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", + "integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==", + "license": "Apache-2.0" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lru-memoizer": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/lru-memoizer/-/lru-memoizer-2.3.0.tgz", + "integrity": "sha512-GXn7gyHAMhO13WSKrIiNfztwxodVsP8IoZ3XfrJV4yH2x0/OeTO/FIaAHTY5YekdGgW94njfuKmyyt1E0mR6Ug==", + "license": "MIT", + "dependencies": { + "lodash.clonedeep": "^4.5.0", + "lru-cache": "6.0.0" + } + }, + "node_modules/lru-memoizer/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/lru-memoizer/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "license": "MIT", + "optional": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/mina-signer": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/mina-signer/-/mina-signer-3.0.7.tgz", + "integrity": "sha512-7eYp/6WWj2VzJjvfC8dNeGMud/brdBrzkUsCdysFFXnfV2/FVpVhAGCMfaS6hs0HJtS4+eplmiD2hXfshQS8CQ==", + "license": "Apache-2.0", + "dependencies": { + "blakejs": "^1.2.1", + "js-sha256": "^0.9.0" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/nan": { + "version": "2.22.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz", + "integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==", + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "optional": true, + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-forge": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "license": "(BSD-3-Clause OR GPL-2.0)", + "engines": { + "node": ">= 6.13.0" + } + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", + "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "devOptional": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/proto3-json-serializer": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz", + "integrity": "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "protobufjs": "^7.2.5" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "optional": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/retry-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", + "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", + "license": "MIT", + "optional": true, + "dependencies": { + "@types/request": "^2.48.8", + "extend": "^3.0.2", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/send/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "license": "MIT", + "optional": true, + "dependencies": { + "stubs": "^3.0.0" + } + }, + "node_modules/stream-shift": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==", + "license": "MIT", + "optional": true + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "optional": true, + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==", + "license": "MIT", + "optional": true + }, + "node_modules/stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw==", + "license": "MIT", + "optional": true + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/teeny-request": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", + "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/teeny-request/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", + "optional": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/teeny-request/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "optional": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/teeny-request/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "license": "MIT", + "optional": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/teeny-request/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT", + "optional": true + }, + "node_modules/teeny-request/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "optional": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT", + "optional": true + }, + "node_modules/ts-deepmerge": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/ts-deepmerge/-/ts-deepmerge-2.0.7.tgz", + "integrity": "sha512-3phiGcxPSSR47RBubQxPoZ+pqXsEsozLo4G4AlSrsMKTFg9TA3l+3he5BqpUi9wiuDbaHWXH/amlzQ49uEdXtg==", + "dev": true, + "license": "ISC" + }, + "node_modules/ts-jest": { + "version": "29.2.5", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.5.tgz", + "integrity": "sha512-KD8zB2aAZrcKIdGk4OwpJggeLcH1FgrICqDSROWqlnJXGCXK4Mn6FcdK2B6670Xr73lHMG1kHw8R87A0ecZ+vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "bs-logger": "^0.2.6", + "ejs": "^3.1.10", + "fast-json-stable-stringify": "^2.1.0", + "jest-util": "^29.0.0", + "json5": "^2.2.3", + "lodash.memoize": "^4.1.2", + "make-error": "^1.3.6", + "semver": "^7.6.3", + "yargs-parser": "^21.1.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0", + "@jest/types": "^29.0.0", + "babel-jest": "^29.0.0", + "jest": "^29.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", + "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/undici-types": { + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "license": "MIT" + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.2.tgz", + "integrity": "sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT", + "optional": true + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause", + "optional": true + }, + "node_modules/websocket-driver": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", + "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", + "license": "Apache-2.0", + "dependencies": { + "http-parser-js": ">=0.5.1", + "safe-buffer": ">=5.1.0", + "websocket-extensions": ">=0.1.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/websocket-extensions": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", + "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "optional": true, + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "devOptional": true, + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "devOptional": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "devOptional": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/frontend/functions/package.json b/frontend/functions/package.json new file mode 100644 index 0000000000..8d76d54827 --- /dev/null +++ b/frontend/functions/package.json @@ -0,0 +1,37 @@ +{ + "name": "functions", + "description": "Cloud Functions for Firebase", + "scripts": { + "serve": "npm run build && firebase emulators:start --only functions,firestore", + "shell": "firebase functions:shell", + "start": "npm run build && node lib/index.js", + "deploy": "firebase deploy --only functions", + "logs": "firebase functions:log", + "test": "jest", + "test:watch": "jest --watch", + "build": "node build.js && tsc -p tsconfig.json", + "build:watch": "tsc --watch" + }, + "engines": { + "node": "22" + }, + "type": "commonjs", + "main": "lib/index.js", + "dependencies": { + "blake2": "^5.0.0", + "bs58check": "^3.0.1", + "firebase-admin": "^12.1.0", + "firebase-functions": "^6.2.0", + "mina-signer": "^3.0.7" + }, + "devDependencies": { + "@types/blake2": "^4.0.1", + "@types/bs58check": "^2.1.0", + "@types/jest": "^29.5.14", + "firebase-functions-test": "^3.1.0", + "jest": "^29.0.0", + "ts-jest": "^29.2.5", + "typescript": "^4.9.5" + }, + "private": true +} diff --git a/frontend/functions/src/index.ts b/frontend/functions/src/index.ts new file mode 100644 index 0000000000..bcf9751b12 --- /dev/null +++ b/frontend/functions/src/index.ts @@ -0,0 +1,134 @@ +import * as admin from 'firebase-admin'; +import * as functions from 'firebase-functions'; +import * as blake2 from 'blake2'; +import bs58check from 'bs58check'; +import Client from 'mina-signer'; +import { submitterAllowed } from './submitterValidator'; +import { CallableRequest, onCall } from 'firebase-functions/v2/https'; +import { getFirestore, FieldValue } from 'firebase-admin/firestore'; + +interface SignatureJson { + field: string; + scalar: string; +} + +interface HeartbeatData { + version: number; + payload: string; + submitter: string; + signature: SignatureJson; +} + +const minaClient = new Client({ network: 'testnet' }); + +admin.initializeApp(); + +// Rate limit duration between heartbeats from the same submitter (15 seconds) +const HEARTBEAT_RATE_LIMIT_MS = 15000; + +function validateSignature( + data: string, + signature: SignatureJson, + publicKeyBase58: string, +): boolean { + try { + const h = blake2.createHash('blake2b', { digestLength: 32 }); + h.update(Buffer.from(data)); + const digest: string = h.digest().toString('hex'); + + try { + // TODO: remove this validation later, since the list is + // hardcoded and we check that the key is there, + // we know it is valid. + let publicKeyBytes: Uint8Array; + try { + publicKeyBytes = bs58check.decode(publicKeyBase58); + } catch (e) { + console.error('Failed to decode public key:', e); + return false; + } + + if (publicKeyBytes[0] !== 0xcb) { + console.error('Invalid public key prefix'); + return false; + } + + return minaClient.verifyMessage({ + data: digest, + signature, + publicKey: publicKeyBase58, + }); + } catch (e) { + console.error('Error parsing signature or verifying:', e); + return false; + } + } catch (e) { + console.error('Error in signature validation:', e); + return false; + } +} + +export const handleValidationAndStore = onCall( + { region: 'us-central1', enforceAppCheck: false }, + async (request: CallableRequest) => { + console.log('Received data:', request.data); + const data = request.data; + const { submitter, payload, signature } = data; + + if (!submitterAllowed(submitter)) { + throw new functions.https.HttpsError( + 'permission-denied', + 'Public key not authorized', + ); + } + + const db = getFirestore(); + + try { + if (!validateSignature(payload, signature, submitter)) { + throw new functions.https.HttpsError( + 'unauthenticated', + 'Signature validation failed', + ); + } + + const rateLimitRef = db.collection('publicKeyRateLimits').doc(submitter); + const newHeartbeatRef = db.collection('heartbeats').doc(); + + await db.runTransaction(async (transaction) => { + const doc = await transaction.get(rateLimitRef); + const now = Date.now(); + const cutoff = now - HEARTBEAT_RATE_LIMIT_MS; + + if (doc.exists) { + const lastCall = doc.data()?.['lastCall']; + if (lastCall > cutoff) { + throw new functions.https.HttpsError( + 'resource-exhausted', + 'Rate limit exceeded for this public key', + ); + } + } + + transaction.set(rateLimitRef, { lastCall: FieldValue.serverTimestamp() }, { merge: true }); + transaction.create(newHeartbeatRef, { + ...data, + createTime: FieldValue.serverTimestamp(), + }); + }); + + return { message: 'Data validated and stored successfully' }; + } catch (error) { + console.error('Error during data validation and storage:', error); + if (error instanceof functions.https.HttpsError) { + throw error; + } + throw new functions.https.HttpsError( + 'internal', + 'An error occurred during validation or storage', + ); + } + }, +); + +export { validateSignature }; diff --git a/frontend/functions/src/submitterValidator.ts b/frontend/functions/src/submitterValidator.ts new file mode 100644 index 0000000000..7e44314735 --- /dev/null +++ b/frontend/functions/src/submitterValidator.ts @@ -0,0 +1,11 @@ +// base58 encoded public keys that are allowed to submit data +const allowedPublicKeys: Set = new Set([ + // ALLOWED_PUBLIC_KEYS_PLACEHOLDER +]); + +export function submitterAllowed(publicKeyBase58: string): boolean { + if (allowedPublicKeys.size === 0) { + return true; + } + return allowedPublicKeys.has(publicKeyBase58); +} diff --git a/frontend/functions/tsconfig.json b/frontend/functions/tsconfig.json new file mode 100644 index 0000000000..37ac884f80 --- /dev/null +++ b/frontend/functions/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "target": "ES2019", + "module": "CommonJS", + "moduleResolution": "Node", + "noImplicitReturns": true, + "noUnusedLocals": true, + "outDir": "lib", + "sourceMap": true, + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "allowSyntheticDefaultImports": true + }, + "compileOnSave": true, + "include": [ + "src/**/*" + ], + "exclude": [ + "build.js", + "jest.config.js" + ], + "types": [ + "node", + "jest" + ] +} diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 0b6dff0be7..4844afab1b 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "frontend", - "version": "1.0.66", + "version": "1.0.95", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "frontend", - "version": "1.0.66", + "version": "1.0.95", "dependencies": { "@angular/animations": "^17.3.12", "@angular/cdk": "^17.3.10", @@ -31,6 +31,7 @@ "@sentry/angular": "^8.35.0", "@sentry/cli": "^2.38.2", "@sentry/tracing": "^7.114.0", + "aos": "^2.3.4", "base-x": "^5.0.0", "bs58check": "^4.0.0", "buffer": "^6.0.3", @@ -7913,6 +7914,17 @@ "node": ">= 8" } }, + "node_modules/aos": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/aos/-/aos-2.3.4.tgz", + "integrity": "sha512-zh/ahtR2yME4I51z8IttIt4lC1Nw0ktsFtmeDzID1m9naJnWXhCoARaCgNOGXb5CLy3zm+wqmRAEgMYB5E2HUw==", + "license": "MIT", + "dependencies": { + "classlist-polyfill": "^1.0.3", + "lodash.debounce": "^4.0.6", + "lodash.throttle": "^4.0.1" + } + }, "node_modules/arch": { "version": "2.2.0", "dev": true, @@ -9081,6 +9093,12 @@ "node": ">=8" } }, + "node_modules/classlist-polyfill": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/classlist-polyfill/-/classlist-polyfill-1.2.0.tgz", + "integrity": "sha512-GzIjNdcEtH4ieA2S8NmrSxv7DfEV5fmixQeyTmqmRmRJPGpRBaSnA2a0VrCjyT8iW8JjEdMbKzDotAJf+ajgaQ==", + "license": "Unlicense" + }, "node_modules/clean-stack": { "version": "2.2.0", "dev": true, @@ -13537,8 +13555,7 @@ "node_modules/lodash.debounce": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", - "dev": true + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" }, "node_modules/lodash.isfinite": { "version": "3.3.2", @@ -13552,6 +13569,12 @@ "dev": true, "license": "MIT" }, + "node_modules/lodash.throttle": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz", + "integrity": "sha512-wIkUCfVKpVsWo3JSZlc+8MB5it+2AN5W8J7YVMST30UrvcQNZ1Okbj+rbVniijTWE6FGYy4XJq/rHkas8qJMLQ==", + "license": "MIT" + }, "node_modules/log-symbols": { "version": "4.1.0", "license": "MIT", diff --git a/frontend/package.json b/frontend/package.json index 0bf5f52762..9003e5757a 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "frontend", - "version": "1.0.70", + "version": "1.0.103", "scripts": { "install:deps": "npm install", "start": "npm install && ng serve --configuration local --open", @@ -20,7 +20,8 @@ "prerender": "ng run frontend:prerender", "sentry:sourcemaps": "sentry-cli sourcemaps inject --org openmina-uv --project openmina ./dist/frontend/browser && sentry-cli sourcemaps upload --org openmina-uv --project openmina ./dist/frontend/browser", "copy-env": "cp dist/frontend/browser/assets/environments/webnode.js dist/frontend/browser/assets/environments/env.js", - "deploy": "npm run prebuild && npm run build:prod && npm run copy-env && firebase deploy" + "deploy": "npm run prebuild && npm run build:prod && npm run copy-env && firebase deploy", + "deploy:leaderboard": "npm run prebuild && npm run build:prod && cp dist/frontend/browser/assets/environments/leaderboard.js dist/frontend/browser/assets/environments/env.js && firebase deploy" }, "private": true, "dependencies": { @@ -47,6 +48,7 @@ "@sentry/angular": "^8.35.0", "@sentry/cli": "^2.38.2", "@sentry/tracing": "^7.114.0", + "aos": "^2.3.4", "base-x": "^5.0.0", "bs58check": "^4.0.0", "buffer": "^6.0.3", diff --git a/frontend/src/app/app.component.html b/frontend/src/app/app.component.html index 79a5cf0237..3b43ad7624 100644 --- a/frontend/src/app/app.component.html +++ b/frontend/src/app/app.component.html @@ -1,8 +1,11 @@ @if (showLandingPage$ | async) { - + + + } @else if (showLoadingWebNodePage$ | async) { +} @else if (showLeaderboardPage$ | async) { + } @else if (loaded) { = this.select$(AppSelectors.menu); readonly showLandingPage$: Observable = this.select$(getMergedRoute).pipe(filter(Boolean), map((route: MergedRoute) => route.url === '/' || route.url.startsWith('/?'))); readonly showLoadingWebNodePage$: Observable = this.select$(getMergedRoute).pipe(filter(Boolean), map((route: MergedRoute) => route.url.startsWith(`/${Routes.LOADING_WEB_NODE}`))); + readonly showLeaderboardPage$: Observable = this.select$(getMergedRoute).pipe(filter(Boolean), map((route: MergedRoute) => route.url.startsWith(`/${Routes.LEADERBOARD}`))); subMenusLength: number = 0; hideToolbar: boolean = CONFIG.hideToolbar; loaded: boolean; @@ -33,6 +36,8 @@ export class AppComponent extends StoreDispatcher implements OnInit { constructor(private breakpointObserver: BreakpointObserver, private router: Router, private webNodeService: WebNodeService) { + AOS.init(); + super(); safelyExecuteInBrowser(() => { if (any(window).Cypress) { @@ -55,7 +60,7 @@ export class AppComponent extends StoreDispatcher implements OnInit { () => this.initAppFunctionalities(), filter(Boolean), take(1), - filter((route: MergedRoute) => route.url !== '/' && !route.url.startsWith('/?')), + filter((route: MergedRoute) => route.url !== '/' && !route.url.startsWith('/?') && !route.url.startsWith('/leaderboard')), ); this.select( getMergedRoute, diff --git a/frontend/src/app/app.module.ts b/frontend/src/app/app.module.ts index eb0917ec77..734d520c3d 100644 --- a/frontend/src/app/app.module.ts +++ b/frontend/src/app/app.module.ts @@ -41,6 +41,8 @@ import { getAnalytics, provideAnalytics, ScreenTrackingService } from '@angular/ import { getPerformance, providePerformance } from '@angular/fire/performance'; import { BlockProductionPillComponent } from '@app/layout/block-production-pill/block-production-pill.component'; import { MenuTabsComponent } from '@app/layout/menu-tabs/menu-tabs.component'; +import { getFirestore, provideFirestore } from '@angular/fire/firestore'; +import { LeaderboardModule } from '@leaderboard/leaderboard.module'; registerLocaleData(localeFr, 'fr'); registerLocaleData(localeEn, 'en'); @@ -163,6 +165,7 @@ export class AppGlobalErrorhandler implements ErrorHandler { WebNodeLandingPageComponent, BlockProductionPillComponent, MenuTabsComponent, + LeaderboardModule, ], providers: [ THEME_PROVIDER, @@ -178,15 +181,7 @@ export class AppGlobalErrorhandler implements ErrorHandler { }, provideClientHydration(), provideHttpClient(withFetch()), - provideFirebaseApp(() => initializeApp({ - 'projectId': 'openminawebnode', - 'appId': '1:120031499786:web:9af56c50ebce25c619f1f3', - 'storageBucket': 'openminawebnode.firebasestorage.app', - 'apiKey': 'AIzaSyBreMkb5-8ANb5zL6yWKgRAk9owbDS1g9s', - 'authDomain': 'openminawebnode.firebaseapp.com', - 'messagingSenderId': '120031499786', - 'measurementId': 'G-V0ZC81T9RQ', - })), + provideFirebaseApp(() => initializeApp(CONFIG.globalConfig.firebase)), provideAnalytics(() => getAnalytics()), ScreenTrackingService, // provideAppCheck(() => { @@ -195,6 +190,7 @@ export class AppGlobalErrorhandler implements ErrorHandler { // return initializeAppCheck(undefined, { provider, isTokenAutoRefreshEnabled: true }); // }), providePerformance(() => getPerformance()), + provideFirestore(() => getFirestore()), ], bootstrap: [AppComponent], exports: [ diff --git a/frontend/src/app/app.routing.ts b/frontend/src/app/app.routing.ts index 5aa69195e5..1a5d8eca6d 100644 --- a/frontend/src/app/app.routing.ts +++ b/frontend/src/app/app.routing.ts @@ -76,6 +76,10 @@ function generateRoutes(): Routes { loadChildren: () => import('@web-node/web-node.module').then(m => m.WebNodeModule), title: WEBNODE_TITLE, }, + { + path: '', + loadChildren: () => import('@leaderboard/leaderboard.module').then(m => m.LeaderboardModule), + }, ]; if (CONFIG.showWebNodeLandingPage) { routes.push({ diff --git a/frontend/src/app/app.service.ts b/frontend/src/app/app.service.ts index e6fc2bbaf7..007658b11e 100644 --- a/frontend/src/app/app.service.ts +++ b/frontend/src/app/app.service.ts @@ -1,20 +1,22 @@ import { Injectable } from '@angular/core'; -import { map, Observable, of } from 'rxjs'; +import { map, Observable, of, tap } from 'rxjs'; import { MinaNode } from '@shared/types/core/environment/mina-env.type'; import { CONFIG } from '@shared/constants/config'; import { RustService } from '@core/services/rust.service'; import { AppNodeDetails, AppNodeStatus } from '@shared/types/app/app-node-details.type'; import { getNetwork } from '@shared/helpers/mina.helper'; -import { getLocalStorage, ONE_MILLION } from '@openmina/shared'; +import { getLocalStorage, nanOrElse, ONE_MILLION } from '@openmina/shared'; import { BlockProductionWonSlotsStatus } from '@shared/types/block-production/won-slots/block-production-won-slots-slot.type'; import { AppEnvBuild } from '@shared/types/app/app-env-build.type'; +import { FirestoreService } from '@core/services/firestore.service'; @Injectable({ providedIn: 'root', }) export class AppService { - constructor(private rust: RustService) { } + constructor(private rust: RustService, + private firestoreService: FirestoreService) { } getActiveNode(nodes: MinaNode[]): Observable { const nodeName = new URL(location.href).searchParams.get('node'); @@ -48,10 +50,20 @@ export class AppService { transactions: data.transaction_pool.transactions, chainId: data.chain_id, network: getNetwork(data.chain_id), - producingBlockAt: data.current_block_production_attempt?.won_slot.slot_time / ONE_MILLION, + producingBlockAt: nanOrElse(data.current_block_production_attempt?.won_slot.slot_time / ONE_MILLION, null), producingBlockGlobalSlot: data.current_block_production_attempt?.won_slot.global_slot, producingBlockStatus: data.current_block_production_attempt?.status, } as AppNodeDetails)), + tap((details: any) => { + // undefined not allowed. Firestore does not accept undefined values + // foreach undefined value, we set it to null + Object.keys(details).forEach((key: string) => { + if (details[key] === undefined) { + details[key] = null; + } + }); + // this.firestoreService.addHeartbeat(details); + }), ); } diff --git a/frontend/src/app/app.setup.ts b/frontend/src/app/app.setup.ts index 8f2cf84222..252fba90f8 100644 --- a/frontend/src/app/app.setup.ts +++ b/frontend/src/app/app.setup.ts @@ -38,6 +38,8 @@ import { benchmarksReducer } from '@benchmarks/benchmarks.reducer'; import { fuzzingReducer } from '@fuzzing/fuzzing.reducer'; import { FuzzingState } from '@fuzzing/fuzzing.state'; import { FuzzingAction } from '@fuzzing/fuzzing.actions'; +import { LeaderboardState } from '@leaderboard/leaderboard.state'; +import { leaderboardReducer } from '@leaderboard/leaderboard.reducer'; export interface MinaState { [APP_KEY]: AppState; @@ -53,6 +55,7 @@ export interface MinaState { snarks: SnarksState; benchmarks: BenchmarksState; fuzzing: FuzzingState; + leaderboard: LeaderboardState; } type MinaAction = @@ -80,6 +83,7 @@ export const reducers: ActionReducerMap = { snarks: snarksReducer, benchmarks: benchmarksReducer, fuzzing: fuzzingReducer, + leaderboard: leaderboardReducer, }; export const metaReducers: MetaReducer[] = []; diff --git a/frontend/src/app/core/helpers/file-progress.helper.ts b/frontend/src/app/core/helpers/file-progress.helper.ts index ebb6a2a006..890310def7 100644 --- a/frontend/src/app/core/helpers/file-progress.helper.ts +++ b/frontend/src/app/core/helpers/file-progress.helper.ts @@ -1,7 +1,7 @@ import { BehaviorSubject } from 'rxjs'; import { safelyExecuteInBrowser } from '@openmina/shared'; -const WASM_FILE_SIZE = 30653596; +const WASM_FILE_SIZE = 31705944; class AssetMonitor { readonly downloads: Map = new Map(); diff --git a/frontend/src/app/core/services/firestore.service.ts b/frontend/src/app/core/services/firestore.service.ts new file mode 100644 index 0000000000..1073919e4e --- /dev/null +++ b/frontend/src/app/core/services/firestore.service.ts @@ -0,0 +1,42 @@ +import { Injectable } from '@angular/core'; +import { + Firestore, + CollectionReference, + collection, + addDoc, + doc, + setDoc, + updateDoc, + deleteDoc, + DocumentData, +} from '@angular/fire/firestore'; +import { HttpClient } from '@angular/common/http'; +import { Observable } from 'rxjs'; + +@Injectable({ + providedIn: 'root', +}) +export class FirestoreService { + private heartbeatCollection: CollectionReference; + private cloudFunctionUrl = 'https://us-central1-webnode-gtm-test.cloudfunctions.net/handleValidationAndStore'; + + constructor(private firestore: Firestore, + private http: HttpClient) { + this.heartbeatCollection = collection(this.firestore, 'heartbeat'); + } + + addHeartbeat(data: any): Observable { + console.log('Posting to cloud function:', data); + return this.http.post(this.cloudFunctionUrl, { data }); + } + + updateHeartbeat(id: string, data: any): Promise { + const docRef = doc(this.heartbeatCollection, id); + return updateDoc(docRef, data); + } + + deleteHeartbeat(id: string): Promise { + const docRef = doc(this.heartbeatCollection, id); + return deleteDoc(docRef); + } +} diff --git a/frontend/src/app/core/services/web-node.service.ts b/frontend/src/app/core/services/web-node.service.ts index 56f4f3c756..549afe48cb 100644 --- a/frontend/src/app/core/services/web-node.service.ts +++ b/frontend/src/app/core/services/web-node.service.ts @@ -1,5 +1,5 @@ import { Injectable } from '@angular/core'; -import { BehaviorSubject, catchError, EMPTY, filter, from, fromEvent, map, merge, Observable, of, switchMap, tap, throwError } from 'rxjs'; +import { BehaviorSubject, catchError, EMPTY, filter, from, fromEvent, map, merge, Observable, of, switchMap, tap, throwError, timer } from 'rxjs'; import base from 'base-x'; import { any, isBrowser, safelyExecuteInBrowser } from '@openmina/shared'; import { HttpClient } from '@angular/common/http'; @@ -7,6 +7,9 @@ import { sendSentryEvent } from '@shared/helpers/webnode.helper'; import { DashboardPeerStatus } from '@shared/types/dashboard/dashboard.peer'; import { FileProgressHelper } from '@core/helpers/file-progress.helper'; import { CONFIG } from '@shared/constants/config'; +import firebase from 'firebase/compat'; +import FirebaseStorageError = firebase.storage.FirebaseStorageError; +import { FirestoreService } from '@core/services/firestore.service'; export interface PrivateStake { publicKey: string; @@ -33,7 +36,8 @@ export class WebNodeService { privateStake: PrivateStake; noBlockProduction: boolean = false; - constructor(private http: HttpClient) { + constructor(private http: HttpClient, + private firestore: FirestoreService) { FileProgressHelper.initDownloadProgress(); const basex = base('123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'); safelyExecuteInBrowser(() => { @@ -120,6 +124,7 @@ export class WebNodeService { } })(); console.log('webnode config:', !!this.webNodeKeyPair.privateKey, this.webNodeNetwork, urls); + console.log(this.privateStake); let privateKey = this.privateStake ? [this.privateStake.stake, this.privateStake.password] : this.webNodeKeyPair.privateKey; if (this.noBlockProduction) { privateKey = null; @@ -131,13 +136,15 @@ export class WebNodeService { any(window).webnode = webnode; this.webnode$.next(webnode); this.webnodeProgress$.next('Started'); - }), catchError((error) => { sendSentryEvent('WebNode failed to start: ' + error.message); return throwError(() => new Error(error.message)); }), switchMap(() => this.webnode$.asObservable()), + switchMap(() => timer(0, 60000)), + switchMap(() => this.heartBeat$), + switchMap(heartBeat => this.firestore.addHeartbeat(heartBeat)), ); } return EMPTY; @@ -228,10 +235,21 @@ export class WebNodeService { ); } - actions$(param: any): Observable { + get heartBeat$(): Observable { + return this.webnode$.asObservable().pipe( + filter(Boolean), + switchMap(webnode => from(webnode.make_heartbeat())), + ); + } + + actions$(path: string): Observable { + let slot: string | number = path.split('?id=')[1]; + if (!isNaN(Number(slot))) { + slot = Number(slot); + } return this.webnode$.asObservable().pipe( filter(Boolean), - switchMap(webnode => webnode.actions()), + switchMap(webnode => webnode.stats().actions(slot)), ); } } diff --git a/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.html b/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.html new file mode 100644 index 0000000000..e5e3da605c --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.html @@ -0,0 +1,3 @@ + + Round 1 Applications Close in 5d 5h 12m - Apply arrow_right_alt + diff --git a/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.scss b/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.scss new file mode 100644 index 0000000000..447df2fdcb --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.scss @@ -0,0 +1,16 @@ +@import 'leaderboard-variables'; + +.gradient { + height: 52px; + background: $mina-brand-gradient; + top: 0; + left: 0; + font-family: "IBM Plex Mono", sans-serif; + font-size: 16px; + font-weight: 400; + color: $mina-base-primary; + + @media (max-width: 767px) { + font-size: 3.1vw; + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.ts b/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.ts new file mode 100644 index 0000000000..74c1035047 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-apply/leaderboard-apply.component.ts @@ -0,0 +1,11 @@ +import { ChangeDetectionStrategy, Component } from '@angular/core'; + +@Component({ + selector: 'mina-leaderboard-apply', + templateUrl: './leaderboard-apply.component.html', + styleUrl: './leaderboard-apply.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class LeaderboardApplyComponent { + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.html b/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.html new file mode 100644 index 0000000000..e62f0e1ccd --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.html @@ -0,0 +1,11 @@ + + +
+ +
+

Mina Web Node Testing Program

+

Details

+

TBD

+
+ +
diff --git a/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.scss b/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.scss new file mode 100644 index 0000000000..b5c5a9483d --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.scss @@ -0,0 +1,41 @@ +@import 'leaderboard-variables'; + +:host { + display: block; + height: 100%; + padding-top: 52px; + background-color: $mina-cta-primary; + color: $mina-base-primary; + font-family: "IBM Plex Sans", sans-serif; +} + +main, +mina-leaderboard-header, +mina-leaderboard-footer { + max-width: 1200px; + width: 100%; + padding: 0 48px; + margin: 0 auto; + + @media (max-width: 1023px) { + padding: 0; + } +} + +h2 { + margin-top: 72px; + margin-bottom: 16px; + color: $mina-base-secondary; + font-size: 20px; + font-weight: 500; + line-height: 28px; +} + +h1 { + margin-top: 0; + margin-bottom: 80px; + color: $mina-base-primary; + font-size: 80px; + font-weight: 400; + line-height: 80px; +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.ts b/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.ts new file mode 100644 index 0000000000..850603a0a0 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-details/leaderboard-details.component.ts @@ -0,0 +1,11 @@ +import { ChangeDetectionStrategy, Component } from '@angular/core'; + +@Component({ + selector: 'mina-leaderboard-details', + templateUrl: './leaderboard-details.component.html', + styleUrl: './leaderboard-details.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class LeaderboardDetailsComponent { + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.html b/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.html new file mode 100644 index 0000000000..0f9db55bdb --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.html @@ -0,0 +1,26 @@ +
+
+
+
+
+ arrow_upward + Uptime +
+
+
+
+ arrow_upward + Block Production +
+
+
+ +
+
diff --git a/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.scss b/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.scss new file mode 100644 index 0000000000..2ac9d53612 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.scss @@ -0,0 +1,115 @@ +@import 'leaderboard-variables'; + +@import url('https://fonts.googleapis.com/css2?family=IBM+Plex+Sans:wght@100;200;300;400;500;600;700&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=Nunito+Sans:wght@200..1000&display=swap'); + +:host { + font-family: "IBM Plex Sans", sans-serif; + + section { + width: 100%; + + div.filters { + padding: 16px 0; + border-bottom: 1px solid $black6; + + .sort { + box-shadow: 0 0 0 0 transparent; + padding: 0 24px; + border-radius: 44px; + font-size: 16px; + transition: all 0.15s ease; + background-color: $mina-base-divider; + color: $black; + cursor: pointer; + position: relative; + overflow: hidden; + + &:hover { + box-shadow: 0 2px 4px 0 $black3; + } + + .sort-content { + display: flex; + align-items: center; + justify-content: center; + width: 100%; + } + + .mina-icon { + opacity: 0; + transform: translateY(-100%); + transition: all 0.3s ease; + font-size: 20px; + width: 0; + + &.show { + width: 20px; + opacity: 1; + transform: translateY(0); + } + + &.flip { + transform: rotate(180deg); + } + } + + .text { + transition: all 0.3s ease; + } + + &.active { + background-color: $mina-base-primary; + color: $mina-cta-primary; + padding-left: 16px; + + .sort-content { + transform: translateX(0); + } + + .text { + margin-left: 8px; + } + } + } + + .search { + width: 150px; + height: 32px; + + .mina-icon { + color: $black; + width: 24px; + height: 24px; + } + + input { + padding-left: 32px; + border: none; + outline: none; + font-size: 16px; + + &::placeholder { + color: $mina-base-secondary; + font-size: 16px; + font-weight: 400; + } + } + } + + @media (max-width: 480px) { + .sort { + padding: 0 12px; + font-size: 3.2vw; + } + + .search input, + .search input::placeholder { + font-size: 3.3vw; + + } + } + } + } +} + diff --git a/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.ts b/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.ts new file mode 100644 index 0000000000..dae3666c25 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-filters/leaderboard-filters.component.ts @@ -0,0 +1,54 @@ +import { AfterViewInit, ChangeDetectionStrategy, Component, DestroyRef, ElementRef, OnInit, ViewChild } from '@angular/core'; +import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; +import { SortDirection, TableSort } from '@openmina/shared'; +import { HeartbeatSummary } from '@shared/types/leaderboard/heartbeat-summary.type'; +import { LeaderboardSelectors } from '@leaderboard/leaderboard.state'; +import { LeaderboardActions } from '@leaderboard/leaderboard.actions'; +import { fromEvent } from 'rxjs'; +import { takeUntilDestroyed } from '@angular/core/rxjs-interop'; + +@Component({ + selector: 'mina-leaderboard-filters', + templateUrl: './leaderboard-filters.component.html', + styleUrl: './leaderboard-filters.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, + host: { class: 'flex-row flex-center w-100' }, +}) +export class LeaderboardFiltersComponent extends StoreDispatcher implements OnInit, AfterViewInit { + + protected readonly SortDirection = SortDirection; + + @ViewChild('inputElement') private inputElement: ElementRef; + + currentSort: TableSort; + + constructor(private destroyRef: DestroyRef) {super();} + + ngOnInit(): void { + this.listenToSort(); + } + + ngAfterViewInit(): void { + fromEvent(this.inputElement.nativeElement, 'keyup') + .pipe(takeUntilDestroyed(this.destroyRef)) + .subscribe(() => { + this.dispatch2(LeaderboardActions.changeFilters({ filters: { search: this.inputElement.nativeElement.value } })); + }); + } + + private listenToSort(): void { + this.select(LeaderboardSelectors.sortBy, (sort: TableSort) => { + this.currentSort = sort; + this.detect(); + }); + } + + sortBy(sortBy: string): void { + const sortDirection = sortBy !== this.currentSort.sortBy + ? this.currentSort.sortDirection + : this.currentSort.sortDirection === SortDirection.ASC ? SortDirection.DSC : SortDirection.ASC; + const sort = { sortBy: sortBy as keyof HeartbeatSummary, sortDirection }; + this.dispatch2(LeaderboardActions.sort({ sort })); + } + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.html b/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.html new file mode 100644 index 0000000000..cffccd6d15 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.html @@ -0,0 +1,8 @@ +
+
© 2025 Mina Foundation. All rights reserved.
+ +
diff --git a/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.scss b/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.scss new file mode 100644 index 0000000000..0515a8ec44 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.scss @@ -0,0 +1,35 @@ +@import 'leaderboard-variables'; + +:host { + font-family: "IBM Plex Sans", sans-serif; + font-size: 16px; + font-weight: 400; + line-height: 24px; + color: $mina-base-primary; + + > div { + min-height: 40px; + + @media (max-width: 1023px) { + padding: 16px 24px; + line-height: 32px; + } + } +} + +.right-side { + gap: 32px; + @media (max-width: 767px) { + gap: 0; + } + + a { + color: $mina-base-primary; + cursor: pointer; + transition: .15s ease; + + &:hover { + color: $mina-access-primary; + } + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.ts b/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.ts new file mode 100644 index 0000000000..d41ce756c0 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-footer/leaderboard-footer.component.ts @@ -0,0 +1,11 @@ +import { ChangeDetectionStrategy, Component } from '@angular/core'; + +@Component({ + selector: 'mina-leaderboard-footer', + templateUrl: './leaderboard-footer.component.html', + styleUrl: './leaderboard-footer.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class LeaderboardFooterComponent { + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.html b/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.html new file mode 100644 index 0000000000..2e93e9e1d0 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.html @@ -0,0 +1,13 @@ + diff --git a/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.scss b/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.scss new file mode 100644 index 0000000000..0940b3d5e9 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.scss @@ -0,0 +1,62 @@ +@import 'leaderboard-variables'; + +.menu { + height: 58px; + font-family: "IBM Plex Mono", sans-serif; + font-size: 16px; + font-weight: 400; + color: $mina-base-primary; + + @media (max-width: 1023px) { + padding: 0 12px; + } + + .dropdown-menu { + gap: 40px; + + @media (max-width: 767px) { + width: 100%; + flex-direction: column; + position: absolute; + top: 108px; + right: 0; + background: $mina-cta-primary; + border: 1px solid $mina-base-divider; + box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.1); + padding: 10px; + z-index: 1000; + line-height: 60px; + gap: 0; + + a { + width: 100%; + text-align: center; + } + + &.open { + display: flex; + } + } + } + + a { + color: $mina-base-primary; + + &.active { + color: $mina-access-primary; + } + + &:hover { + color: darken($mina-access-primary, 7%); + } + } + + .hamburger-trigger { + display: none; /* Hide trigger by default */ + cursor: pointer; + + @media (max-width: 767px) { + display: block; /* Show trigger on mobile */ + } + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.ts b/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.ts new file mode 100644 index 0000000000..4411aacf6f --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-header/leaderboard-header.component.ts @@ -0,0 +1,57 @@ +import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core'; +import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; +import { getMergedRoute, isDesktop, MergedRoute } from '@openmina/shared'; +import { animate, state, style, transition, trigger } from '@angular/animations'; + +@Component({ + selector: 'mina-leaderboard-header', + templateUrl: './leaderboard-header.component.html', + styleUrl: './leaderboard-header.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, + host: { class: 'flex-column' }, + animations: [ + trigger('dropdownAnimation', [ + state('closed', style({ + height: '0', + opacity: '0', + overflow: 'hidden', + })), + state('open', style({ + height: '*', + opacity: '1', + })), + transition('closed => open', [ + animate('300ms ease-out'), + ]), + transition('open => closed', [ + animate('200ms ease-in'), + ]), + ]), + ], +}) +export class LeaderboardHeaderComponent extends StoreDispatcher implements OnInit { + + route: string; + isMenuOpen: boolean = isDesktop(); + + ngOnInit(): void { + this.select(getMergedRoute, (route: MergedRoute) => { + this.route = route.url; + this.detect(); + }); + } + + closeMenu(): void { + if (isDesktop()) { + return; + } + this.isMenuOpen = false; + } + + toggleMenu(): void { + if (isDesktop()) { + return; + } + this.isMenuOpen = !this.isMenuOpen; + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.html b/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.html new file mode 100644 index 0000000000..d17e5c0faa --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.html @@ -0,0 +1,11 @@ + + +
+ +
+

Mina Web Node Testing Program

+

Impressum

+

TBD

+
+ +
diff --git a/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.scss b/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.scss new file mode 100644 index 0000000000..b5c5a9483d --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.scss @@ -0,0 +1,41 @@ +@import 'leaderboard-variables'; + +:host { + display: block; + height: 100%; + padding-top: 52px; + background-color: $mina-cta-primary; + color: $mina-base-primary; + font-family: "IBM Plex Sans", sans-serif; +} + +main, +mina-leaderboard-header, +mina-leaderboard-footer { + max-width: 1200px; + width: 100%; + padding: 0 48px; + margin: 0 auto; + + @media (max-width: 1023px) { + padding: 0; + } +} + +h2 { + margin-top: 72px; + margin-bottom: 16px; + color: $mina-base-secondary; + font-size: 20px; + font-weight: 500; + line-height: 28px; +} + +h1 { + margin-top: 0; + margin-bottom: 80px; + color: $mina-base-primary; + font-size: 80px; + font-weight: 400; + line-height: 80px; +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.ts b/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.ts new file mode 100644 index 0000000000..afa316342a --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-impressum/leaderboard-impressum.component.ts @@ -0,0 +1,11 @@ +import { ChangeDetectionStrategy, Component } from '@angular/core'; + +@Component({ + selector: 'mina-leaderboard-impressum', + templateUrl: './leaderboard-impressum.component.html', + styleUrl: './leaderboard-impressum.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class LeaderboardImpressumComponent { + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.html b/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.html new file mode 100644 index 0000000000..c5a364c04b --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.html @@ -0,0 +1,70 @@ + +
+ +
+ +
+ +
+ + +
+
+

We(b) Node
Do You?

+
+
+ Apply to be a node runner +

Round 1 is limited to 100 seats

+
+
+
+ +
+
+
+ +

Don't Trust, Verify

+

Every piece of data comes from verified sources. Produce, check and confirm blocks right from your device.

+
+
+ +

Start in One Click

+

A tab in your everyday browser lets you operate and verify the blockchain. No complex setup needed.

+
+
+ +

Node-To-Earn

+

Keep a browser tab open to earn rewards while supporting a fairer blockchain system. Testing now live.

+
+
+
+

Run a web node on Testnet and enter a 1000 MINA lottery

+
+ Start Testing & Earn $500 USD +

Apply by DATE. Not Selected? You're first in line next time.

+
+
+
+ +
+
+ +
+ +
+

The Mina Web Node, part 1

+

Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore . +

+

Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore .

+
+ +
+
+
+
diff --git a/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.scss b/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.scss new file mode 100644 index 0000000000..0ee7917bcf --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.scss @@ -0,0 +1,242 @@ +@import 'leaderboard-variables'; +@import url('https://fonts.googleapis.com/css2?family=IBM+Plex+Sans:wght@100;200;300;400;500;600;700&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=Nunito+Sans:wght@200..1000&display=swap'); + + +:host { + padding-top: 52px; + background-color: $mina-cta-primary; + color: $mina-base-primary; + font-family: "IBM Plex Sans", sans-serif; +} + +main, +mina-leaderboard-header, +mina-leaderboard-footer { + max-width: 1200px; + width: 100%; + padding: 0 48px; + margin: 0 auto; + + @media (max-width: 1023px) { + padding: 0; + } +} + +.button-description { + a { + color: $mina-cta-primary; + padding: 0 36px; + height: 62px; + background-color: $mina-base-primary; + border-radius: 44px; + font-size: 20px; + line-height: 20px; + font-weight: 300; + transition: .15s ease; + + &:hover { + background-color: $black; + } + } + + p { + color: $black; + font-size: 16px; + font-weight: 300; + margin-bottom: 80px; + } +} + +.overflow-y-scroll { + background-color: $mina-cta-primary; + + &::-webkit-scrollbar-track { + background-color: transparent; + } + + &::-webkit-scrollbar-thumb { + background-color: $white4; + } + + &::-webkit-scrollbar-thumb:hover { + background-color: $mina-base-secondary; + } +} + +section { + width: 100%; + max-width: 1120px; +} + +.welcome-section { + margin: 64px 0 0; + + h1 { + font-size: 80px; + line-height: 110%; + font-weight: 300; + margin-bottom: 32px; + margin-top: 0; + letter-spacing: -3px; + @media (max-width: 1023px) { + text-align: center; + font-size: 54px; + } + } + + img { + @media (min-width: 1024px) { + max-width: 729px; + } + } +} + +.cards-section { + color: $mina-base-primary; + padding: 40px 0; + gap: 32px; + background-position: -48px -48px; + background-size: calc(100% + 48px) calc(100% + 48px); + @media (max-width: 1023px) { + max-width: calc(100% - 5vw); + margin: 0 auto; + } + + .card { + background: $mina-base-container; + border-radius: 20px; + border: 1px solid #b7cdd8; + box-shadow: 0 11.614px 24.121px -4.467px rgba(183, 205, 216, 0.20); + backdrop-filter: blur(20.777027130126953px); + padding: 40px; + + img { + width: 72px; + height: 72px; + } + + h3 { + margin-top: 24px; + font-size: 20px; + font-weight: 500; + line-height: 28px; + margin-bottom: 8px; + } + + p { + text-align: center; + font-size: 16px; + font-weight: 400; + line-height: 24px; + margin: 0; + } + } +} + +.run-web-node { + height: 574px; + color: $mina-base-primary; + background-position: -48px -48px; + background-size: calc(100% + 48px) calc(100% + 48px); + + h1 { + line-height: 118%; + font-size: 54px; + text-align: center; + margin-bottom: 64px; + font-weight: 300; + max-width: 760px; + + @media (max-width: 1023px) { + font-size: 10vw; + } + + span { + color: $black5; + } + } +} + +.mina { + padding: 80px 0; + background: $mina-brand-gradient2; + gap: 48px; + margin-top: 24px; + + > img { + width: 100%; + } + + .explanation { + padding: 48px; + min-height: 336px; + gap: 40px; + background: rgba(239, 244, 246, 0.30); + border-radius: 20px; + border: 1px solid #b7cdd8; + box-shadow: 0 11.614px 24.121px -4.467px rgba(183, 205, 216, 0.20); + backdrop-filter: blur(20.777027130126953px); + max-width: 90%; + margin: 48px auto 0; + + img { + max-height: 240.5px; + } + + @media (max-width: 1023px) { + width: 552.5px !important; + img { + max-height: unset; + max-width: 100%; + } + } + + .text { + font-family: "IBM Plex Sans", sans-serif; + + h4 { + margin-bottom: 14px; + font-size: 20px; + line-height: 28px; + margin-top: 0; + font-weight: 400; + } + + p { + font-size: 16px; + line-height: 24px; + font-weight: 300; + } + + .line { + margin-top: 8px; + margin-bottom: 14px; + } + + a { + font-weight: 300; + font-size: 20px; + gap: 4px; + color: $mina-base-primary; + position: relative; + text-decoration: none; + } + + a::after { + content: ''; + position: absolute; + width: 0; + height: 2px; + bottom: -2px; + left: 0; + background-color: $mina-base-primary; + transition: width 0.3s ease-in-out; + } + + a:hover::after { + width: 80%; + } + } + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.ts b/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.ts new file mode 100644 index 0000000000..afce1668e7 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-landing-page/leaderboard-landing-page.component.ts @@ -0,0 +1,15 @@ +import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core'; + +@Component({ + selector: 'mina-leaderboard-landing-page', + templateUrl: './leaderboard-landing-page.component.html', + styleUrl: './leaderboard-landing-page.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, + host: { class: 'flex-column h-100 align-center' }, +}) +export class LeaderboardLandingPageComponent implements OnInit { + + ngOnInit(): void { + } + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.html b/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.html new file mode 100644 index 0000000000..dcdc8794c2 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.html @@ -0,0 +1,11 @@ + + +
+ +
+ + + +
+ +
diff --git a/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.scss b/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.scss new file mode 100644 index 0000000000..557f63b4eb --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.scss @@ -0,0 +1,43 @@ +@import 'leaderboard-variables'; + +:host { + padding-top: 52px; + background-color: $mina-cta-primary; +} + +main, +mina-leaderboard-header, +mina-leaderboard-footer { + max-width: 1200px; + width: 100%; + padding: 0 48px; + margin: 0 auto; + + @media (max-width: 1023px) { + padding: 0; + } +} + +main { + border-bottom: 1px solid $mina-base-divider; + + @media (max-width: 1023px) { + padding: 0 10px; + } +} + +.overflow-auto { + background-color: $mina-cta-primary; + + &::-webkit-scrollbar-track { + background-color: transparent; + } + + &::-webkit-scrollbar-thumb { + background-color: $white4; + } + + &::-webkit-scrollbar-thumb:hover { + background-color: $mina-base-secondary; + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.ts b/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.ts new file mode 100644 index 0000000000..15e87823ce --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-page/leaderboard-page.component.ts @@ -0,0 +1,24 @@ +import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core'; +import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; +import { LeaderboardActions } from '@leaderboard/leaderboard.actions'; +import { timer } from 'rxjs'; +import { untilDestroyed } from '@ngneat/until-destroy'; + +@Component({ + selector: 'mina-leaderboard-page', + templateUrl: './leaderboard-page.component.html', + styleUrl: './leaderboard-page.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, + host: { class: 'flex-column h-100' }, +}) +export class LeaderboardPageComponent extends StoreDispatcher implements OnInit { + + ngOnInit(): void { + timer(0, 5000) + .pipe(untilDestroyed(this)) + .subscribe(() => { + this.dispatch2(LeaderboardActions.getHeartbeats()); + }); + } + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.html b/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.html new file mode 100644 index 0000000000..06a069d919 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.html @@ -0,0 +1,11 @@ + + +
+ +
+

Mina Web Node Testing Program

+

Privacy Policy

+

TBD

+
+ +
diff --git a/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.scss b/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.scss new file mode 100644 index 0000000000..b5c5a9483d --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.scss @@ -0,0 +1,41 @@ +@import 'leaderboard-variables'; + +:host { + display: block; + height: 100%; + padding-top: 52px; + background-color: $mina-cta-primary; + color: $mina-base-primary; + font-family: "IBM Plex Sans", sans-serif; +} + +main, +mina-leaderboard-header, +mina-leaderboard-footer { + max-width: 1200px; + width: 100%; + padding: 0 48px; + margin: 0 auto; + + @media (max-width: 1023px) { + padding: 0; + } +} + +h2 { + margin-top: 72px; + margin-bottom: 16px; + color: $mina-base-secondary; + font-size: 20px; + font-weight: 500; + line-height: 28px; +} + +h1 { + margin-top: 0; + margin-bottom: 80px; + color: $mina-base-primary; + font-size: 80px; + font-weight: 400; + line-height: 80px; +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.ts b/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.ts new file mode 100644 index 0000000000..54daff4012 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component.ts @@ -0,0 +1,11 @@ +import { ChangeDetectionStrategy, Component } from '@angular/core'; + +@Component({ + selector: 'mina-leaderboard-privacy-policy', + templateUrl: './leaderboard-privacy-policy.component.html', + styleUrl: './leaderboard-privacy-policy.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class LeaderboardPrivacyPolicyComponent { + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.html b/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.html new file mode 100644 index 0000000000..6c9f2f3686 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.html @@ -0,0 +1,50 @@ +
+
+ Web Node Public Key + Uptime + Produced Blocks +
+
+
+ @if (!isLoading) { + @for (row of rows; track $index) { +
+
+ + circle + {{ row.publicKey | truncateMid: (desktop ? 15 : 6): 6 }} + + + {{ row.uptimePercentage }}% + @if (row.uptimePercentage > 33.33) { + bookmark_check + } + @if (row.uptimePrize) { + + } + + + {{ row.blocksProduced ?? 0 }} + @if (row.blocksPrize) { + + } + +
+
+ } + } @else { +
+ +
Loading
+
+ } +
+ + + + + + diff --git a/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.scss b/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.scss new file mode 100644 index 0000000000..a4868d0b6c --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.scss @@ -0,0 +1,95 @@ +@import 'leaderboard-variables'; + +.mina-icon { + font-variation-settings: 'FILL' 1, 'wght' 400 !important; +} + +.row.head span { + color: $black; + font-size: 16px; + font-weight: 400; +} + +.row-wrap { + &.head { + max-width: unset; + height: 56px; + + .row { + margin: 0 auto; + font-family: "IBM Plex Sans", sans-serif; + } + } + + &.odd:not(.head) { + background-color: $mina-base-container; + } +} + +.row { + display: grid; + grid-template-columns: 40% 20% 1fr; + width: 100%; + height: 40px; + line-height: 40px; + font-family: "IBM Plex Mono", sans-serif; + font-size: 16px; + + @media (max-width: 480px) { + grid-template-columns: 48% 24% 1fr; + } + + span { + color: $black; + + &:not(.mina-icon) { + @media (max-width: 480px) { + font-size: 3vw; + } + } + } + + .circle { + color: $black4; + } + + .perc { + width: 37px; + @media (max-width: 480px) { + width: 26px; + } + } + + .circle.active { + color: $mina-brand-cyan; + } +} + +.fx-row-vert-cent { + .mina-icon { + width: 26px; + } + + mina-loading-spinner { + margin-left: -5px; + margin-right: 5px; + } + + .mina-icon { + padding-left: 2px; + } +} + +:host ::ng-deep mina-loading-spinner .loading { + border: 1px solid $mina-base-primary !important; + border-top-color: transparent !important; +} + +.p-absolute { + background-color: $mina-cta-primary; +} + +mina-loading-spinner + div { + font-family: "IBM Plex Sans", sans-serif; + color: $mina-base-primary; +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.ts b/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.ts new file mode 100644 index 0000000000..a36b6fcc7a --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-table/leaderboard-table.component.ts @@ -0,0 +1,50 @@ +import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core'; +import { LeaderboardSelectors } from '@leaderboard/leaderboard.state'; +import { HeartbeatSummary } from '@shared/types/leaderboard/heartbeat-summary.type'; +import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; +import { isDesktop } from '@openmina/shared'; +import { animate, style, transition, trigger } from '@angular/animations'; + +@Component({ + selector: 'mina-leaderboard-table', + templateUrl: './leaderboard-table.component.html', + styleUrl: './leaderboard-table.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, + host: { class: 'flex-column w-100 h-100' }, + animations: [ + trigger('fadeInOut', [ + transition(':enter', [ + style({ opacity: 0 }), + animate('400ms linear', style({ opacity: 1 })), + ]), + transition(':leave', [ + animate('400ms linear', style({ opacity: 0 })), + ]), + ]), + ], +}) +export class LeaderboardTableComponent extends StoreDispatcher implements OnInit { + + isLoading: boolean = true; + rows: HeartbeatSummary[] = []; + desktop: boolean = isDesktop(); + + ngOnInit(): void { + this.listenToEmptyInDatabase(); + this.listenToHeartbeatsChanges(); + } + + private listenToEmptyInDatabase(): void { + this.select(LeaderboardSelectors.isLoading, (loading: boolean) => { + this.isLoading = loading; + this.detect(); + }); + } + + private listenToHeartbeatsChanges(): void { + this.select(LeaderboardSelectors.filteredHeartbeatSummaries, (rows: HeartbeatSummary[]) => { + this.rows = rows; + this.detect(); + }); + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.html b/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.html new file mode 100644 index 0000000000..2c5e2dcd0c --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.html @@ -0,0 +1,11 @@ + + +
+ +
+

Mina Web Node Testing Program

+

Terms and Conditions

+

TBD

+
+ +
diff --git a/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.scss b/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.scss new file mode 100644 index 0000000000..b5c5a9483d --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.scss @@ -0,0 +1,41 @@ +@import 'leaderboard-variables'; + +:host { + display: block; + height: 100%; + padding-top: 52px; + background-color: $mina-cta-primary; + color: $mina-base-primary; + font-family: "IBM Plex Sans", sans-serif; +} + +main, +mina-leaderboard-header, +mina-leaderboard-footer { + max-width: 1200px; + width: 100%; + padding: 0 48px; + margin: 0 auto; + + @media (max-width: 1023px) { + padding: 0; + } +} + +h2 { + margin-top: 72px; + margin-bottom: 16px; + color: $mina-base-secondary; + font-size: 20px; + font-weight: 500; + line-height: 28px; +} + +h1 { + margin-top: 0; + margin-bottom: 80px; + color: $mina-base-primary; + font-size: 80px; + font-weight: 400; + line-height: 80px; +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.ts b/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.ts new file mode 100644 index 0000000000..ebb2ba6d5c --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component.ts @@ -0,0 +1,11 @@ +import { ChangeDetectionStrategy, Component } from '@angular/core'; + +@Component({ + selector: 'mina-leaderboard-terms-and-conditions', + templateUrl: './leaderboard-terms-and-conditions.component.html', + styleUrl: './leaderboard-terms-and-conditions.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class LeaderboardTermsAndConditionsComponent { + +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.html b/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.html new file mode 100644 index 0000000000..243f8138fe --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.html @@ -0,0 +1 @@ +

Leaderboard

diff --git a/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.scss b/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.scss new file mode 100644 index 0000000000..e4872bf88f --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.scss @@ -0,0 +1,19 @@ +@import 'leaderboard-variables'; + +:host { + max-width: 1200px; + width: 100%; + margin: 0 auto; +} + +h1 { + font-family: "IBM Plex Sans", sans-serif; + font-weight: 300; + font-size: 80px; + color: $black6; + margin: 80px 0; + + @media (max-width: 1023px) { + font-size: 10vw; + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.ts b/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.ts new file mode 100644 index 0000000000..ab3ff4348d --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard-title/leaderboard-title.component.ts @@ -0,0 +1,27 @@ +import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core'; +import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; +import { HeartbeatSummary } from '@shared/types/leaderboard/heartbeat-summary.type'; +import { LeaderboardSelectors } from '@leaderboard/leaderboard.state'; + +@Component({ + selector: 'mina-leaderboard-title', + templateUrl: './leaderboard-title.component.html', + styleUrl: './leaderboard-title.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, + host: { class: 'flex-column' }, +}) +export class LeaderboardTitleComponent extends StoreDispatcher implements OnInit { + + rows: HeartbeatSummary[] = []; + + ngOnInit(): void { + this.listenToHeartbeatsChanges(); + } + + private listenToHeartbeatsChanges(): void { + this.select(LeaderboardSelectors.filteredHeartbeatSummaries, (rows: HeartbeatSummary[]) => { + this.rows = rows; + this.detect(); + }); + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard.actions.ts b/frontend/src/app/features/leaderboard/leaderboard.actions.ts new file mode 100644 index 0000000000..e89de21b15 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard.actions.ts @@ -0,0 +1,27 @@ +import { createType } from '@shared/constants/store-functions'; +import { createAction, props } from '@ngrx/store'; +import { HeartbeatSummary } from '@shared/types/leaderboard/heartbeat-summary.type'; +import { TableSort } from '@openmina/shared'; + +export const LEADERBOARD_KEY = 'leaderboard'; +export const LEADERBOARD_PREFIX = 'Leaderboard'; + +const type = (type: T) => createType(LEADERBOARD_PREFIX, null, type); + +const init = createAction(type('Init')); +const close = createAction(type('Close')); +const getHeartbeats = createAction(type('Get Heartbeats')); +const getHeartbeatsSuccess = createAction(type('Get Heartbeats Success'), props<{ + heartbeatSummaries: HeartbeatSummary[], +}>()); +const changeFilters = createAction(type('Change Filters'), props<{ filters: any }>()); +const sort = createAction(type('Sort'), props<{ sort: TableSort }>()); + +export const LeaderboardActions = { + init, + close, + getHeartbeats, + getHeartbeatsSuccess, + changeFilters, + sort, +}; diff --git a/frontend/src/app/features/leaderboard/leaderboard.effects.ts b/frontend/src/app/features/leaderboard/leaderboard.effects.ts new file mode 100644 index 0000000000..bc784019b9 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard.effects.ts @@ -0,0 +1,33 @@ +import { Injectable } from '@angular/core'; +import { MinaState, selectMinaState } from '@app/app.setup'; +import { Actions, createEffect, ofType } from '@ngrx/effects'; +import { Effect } from '@openmina/shared'; +import { map, switchMap } from 'rxjs'; +import { catchErrorAndRepeat2 } from '@shared/constants/store-functions'; +import { MinaErrorType } from '@shared/types/error-preview/mina-error-type.enum'; +import { Store } from '@ngrx/store'; +import { BaseEffect } from '@shared/base-classes/mina-rust-base.effect'; +import { LeaderboardActions } from '@leaderboard/leaderboard.actions'; +import { LeaderboardService } from '@leaderboard/leaderboard.service'; + +@Injectable({ + providedIn: 'root', +}) +export class LeaderboardEffects extends BaseEffect { + + readonly getHeartbeats$: Effect; + + constructor(private actions$: Actions, + private leaderboardService: LeaderboardService, + store: Store) { + super(store, selectMinaState); + + this.getHeartbeats$ = createEffect(() => this.actions$.pipe( + ofType(LeaderboardActions.getHeartbeats), + this.latestActionState(), + switchMap(() => this.leaderboardService.getHeartbeatsSummaries()), + map(heartbeatSummaries => LeaderboardActions.getHeartbeatsSuccess({ heartbeatSummaries })), + catchErrorAndRepeat2(MinaErrorType.GENERIC, LeaderboardActions.getHeartbeatsSuccess({ heartbeatSummaries: [] })), + )); + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard.module.ts b/frontend/src/app/features/leaderboard/leaderboard.module.ts new file mode 100644 index 0000000000..44c318a2f0 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard.module.ts @@ -0,0 +1,50 @@ +import { NgModule } from '@angular/core'; +import { CommonModule } from '@angular/common'; + +import { LeaderboardRouting } from './leaderboard.routing'; +import { LeaderboardFiltersComponent } from '@leaderboard/leaderboard-filters/leaderboard-filters.component'; +import { LeaderboardHeaderComponent } from '@leaderboard/leaderboard-header/leaderboard-header.component'; +import { LeaderboardPageComponent } from '@leaderboard/leaderboard-page/leaderboard-page.component'; +import { LeaderboardTableComponent } from '@leaderboard/leaderboard-table/leaderboard-table.component'; +import { LeaderboardTitleComponent } from '@leaderboard/leaderboard-title/leaderboard-title.component'; +import { CopyComponent, OpenminaSharedModule } from '@openmina/shared'; +import { LoadingSpinnerComponent } from '@shared/loading-spinner/loading-spinner.component'; +import { EffectsModule } from '@ngrx/effects'; +import { LeaderboardEffects } from '@leaderboard/leaderboard.effects'; +import { LeaderboardFooterComponent } from '@leaderboard/leaderboard-footer/leaderboard-footer.component'; +import { LeaderboardLandingPageComponent } from '@leaderboard/leaderboard-landing-page/leaderboard-landing-page.component'; +import { LeaderboardDetailsComponent } from '@leaderboard/leaderboard-details/leaderboard-details.component'; +import { LeaderboardTermsAndConditionsComponent } from '@leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component'; +import { LeaderboardImpressumComponent } from '@leaderboard/leaderboard-impressum/leaderboard-impressum.component'; +import { LeaderboardPrivacyPolicyComponent } from '@leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component'; +import { LeaderboardApplyComponent } from '@leaderboard/leaderboard-apply/leaderboard-apply.component'; + + +@NgModule({ + declarations: [ + LeaderboardPageComponent, + LeaderboardFiltersComponent, + LeaderboardHeaderComponent, + LeaderboardTableComponent, + LeaderboardTitleComponent, + LeaderboardFooterComponent, + LeaderboardLandingPageComponent, + LeaderboardDetailsComponent, + LeaderboardTermsAndConditionsComponent, + LeaderboardImpressumComponent, + LeaderboardPrivacyPolicyComponent, + LeaderboardApplyComponent, + ], + imports: [ + CommonModule, + LeaderboardRouting, + CopyComponent, + OpenminaSharedModule, + LoadingSpinnerComponent, + EffectsModule.forFeature(LeaderboardEffects), + ], + exports: [ + LeaderboardLandingPageComponent, + ], +}) +export class LeaderboardModule {} diff --git a/frontend/src/app/features/leaderboard/leaderboard.reducer.ts b/frontend/src/app/features/leaderboard/leaderboard.reducer.ts new file mode 100644 index 0000000000..770d162cb4 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard.reducer.ts @@ -0,0 +1,56 @@ +import { createReducer, on } from '@ngrx/store'; +import { LeaderboardState } from '@leaderboard/leaderboard.state'; +import { LeaderboardActions } from '@leaderboard/leaderboard.actions'; +import { HeartbeatSummary } from '@shared/types/leaderboard/heartbeat-summary.type'; +import { sort, SortDirection, TableSort } from '@openmina/shared'; + + +const initialState: LeaderboardState = { + filteredHeartbeatSummaries: [], + heartbeatSummaries: [], + filters: { + search: '', + }, + sortBy: { + sortDirection: SortDirection.DSC, + sortBy: 'uptimePercentage', + }, + isLoading: true, +}; + +export const leaderboardReducer = createReducer( + initialState, + on(LeaderboardActions.getHeartbeatsSuccess, (state, { heartbeatSummaries }) => ({ + ...state, + isLoading: false, + heartbeatSummaries, + filteredHeartbeatSummaries: sortHeartbeats(filterHeartbeats(heartbeatSummaries, state.filters), state.sortBy), + })), + on(LeaderboardActions.changeFilters, (state, { filters }) => ({ + ...state, + filters, + filteredHeartbeatSummaries: sortHeartbeats(filterHeartbeats(state.heartbeatSummaries, filters), state.sortBy), + })), + on(LeaderboardActions.sort, (state, { sort }) => ({ + ...state, + sortBy: sort, + filteredHeartbeatSummaries: sortHeartbeats(state.filteredHeartbeatSummaries, sort), + })), +); + + +function sortHeartbeats(node: HeartbeatSummary[], tableSort: TableSort): HeartbeatSummary[] { + return sort(node, tableSort, []); +} + +function filterHeartbeats(summaries: HeartbeatSummary[], filters: any): HeartbeatSummary[] { + return summaries.filter(summary => { + if (filters.search?.length) { + const searchTerm = filters.search.toLowerCase(); + const searchMatch = summary.publicKey.toLowerCase().includes(searchTerm); + if (!searchMatch) return false; + } + + return true; + }); +} diff --git a/frontend/src/app/features/leaderboard/leaderboard.routing.ts b/frontend/src/app/features/leaderboard/leaderboard.routing.ts new file mode 100644 index 0000000000..69b819e42c --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard.routing.ts @@ -0,0 +1,40 @@ +import { NgModule } from '@angular/core'; +import { RouterModule, Routes } from '@angular/router'; +import { LeaderboardPageComponent } from '@leaderboard/leaderboard-page/leaderboard-page.component'; +import { LeaderboardDetailsComponent } from '@leaderboard/leaderboard-details/leaderboard-details.component'; +import { LeaderboardPrivacyPolicyComponent } from '@leaderboard/leaderboard-privacy-policy/leaderboard-privacy-policy.component'; +import { LeaderboardTermsAndConditionsComponent } from '@leaderboard/leaderboard-terms-and-conditions/leaderboard-terms-and-conditions.component'; +import { LeaderboardImpressumComponent } from '@leaderboard/leaderboard-impressum/leaderboard-impressum.component'; + +const routes: Routes = [ + { + path: 'leaderboard', + component: LeaderboardPageComponent, + }, + { + path: 'leaderboard/details', + component: LeaderboardDetailsComponent, + }, + { + path: 'leaderboard/impressum', + component: LeaderboardImpressumComponent, + }, + { + path: 'leaderboard/privacy-policy', + component: LeaderboardPrivacyPolicyComponent, + }, + { + path: 'leaderboard/terms-and-conditions', + component: LeaderboardTermsAndConditionsComponent, + }, + { + path: '**', + redirectTo: '', + }, +]; + +@NgModule({ + imports: [RouterModule.forChild(routes)], + exports: [RouterModule], +}) +export class LeaderboardRouting {} diff --git a/frontend/src/app/features/leaderboard/leaderboard.service.ts b/frontend/src/app/features/leaderboard/leaderboard.service.ts new file mode 100644 index 0000000000..6c19937356 --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard.service.ts @@ -0,0 +1,47 @@ +import { Injectable } from '@angular/core'; +import { combineLatest, map, Observable } from 'rxjs'; +import { HeartbeatSummary } from '@shared/types/leaderboard/heartbeat-summary.type'; +import { collection, collectionData, CollectionReference, Firestore } from '@angular/fire/firestore'; + +@Injectable({ + providedIn: 'root', +}) +export class LeaderboardService { + + private scoresCollection: CollectionReference; + private maxScoreCollection: CollectionReference; + + constructor(private firestore: Firestore) { + this.scoresCollection = collection(this.firestore, 'scores'); + this.maxScoreCollection = collection(this.firestore, 'maxScore'); + } + + getHeartbeatsSummaries(): Observable { + return combineLatest([ + collectionData(this.scoresCollection, { idField: 'id' }), + collectionData(this.maxScoreCollection, { idField: 'id' }), + ]).pipe( + map(([scores, maxScore]) => { + const maxScoreRightNow = maxScore.find(c => c.id === 'current')['value']; + + const items = scores.map(score => ({ + publicKey: score['publicKey'], + blocksProduced: score['blocksProduced'], + isActive: score['lastUpdated'] > Date.now() - 120000, + uptimePercentage: Math.floor((score['score'] / maxScoreRightNow) * 100), + uptimePrize: false, + blocksPrize: false, + } as HeartbeatSummary)); + + const sortedItemsByUptime = [...items].sort((a, b) => b.uptimePercentage - a.uptimePercentage); + const fifthPlacePercentageByUptime = sortedItemsByUptime[4]?.uptimePercentage ?? 0; + const highestProducedBlocks = Math.max(...items.map(item => item.blocksProduced)); + return items.map(item => ({ + ...item, + uptimePrize: item.uptimePercentage >= fifthPlacePercentageByUptime, + blocksPrize: item.blocksProduced === highestProducedBlocks, + })); + }), + ); + } +} diff --git a/frontend/src/app/features/leaderboard/leaderboard.state.ts b/frontend/src/app/features/leaderboard/leaderboard.state.ts new file mode 100644 index 0000000000..32eb9257ff --- /dev/null +++ b/frontend/src/app/features/leaderboard/leaderboard.state.ts @@ -0,0 +1,29 @@ +import { createFeatureSelector, createSelector, MemoizedSelector } from '@ngrx/store'; +import { MinaState } from '@app/app.setup'; +import { HeartbeatSummary } from '@shared/types/leaderboard/heartbeat-summary.type'; +import { LEADERBOARD_KEY } from '@leaderboard/leaderboard.actions'; +import { TableSort } from '@openmina/shared'; + +export interface LeaderboardState { + filteredHeartbeatSummaries: HeartbeatSummary[]; + heartbeatSummaries: HeartbeatSummary[]; + filters: { search: string }; + sortBy: TableSort; + isLoading: boolean; +} + +const select = (selector: (state: LeaderboardState) => T): MemoizedSelector => createSelector( + createFeatureSelector(LEADERBOARD_KEY), + selector, +); +const filteredHeartbeatSummaries = select(state => state.filteredHeartbeatSummaries); +const filters = select(state => state.filters); +const sortBy = select(state => state.sortBy); +const isLoading = select(state => state.isLoading); + +export const LeaderboardSelectors = { + filteredHeartbeatSummaries, + filters, + sortBy, + isLoading, +}; diff --git a/frontend/src/app/features/web-node/web-node-initialization/web-node-initialization.component.ts b/frontend/src/app/features/web-node/web-node-initialization/web-node-initialization.component.ts index 07bf15f813..af2a5084c4 100644 --- a/frontend/src/app/features/web-node/web-node-initialization/web-node-initialization.component.ts +++ b/frontend/src/app/features/web-node/web-node-initialization/web-node-initialization.component.ts @@ -2,10 +2,10 @@ import { AfterViewInit, ChangeDetectionStrategy, Component, ElementRef, OnInit, import { untilDestroyed } from '@ngneat/until-destroy'; import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; import { WebNodeService } from '@core/services/web-node.service'; -import { any, GlobalErrorHandlerService, safelyExecuteInBrowser } from '@openmina/shared'; -import { NgClass, NgForOf, NgIf, NgOptimizedImage } from '@angular/common'; +import { GlobalErrorHandlerService, safelyExecuteInBrowser } from '@openmina/shared'; +import { NgClass, NgOptimizedImage } from '@angular/common'; import { Router } from '@angular/router'; -import { CONFIG, getFirstFeature } from '@shared/constants/config'; +import { getFirstFeature } from '@shared/constants/config'; import { animate, style, transition, trigger } from '@angular/animations'; import { filter, switchMap, timer } from 'rxjs'; import { LoadingSpinnerComponent } from '@shared/loading-spinner/loading-spinner.component'; @@ -33,8 +33,6 @@ export interface WebNodeLoadingStep { standalone: true, imports: [ NgClass, - NgIf, - NgForOf, NgOptimizedImage, LoadingSpinnerComponent, ], diff --git a/frontend/src/app/features/web-node/web-node.component.ts b/frontend/src/app/features/web-node/web-node.component.ts index ee262db3d8..81f919da09 100644 --- a/frontend/src/app/features/web-node/web-node.component.ts +++ b/frontend/src/app/features/web-node/web-node.component.ts @@ -31,6 +31,7 @@ export class WebNodeComponent extends StoreDispatcher implements OnInit { private webNodeService: WebNodeService) { super(); } ngOnInit(): void { + document.body.style.backgroundColor = 'var(--base-background)'; this.listenToFileUploadingEvents(); this.checkIfDeviceIsSupported(); this.listenToRoute(); @@ -42,7 +43,7 @@ export class WebNodeComponent extends StoreDispatcher implements OnInit { private listenToRoute(): void { this.select(getMergedRoute, (route: MergedRoute) => { - let initial = 174; + let initial = 176; if (route.queryParams['initial']) { initial = Number(route.queryParams['initial']); } diff --git a/frontend/src/app/layout/web-node-landing-page/web-node-landing-page.component.ts b/frontend/src/app/layout/web-node-landing-page/web-node-landing-page.component.ts index f8cd2143ec..f5d129cfb9 100644 --- a/frontend/src/app/layout/web-node-landing-page/web-node-landing-page.component.ts +++ b/frontend/src/app/layout/web-node-landing-page/web-node-landing-page.component.ts @@ -1,5 +1,4 @@ import { ChangeDetectionStrategy, Component, EventEmitter, OnInit, Output } from '@angular/core'; -import { NgOptimizedImage } from '@angular/common'; import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; import { AppSelectors } from '@app/app.state'; import { filter } from 'rxjs'; @@ -7,9 +6,6 @@ import { filter } from 'rxjs'; @Component({ selector: 'mina-web-node-landing-page', standalone: true, - imports: [ - NgOptimizedImage, - ], templateUrl: './web-node-landing-page.component.html', styleUrl: './web-node-landing-page.component.scss', changeDetection: ChangeDetectionStrategy.OnPush, diff --git a/frontend/src/app/shared/enums/routes.enum.ts b/frontend/src/app/shared/enums/routes.enum.ts index 0a30a63b14..bb7b68e769 100644 --- a/frontend/src/app/shared/enums/routes.enum.ts +++ b/frontend/src/app/shared/enums/routes.enum.ts @@ -41,4 +41,5 @@ export enum Routes { OCAML = 'ocaml', RUST = 'rust', FUZZING = 'fuzzing', + LEADERBOARD = 'leaderboard', } diff --git a/frontend/src/app/shared/types/core/environment/mina-env.type.ts b/frontend/src/app/shared/types/core/environment/mina-env.type.ts index 0edf5d8720..febcec9c4b 100644 --- a/frontend/src/app/shared/types/core/environment/mina-env.type.ts +++ b/frontend/src/app/shared/types/core/environment/mina-env.type.ts @@ -13,6 +13,7 @@ export interface MinaEnv { globalConfig?: { features?: FeaturesConfig; graphQL?: string; + firebase?: any; }; } diff --git a/frontend/src/app/shared/types/leaderboard/heartbeat-summary.type.ts b/frontend/src/app/shared/types/leaderboard/heartbeat-summary.type.ts new file mode 100644 index 0000000000..a134981c2f --- /dev/null +++ b/frontend/src/app/shared/types/leaderboard/heartbeat-summary.type.ts @@ -0,0 +1,8 @@ +export interface HeartbeatSummary { + publicKey: string; + isActive: boolean; + uptimePercentage: number; + blocksProduced: number; + uptimePrize: boolean; + blocksPrize: boolean; +} diff --git a/frontend/src/app/shared/types/leaderboard/heartbeat.type.ts b/frontend/src/app/shared/types/leaderboard/heartbeat.type.ts new file mode 100644 index 0000000000..1e697df0e6 --- /dev/null +++ b/frontend/src/app/shared/types/leaderboard/heartbeat.type.ts @@ -0,0 +1,3 @@ +// export interface Heartbeat { +// +// } diff --git a/frontend/src/assets/environments/leaderboard.js b/frontend/src/assets/environments/leaderboard.js new file mode 100644 index 0000000000..904531dd35 --- /dev/null +++ b/frontend/src/assets/environments/leaderboard.js @@ -0,0 +1,37 @@ +/** + * This configuration is used for the staging-webnode environment. + */ + +export default { + production: true, + canAddNodes: false, + showWebNodeLandingPage: true, + globalConfig: { + features: { + 'dashboard': [], + 'block-production': ['won-slots'], + 'mempool': [], + 'benchmarks': ['wallets'], + 'state': ['actions'], + }, + firebase: { + apiKey: 'AIzaSyBZzFsHjIbQVbBP0N-KkUsEvHRVU_wwd7g', + authDomain: 'webnode-gtm-test.firebaseapp.com', + projectId: 'webnode-gtm-test', + storageBucket: 'webnode-gtm-test.firebasestorage.app', + messagingSenderId: '1016673359357', + appId: '1:1016673359357:web:bbd2cbf3f031756aec7594', + measurementId: 'G-ENDBL923XT', + }, + }, + // sentry: { + // dsn: 'https://69aba72a6290383494290cf285ab13b3@o4508216158584832.ingest.de.sentry.io/4508216160616528', + // tracingOrigins: ['https://www.openmina.com', 'webnode-gtm-test.firebaseapp.com', 'webnode-gtm-test.firebasestorage.app'], + // }, + configs: [ + { + name: 'Web Node', + isWebNode: true, + }, + ], +}; diff --git a/frontend/src/assets/environments/webnode.js b/frontend/src/assets/environments/webnode.js index d6dd167f07..14ed05ebc1 100644 --- a/frontend/src/assets/environments/webnode.js +++ b/frontend/src/assets/environments/webnode.js @@ -12,6 +12,16 @@ export default { 'block-production': ['won-slots'], 'mempool': [], 'benchmarks': ['wallets'], + 'state': ['actions'], + }, + firebase: { + 'projectId': 'openminawebnode', + 'appId': '1:120031499786:web:9af56c50ebce25c619f1f3', + 'storageBucket': 'openminawebnode.firebasestorage.app', + 'apiKey': 'AIzaSyBreMkb5-8ANb5zL6yWKgRAk9owbDS1g9s', + 'authDomain': 'openminawebnode.firebaseapp.com', + 'messagingSenderId': '120031499786', + 'measurementId': 'G-V0ZC81T9RQ', }, }, sentry: { diff --git a/frontend/src/assets/images/landing-page/blog-featured-image.jpg b/frontend/src/assets/images/landing-page/blog-featured-image.jpg new file mode 100644 index 0000000000..5d79cbe252 Binary files /dev/null and b/frontend/src/assets/images/landing-page/blog-featured-image.jpg differ diff --git a/frontend/src/assets/images/landing-page/cta-section-bg.png b/frontend/src/assets/images/landing-page/cta-section-bg.png new file mode 100644 index 0000000000..37642a93a6 Binary files /dev/null and b/frontend/src/assets/images/landing-page/cta-section-bg.png differ diff --git a/frontend/src/assets/images/landing-page/hero-image.jpg b/frontend/src/assets/images/landing-page/hero-image.jpg new file mode 100644 index 0000000000..6398a10d62 Binary files /dev/null and b/frontend/src/assets/images/landing-page/hero-image.jpg differ diff --git a/frontend/src/assets/images/landing-page/icon-click.svg b/frontend/src/assets/images/landing-page/icon-click.svg new file mode 100644 index 0000000000..eaf794cc9b --- /dev/null +++ b/frontend/src/assets/images/landing-page/icon-click.svg @@ -0,0 +1,4 @@ + + + + diff --git a/frontend/src/assets/images/landing-page/icon-earn.svg b/frontend/src/assets/images/landing-page/icon-earn.svg new file mode 100644 index 0000000000..62b3202efe --- /dev/null +++ b/frontend/src/assets/images/landing-page/icon-earn.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/frontend/src/assets/images/landing-page/icon-verify.svg b/frontend/src/assets/images/landing-page/icon-verify.svg new file mode 100644 index 0000000000..ddaa2c08da --- /dev/null +++ b/frontend/src/assets/images/landing-page/icon-verify.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/frontend/src/assets/images/landing-page/mina-intro-image.jpg b/frontend/src/assets/images/landing-page/mina-intro-image.jpg new file mode 100644 index 0000000000..ab0777f3fd Binary files /dev/null and b/frontend/src/assets/images/landing-page/mina-intro-image.jpg differ diff --git a/frontend/src/assets/images/landing-page/mina-intro-lettering.png b/frontend/src/assets/images/landing-page/mina-intro-lettering.png new file mode 100644 index 0000000000..4ebebde268 Binary files /dev/null and b/frontend/src/assets/images/landing-page/mina-intro-lettering.png differ diff --git a/frontend/src/assets/images/landing-page/tinified/blog-featured-image.jpg b/frontend/src/assets/images/landing-page/tinified/blog-featured-image.jpg new file mode 100644 index 0000000000..45f77587ca Binary files /dev/null and b/frontend/src/assets/images/landing-page/tinified/blog-featured-image.jpg differ diff --git a/frontend/src/assets/images/landing-page/tinified/cta-section-bg.png b/frontend/src/assets/images/landing-page/tinified/cta-section-bg.png new file mode 100644 index 0000000000..b33c9713ab Binary files /dev/null and b/frontend/src/assets/images/landing-page/tinified/cta-section-bg.png differ diff --git a/frontend/src/assets/images/landing-page/tinified/hero-image.jpg b/frontend/src/assets/images/landing-page/tinified/hero-image.jpg new file mode 100644 index 0000000000..7d6f2be276 Binary files /dev/null and b/frontend/src/assets/images/landing-page/tinified/hero-image.jpg differ diff --git a/frontend/src/assets/images/landing-page/tinified/mina-intro-image.jpg b/frontend/src/assets/images/landing-page/tinified/mina-intro-image.jpg new file mode 100644 index 0000000000..de8b494b73 Binary files /dev/null and b/frontend/src/assets/images/landing-page/tinified/mina-intro-image.jpg differ diff --git a/frontend/src/assets/images/landing-page/tinified/mina-intro-lettering.png b/frontend/src/assets/images/landing-page/tinified/mina-intro-lettering.png new file mode 100644 index 0000000000..5dd070b151 Binary files /dev/null and b/frontend/src/assets/images/landing-page/tinified/mina-intro-lettering.png differ diff --git a/frontend/src/assets/images/landing-page/tinified/web-node-bits-bg.png b/frontend/src/assets/images/landing-page/tinified/web-node-bits-bg.png new file mode 100644 index 0000000000..95b81b8804 Binary files /dev/null and b/frontend/src/assets/images/landing-page/tinified/web-node-bits-bg.png differ diff --git a/frontend/src/assets/images/landing-page/web-node-bits-bg.png b/frontend/src/assets/images/landing-page/web-node-bits-bg.png new file mode 100644 index 0000000000..4c7ad227ca Binary files /dev/null and b/frontend/src/assets/images/landing-page/web-node-bits-bg.png differ diff --git a/frontend/src/assets/images/logo/logo-text.svg b/frontend/src/assets/images/logo/logo-text.svg index de0c2f13cf..6b6386e607 100644 --- a/frontend/src/assets/images/logo/logo-text.svg +++ b/frontend/src/assets/images/logo/logo-text.svg @@ -1 +1,24 @@ - \ No newline at end of file + + + + + + + + diff --git a/frontend/src/assets/styles/leaderboard-variables.scss b/frontend/src/assets/styles/leaderboard-variables.scss new file mode 100644 index 0000000000..63c4f54c84 --- /dev/null +++ b/frontend/src/assets/styles/leaderboard-variables.scss @@ -0,0 +1,25 @@ +$mina-cta-primary: white; +$mina-base-divider: rgba(45, 45, 45, 0.10); +$white4: rgba(45, 45, 45, 0.4); +$mina-base-secondary: rgba(45, 45, 45, 0.60); +$mina-base-surface: rgba(226, 235, 239, 0.50); +$mina-base-container: #eff4f6; + +$black: rgba(0, 0, 0, 1); +$mina-base-primary: #2d2d2d; +$mina-cta-surface: #2d2d2d; +$black3: rgba(0, 0, 0, 0.20); +$black4: rgba(0, 0, 0, 0.40); +$black5: rgba(96, 96, 96, 1); +$black6: rgba(45, 45, 45, 1); + +$mina-access-primary: #8971fd; +$mina-brand-aqua: #bbfdf8; +$mina-brand-cyan: #31cdea; +$mina-brand-lilac: #e2dfff; +$mina-brand-peony: #f1dceb; +$mina-brand-gray: #d9d9d9; + +$mina-brand-gradient: linear-gradient(272deg, #b6eeff 2.39%, #f7f5ff 25.39%, #d7c4fa 48.39%, #f4c0da 71.4%, #ffc4a4 94.4%); +$mina-brand-gradient-reversed: linear-gradient(272deg, #ffc4a4 2.39%, #f4c0da 25.39%, #d7c4fa 48.39%, #f7f5ff 71.4%, #b6eeff 94.4%); +$mina-brand-gradient2: linear-gradient(45deg, #57d7ff 8%, #fda2ff 60%, #ff833d 100%); diff --git a/frontend/src/environments/environment.ts b/frontend/src/environments/environment.ts index 3fff6b5fe6..27fb7cddd8 100644 --- a/frontend/src/environments/environment.ts +++ b/frontend/src/environments/environment.ts @@ -4,7 +4,7 @@ export const environment: Readonly = { production: false, identifier: 'Dev FE', canAddNodes: true, - showWebNodeLandingPage: false, + showWebNodeLandingPage: true, globalConfig: { features: { dashboard: [], @@ -18,6 +18,15 @@ export const environment: Readonly = { benchmarks: ['wallets'], fuzzing: [], }, + firebase: { + apiKey: 'AIzaSyBZzFsHjIbQVbBP0N-KkUsEvHRVU_wwd7g', + authDomain: 'webnode-gtm-test.firebaseapp.com', + projectId: 'webnode-gtm-test', + storageBucket: 'webnode-gtm-test.firebasestorage.app', + messagingSenderId: '1016673359357', + appId: '1:1016673359357:web:bbd2cbf3f031756aec7594', + measurementId: 'G-ENDBL923XT', + }, graphQL: 'https://adonagy.com/graphql', // graphQL: 'https://api.minascan.io/node/devnet/v1/graphql', // graphQL: 'http://65.109.105.40:5000/graphql', diff --git a/frontend/src/index.html b/frontend/src/index.html index 9b2ba7b22e..e627af2266 100644 --- a/frontend/src/index.html +++ b/frontend/src/index.html @@ -49,13 +49,18 @@ + diff --git a/frontend/src/styles.scss b/frontend/src/styles.scss index 1ab4659dc4..beff9d9483 100644 --- a/frontend/src/styles.scss +++ b/frontend/src/styles.scss @@ -1,6 +1,7 @@ @use '@angular/material' as mat; @import 'openmina'; +@import 'leaderboard-variables'; $custom-typography: mat.define-legacy-typography-config($font-family: '"Inter", sans-serif'); // TODO(v15): As of v15 mat.legacy-core no longer includes default typography styles. @@ -24,6 +25,8 @@ body { font-weight: 400; color: $base-primary; background-color: var(--base-background, #000000); + -webkit-tap-highlight-color: transparent; + -webkit-tap-highlight-color: rgba(0, 0, 0, 0); } .theme-transition { diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 76e7db1fbb..ccc8161a14 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -80,6 +80,9 @@ ], "@fuzzing/*": [ "src/app/features/fuzzing/*" + ], + "@leaderboard/*": [ + "src/app/features/leaderboard/*" ] } }, diff --git a/fuzzer/Cargo.toml b/fuzzer/Cargo.toml index fbfdb531bc..f7ec98841d 100644 --- a/fuzzer/Cargo.toml +++ b/fuzzer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-fuzzer" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/ledger/Cargo.toml b/ledger/Cargo.toml index 2b8cae1b01..b13cb156df 100644 --- a/ledger/Cargo.toml +++ b/ledger/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mina-tree" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/ledger/src/account/account.rs b/ledger/src/account/account.rs index c5a17b4ec0..d566261a77 100644 --- a/ledger/src/account/account.rs +++ b/ledger/src/account/account.rs @@ -49,7 +49,9 @@ use super::common::*; pub const TXN_VERSION_CURRENT: TxnVersion = TxnVersion::from_u32(PROTOCOL_VERSION.transaction.as_u64() as u32); -#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)] +#[serde(into = "v2::MinaBaseTokenIdStableV2")] +#[serde(try_from = "v2::MinaBaseTokenIdStableV2")] pub struct TokenId(pub Fp); impl std::fmt::Debug for TokenId { diff --git a/ledger/src/account/conv.rs b/ledger/src/account/conv.rs index 8e0a3fab14..b1a853b063 100644 --- a/ledger/src/account/conv.rs +++ b/ledger/src/account/conv.rs @@ -467,6 +467,12 @@ impl From<&TokenId> for mina_p2p_messages::v2::MinaBaseTokenIdStableV2 { } } +impl From for mina_p2p_messages::v2::MinaBaseTokenIdStableV2 { + fn from(token_id: TokenId) -> Self { + Self(MinaBaseAccountIdDigestStableV1(token_id.0.into())) + } +} + impl TryFrom<&mina_p2p_messages::v2::MinaBaseTokenIdStableV2> for TokenId { type Error = InvalidBigInt; @@ -477,6 +483,16 @@ impl TryFrom<&mina_p2p_messages::v2::MinaBaseTokenIdStableV2> for TokenId { } } +impl TryFrom for TokenId { + type Error = InvalidBigInt; + + fn try_from( + token_id: mina_p2p_messages::v2::MinaBaseTokenIdStableV2, + ) -> Result { + Ok(Self(token_id.to_field()?)) + } +} + impl binprot::BinProtRead for Account { fn binprot_read(r: &mut R) -> Result where diff --git a/ledger/src/base.rs b/ledger/src/base.rs index 0183b5db03..3df762bbd5 100644 --- a/ledger/src/base.rs +++ b/ledger/src/base.rs @@ -83,6 +83,9 @@ pub trait BaseLedger { /// set of account ids associated with accounts fn accounts(&self) -> HashSet; + /// Get the account id that owns a token. + fn token_owner(&self, token_id: TokenId) -> Option; + /// Get all of the tokens for which a public key has accounts. fn tokens(&self, public_key: CompressedPubKey) -> HashSet; diff --git a/ledger/src/database/database.rs b/ledger/src/database/database.rs index da1739bf7f..7759b85465 100644 --- a/ledger/src/database/database.rs +++ b/ledger/src/database/database.rs @@ -152,6 +152,10 @@ impl BaseLedger for Database { self.with(|this| this.accounts()) } + fn token_owner(&self, token_id: TokenId) -> Option { + self.with(|this| this.token_owner(token_id)) + } + fn tokens(&self, public_key: CompressedPubKey) -> HashSet { self.with(|this| this.tokens(public_key)) } @@ -472,7 +476,7 @@ export function performance_now() { console::time_with_label("generate random accounts"); - let mut db = Database::::create(20); + let mut db = Database::::create(20, false); console::log_1(&format!("{:?} accounts in nodejs", NACCOUNTS).into()); diff --git a/ledger/src/database/database_impl.rs b/ledger/src/database/database_impl.rs index 425a9c31df..369a8ce2c3 100644 --- a/ledger/src/database/database_impl.rs +++ b/ledger/src/database/database_impl.rs @@ -6,6 +6,7 @@ use std::{ use mina_hasher::Fp; use mina_signer::CompressedPubKey; +use openmina_core::IS_ARCHIVE; use crate::{ next_uuid, Account, AccountId, AccountIndex, AccountLegacy, Address, AddressIterator, @@ -20,6 +21,7 @@ pub struct DatabaseImpl { accounts: Vec>, pub hashes_matrix: HashesMatrix, id_to_addr: HashMap, + token_owners: Option>, depth: u8, last_location: Option
, naccounts: usize, @@ -33,7 +35,7 @@ impl std::fmt::Debug for DatabaseImpl { // .field("accounts", &self.accounts) .field("hashes_matrix", &self.hashes_matrix) // .field("id_to_addr", &self.id_to_addr) - // .field("token_to_account", &self.token_to_account) + // .field("token_owners", &self.token_owners) // .field("depth", &self.depth) // .field("last_location", &self.last_location) .field("naccounts", &self.naccounts) @@ -54,6 +56,7 @@ impl DatabaseImpl { // root: self.root.clone(), accounts: self.accounts.clone(), id_to_addr: self.id_to_addr.clone(), + token_owners: self.token_owners.clone(), depth: self.depth, last_location: self.last_location.clone(), naccounts: self.naccounts, @@ -88,6 +91,7 @@ impl DatabaseImpl { return Ok(GetOrCreated::Existed(addr)); } + let token_id = account.token_id.clone(); let location = match self.last_location.as_ref() { Some(last) => last.next().ok_or(DatabaseError::OutOfLeaves)?, None => Address::first(self.depth as usize), @@ -102,6 +106,11 @@ impl DatabaseImpl { self.last_location = Some(location.clone()); self.naccounts += 1; + if !token_id.is_default() { + if let Some(token_owners) = self.token_owners.as_mut() { + token_owners.insert(account_id.derive_token_id(), account_id.clone()); + } + } self.id_to_addr.insert(account_id, location.clone()); // self.root_hash.borrow_mut().take(); @@ -330,12 +339,19 @@ impl DatabaseImpl { // std::fs::create_dir_all(&path).ok(); + let token_owners = if IS_ARCHIVE.get().cloned().unwrap_or_default() { + Some(HashMap::with_capacity(NACCOUNTS)) + } else { + None + }; + Self { depth, accounts: Vec::with_capacity(NACCOUNTS), last_location: None, naccounts: 0, id_to_addr: HashMap::with_capacity(NACCOUNTS), + token_owners, uuid, directory: path, hashes_matrix: HashesMatrix::new(depth as usize), @@ -516,6 +532,12 @@ impl BaseLedger for DatabaseImpl { self.id_to_addr.keys().cloned().collect() } + fn token_owner(&self, token_id: TokenId) -> Option { + self.token_owners + .as_ref() + .and_then(|token_owners| token_owners.get(&token_id).cloned()) + } + fn tokens(&self, public_key: CompressedPubKey) -> HashSet { let mut set = HashSet::with_capacity(100); @@ -673,10 +695,20 @@ impl BaseLedger for DatabaseImpl { if let Some(account) = self.get(addr.clone()) { let id = account.id(); self.id_to_addr.remove(&id); + if !id.token_id.is_default() { + if let Some(token_owners) = self.token_owners.as_mut() { + token_owners.remove(&id.derive_token_id()); + } + } } else { self.naccounts += 1; } + if !account.token_id.is_default() { + if let Some(token_owners) = self.token_owners.as_mut() { + token_owners.insert(account.id().derive_token_id(), id.clone()); + } + } self.id_to_addr.insert(id, addr.clone()); self.accounts[index] = Some(*account); // root.add_account_on_path(account, addr.iter()); @@ -806,6 +838,11 @@ impl BaseLedger for DatabaseImpl { let id = account.id(); self.id_to_addr.remove(&id); + if !id.token_id.is_default() { + if let Some(token_owners) = self.token_owners.as_mut() { + token_owners.remove(&id.derive_token_id()); + } + } self.naccounts = self .naccounts diff --git a/ledger/src/mask/mask.rs b/ledger/src/mask/mask.rs index f407bfde2d..28aacb4b60 100644 --- a/ledger/src/mask/mask.rs +++ b/ledger/src/mask/mask.rs @@ -7,6 +7,8 @@ use std::{ use mina_hasher::Fp; use mina_signer::CompressedPubKey; +use openmina_core::IS_ARCHIVE; + use crate::{ account::{Account, AccountId, TokenId}, address::Address, @@ -90,16 +92,23 @@ impl Mask { childs: HashMap::with_capacity(2), })), }; - super::tests::add_mask(&uuid); + super::alive_add(&uuid); mask } pub fn new_unattached(depth: usize) -> Self { let uuid = next_uuid(); + let is_archive = IS_ARCHIVE.get().cloned().unwrap_or_default(); + let mask = Self { inner: Arc::new(Mutex::new(MaskImpl::Unattached { owning_account: Default::default(), + token_owners: if is_archive { + Some(Default::default()) + } else { + None + }, id_to_addr: Default::default(), last_location: None, depth: depth as u8, @@ -109,7 +118,7 @@ impl Mask { })), }; - super::tests::add_mask(&uuid); + super::alive_add(&uuid); mask } @@ -343,6 +352,10 @@ impl BaseLedger for Mask { self.with(|this| this.accounts()) } + fn token_owner(&self, token_id: TokenId) -> Option { + self.with(|this| this.token_owner(token_id)) + } + fn tokens(&self, public_key: CompressedPubKey) -> HashSet { self.with(|this| this.tokens(public_key)) } @@ -532,16 +545,16 @@ mod tests { } // The 3 masks should still be alive - assert!(crate::mask::tests::is_mask_alive(&root_uuid)); - assert!(crate::mask::tests::is_mask_alive(&child1_uuid)); - assert!(crate::mask::tests::is_mask_alive(&child2_uuid)); + assert!(crate::mask::is_alive(&root_uuid)); + assert!(crate::mask::is_alive(&child1_uuid)); + assert!(crate::mask::is_alive(&child2_uuid)); std::mem::drop(child); // Now they are all drop/deallocated - assert!(!crate::mask::tests::is_mask_alive(&root_uuid)); - assert!(!crate::mask::tests::is_mask_alive(&child1_uuid)); - assert!(!crate::mask::tests::is_mask_alive(&child2_uuid)); + assert!(!crate::mask::is_alive(&root_uuid)); + assert!(!crate::mask::is_alive(&child1_uuid)); + assert!(!crate::mask::is_alive(&child2_uuid)); } #[test] diff --git a/ledger/src/mask/mask_impl.rs b/ledger/src/mask/mask_impl.rs index 4f8a20df75..b929df3ae4 100644 --- a/ledger/src/mask/mask_impl.rs +++ b/ledger/src/mask/mask_impl.rs @@ -27,6 +27,7 @@ pub enum MaskImpl { Attached { parent: Mask, owning_account: HashMap, + token_owners: Option>, id_to_addr: HashMap, last_location: Option
, depth: u8, @@ -38,6 +39,7 @@ pub enum MaskImpl { depth: u8, childs: HashMap, owning_account: HashMap, + token_owners: Option>, id_to_addr: HashMap, last_location: Option
, hashes: HashesMatrix, @@ -45,20 +47,18 @@ pub enum MaskImpl { }, } -/// Drop implementation used on tests only ! -#[cfg(test)] impl Drop for MaskImpl { fn drop(&mut self) { if self.uuid().starts_with("temporary") { return; } - super::tests::remove_mask(&self.get_uuid()); + super::alive_remove(&self.get_uuid()); } } impl Clone for MaskImpl { fn clone(&self) -> Self { - match self { + let copy = match self { Self::Root { database, childs } => Self::Root { database: database.clone_db(database.get_directory().unwrap()), childs: childs.clone(), @@ -66,6 +66,7 @@ impl Clone for MaskImpl { Self::Attached { parent, owning_account, + token_owners, id_to_addr, last_location, depth, @@ -75,6 +76,7 @@ impl Clone for MaskImpl { } => Self::Attached { parent: parent.clone(), owning_account: owning_account.clone(), + token_owners: token_owners.clone(), id_to_addr: id_to_addr.clone(), last_location: last_location.clone(), depth: *depth, @@ -86,6 +88,7 @@ impl Clone for MaskImpl { depth, childs, owning_account, + token_owners, id_to_addr, last_location, hashes, @@ -94,12 +97,15 @@ impl Clone for MaskImpl { depth: *depth, childs: childs.clone(), owning_account: owning_account.clone(), + token_owners: token_owners.clone(), id_to_addr: id_to_addr.clone(), last_location: last_location.clone(), hashes: hashes.clone(), uuid: next_uuid(), }, - } + }; + super::alive_add(©.uuid()); + copy } } @@ -115,6 +121,7 @@ impl std::fmt::Debug for MaskImpl { Self::Attached { parent, owning_account, + token_owners, id_to_addr, last_location, depth, @@ -126,6 +133,10 @@ impl std::fmt::Debug for MaskImpl { .field("uuid", uuid) .field("parent", &parent.get_uuid()) .field("owning_account", &owning_account.len()) + .field( + "token_owners", + &token_owners.as_ref().map(|to| to.len()).unwrap_or(0), + ) .field("id_to_addr", &id_to_addr.len()) .field("last_location", last_location) .field("depth", depth) @@ -137,6 +148,7 @@ impl std::fmt::Debug for MaskImpl { depth, childs, owning_account, + token_owners, id_to_addr, last_location, hashes, @@ -146,6 +158,10 @@ impl std::fmt::Debug for MaskImpl { .field("depth", depth) .field("childs", &childs.len()) .field("owning_account", &owning_account.len()) + .field( + "token_owners", + &token_owners.as_ref().map(|to| to.len()).unwrap_or(0), + ) .field("id_to_addr", &id_to_addr.len()) .field("last_location", last_location) .field("uuid", uuid) @@ -288,6 +304,7 @@ impl MaskImpl { childs, uuid, owning_account, + token_owners, id_to_addr, last_location, hashes, @@ -297,6 +314,7 @@ impl MaskImpl { *self = Attached { parent, owning_account: take(owning_account), + token_owners: take(token_owners), id_to_addr: take(id_to_addr), last_location: take(last_location), depth: *depth, @@ -366,6 +384,7 @@ impl MaskImpl { Attached { parent, owning_account, + token_owners, id_to_addr, hashes, .. @@ -373,6 +392,9 @@ impl MaskImpl { assert_ne!(parent.get_uuid(), self_uuid); let (accounts, hashes) = { + if let Some(to) = token_owners.as_mut() { + to.clear(); + } id_to_addr.clear(); (std::mem::take(owning_account), hashes.take()) }; @@ -471,6 +493,7 @@ impl MaskImpl { let Self::Attached { parent, owning_account, + token_owners, id_to_addr, last_location, depth, @@ -487,6 +510,7 @@ impl MaskImpl { let owning_account = std::mem::take(owning_account); let depth = std::mem::take(depth); let childs = std::mem::take(childs); + let token_owners = std::mem::take(token_owners); let id_to_addr = std::mem::take(id_to_addr); let last_location = std::mem::take(last_location); let hashes = std::mem::replace(hashes, HashesMatrix::new(depth as usize)); @@ -494,6 +518,7 @@ impl MaskImpl { *self = Self::Unattached { owning_account, + token_owners, id_to_addr, last_location, depth, @@ -773,6 +798,7 @@ impl MaskImpl { Root { .. } => todo!(), Unattached { owning_account, + token_owners, id_to_addr, last_location, hashes, @@ -780,6 +806,7 @@ impl MaskImpl { } | Attached { owning_account, + token_owners, id_to_addr, last_location, hashes, @@ -795,7 +822,11 @@ impl MaskImpl { let account_index = addr.to_index(); hashes.invalidate_hashes(account_index); - let _account = owning_account.remove(&account_index).unwrap(); + let account = owning_account.remove(&account_index).unwrap(); + token_owners + .as_mut() + .map(|to| to.remove(&account.id().derive_token_id())) + .unwrap_or_default(); if last_location .as_ref() @@ -832,12 +863,14 @@ impl MaskImpl { Root { database, .. } => database.set(addr, account), Unattached { owning_account, + token_owners, id_to_addr, last_location, .. } | Attached { owning_account, + token_owners, id_to_addr, last_location, .. @@ -846,6 +879,9 @@ impl MaskImpl { owning_account.insert(account_index, *account); id_to_addr.insert(account_id.clone(), addr.clone()); + token_owners + .as_mut() + .map(|to| to.insert(account_id.derive_token_id(), account_id)); if last_location .as_ref() @@ -1087,6 +1123,27 @@ impl BaseLedger for MaskImpl { set } + fn token_owner(&self, token_id: TokenId) -> Option { + let (parent, token_owners) = match self { + Root { database, .. } => return database.token_owner(token_id), + Attached { + parent, + token_owners, + .. + } => (Some(parent), token_owners), + Unattached { token_owners, .. } => (None, token_owners), + }; + + if let Some(account_id) = token_owners + .as_ref() + .and_then(|to| to.get(&token_id).cloned()) + { + return Some(account_id); + }; + + parent.as_ref()?.token_owner(token_id) + } + fn tokens(&self, public_key: CompressedPubKey) -> HashSet { let mut set = HashSet::with_capacity(1024); @@ -1143,6 +1200,7 @@ impl BaseLedger for MaskImpl { Root { database, .. } => database.get_or_create_account(account_id, account)?, Unattached { owning_account, + token_owners, id_to_addr, last_location, depth, @@ -1150,6 +1208,7 @@ impl BaseLedger for MaskImpl { } | Attached { owning_account, + token_owners, id_to_addr, last_location, depth, @@ -1164,6 +1223,9 @@ impl BaseLedger for MaskImpl { id_to_addr.insert(account_id.clone(), location.clone()); *last_location = Some(location.clone()); + token_owners + .as_mut() + .map(|to| to.insert(account_id.derive_token_id(), account_id)); owning_account.insert(account_index, account); self.invalidate_hashes(account_index); diff --git a/ledger/src/mask/mod.rs b/ledger/src/mask/mod.rs index 9243da3a59..7397d79d8a 100644 --- a/ledger/src/mask/mod.rs +++ b/ledger/src/mask/mod.rs @@ -5,35 +5,42 @@ mod mask_impl; pub use mask::*; -/// Used for tests, to make sure we don't leak masks -#[cfg(test)] -mod tests { - use once_cell::sync::Lazy; - use std::{collections::HashSet, sync::Mutex}; +use once_cell::sync::Lazy; +use std::{collections::HashSet, sync::Mutex}; - use crate::Uuid; +use crate::Uuid; - static MASK_ALIVE: Lazy>> = - Lazy::new(|| Mutex::new(HashSet::with_capacity(256))); +// block masks(k = 290) + staking/next epoch masks (2) + 2 root masks = 294. +static MASKS_ALIVE: Lazy>> = + Lazy::new(|| Mutex::new(HashSet::with_capacity(294))); - pub fn add_mask(uuid: &Uuid) { - MASK_ALIVE.lock().unwrap().insert(uuid.to_string()); - } +fn exec(f: F) -> R +where + F: FnOnce(&mut HashSet) -> R, +{ + f(&mut MASKS_ALIVE.lock().unwrap()) +} - pub fn remove_mask(uuid: &Uuid) { - MASK_ALIVE.lock().unwrap().remove(uuid); - } +pub(super) fn alive_add(uuid: &Uuid) { + exec(|list| { + list.insert(uuid.to_owned()); + }); +} - pub fn is_mask_alive(uuid: &Uuid) -> bool { - MASK_ALIVE.lock().unwrap().contains(uuid) - } +pub(super) fn alive_remove(uuid: &Uuid) { + exec(|list| { + list.remove(uuid); + }); } -#[cfg(not(test))] -mod tests { - use crate::Uuid; +pub fn is_alive(uuid: &Uuid) -> bool { + exec(|list| list.contains(uuid)) +} - pub fn add_mask(_: &Uuid) {} +pub fn alive_len() -> usize { + exec(|list| list.len()) +} - pub fn remove_mask(_: &Uuid) {} +pub fn alive_collect() -> Vec { + exec(|list| list.iter().cloned().collect()) } diff --git a/ledger/src/proofs/transaction.rs b/ledger/src/proofs/transaction.rs index 06172ac40a..9f8a4ba471 100644 --- a/ledger/src/proofs/transaction.rs +++ b/ledger/src/proofs/transaction.rs @@ -4755,7 +4755,12 @@ pub(super) mod tests { } let rsa_private_key = { - let Ok(string) = std::fs::read_to_string("~/.openmina/debug/rsa.priv") else { + let Ok(home) = std::env::var("HOME") else { + eprintln!("$HOME not set"); + return; + }; + let Ok(string) = std::fs::read_to_string(format!("{home}/.openmina/debug/rsa.priv")) + else { eprintln!("Missing private key"); return; }; diff --git a/ledger/src/scan_state/transaction_logic.rs b/ledger/src/scan_state/transaction_logic.rs index e5f1882899..0402a2752d 100644 --- a/ledger/src/scan_state/transaction_logic.rs +++ b/ledger/src/scan_state/transaction_logic.rs @@ -865,7 +865,7 @@ pub mod signed_command { FeeExcess::of_single((self.fee_token(), Signed::::of_unsigned(self.fee()))) } - /// https://github.com/MinaProtocol/mina/blob/2ff0292b637684ce0372e7b8e23ec85404dc5091/src/lib/mina_base/signed_command_payload.ml#L354 + /// https://github.com/MinaProtocol/mina/blob/802634fdda92f5cba106fd5f98bd0037c4ec14be/src/lib/mina_base/signed_command_payload.ml#L322 pub fn account_access_statuses( &self, status: &TransactionStatus, @@ -875,7 +875,9 @@ pub mod signed_command { match status { Applied => vec![(self.fee_payer(), Accessed), (self.receiver(), Accessed)], - Failed(_) => vec![(self.receiver(), NotAccessed)], + // Note: The fee payer is always accessed, even if the transaction fails + // https://github.com/MinaProtocol/mina/blob/802634fdda92f5cba106fd5f98bd0037c4ec14be/src/lib/mina_base/signed_command_payload.mli#L205 + Failed(_) => vec![(self.fee_payer(), Accessed), (self.receiver(), NotAccessed)], } } @@ -3301,6 +3303,13 @@ pub mod zkapp_command { self.fold_impl(init, &mut fun) } + pub fn exists<'a, F>(&'a self, mut fun: F) -> bool + where + F: FnMut(&'a AccUpdate) -> bool, + { + self.fold(false, |acc, x| acc || fun(x)) + } + fn map_to_impl( &self, fun: &F, @@ -3577,7 +3586,7 @@ pub mod zkapp_command { pub memo: Memo, } - #[derive(Debug, Clone, PartialEq, Hash, Eq)] + #[derive(Debug, Clone, PartialEq, Hash, Eq, Ord, PartialOrd)] pub enum AccessedOrNot { Accessed, NotAccessed, @@ -3627,16 +3636,15 @@ pub mod zkapp_command { pub fn has_zero_vesting_period(&self) -> bool { self.account_updates - .iter() - .any(|p| match &p.elt.account_update.body.update.timing { + .exists(|account_update| match &account_update.body.update.timing { SetOrKeep::Keep => false, SetOrKeep::Set(Timing { vesting_period, .. }) => vesting_period.is_zero(), }) } pub fn is_incompatible_version(&self) -> bool { - self.account_updates.iter().any(|p| { - match &p.elt.account_update.body.update.permissions { + self.account_updates.exists(|account_update| { + match &account_update.body.update.permissions { SetOrKeep::Keep => false, SetOrKeep::Set(Permissions { set_verification_key, diff --git a/ledger/src/staged_ledger/staged_ledger.rs b/ledger/src/staged_ledger/staged_ledger.rs index 8b85342f6e..ec547f4c51 100644 --- a/ledger/src/staged_ledger/staged_ledger.rs +++ b/ledger/src/staged_ledger/staged_ledger.rs @@ -1961,7 +1961,7 @@ impl StagedLedger { } /// https://github.com/MinaProtocol/mina/blob/05c2f73d0f6e4f1341286843814ce02dcb3919e0/src/lib/staged_ledger/staged_ledger.ml#L2024 - fn latest_block_accounts_created(&self, previous_block_state_hash: Fp) -> Vec { + pub fn latest_block_accounts_created(&self, previous_block_state_hash: Fp) -> Vec { use scan_state::transaction_logic::transaction_applied::signed_command_applied::Body; use scan_state::transaction_logic::transaction_applied::CommandApplied; use scan_state::transaction_logic::transaction_applied::Varying; diff --git a/ledger/src/transaction_pool.rs b/ledger/src/transaction_pool.rs index a5a3a85420..14a8ca340f 100644 --- a/ledger/src/transaction_pool.rs +++ b/ledger/src/transaction_pool.rs @@ -241,6 +241,7 @@ pub mod diff { } } + #[derive(Debug)] pub struct Diff { pub list: Vec, } @@ -1257,6 +1258,7 @@ impl IndexedPool { }; // TODO: Should `self.all_by_fee` be a `BTreeSet` instead ? + #[allow(clippy::mutable_key_type)] let bset: BTreeSet<_> = set.iter().collect(); // TODO: Not sure if OCaml compare the same way than we do let min = bset.first().map(|min| (*min).clone()).unwrap(); @@ -1315,13 +1317,13 @@ impl IndexedPool { let current_balance = account .liquid_balance_at_slot(global_slot_since_genesis) .to_amount(); - let first_cmd = queue.front().unwrap(); + let first_cmd = queue.front().cloned().unwrap(); let first_nonce = first_cmd.data.forget_check().applicable_at_nonce(); if !(account.has_permission_to_send() && account.has_permission_to_increment_nonce()) || account.nonce < first_nonce { - let this_dropped = self.remove_with_dependents_exn(first_cmd)?; + let this_dropped = self.remove_with_dependents_exn(&first_cmd)?; dropped.extend(this_dropped); } else { // current_nonce >= first_nonce @@ -1330,13 +1332,13 @@ impl IndexedPool { nonce == account.nonce }); - let keep_queue = match first_applicable_nonce_index { + let retained_for_nonce = match first_applicable_nonce_index { Some(index) => queue.split_off(index), None => Default::default(), }; - let drop_queue = queue; + let dropped_for_nonce = queue; - for cmd in &drop_queue { + for cmd in &dropped_for_nonce { currency_reserved = currency_reserved .checked_sub(¤cy_consumed(&cmd.data.forget_check())?) .unwrap(); @@ -1344,28 +1346,41 @@ impl IndexedPool { let (keep_queue, currency_reserved, dropped_for_balance) = Self::drop_until_sufficient_balance( - keep_queue, + retained_for_nonce, currency_reserved, current_balance, )?; - let to_drop: Vec<_> = drop_queue.into_iter().chain(dropped_for_balance).collect(); - - let Some(head) = to_drop.first() else { - continue; - }; - - self.remove_applicable_exn(head); - self.update_remove_all_by_fee_and_hash_and_expiration(to_drop.clone()); + let keeping_prefix = dropped_for_nonce.is_empty(); + let keeping_suffix = dropped_for_balance.is_empty(); + let to_drop: Vec<_> = dropped_for_nonce + .into_iter() + .chain(dropped_for_balance) + .collect(); match keep_queue.front().cloned() { + _ if keeping_prefix && keeping_suffix => { + // Nothing dropped, nothing needs to be updated + } None => { + // We drop the entire queue, first element needs to be removed from + // applicable_by_fee + self.remove_applicable_exn(&first_cmd); self.all_by_sender.remove(&sender); } + Some(_) if keeping_prefix => { + // We drop only transactions from the end of queue, keeping + // the head untouched, no need to update applicable_by_fee + self.all_by_sender + .insert(sender, (keep_queue, currency_reserved)); + } Some(first_kept) => { + // We need to replace old queue head with the new queue head + // in applicable_by_fee let first_kept_unchecked = first_kept.data.forget_check(); self.all_by_sender .insert(sender, (keep_queue, currency_reserved)); + self.remove_applicable_exn(&first_cmd); Self::map_set_insert( &mut self.applicable_by_fee, first_kept_unchecked.fee_per_wu(), @@ -1373,7 +1388,7 @@ impl IndexedPool { ); } } - + self.update_remove_all_by_fee_and_hash_and_expiration(to_drop.clone()); dropped.extend(to_drop); } } @@ -1742,9 +1757,12 @@ impl TransactionPool { let mut new_commands = collect_hashed(new_commands); let mut removed_commands = collect_hashed(removed_commands); + #[allow(clippy::mutable_key_type)] let new_commands_set = new_commands.iter().collect::>(); + #[allow(clippy::mutable_key_type)] let removed_commands_set = removed_commands.iter().collect::>(); + #[allow(clippy::mutable_key_type)] let duplicates = new_commands_set .intersection(&removed_commands_set) .map(|cmd| (*cmd).clone()) diff --git a/macros/Cargo.toml b/macros/Cargo.toml index e5df02f18c..7ec7ab3a84 100644 --- a/macros/Cargo.toml +++ b/macros/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-macros" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" authors = [ "Alexander Koptelov " ] diff --git a/mina-p2p-messages/Cargo.toml b/mina-p2p-messages/Cargo.toml index 7c330f4e93..7fa5643e6c 100644 --- a/mina-p2p-messages/Cargo.toml +++ b/mina-p2p-messages/Cargo.toml @@ -37,6 +37,9 @@ ark-ff = { workspace = true } rsexp = "0.2.3" rsexp-derive = "0.2.3" +graphannis-malloc_size_of = { workspace = true } +graphannis-malloc_size_of_derive = { workspace = true } + [target.'cfg(fuzzing)'.dev-dependencies] fuzzcheck = "0.12.1" diff --git a/mina-p2p-messages/src/array.rs b/mina-p2p-messages/src/array.rs index 7340750145..1a70c818f9 100644 --- a/mina-p2p-messages/src/array.rs +++ b/mina-p2p-messages/src/array.rs @@ -1,6 +1,7 @@ use std::ops::Deref; use binprot::{BinProtRead, BinProtWrite, Nat0}; +use malloc_size_of_derive::MallocSizeOf; use rsexp::OfSexp; use serde::{Deserialize, Serialize}; @@ -17,6 +18,7 @@ use serde::{Deserialize, Serialize}; Deserialize, derive_more::From, derive_more::Into, + MallocSizeOf, )] pub struct ArrayN(Vec); diff --git a/mina-p2p-messages/src/b58.rs b/mina-p2p-messages/src/b58.rs index 6c1d303b42..1af06978eb 100644 --- a/mina-p2p-messages/src/b58.rs +++ b/mina-p2p-messages/src/b58.rs @@ -7,6 +7,7 @@ use std::str::FromStr; use binprot::{BinProtRead, BinProtWrite}; use binprot_derive::{BinProtRead, BinProtWrite}; use derive_more::From; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; /// Before encoding, data is prepended with the version byte. @@ -84,7 +85,7 @@ where /// Wrapper that uses base58check of binprot serialization for the wrapped type /// for human readable serializer. -#[derive(PartialEq, Eq, PartialOrd, Ord)] +#[derive(PartialEq, Eq, PartialOrd, Ord, MallocSizeOf)] pub struct Base58CheckOfBinProt(T, PhantomData); impl Default for Base58CheckOfBinProt diff --git a/mina-p2p-messages/src/bigint.rs b/mina-p2p-messages/src/bigint.rs index 11a8958f44..6fc13c944e 100644 --- a/mina-p2p-messages/src/bigint.rs +++ b/mina-p2p-messages/src/bigint.rs @@ -1,4 +1,5 @@ use ark_ff::{fields::arithmetic::InvalidBigInt, BigInteger256}; +use malloc_size_of::MallocSizeOf; use rsexp::{OfSexp, SexpOf}; use serde::{Deserialize, Serialize}; @@ -13,6 +14,12 @@ impl std::fmt::Debug for BigInt { } } +impl MallocSizeOf for BigInt { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + #[derive(Debug, thiserror::Error)] #[error("Invalid decimal number")] pub struct InvalidDecimalNumber; diff --git a/mina-p2p-messages/src/lib.rs b/mina-p2p-messages/src/lib.rs index 3626fe1d05..32ef3f838b 100644 --- a/mina-p2p-messages/src/lib.rs +++ b/mina-p2p-messages/src/lib.rs @@ -2,6 +2,9 @@ //! //! This crate contains gossip network messages and RPCs. +extern crate graphannis_malloc_size_of as malloc_size_of; +extern crate graphannis_malloc_size_of_derive as malloc_size_of_derive; + pub mod array; pub mod bigint; pub mod char; diff --git a/mina-p2p-messages/src/list.rs b/mina-p2p-messages/src/list.rs index 19b0daefa6..81a4151dac 100644 --- a/mina-p2p-messages/src/list.rs +++ b/mina-p2p-messages/src/list.rs @@ -4,6 +4,7 @@ use std::{ }; use binprot::{BinProtRead, BinProtWrite, Nat0}; +use malloc_size_of_derive::MallocSizeOf; use rsexp::OfSexp; pub type Backend = LinkedList; @@ -21,6 +22,7 @@ pub type Backend = LinkedList; serde::Deserialize, derive_more::From, derive_more::Into, + MallocSizeOf, )] pub struct List(Backend); diff --git a/mina-p2p-messages/src/number.rs b/mina-p2p-messages/src/number.rs index 41e6244608..221589e144 100644 --- a/mina-p2p-messages/src/number.rs +++ b/mina-p2p-messages/src/number.rs @@ -1,5 +1,6 @@ use std::{fmt::Display, marker::PhantomData, str::FromStr}; +use malloc_size_of::MallocSizeOf; use serde::{de::Visitor, Deserialize, Serialize}; #[derive( @@ -14,6 +15,12 @@ impl std::fmt::Debug for Number { } } +impl MallocSizeOf for Number { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + pub type Int32 = Number; pub type UInt32 = Number; pub type Int64 = Number; diff --git a/mina-p2p-messages/src/pseq.rs b/mina-p2p-messages/src/pseq.rs index bcc65fdf79..173d90e726 100644 --- a/mina-p2p-messages/src/pseq.rs +++ b/mina-p2p-messages/src/pseq.rs @@ -1,9 +1,10 @@ use std::{array, fmt::Formatter, marker::PhantomData}; use binprot::{BinProtRead, BinProtWrite}; +use malloc_size_of_derive::MallocSizeOf; use rsexp::{OfSexp, SexpOf}; use serde::ser::SerializeTuple; -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, MallocSizeOf)] pub struct PaddedSeq(pub [T; N]); impl Default for PaddedSeq diff --git a/mina-p2p-messages/src/rpc.rs b/mina-p2p-messages/src/rpc.rs index 2f777f1315..53eddfe082 100644 --- a/mina-p2p-messages/src/rpc.rs +++ b/mina-p2p-messages/src/rpc.rs @@ -31,6 +31,14 @@ macro_rules! mina_rpc { }; } +mina_rpc!( + SendArchiveDiffUnversioned, + "Send_archive_diff", + 0, + v2::ArchiveRpc, + () +); + mina_rpc!( VersionedRpcMenuV1, "__Versioned_rpc.Menu", diff --git a/mina-p2p-messages/src/rpc_kernel.rs b/mina-p2p-messages/src/rpc_kernel.rs index bf0b035acc..c0791fa114 100644 --- a/mina-p2p-messages/src/rpc_kernel.rs +++ b/mina-p2p-messages/src/rpc_kernel.rs @@ -3,6 +3,7 @@ use std::io::Read; use binprot::{BinProtRead, BinProtWrite}; use binprot_derive::{BinProtRead, BinProtWrite}; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use crate::versioned::Ver; @@ -251,14 +252,18 @@ impl From> for Message { } } -#[derive(Clone, Debug, Serialize, Deserialize, BinProtRead, BinProtWrite, PartialEq, Eq)] +#[derive( + Clone, Debug, Serialize, Deserialize, BinProtRead, BinProtWrite, PartialEq, Eq, MallocSizeOf, +)] pub struct QueryHeader { pub tag: BinprotTag, pub version: Ver, pub id: QueryID, } -#[derive(Clone, Debug, Serialize, Deserialize, BinProtRead, BinProtWrite, PartialEq, Eq)] +#[derive( + Clone, Debug, Serialize, Deserialize, BinProtRead, BinProtWrite, PartialEq, Eq, MallocSizeOf, +)] pub struct ResponseHeader { pub id: QueryID, } diff --git a/mina-p2p-messages/src/string.rs b/mina-p2p-messages/src/string.rs index 06f97cca4d..544e70b453 100644 --- a/mina-p2p-messages/src/string.rs +++ b/mina-p2p-messages/src/string.rs @@ -1,6 +1,7 @@ use std::marker::PhantomData; use binprot::Nat0; +use malloc_size_of_derive::MallocSizeOf; use serde::{de::Visitor, Deserialize, Serialize}; use serde_bytes; @@ -19,7 +20,7 @@ pub type ZkAppUri = BoundedCharString; pub type TokenSymbol = BoundedCharString; /// String of bytes. -#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, MallocSizeOf)] pub struct BoundedByteString(pub Vec, PhantomData<[u8; MAX_LENGTH]>); impl std::fmt::Debug for BoundedByteString { @@ -141,7 +142,7 @@ impl binprot::BinProtWrite for BoundedByteString(Vec, PhantomData<[u8; MAX_LENGTH]>); impl std::fmt::Debug for BoundedCharString { diff --git a/mina-p2p-messages/src/v2/generated.rs b/mina-p2p-messages/src/v2/generated.rs index b24dc00834..43797615b5 100644 --- a/mina-p2p-messages/src/v2/generated.rs +++ b/mina-p2p-messages/src/v2/generated.rs @@ -1,5 +1,7 @@ use binprot_derive::{BinProtRead, BinProtWrite}; use derive_more::Deref; +use malloc_size_of::MallocSizeOf; +use malloc_size_of_derive::MallocSizeOf; use openmina_macros::SerdeYojsonEnum; use rsexp_derive::{OfSexp, SexpOf}; use serde::{Deserialize, Serialize}; @@ -13,7 +15,9 @@ use super::manual::*; /// /// Gid: `1102` /// Location: [src/lib/mina_block/block.ml:8:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_block/block.ml#L8) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBlockBlockStableV2 { pub header: MinaBlockHeaderStableV2, pub body: StagedLedgerDiffBodyStableV1, @@ -140,7 +144,9 @@ pub struct MinaBasePendingCoinbaseStableV2 { /// Gid: `1000` /// Location: [src/lib/mina_state/protocol_state.ml:38:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_state/protocol_state.ml#L38) /// Args: StateHash , MinaStateProtocolStateBodyValueStableV2 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaStateProtocolStateValueStableV2 { pub previous_state_hash: StateHash, pub body: MinaStateProtocolStateBodyValueStableV2, @@ -187,7 +193,9 @@ pub enum MinaLedgerSyncLedgerAnswerStableV2 { /// Gid: `983` /// Location: [src/lib/consensus/proof_of_stake.ml:1723:12](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/consensus/proof_of_stake.ml#L1723) /// Args: UnsignedExtendedUInt32StableV1 , ConsensusVrfOutputTruncatedStableV1 , CurrencyAmountStableV1 , ConsensusGlobalSlotStableV1 , MinaNumbersGlobalSlotSinceGenesisMStableV1 , ConsensusProofOfStakeDataEpochDataStakingValueVersionedValueStableV1 , ConsensusProofOfStakeDataEpochDataNextValueVersionedValueStableV1 , bool , NonZeroCurvePoint -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct ConsensusProofOfStakeDataConsensusStateValueStableV2 { pub blockchain_length: UnsignedExtendedUInt32StableV1, pub epoch_count: UnsignedExtendedUInt32StableV1, @@ -524,6 +532,12 @@ pub struct PicklesProofProofsVerified2ReprStableV2PrevEvals { pub ft_eval1: crate::bigint::BigInt, } +impl MallocSizeOf for PicklesProofProofsVerified2ReprStableV2PrevEvals { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + /// Derived name: `Pickles__Wrap_wire_proof.Stable.V1.bulletproof` /// /// Gid: `468` @@ -548,7 +562,9 @@ pub struct PicklesWrapWireProofStableV1Bulletproof { /// Gid: `476` /// Location: [src/lib/pickles_types/plonk_verification_key_evals.ml:7:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles_types/plonk_verification_key_evals.ml#L7) /// Args: (crate :: bigint :: BigInt , crate :: bigint :: BigInt ,) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseVerificationKeyWireStableV1WrapIndex { pub sigma_comm: PaddedSeq<(crate::bigint::BigInt, crate::bigint::BigInt), 7>, pub coefficients_comm: PaddedSeq<(crate::bigint::BigInt, crate::bigint::BigInt), 15>, @@ -566,7 +582,16 @@ pub struct MinaBaseVerificationKeyWireStableV1WrapIndex { /// Location: [src/lib/crypto/kimchi_backend/common/scalar_challenge.ml:6:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/crypto/kimchi_backend/common/scalar_challenge.ml#L6) /// Args: PaddedSeq < LimbVectorConstantHex64StableV1 , 2 > #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2AChallenge { pub inner: PaddedSeq, @@ -607,7 +632,7 @@ pub enum SnarkWorkerWorkerRpcsVersionedSubmitWorkV2TQueryMetrics { /// Gid: `508` /// Location: [src/lib/one_or_two/one_or_two.ml:7:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/one_or_two/one_or_two.ml#L7) /// Args: LedgerProofProdStableV2 -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] #[polymorphic_variant] pub enum TransactionSnarkWorkTStableV2Proofs { #[allow(non_camel_case_types)] @@ -639,7 +664,9 @@ pub enum SnarkWorkerWorkerRpcsVersionedGetWorkV2TResponseA0Instances { /// /// Gid: `514` /// Location: [src/lib/pickles_base/proofs_verified.ml:8:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles_base/proofs_verified.ml#L8) -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, SexpOf)] +#[derive( + Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, SexpOf, MallocSizeOf, +)] pub enum PicklesBaseProofsVerifiedStableV1 { N0, N1, @@ -654,7 +681,9 @@ pub enum PicklesBaseProofsVerifiedStableV1 { /// /// Gid: `125` /// Location: [src/int64.ml:6:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/int64.ml#L6) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct LimbVectorConstantHex64StableV1(pub crate::number::UInt64); /// **OCaml name**: `Composition_types__Branch_data.Make_str.Domain_log2.Stable.V1` @@ -677,10 +706,20 @@ pub struct CompositionTypesBranchDataDomainLog2StableV1(pub crate::char::Char); /// Gid: `525` /// Location: [src/lib/pickles/composition_types/branch_data.ml:51:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/composition_types/branch_data.ml#L51) #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct CompositionTypesBranchDataStableV1 { pub proofs_verified: PicklesBaseProofsVerifiedStableV1, + #[ignore_malloc_size_of = "primitive"] pub domain_log2: CompositionTypesBranchDataDomainLog2StableV1, } @@ -690,7 +729,16 @@ pub struct CompositionTypesBranchDataStableV1 { /// Location: [src/lib/pickles/composition_types/bulletproof_challenge.ml:4:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/composition_types/bulletproof_challenge.ml#L4) /// Args: PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2AChallenge #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2A { pub prechallenge: @@ -701,7 +749,9 @@ pub struct PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorSt /// /// Gid: `527` /// Location: [src/lib/pickles/composition_types/digest.ml:13:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/composition_types/digest.ml#L13) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct CompositionTypesDigestConstantStableV1( pub PaddedSeq, ); @@ -712,7 +762,16 @@ pub struct CompositionTypesDigestConstantStableV1( /// Location: [src/lib/pickles/composition_types/composition_types.ml:45:14](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/composition_types/composition_types.ml#L45) /// Args: PaddedSeq < LimbVectorConstantHex64StableV1 , 2 > , PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2AChallenge , bool #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesProofProofsVerified2ReprStableV2StatementProofStateDeferredValuesPlonk { pub alpha: @@ -723,6 +782,7 @@ pub struct PicklesProofProofsVerified2ReprStableV2StatementProofStateDeferredVal pub joint_combiner: Option< PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2AChallenge, >, + #[ignore_malloc_size_of = "primitive"] pub feature_flags: PicklesProofProofsVerified2ReprStableV2StatementProofStateDeferredValuesPlonkFeatureFlags, } @@ -733,7 +793,16 @@ pub struct PicklesProofProofsVerified2ReprStableV2StatementProofStateDeferredVal /// Location: [src/lib/pickles/composition_types/composition_types.ml:275:12](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/composition_types/composition_types.ml#L275) /// Args: PaddedSeq < LimbVectorConstantHex64StableV1 , 2 > , PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2AChallenge , PicklesProofProofsVerified2ReprStableV2StatementFp , bool , PaddedSeq < PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2A , 16 > , CompositionTypesBranchDataStableV1 #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesProofProofsVerified2ReprStableV2StatementProofStateDeferredValues { pub plonk: PicklesProofProofsVerified2ReprStableV2StatementProofStateDeferredValuesPlonk, @@ -748,7 +817,16 @@ pub struct PicklesProofProofsVerified2ReprStableV2StatementProofStateDeferredVal /// Location: [src/lib/pickles/composition_types/composition_types.ml:397:10](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/composition_types/composition_types.ml#L397) /// Args: (crate :: bigint :: BigInt , crate :: bigint :: BigInt ,) , PaddedSeq < PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2 , 2 > #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesProofProofsVerified2ReprStableV2MessagesForNextWrapProof { pub challenge_polynomial_commitment: (crate::bigint::BigInt, crate::bigint::BigInt), @@ -762,7 +840,16 @@ pub struct PicklesProofProofsVerified2ReprStableV2MessagesForNextWrapProof { /// Location: [src/lib/pickles/composition_types/composition_types.ml:466:10](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/composition_types/composition_types.ml#L466) /// Args: PaddedSeq < LimbVectorConstantHex64StableV1 , 2 > , PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2AChallenge , PicklesProofProofsVerified2ReprStableV2StatementFp , bool , PicklesProofProofsVerified2ReprStableV2MessagesForNextWrapProof , CompositionTypesDigestConstantStableV1 , PaddedSeq < PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2A , 16 > , CompositionTypesBranchDataStableV1 #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesProofProofsVerified2ReprStableV2StatementProofState { pub deferred_values: PicklesProofProofsVerified2ReprStableV2StatementProofStateDeferredValues, @@ -777,7 +864,16 @@ pub struct PicklesProofProofsVerified2ReprStableV2StatementProofState { /// Location: [src/lib/pickles/composition_types/composition_types.ml:714:10](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/composition_types/composition_types.ml#L714) /// Args: PaddedSeq < LimbVectorConstantHex64StableV1 , 2 > , PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2AChallenge , PicklesProofProofsVerified2ReprStableV2StatementFp , bool , PicklesProofProofsVerified2ReprStableV2MessagesForNextWrapProof , CompositionTypesDigestConstantStableV1 , PicklesProofProofsVerified2ReprStableV2MessagesForNextStepProof , PaddedSeq < PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2A , 16 > , CompositionTypesBranchDataStableV1 #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesProofProofsVerified2ReprStableV2Statement { pub proof_state: PicklesProofProofsVerified2ReprStableV2StatementProofState, @@ -832,13 +928,28 @@ pub struct PicklesWrapWireProofStableV1 { pub bulletproof: PicklesWrapWireProofStableV1Bulletproof, } +impl MallocSizeOf for PicklesWrapWireProofStableV1 { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + /// Derived name: `Pickles__Proof.Proofs_verified_2.Repr.Stable.V2.messages_for_next_step_proof` /// /// Gid: `542` /// Location: [src/lib/pickles/reduced_messages_for_next_proof_over_same_field.ml:16:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/reduced_messages_for_next_proof_over_same_field.ml#L16) /// Args: () , List < (crate :: bigint :: BigInt , crate :: bigint :: BigInt ,) > , List < PaddedSeq < PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2A , 16 > > #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesProofProofsVerified2ReprStableV2MessagesForNextStepProof { #[serde(deserialize_with = "always_unit")] @@ -858,7 +969,9 @@ pub struct PicklesProofProofsVerified2ReprStableV2MessagesForNextStepProof { /// Gid: `488` /// Location: [src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml:32:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml#L32) /// Args: PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2A -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorStableV2( pub PaddedSeq, ); @@ -872,7 +985,9 @@ pub struct PicklesReducedMessagesForNextProofOverSameFieldWrapChallengesVectorSt /// Gid: `519` /// Location: [src/lib/pickles_base/side_loaded_verification_key.ml:130:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles_base/side_loaded_verification_key.ml#L130) /// Args: (crate :: bigint :: BigInt , crate :: bigint :: BigInt ,) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseVerificationKeyWireStableV1 { pub max_proofs_verified: PicklesBaseProofsVerifiedStableV1, pub actual_wrap_domain_size: PicklesBaseProofsVerifiedStableV1, @@ -889,7 +1004,16 @@ pub struct MinaBaseVerificationKeyWireStableV1 { /// Location: [src/lib/pickles/proof.ml:47:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/proof.ml#L47) /// Args: PicklesProofProofsVerified2ReprStableV2MessagesForNextWrapProof , PicklesProofProofsVerified2ReprStableV2MessagesForNextStepProof #[derive( - Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, SexpOf, OfSexp, + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + SexpOf, + OfSexp, + MallocSizeOf, )] pub struct PicklesProofProofsVerified2ReprStableV2 { pub statement: PicklesProofProofsVerified2ReprStableV2Statement, @@ -906,7 +1030,7 @@ pub struct PicklesProofProofsVerified2ReprStableV2 { /// Gid: `546` /// Location: [src/lib/pickles/proof.ml:47:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/pickles/proof.ml#L47) /// Args: PicklesProofProofsVerified2ReprStableV2MessagesForNextWrapProof , PicklesProofProofsVerified2ReprStableV2MessagesForNextStepProof -#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, SexpOf, OfSexp)] +#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, SexpOf, OfSexp, MallocSizeOf)] pub struct PicklesProofProofsVerifiedMaxStableV2 { pub statement: PicklesProofProofsVerified2ReprStableV2Statement, pub prev_evals: PicklesProofProofsVerified2ReprStableV2PrevEvals, @@ -923,7 +1047,17 @@ pub struct PicklesProofProofsVerifiedMaxStableV2 { /// Location: [src/lib/non_zero_curve_point/compressed_poly.ml:13:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/non_zero_curve_point/compressed_poly.ml#L13) /// Args: crate :: bigint :: BigInt , bool #[derive( - Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, BinProtRead, BinProtWrite, + Clone, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + MallocSizeOf, )] pub struct NonZeroCurvePointUncompressedStableV1 { pub x: crate::bigint::BigInt, @@ -957,7 +1091,16 @@ pub struct UnsignedExtendedUInt64Int64ForVersionTagsStableV1(pub crate::number:: /// Gid: `119` /// Location: [src/int32.ml:6:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/int32.ml#L6) #[derive( - Clone, Copy, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, Default, + Clone, + Copy, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + Deref, + Default, + MallocSizeOf, )] pub struct UnsignedExtendedUInt32StableV1(pub crate::number::UInt32); @@ -994,7 +1137,7 @@ pub enum MinaNumbersGlobalSlotSpanStableV1 { /// /// Gid: `614` /// Location: [src/lib/mina_numbers/global_slot_since_genesis.ml:27:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_numbers/global_slot_since_genesis.ml#L27) -#[derive(Clone, Debug, PartialEq, Serialize, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, Serialize, BinProtRead, BinProtWrite, MallocSizeOf)] #[serde(untagged)] pub enum MinaNumbersGlobalSlotSinceGenesisMStableV1 { SinceGenesis(UnsignedExtendedUInt32StableV1), @@ -1004,7 +1147,7 @@ pub enum MinaNumbersGlobalSlotSinceGenesisMStableV1 { /// /// Gid: `620` /// Location: [src/lib/mina_numbers/global_slot_since_hard_fork.ml:27:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_numbers/global_slot_since_hard_fork.ml#L27) -#[derive(Clone, Debug, PartialEq, Serialize, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, Serialize, BinProtRead, BinProtWrite, MallocSizeOf)] #[serde(untagged)] pub enum MinaNumbersGlobalSlotSinceHardForkMStableV1 { SinceHardFork(UnsignedExtendedUInt32StableV1), @@ -1014,7 +1157,7 @@ pub enum MinaNumbersGlobalSlotSinceHardForkMStableV1 { /// /// Gid: `636` /// Location: [src/lib/sgn/sgn.ml:9:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/sgn/sgn.ml#L9) -#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum SgnStableV1 { Pos, Neg, @@ -1038,6 +1181,12 @@ pub struct MinaStateBlockchainStateValueStableV2SignedAmount { #[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref)] pub struct CurrencyFeeStableV1(pub UnsignedExtendedUInt64Int64ForVersionTagsStableV1); +impl MallocSizeOf for CurrencyFeeStableV1 { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + /// **OCaml name**: `Currency.Make_str.Amount.Make_str.Stable.V1` /// /// Gid: `641` @@ -1045,11 +1194,19 @@ pub struct CurrencyFeeStableV1(pub UnsignedExtendedUInt64Int64ForVersionTagsStab #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] pub struct CurrencyAmountStableV1(pub UnsignedExtendedUInt64Int64ForVersionTagsStableV1); +impl MallocSizeOf for CurrencyAmountStableV1 { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + /// **OCaml name**: `Currency.Make_str.Balance.Stable.V1` /// /// Gid: `644` /// Location: [src/lib/currency/currency.ml:1138:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/currency/currency.ml#L1138) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct CurrencyBalanceStableV1(pub CurrencyAmountStableV1); /// Derived name: `Mina_base__Zkapp_command.Verifiable.Stable.V1.account_updates.data.a` @@ -1079,6 +1236,7 @@ pub struct MinaBaseZkappCommandVerifiableStableV1AccountUpdatesDataA { BinProtRead, BinProtWrite, Deref, + MallocSizeOf, )] pub struct DataHashLibStateHashStableV1(pub crate::bigint::BigInt); @@ -1127,7 +1285,20 @@ pub struct BlockTimeTimeStableV1(pub UnsignedExtendedUInt64Int64ForVersionTagsSt /// /// Gid: `664` /// Location: [src/lib/mina_base/account_id.ml:64:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/account_id.ml#L64) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + Deref, + PartialOrd, + Ord, + Eq, + MallocSizeOf, +)] pub struct MinaBaseAccountIdDigestStableV1(pub crate::bigint::BigInt); /// **OCaml name**: `Mina_base__Account_id.Make_str.Stable.V2` @@ -1167,14 +1338,16 @@ pub enum MinaBaseAccountTimingStableV2 { /// Gid: `676` /// Location: [src/lib/mina_base/signature.ml:12:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/signature.ml#L12) /// Args: crate :: bigint :: BigInt , crate :: bigint :: BigInt -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseSignatureStableV1(pub crate::bigint::BigInt, pub crate::bigint::BigInt); /// **OCaml name**: `Mina_base__Control.Stable.V2` /// /// Gid: `683` /// Location: [src/lib/mina_base/control.ml:11:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/control.ml#L11) -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseControlStableV2 { Proof(Box), Signature(Signature), @@ -1185,7 +1358,19 @@ pub enum MinaBaseControlStableV2 { /// /// Gid: `687` /// Location: [src/lib/mina_base/token_id.ml:8:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/token_id.ml#L8) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, + Debug, + PartialEq, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + Deref, + PartialOrd, + Ord, + Eq, +)] pub struct MinaBaseTokenIdStableV2(pub MinaBaseAccountIdDigestStableV1); /// **OCaml name**: `Mina_base__Payment_payload.Stable.V2` @@ -1197,7 +1382,9 @@ pub struct MinaBaseTokenIdStableV2(pub MinaBaseAccountIdDigestStableV1); /// Gid: `693` /// Location: [src/lib/mina_base/payment_payload.ml:14:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/payment_payload.ml#L14) /// Args: NonZeroCurvePoint , CurrencyAmountStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBasePaymentPayloadStableV2 { pub receiver_pk: NonZeroCurvePoint, pub amount: CurrencyAmountStableV1, @@ -1219,6 +1406,7 @@ pub struct MinaBasePaymentPayloadStableV2 { BinProtRead, BinProtWrite, Deref, + MallocSizeOf, )] pub struct MinaBaseLedgerHash0StableV1(pub crate::bigint::BigInt); @@ -1273,11 +1461,17 @@ pub struct MinaBasePermissionsStableV2 { pub set_timing: MinaBasePermissionsAuthRequiredStableV2, } +impl MallocSizeOf for MinaBasePermissionsStableV2 { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + /// **OCaml name**: `Mina_base__Stake_delegation.Stable.V2` /// /// Gid: `712` /// Location: [src/lib/mina_base/stake_delegation.ml:11:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/stake_delegation.ml#L11) -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseStakeDelegationStableV2 { SetDelegate { new_delegate: NonZeroCurvePoint }, } @@ -1337,6 +1531,12 @@ pub enum MinaBaseTransactionStatusFailureStableV2 { Cancelled, } +impl MallocSizeOf for MinaBaseTransactionStatusFailureStableV2 { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + /// **OCaml name**: `Mina_base__Transaction_status.Failure.Collection.Stable.V1` /// /// Gid: `720` @@ -1351,7 +1551,9 @@ pub enum MinaBaseTransactionStatusFailureStableV2 { /// Gid: `50` /// Location: [src/list0.ml:6:0](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/list0.ml#L6) /// Args: List < MinaBaseTransactionStatusFailureStableV2 > -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseTransactionStatusFailureCollectionStableV1( pub List>, ); @@ -1360,7 +1562,7 @@ pub struct MinaBaseTransactionStatusFailureCollectionStableV1( /// /// Gid: `721` /// Location: [src/lib/mina_base/transaction_status.ml:476:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/transaction_status.ml#L476) -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseTransactionStatusStableV2 { Applied, Failed(MinaBaseTransactionStatusFailureCollectionStableV1), @@ -1375,11 +1577,14 @@ pub enum MinaBaseTransactionStatusStableV2 { /// Gid: `722` /// Location: [src/lib/mina_base/signed_command_payload.ml:41:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/signed_command_payload.ml#L41) /// Args: CurrencyFeeStableV1 , NonZeroCurvePoint , UnsignedExtendedUInt32StableV1 , MinaNumbersGlobalSlotSinceGenesisMStableV1 , MinaBaseSignedCommandMemoStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseSignedCommandPayloadCommonStableV2 { pub fee: CurrencyFeeStableV1, pub fee_payer_pk: NonZeroCurvePoint, pub nonce: UnsignedExtendedUInt32StableV1, + #[ignore_malloc_size_of = "primitive"] pub valid_until: MinaNumbersGlobalSlotSinceGenesisMStableV1, pub memo: MinaBaseSignedCommandMemoStableV1, } @@ -1388,7 +1593,7 @@ pub struct MinaBaseSignedCommandPayloadCommonStableV2 { /// /// Gid: `730` /// Location: [src/lib/mina_base/signed_command_payload.ml:189:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/signed_command_payload.ml#L189) -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseSignedCommandPayloadBodyStableV2 { Payment(MinaBasePaymentPayloadStableV2), StakeDelegation(MinaBaseStakeDelegationStableV2), @@ -1403,7 +1608,9 @@ pub enum MinaBaseSignedCommandPayloadBodyStableV2 { /// Gid: `734` /// Location: [src/lib/mina_base/signed_command_payload.ml:249:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/signed_command_payload.ml#L249) /// Args: MinaBaseSignedCommandPayloadCommonStableV2 , MinaBaseSignedCommandPayloadBodyStableV2 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseSignedCommandPayloadStableV2 { pub common: MinaBaseSignedCommandPayloadCommonStableV2, pub body: MinaBaseSignedCommandPayloadBodyStableV2, @@ -1418,7 +1625,9 @@ pub struct MinaBaseSignedCommandPayloadStableV2 { /// Gid: `741` /// Location: [src/lib/mina_base/signed_command.ml:27:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/signed_command.ml#L27) /// Args: MinaBaseSignedCommandPayloadStableV2 , NonZeroCurvePoint , Signature -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseSignedCommandStableV2 { pub payload: MinaBaseSignedCommandPayloadStableV2, pub signer: NonZeroCurvePoint, @@ -1429,14 +1638,18 @@ pub struct MinaBaseSignedCommandStableV2 { /// /// Gid: `755` /// Location: [src/lib/mina_base/receipt.ml:31:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/receipt.ml#L31) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseReceiptChainHashStableV1(pub crate::bigint::BigInt); /// **OCaml name**: `Mina_base__State_body_hash.Stable.V1` /// /// Gid: `760` /// Location: [src/lib/mina_base/state_body_hash.ml:19:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/state_body_hash.ml#L19) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseStateBodyHashStableV1(pub crate::bigint::BigInt); /// Derived name: `Mina_base__Account_update.Update.Stable.V1.timing` @@ -1444,7 +1657,7 @@ pub struct MinaBaseStateBodyHashStableV1(pub crate::bigint::BigInt); /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) /// Args: MinaBaseAccountUpdateUpdateTimingInfoStableV1 -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseAccountUpdateUpdateStableV1Timing { Set(Box), Keep, @@ -1455,7 +1668,7 @@ pub enum MinaBaseAccountUpdateUpdateStableV1Timing { /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) /// Args: MinaBasePermissionsStableV2 -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseAccountUpdateUpdateStableV1Permissions { Set(Box), Keep, @@ -1466,7 +1679,7 @@ pub enum MinaBaseAccountUpdateUpdateStableV1Permissions { /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) /// Args: MinaBaseVerificationKeyWireStableV1 -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseAccountUpdateUpdateStableV1VerificationKey { Set(Box), Keep, @@ -1477,7 +1690,7 @@ pub enum MinaBaseAccountUpdateUpdateStableV1VerificationKey { /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) /// Args: NonZeroCurvePoint -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseAccountUpdateUpdateStableV1Delegate { Set(NonZeroCurvePoint), Keep, @@ -1488,7 +1701,7 @@ pub enum MinaBaseAccountUpdateUpdateStableV1Delegate { /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) /// Args: StateHash -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseAccountUpdateUpdateStableV1VotingFor { Set(StateHash), Keep, @@ -1499,7 +1712,7 @@ pub enum MinaBaseAccountUpdateUpdateStableV1VotingFor { /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) /// Args: crate :: bigint :: BigInt -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseAccountUpdateUpdateStableV1AppStateA { Set(crate::bigint::BigInt), Keep, @@ -1510,7 +1723,7 @@ pub enum MinaBaseAccountUpdateUpdateStableV1AppStateA { /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) /// Args: crate :: string :: ZkAppUri -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseAccountUpdateUpdateStableV1ZkappUri { Set(crate::string::ZkAppUri), Keep, @@ -1521,7 +1734,9 @@ pub enum MinaBaseAccountUpdateUpdateStableV1ZkappUri { /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) /// Args: crate :: string :: TokenSymbol -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub enum MinaBaseAccountUpdateUpdateStableV1TokenSymbol { Set(crate::string::TokenSymbol), Keep, @@ -1532,7 +1747,7 @@ pub enum MinaBaseAccountUpdateUpdateStableV1TokenSymbol { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: EpochSeed -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionProtocolStateEpochDataStableV1EpochSeed { Check(EpochSeed), Ignore, @@ -1543,7 +1758,7 @@ pub enum MinaBaseZkappPreconditionProtocolStateEpochDataStableV1EpochSeed { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: LedgerHash -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionProtocolStateStableV1SnarkedLedgerHash { Check(LedgerHash), Ignore, @@ -1554,7 +1769,7 @@ pub enum MinaBaseZkappPreconditionProtocolStateStableV1SnarkedLedgerHash { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: MinaBaseReceiptChainHashStableV1 -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionAccountStableV2ReceiptChainHash { Check(MinaBaseReceiptChainHashStableV1), Ignore, @@ -1565,7 +1780,7 @@ pub enum MinaBaseZkappPreconditionAccountStableV2ReceiptChainHash { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: NonZeroCurvePoint -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionAccountStableV2Delegate { Check(NonZeroCurvePoint), Ignore, @@ -1576,7 +1791,7 @@ pub enum MinaBaseZkappPreconditionAccountStableV2Delegate { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: StateHash -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionProtocolStateEpochDataStableV1StartCheckpoint { Check(StateHash), Ignore, @@ -1587,7 +1802,7 @@ pub enum MinaBaseZkappPreconditionProtocolStateEpochDataStableV1StartCheckpoint /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: bool -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionAccountStableV2ProvedState { Check(bool), Ignore, @@ -1598,7 +1813,7 @@ pub enum MinaBaseZkappPreconditionAccountStableV2ProvedState { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: crate :: bigint :: BigInt -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionAccountStableV2StateA { Check(crate::bigint::BigInt), Ignore, @@ -1658,7 +1873,9 @@ pub struct MinaBaseAccountIndexStableV1(pub crate::number::UInt64); /// Gid: `781` /// Location: [src/lib/mina_base/epoch_ledger.ml:9:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/epoch_ledger.ml#L9) /// Args: MinaBaseZkappPreconditionProtocolStateStableV1SnarkedLedgerHash , MinaBaseZkappPreconditionProtocolStateStableV1Amount -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappPreconditionProtocolStateEpochDataStableV1EpochLedger { pub hash: MinaBaseZkappPreconditionProtocolStateStableV1SnarkedLedgerHash, pub total_currency: MinaBaseZkappPreconditionProtocolStateStableV1Amount, @@ -1673,7 +1890,9 @@ pub struct MinaBaseZkappPreconditionProtocolStateEpochDataStableV1EpochLedger { /// Gid: `781` /// Location: [src/lib/mina_base/epoch_ledger.ml:9:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/epoch_ledger.ml#L9) /// Args: LedgerHash , CurrencyAmountStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseEpochLedgerValueStableV1 { pub hash: LedgerHash, pub total_currency: CurrencyAmountStableV1, @@ -1683,7 +1902,9 @@ pub struct MinaBaseEpochLedgerValueStableV1 { /// /// Gid: `785` /// Location: [src/lib/mina_base/epoch_seed.ml:14:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/epoch_seed.ml#L14) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseEpochSeedStableV1(pub crate::bigint::BigInt); /// Derived name: `Mina_base__Zkapp_precondition.Protocol_state.Stable.V1.amount.a` @@ -1691,7 +1912,9 @@ pub struct MinaBaseEpochSeedStableV1(pub crate::bigint::BigInt); /// Gid: `790` /// Location: [src/lib/mina_base/zkapp_precondition.ml:23:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_precondition.ml#L23) /// Args: CurrencyAmountStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappPreconditionProtocolStateStableV1AmountA { pub lower: CurrencyAmountStableV1, pub upper: CurrencyAmountStableV1, @@ -1702,7 +1925,9 @@ pub struct MinaBaseZkappPreconditionProtocolStateStableV1AmountA { /// Gid: `790` /// Location: [src/lib/mina_base/zkapp_precondition.ml:23:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_precondition.ml#L23) /// Args: CurrencyBalanceStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappPreconditionAccountStableV2BalanceA { pub lower: CurrencyBalanceStableV1, pub upper: CurrencyBalanceStableV1, @@ -1713,7 +1938,9 @@ pub struct MinaBaseZkappPreconditionAccountStableV2BalanceA { /// Gid: `790` /// Location: [src/lib/mina_base/zkapp_precondition.ml:23:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_precondition.ml#L23) /// Args: MinaNumbersGlobalSlotSinceGenesisMStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappPreconditionProtocolStateStableV1GlobalSlotA { pub lower: MinaNumbersGlobalSlotSinceGenesisMStableV1, pub upper: MinaNumbersGlobalSlotSinceGenesisMStableV1, @@ -1724,7 +1951,9 @@ pub struct MinaBaseZkappPreconditionProtocolStateStableV1GlobalSlotA { /// Gid: `790` /// Location: [src/lib/mina_base/zkapp_precondition.ml:23:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_precondition.ml#L23) /// Args: UnsignedExtendedUInt32StableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappPreconditionProtocolStateStableV1LengthA { pub lower: UnsignedExtendedUInt32StableV1, pub upper: UnsignedExtendedUInt32StableV1, @@ -1740,7 +1969,7 @@ pub struct MinaBaseZkappPreconditionProtocolStateStableV1LengthA { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: MinaBaseZkappPreconditionProtocolStateStableV1AmountA -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionProtocolStateStableV1Amount { Check(MinaBaseZkappPreconditionProtocolStateStableV1AmountA), Ignore, @@ -1756,7 +1985,7 @@ pub enum MinaBaseZkappPreconditionProtocolStateStableV1Amount { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: MinaBaseZkappPreconditionAccountStableV2BalanceA -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionAccountStableV2Balance { Check(MinaBaseZkappPreconditionAccountStableV2BalanceA), Ignore, @@ -1772,7 +2001,7 @@ pub enum MinaBaseZkappPreconditionAccountStableV2Balance { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: MinaBaseZkappPreconditionProtocolStateStableV1GlobalSlotA -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionProtocolStateStableV1GlobalSlot { Check(MinaBaseZkappPreconditionProtocolStateStableV1GlobalSlotA), Ignore, @@ -1788,7 +2017,7 @@ pub enum MinaBaseZkappPreconditionProtocolStateStableV1GlobalSlot { /// Gid: `767` /// Location: [src/lib/mina_base/zkapp_basic.ml:232:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L232) /// Args: MinaBaseZkappPreconditionProtocolStateStableV1LengthA -#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite)] +#[derive(Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, MallocSizeOf)] pub enum MinaBaseZkappPreconditionProtocolStateStableV1Length { Check(MinaBaseZkappPreconditionProtocolStateStableV1LengthA), Ignore, @@ -1798,7 +2027,9 @@ pub enum MinaBaseZkappPreconditionProtocolStateStableV1Length { /// /// Gid: `792` /// Location: [src/lib/mina_base/zkapp_precondition.ml:465:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_precondition.ml#L465) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappPreconditionAccountStableV2 { pub balance: MinaBaseZkappPreconditionAccountStableV2Balance, pub nonce: MinaBaseZkappPreconditionProtocolStateStableV1Length, @@ -1819,7 +2050,9 @@ pub struct MinaBaseZkappPreconditionAccountStableV2 { /// Gid: `788` /// Location: [src/lib/mina_base/epoch_data.ml:8:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/epoch_data.ml#L8) /// Args: MinaBaseZkappPreconditionProtocolStateEpochDataStableV1EpochLedger , MinaBaseZkappPreconditionProtocolStateEpochDataStableV1EpochSeed , MinaBaseZkappPreconditionProtocolStateEpochDataStableV1StartCheckpoint , MinaBaseZkappPreconditionProtocolStateEpochDataStableV1StartCheckpoint , MinaBaseZkappPreconditionProtocolStateStableV1Length -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappPreconditionProtocolStateEpochDataStableV1 { pub ledger: MinaBaseZkappPreconditionProtocolStateEpochDataStableV1EpochLedger, pub seed: MinaBaseZkappPreconditionProtocolStateEpochDataStableV1EpochSeed, @@ -1837,7 +2070,9 @@ pub struct MinaBaseZkappPreconditionProtocolStateEpochDataStableV1 { /// Gid: `794` /// Location: [src/lib/mina_base/zkapp_precondition.ml:923:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_precondition.ml#L923) /// Args: MinaBaseZkappPreconditionProtocolStateStableV1SnarkedLedgerHash , MinaBaseZkappPreconditionProtocolStateStableV1Length , MinaBaseZkappPreconditionProtocolStateStableV1GlobalSlot , MinaBaseZkappPreconditionProtocolStateStableV1Amount , MinaBaseZkappPreconditionProtocolStateEpochDataStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappPreconditionProtocolStateStableV1 { pub snarked_ledger_hash: MinaBaseZkappPreconditionProtocolStateStableV1SnarkedLedgerHash, pub blockchain_length: MinaBaseZkappPreconditionProtocolStateStableV1Length, @@ -1883,11 +2118,19 @@ pub struct MinaBaseAccountUpdateUpdateTimingInfoStableV1 { pub vesting_increment: CurrencyAmountStableV1, } +impl MallocSizeOf for MinaBaseAccountUpdateUpdateTimingInfoStableV1 { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + /// **OCaml name**: `Mina_base__Account_update.Update.Stable.V1` /// /// Gid: `806` /// Location: [src/lib/mina_base/account_update.ml:692:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/account_update.ml#L692) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseAccountUpdateUpdateStableV1 { pub app_state: PaddedSeq, pub delegate: MinaBaseAccountUpdateUpdateStableV1Delegate, @@ -1903,7 +2146,9 @@ pub struct MinaBaseAccountUpdateUpdateStableV1 { /// /// Gid: `807` /// Location: [src/lib/mina_base/account_update.ml:958:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/account_update.ml#L958) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseAccountUpdateAccountPreconditionStableV1( pub MinaBaseZkappPreconditionAccountStableV2, ); @@ -1912,7 +2157,9 @@ pub struct MinaBaseAccountUpdateAccountPreconditionStableV1( /// /// Gid: `808` /// Location: [src/lib/mina_base/account_update.ml:1029:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/account_update.ml#L1029) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseAccountUpdatePreconditionsStableV1 { pub network: MinaBaseZkappPreconditionProtocolStateStableV1, pub account: MinaBaseAccountUpdateAccountPreconditionStableV1, @@ -1933,18 +2180,23 @@ pub struct MinaBaseAccountUpdatePreconditionsStableV1 { /// Gid: `50` /// Location: [src/list0.ml:6:0](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/list0.ml#L6) /// Args: ArrayN16 < crate :: bigint :: BigInt > -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseAccountUpdateBodyEventsStableV1(pub List>); /// **OCaml name**: `Mina_base__Account_update.Body.Stable.V1` /// /// Gid: `812` /// Location: [src/lib/mina_base/account_update.ml:1216:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/account_update.ml#L1216) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseAccountUpdateBodyStableV1 { pub public_key: NonZeroCurvePoint, pub token_id: TokenIdKeyHash, pub update: MinaBaseAccountUpdateUpdateStableV1, + #[ignore_malloc_size_of = "primitive"] pub balance_change: MinaStateBlockchainStateValueStableV2SignedAmount, pub increment_nonce: bool, pub events: MinaBaseAccountUpdateBodyEventsStableV1, @@ -1953,7 +2205,9 @@ pub struct MinaBaseAccountUpdateBodyStableV1 { pub preconditions: MinaBaseAccountUpdatePreconditionsStableV1, pub use_full_commitment: bool, pub implicit_account_creation_fee: bool, + #[ignore_malloc_size_of = "primitive"] pub may_use_token: MinaBaseAccountUpdateMayUseTokenStableV1, + #[ignore_malloc_size_of = "primitive"] pub authorization_kind: MinaBaseAccountUpdateAuthorizationKindStableV1, } @@ -1961,11 +2215,15 @@ pub struct MinaBaseAccountUpdateBodyStableV1 { /// /// Gid: `813` /// Location: [src/lib/mina_base/account_update.ml:1322:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/account_update.ml#L1322) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseAccountUpdateBodyFeePayerStableV1 { pub public_key: NonZeroCurvePoint, pub fee: CurrencyFeeStableV1, + #[ignore_malloc_size_of = "primitive"] pub valid_until: Option, + #[ignore_malloc_size_of = "primitive"] pub nonce: UnsignedExtendedUInt32StableV1, } @@ -1973,7 +2231,9 @@ pub struct MinaBaseAccountUpdateBodyFeePayerStableV1 { /// /// Gid: `816` /// Location: [src/lib/mina_base/account_update.ml:1694:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/account_update.ml#L1694) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseAccountUpdateTStableV1 { pub body: MinaBaseAccountUpdateBodyStableV1, pub authorization: MinaBaseControlStableV2, @@ -1983,7 +2243,9 @@ pub struct MinaBaseAccountUpdateTStableV1 { /// /// Gid: `817` /// Location: [src/lib/mina_base/account_update.ml:1738:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/account_update.ml#L1738) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseAccountUpdateFeePayerStableV1 { pub body: MinaBaseAccountUpdateBodyFeePayerStableV1, pub authorization: Signature, @@ -1994,7 +2256,9 @@ pub struct MinaBaseAccountUpdateFeePayerStableV1 { /// Gid: `818` /// Location: [src/lib/mina_base/with_stack_hash.ml:6:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/with_stack_hash.ml#L6) /// Args: Box < MinaBaseZkappCommandTStableV1WireStableV1AccountUpdatesAA > , () -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappCommandTStableV1WireStableV1AccountUpdatesAACallsA { pub elt: Box, #[serde(deserialize_with = "always_unit")] @@ -2017,7 +2281,9 @@ pub struct MinaBaseZkappCommandVerifiableStableV1AccountUpdatesAACallsA { /// Gid: `818` /// Location: [src/lib/mina_base/with_stack_hash.ml:6:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/with_stack_hash.ml#L6) /// Args: MinaBaseZkappCommandTStableV1WireStableV1AccountUpdatesAA , () -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappCommandTStableV1WireStableV1AccountUpdatesA { pub elt: MinaBaseZkappCommandTStableV1WireStableV1AccountUpdatesAA, #[serde(deserialize_with = "always_unit")] @@ -2073,7 +2339,9 @@ pub struct MinaTransactionLogicTransactionAppliedSignedCommandAppliedCommonStabl /// Gid: `819` /// Location: [src/lib/mina_base/with_status.ml:6:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/with_status.ml#L6) /// Args: MinaBaseUserCommandStableV2 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct StagedLedgerDiffDiffPreDiffWithAtMostTwoCoinbaseStableV2B { pub data: MinaBaseUserCommandStableV2, pub status: MinaBaseTransactionStatusStableV2, @@ -2110,7 +2378,9 @@ pub struct MinaBaseZkappCommandVerifiableStableV1AccountUpdatesAA { /// Gid: `820` /// Location: [src/lib/mina_base/zkapp_command.ml:11:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_command.ml#L11) /// Args: MinaBaseAccountUpdateTStableV1 , () , () -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappCommandTStableV1WireStableV1AccountUpdatesAA { pub account_update: MinaBaseAccountUpdateTStableV1, #[serde(deserialize_with = "always_unit")] @@ -2138,7 +2408,9 @@ pub struct MinaBaseZkappCommandCallForestMakeDigestStrForestStableV1(pub crate:: /// /// Gid: `829` /// Location: [src/lib/mina_base/zkapp_command.ml:684:12](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_command.ml#L684) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBaseZkappCommandTStableV1WireStableV1 { pub fee_payer: MinaBaseAccountUpdateFeePayerStableV1, pub account_updates: List, @@ -2166,7 +2438,14 @@ pub struct MinaBaseZkappCommandVerifiableStableV1 { /// Location: [src/lib/mina_base/user_command.ml:7:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/user_command.ml#L7) /// Args: MinaBaseSignedCommandStableV2 , MinaBaseZkappCommandTStableV1WireStableV1 #[derive( - Clone, Debug, PartialEq, SerdeYojsonEnum, BinProtRead, BinProtWrite, derive_more::From, + Clone, + Debug, + PartialEq, + SerdeYojsonEnum, + BinProtRead, + BinProtWrite, + derive_more::From, + MallocSizeOf, )] pub enum MinaBaseUserCommandStableV2 { SignedCommand(MinaBaseSignedCommandStableV2), @@ -2247,14 +2526,18 @@ pub struct MinaBasePendingCoinbaseStackIdStableV1(pub crate::number::UInt64); /// /// Gid: `851` /// Location: [src/lib/mina_base/pending_coinbase.ml:159:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/pending_coinbase.ml#L159) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBasePendingCoinbaseCoinbaseStackStableV1(pub crate::bigint::BigInt); /// **OCaml name**: `Mina_base__Pending_coinbase.Make_str.Stack_hash.Stable.V1` /// /// Gid: `856` /// Location: [src/lib/mina_base/pending_coinbase.ml:219:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/pending_coinbase.ml#L219) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBasePendingCoinbaseStackHashStableV1(pub crate::bigint::BigInt); /// **OCaml name**: `Mina_base__Pending_coinbase.Make_str.State_stack.Stable.V1` @@ -2266,7 +2549,9 @@ pub struct MinaBasePendingCoinbaseStackHashStableV1(pub crate::bigint::BigInt); /// Gid: `859` /// Location: [src/lib/mina_base/pending_coinbase.ml:245:10](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/pending_coinbase.ml#L245) /// Args: CoinbaseStackHash -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBasePendingCoinbaseStateStackStableV1 { pub init: CoinbaseStackHash, pub curr: CoinbaseStackHash, @@ -2327,7 +2612,9 @@ pub struct MinaBasePendingCoinbaseUpdateStableV1 { /// Gid: `869` /// Location: [src/lib/mina_base/pending_coinbase.ml:511:10](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/pending_coinbase.ml#L511) /// Args: CoinbaseStackData , MinaBasePendingCoinbaseStateStackStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaBasePendingCoinbaseStackVersionedStableV1 { pub data: CoinbaseStackData, pub state: MinaBasePendingCoinbaseStateStackStableV1, @@ -2445,7 +2732,9 @@ pub struct MinaBaseStagedLedgerHashStableV1 { /// /// Gid: `881` /// Location: [src/lib/mina_base/stack_frame.ml:64:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/stack_frame.ml#L64) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseStackFrameStableV1(pub crate::bigint::BigInt); /// **OCaml name**: `Mina_base__Sok_message.Make_str.Stable.V1` @@ -2481,7 +2770,9 @@ pub struct MinaBaseProtocolConstantsCheckedValueStableV1 { /// /// Gid: `885` /// Location: [src/lib/mina_base/proof.ml:12:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/proof.ml#L12) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseProofStableV2(pub PicklesProofProofsVerified2ReprStableV2); /// **OCaml name**: `Mina_base__Pending_coinbase_witness.Stable.V2` @@ -2498,7 +2789,9 @@ pub struct MinaBasePendingCoinbaseWitnessStableV2 { /// /// Gid: `887` /// Location: [src/lib/mina_base/call_stack_digest.ml:12:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/call_stack_digest.ml#L12) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct MinaBaseCallStackDigestStableV1(pub crate::bigint::BigInt); /// **OCaml name**: `Mina_base__Fee_with_prover.Stable.V1` @@ -2543,7 +2836,9 @@ pub enum MinaTransactionTransactionStableV2 { /// Gid: `905` /// Location: [src/lib/transaction_logic/zkapp_command_logic.ml:196:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/transaction_logic/zkapp_command_logic.ml#L196) /// Args: MinaBaseStackFrameStableV1 , MinaBaseCallStackDigestStableV1 , SignedAmount , LedgerHash , bool , crate :: bigint :: BigInt , UnsignedExtendedUInt32StableV1 , MinaBaseTransactionStatusFailureCollectionStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaTransactionLogicZkappCommandLogicLocalStateValueStableV1 { pub stack_frame: MinaBaseStackFrameStableV1, pub call_stack: MinaBaseCallStackDigestStableV1, @@ -2553,6 +2848,7 @@ pub struct MinaTransactionLogicZkappCommandLogicLocalStateValueStableV1 { pub supply_increase: SignedAmount, pub ledger: LedgerHash, pub success: bool, + #[ignore_malloc_size_of = "primitive"] pub account_update_index: UnsignedExtendedUInt32StableV1, pub failure_status_tbl: MinaBaseTransactionStatusFailureCollectionStableV1, pub will_succeed: bool, @@ -2684,7 +2980,7 @@ pub enum TrustSystemBannedStatusStableV1 { /// /// Gid: `941` /// Location: [src/lib/consensus/vrf/consensus_vrf.ml:168:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/consensus/vrf/consensus_vrf.ml#L168) -#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref)] +#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref, MallocSizeOf)] pub struct ConsensusVrfOutputTruncatedStableV1(pub crate::string::ByteString); /// **OCaml name**: `Consensus__Stake_proof.Stable.V2` @@ -2705,7 +3001,7 @@ pub struct ConsensusStakeProofStableV2 { /// /// Gid: `959` /// Location: [src/lib/consensus/body_reference.ml:17:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/consensus/body_reference.ml#L17) -#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref)] +#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref, MallocSizeOf)] pub struct ConsensusBodyReferenceStableV1(pub crate::string::ByteString); /// **OCaml name**: `Consensus__Global_slot.Make_str.Stable.V1` @@ -2717,7 +3013,9 @@ pub struct ConsensusBodyReferenceStableV1(pub crate::string::ByteString); /// Gid: `965` /// Location: [src/lib/consensus/global_slot.ml:22:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/consensus/global_slot.ml#L22) /// Args: MinaNumbersGlobalSlotSinceHardForkMStableV1 , UnsignedExtendedUInt32StableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct ConsensusGlobalSlotStableV1 { pub slot_number: MinaNumbersGlobalSlotSinceHardForkMStableV1, pub slots_per_epoch: UnsignedExtendedUInt32StableV1, @@ -2732,7 +3030,9 @@ pub struct ConsensusGlobalSlotStableV1 { /// Gid: `788` /// Location: [src/lib/mina_base/epoch_data.ml:8:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/epoch_data.ml#L8) /// Args: MinaBaseEpochLedgerValueStableV1 , EpochSeed , StateHash , StateHash , UnsignedExtendedUInt32StableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct ConsensusProofOfStakeDataEpochDataStakingValueVersionedValueStableV1 { pub ledger: MinaBaseEpochLedgerValueStableV1, pub seed: EpochSeed, @@ -2750,7 +3050,9 @@ pub struct ConsensusProofOfStakeDataEpochDataStakingValueVersionedValueStableV1 /// Gid: `788` /// Location: [src/lib/mina_base/epoch_data.ml:8:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/epoch_data.ml#L8) /// Args: MinaBaseEpochLedgerValueStableV1 , EpochSeed , StateHash , StateHash , UnsignedExtendedUInt32StableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct ConsensusProofOfStakeDataEpochDataNextValueVersionedValueStableV1 { pub ledger: MinaBaseEpochLedgerValueStableV1, pub seed: EpochSeed, @@ -2764,7 +3066,9 @@ pub struct ConsensusProofOfStakeDataEpochDataNextValueVersionedValueStableV1 { /// Gid: `985` /// Location: [src/lib/mina_state/registers.ml:8:4](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_state/registers.ml#L8) /// Args: LedgerHash , MinaBasePendingCoinbaseStackVersionedStableV1 , MinaTransactionLogicZkappCommandLogicLocalStateValueStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaStateBlockchainStateValueStableV2LedgerProofStatementSource { pub first_pass_ledger: LedgerHash, pub second_pass_ledger: LedgerHash, @@ -2787,13 +3091,17 @@ pub enum MinaStateSnarkedLedgerStatePendingCoinbaseStackStateInitStackStableV1 { /// Gid: `991` /// Location: [src/lib/mina_state/snarked_ledger_state.ml:107:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_state/snarked_ledger_state.ml#L107) /// Args: LedgerHash , MinaStateBlockchainStateValueStableV2SignedAmount , MinaBasePendingCoinbaseStackVersionedStableV1 , MinaBaseFeeExcessStableV1 , () , MinaTransactionLogicZkappCommandLogicLocalStateValueStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaStateBlockchainStateValueStableV2LedgerProofStatement { pub source: MinaStateBlockchainStateValueStableV2LedgerProofStatementSource, pub target: MinaStateBlockchainStateValueStableV2LedgerProofStatementSource, pub connecting_ledger_left: LedgerHash, pub connecting_ledger_right: LedgerHash, + #[ignore_malloc_size_of = "primitive"] pub supply_increase: MinaStateBlockchainStateValueStableV2SignedAmount, + #[ignore_malloc_size_of = "primitive"] pub fee_excess: MinaBaseFeeExcessStableV1, #[serde(deserialize_with = "always_unit")] pub sok_digest: (), @@ -2817,13 +3125,17 @@ pub struct MinaStateSnarkedLedgerStateStableV2( /// Gid: `991` /// Location: [src/lib/mina_state/snarked_ledger_state.ml:107:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_state/snarked_ledger_state.ml#L107) /// Args: LedgerHash , MinaStateBlockchainStateValueStableV2SignedAmount , MinaBasePendingCoinbaseStackVersionedStableV1 , MinaBaseFeeExcessStableV1 , crate :: string :: ByteString , MinaTransactionLogicZkappCommandLogicLocalStateValueStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaStateSnarkedLedgerStateWithSokStableV2 { pub source: MinaStateBlockchainStateValueStableV2LedgerProofStatementSource, pub target: MinaStateBlockchainStateValueStableV2LedgerProofStatementSource, pub connecting_ledger_left: LedgerHash, pub connecting_ledger_right: LedgerHash, + #[ignore_malloc_size_of = "primitive"] pub supply_increase: MinaStateBlockchainStateValueStableV2SignedAmount, + #[ignore_malloc_size_of = "primitive"] pub fee_excess: MinaBaseFeeExcessStableV1, pub sok_digest: crate::string::ByteString, } @@ -2837,11 +3149,15 @@ pub struct MinaStateSnarkedLedgerStateWithSokStableV2 { /// Gid: `996` /// Location: [src/lib/mina_state/blockchain_state.ml:10:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_state/blockchain_state.ml#L10) /// Args: MinaBaseStagedLedgerHashStableV1 , LedgerHash , MinaTransactionLogicZkappCommandLogicLocalStateValueStableV1 , BlockTimeTimeStableV1 , ConsensusBodyReferenceStableV1 , MinaStateBlockchainStateValueStableV2SignedAmount , MinaBasePendingCoinbaseStackVersionedStableV1 , MinaBaseFeeExcessStableV1 , () -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaStateBlockchainStateValueStableV2 { + #[ignore_malloc_size_of = "primitive"] pub staged_ledger_hash: MinaBaseStagedLedgerHashStableV1, pub genesis_ledger_hash: LedgerHash, pub ledger_proof_statement: MinaStateBlockchainStateValueStableV2LedgerProofStatement, + #[ignore_malloc_size_of = "primitive"] pub timestamp: BlockTimeTimeStableV1, pub body_reference: ConsensusBodyReferenceStableV1, } @@ -2871,11 +3187,14 @@ pub struct MinaStateSnarkTransitionValueStableV2 { /// Gid: `1001` /// Location: [src/lib/mina_state/protocol_state.ml:62:10](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_state/protocol_state.ml#L62) /// Args: StateHash , MinaStateBlockchainStateValueStableV2 , ConsensusProofOfStakeDataConsensusStateValueStableV2 , MinaBaseProtocolConstantsCheckedValueStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct MinaStateProtocolStateBodyValueStableV2 { pub genesis_state_hash: StateHash, pub blockchain_state: MinaStateBlockchainStateValueStableV2, pub consensus_state: ConsensusProofOfStakeDataConsensusStateValueStableV2, + #[ignore_malloc_size_of = "primitive"] pub constants: MinaBaseProtocolConstantsCheckedValueStableV1, } @@ -2883,14 +3202,16 @@ pub struct MinaStateProtocolStateBodyValueStableV2 { /// /// Gid: `1011` /// Location: [src/lib/transaction_snark/transaction_snark.ml:69:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/transaction_snark/transaction_snark.ml#L69) -#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref)] +#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref, MallocSizeOf)] pub struct TransactionSnarkProofStableV2(pub PicklesProofProofsVerified2ReprStableV2); /// **OCaml name**: `Transaction_snark.Make_str.Stable.V2` /// /// Gid: `1012` /// Location: [src/lib/transaction_snark/transaction_snark.ml:80:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/transaction_snark/transaction_snark.ml#L80) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct TransactionSnarkStableV2 { pub statement: MinaStateSnarkedLedgerStateWithSokStableV2, pub proof: TransactionSnarkProofStableV2, @@ -2900,7 +3221,9 @@ pub struct TransactionSnarkStableV2 { /// /// Gid: `1014` /// Location: [src/lib/ledger_proof/ledger_proof.ml:10:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/ledger_proof/ledger_proof.ml#L10) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, Deref, MallocSizeOf, +)] pub struct LedgerProofProdStableV2(pub TransactionSnarkStableV2); /// **OCaml name**: `Transaction_snark_work.Statement.Stable.V2` @@ -2930,7 +3253,9 @@ pub enum TransactionSnarkWorkStatementStableV2 { /// /// Gid: `1024` /// Location: [src/lib/transaction_snark_work/transaction_snark_work.ml:83:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/transaction_snark_work/transaction_snark_work.ml#L83) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct TransactionSnarkWorkTStableV2 { pub fee: CurrencyFeeStableV1, pub proofs: TransactionSnarkWorkTStableV2Proofs, @@ -2981,10 +3306,13 @@ pub struct StagedLedgerDiffDiffFtStableV1(pub MinaBaseCoinbaseFeeTransferStableV /// Gid: `1028` /// Location: [src/lib/staged_ledger_diff/diff.ml:104:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/staged_ledger_diff/diff.ml#L104) /// Args: TransactionSnarkWorkTStableV2 , StagedLedgerDiffDiffPreDiffWithAtMostTwoCoinbaseStableV2B -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct StagedLedgerDiffDiffPreDiffWithAtMostTwoCoinbaseStableV2 { pub completed_works: List, pub commands: List, + #[ignore_malloc_size_of = "primitive"] pub coinbase: StagedLedgerDiffDiffPreDiffWithAtMostTwoCoinbaseStableV2Coinbase, pub internal_command_statuses: List, } @@ -2998,10 +3326,13 @@ pub struct StagedLedgerDiffDiffPreDiffWithAtMostTwoCoinbaseStableV2 { /// Gid: `1029` /// Location: [src/lib/staged_ledger_diff/diff.ml:136:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/staged_ledger_diff/diff.ml#L136) /// Args: TransactionSnarkWorkTStableV2 , StagedLedgerDiffDiffPreDiffWithAtMostTwoCoinbaseStableV2B -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct StagedLedgerDiffDiffPreDiffWithAtMostOneCoinbaseStableV2 { pub completed_works: List, pub commands: List, + #[ignore_malloc_size_of = "primitive"] pub coinbase: StagedLedgerDiffDiffPreDiffWithAtMostOneCoinbaseStableV2Coinbase, pub internal_command_statuses: List, } @@ -3010,7 +3341,9 @@ pub struct StagedLedgerDiffDiffPreDiffWithAtMostOneCoinbaseStableV2 { /// /// Gid: `1032` /// Location: [src/lib/staged_ledger_diff/diff.ml:206:8](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/staged_ledger_diff/diff.ml#L206) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct StagedLedgerDiffDiffDiffStableV2( pub StagedLedgerDiffDiffPreDiffWithAtMostTwoCoinbaseStableV2, pub Option, @@ -3020,7 +3353,9 @@ pub struct StagedLedgerDiffDiffDiffStableV2( /// /// Gid: `1033` /// Location: [src/lib/staged_ledger_diff/diff.ml:223:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/staged_ledger_diff/diff.ml#L223) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct StagedLedgerDiffDiffStableV2 { pub diff: StagedLedgerDiffDiffDiffStableV2, } @@ -3029,7 +3364,9 @@ pub struct StagedLedgerDiffDiffStableV2 { /// /// Gid: `1034` /// Location: [src/lib/staged_ledger_diff/body.ml:18:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/staged_ledger_diff/body.ml#L18) -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct StagedLedgerDiffBodyStableV1 { pub staged_ledger_diff: StagedLedgerDiffDiffStableV2, } @@ -3206,6 +3543,15 @@ pub struct MinaBlockHeaderStableV2 { pub proposed_protocol_version_opt: Option, } +impl MallocSizeOf for MinaBlockHeaderStableV2 { + fn size_of(&self, ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + let factor = usize::from(!ops.have_seen_ptr(Arc::as_ptr(&self.protocol_state_proof))); + self.protocol_state_proof.size_of(ops) * factor + + self.protocol_state.size_of(ops) + + self.delta_block_chain_proof.1.size_of(ops) + } +} + /// Derived name: `Network_pool__Snark_pool.Diff_versioned.Stable.V2.Add_solved_work.1` /// /// Gid: `1121` diff --git a/mina-p2p-messages/src/v2/manual.rs b/mina-p2p-messages/src/v2/manual.rs index e3ef9601a0..d47ba006c7 100644 --- a/mina-p2p-messages/src/v2/manual.rs +++ b/mina-p2p-messages/src/v2/manual.rs @@ -4,6 +4,7 @@ use ark_ff::BigInteger256; use binprot::{BinProtRead, BinProtWrite}; use binprot_derive::{BinProtRead, BinProtWrite}; use derive_more::Deref; +use malloc_size_of_derive::MallocSizeOf; use poseidon::hash::params::NO_INPUT_COINBASE_STACK; use serde::{de::Visitor, ser::SerializeTuple, Deserialize, Serialize, Serializer}; use time::OffsetDateTime; @@ -12,6 +13,7 @@ use crate::{ b58::{self, Base58CheckOfBinProt, Base58CheckOfBytes}, b58version::USER_COMMAND_MEMO, bigint::BigInt, + list::List, number::Number, string::ByteString, versioned::Versioned, @@ -41,7 +43,7 @@ pub type TransactionSnarkScanStateStableV2TreesAMerge = ( /// /// Gid: `83` /// Location: [src/string.ml:44:6](https://github.com/MinaProtocol/mina/blob//bfd1009/src/string.ml#L44) -#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref)] +#[derive(Clone, Debug, PartialEq, BinProtRead, BinProtWrite, Deref, MallocSizeOf)] pub struct MinaBaseSignedCommandMemoStableV1(pub crate::string::CharString); impl MinaBaseSignedCommandMemoStableV1 { @@ -680,7 +682,17 @@ impl Default for TokenIdKeyHash { } #[derive( - Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, BinProtRead, BinProtWrite, + Clone, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Serialize, + Deserialize, + BinProtRead, + BinProtWrite, + MallocSizeOf, )] pub struct NonZeroCurvePointWithVersions { x: Versioned, @@ -713,6 +725,26 @@ pub type NonZeroCurvePoint = Base58CheckOfBinProt< { crate::b58version::NON_ZERO_CURVE_POINT_COMPRESSED }, >; +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +pub enum ArchiveTransitionFronntierDiff { + BreadcrumbAdded { + block: (MinaBlockBlockStableV2, (Option, StateHash)), + accounts_accessed: List<(crate::number::UInt64, MinaBaseAccountBinableArgStableV2)>, + accounts_created: List<(MinaBaseAccountIdStableV2, CurrencyFeeStableV1)>, + tokens_used: List<(MinaBaseTokenIdStableV2, Option)>, + sender_receipt_chains_from_parent_ledger: + List<(MinaBaseAccountIdStableV2, MinaBaseReceiptChainHashStableV1)>, + }, + // TODO(adonagy): I think this is legacy stuff, doublecheck + RootTransitioned(()), + BoostrapOf(()), +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +pub enum ArchiveRpc { + SendDiff(ArchiveTransitionFronntierDiff), +} + #[cfg(test)] mod tests { use std::fmt::Debug; @@ -1396,7 +1428,9 @@ mod tests_sgn { /// Gid: `602` /// Location: [src/lib/currency/signed_poly.ml:6:4](https://github.com/Minaprotocol/mina/blob/b1facec/src/lib/currency/signed_poly.ml#L6) /// Args: CurrencyFeeStableV1 , SgnStableV1 -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +#[derive( + Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite, MallocSizeOf, +)] pub struct SignedAmount { pub magnitude: CurrencyFeeStableV1, pub sgn: SgnStableV1, @@ -1565,9 +1599,21 @@ impl StagedLedgerDiffBodyStableV1 { self.commands_iter().map(|command| &command.data) } - // FIXME(tizoc): this is not correct, the coinbases are in the commands - // what this is returning is the coinbase fee transfers, which is not the same. - pub fn coinbases_iter(&self) -> impl Iterator { + pub fn tranasctions_with_status( + &self, + ) -> impl Iterator< + Item = ( + &MinaBaseUserCommandStableV2, + &MinaBaseTransactionStatusStableV2, + ), + > { + self.commands_iter() + .map(|command| (&command.data, &command.status)) + } + + pub fn coinbase_fee_transfers_iter( + &self, + ) -> impl Iterator { let diff = self.diff(); let mut coinbases = Vec::with_capacity(4); match &diff.0.coinbase { @@ -1616,10 +1662,22 @@ impl StagedLedgerDiffBodyStableV1 { .map_or(0, |d| d.completed_works.len()) } - pub fn coinbase_sum(&self) -> u64 { - // FIXME(#581): hardcoding 720 here, but this logic is not correct. - // This should be obtained from the `amount` in the coinbase transaction - 720000000000 // 720 mina in nanomina + pub fn has_coinbase(&self) -> bool { + let (first_pre_diff, second_pre_diff) = ( + self.diff().0.coinbase.clone(), + self.diff().1.as_ref().map_or( + StagedLedgerDiffDiffPreDiffWithAtMostOneCoinbaseStableV2Coinbase::Zero, + |v| v.coinbase.clone(), + ), + ); + + match (first_pre_diff, second_pre_diff) { + ( + StagedLedgerDiffDiffPreDiffWithAtMostTwoCoinbaseStableV2Coinbase::Zero, + StagedLedgerDiffDiffPreDiffWithAtMostOneCoinbaseStableV2Coinbase::Zero, + ) => false, + _ => true, + } } pub fn fees_sum(&self) -> u64 { @@ -1683,16 +1741,15 @@ impl std::str::FromStr for SgnStableV1 { mod test { use binprot::BinProtRead; - use crate::v2::{ - MinaBaseVerificationKeyWireStableV1, MinaBaseZkappCommandTStableV1WireStableV1, - }; + use crate::v2; #[test] fn test_zkapp_with_sig_auth_hash() { let expexcted = "AbliNXLg4Keq0ZJyxK/QNAx8SxrJeffYytk5lbcTF9s9Af0A4fUFAP2+oQMA48vntxcABLty3SXWjvuadrLtBjcsxT1oJ3C2hwS/LDh364LKUxrLe3uF/9lr8VlW/J+ctbiI+m9I61sb9BC/AAG5YjVy4OCnqtGScsSv0DQMfEsayXn32MrZOZW3ExfbPQEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAAEBAQEBAQH9AJQ1dwEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAAAAAePL57cXAAS7ct0l1o77mnay7QY3LMU9aCdwtocEvyw4d+uCylMay3t7hf/Za/FZVvyfnLW4iPpvSOtbG/QQvwAAAcwXZjv4NJwWwlJhFZPh2AK+o0dKOpIy1a6CXlskW7gmAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEAAQEBAQEBAf0AlDV3AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQAAAAICAAAAACIBFlRlc3QgWktBcHAgdG8gUmVjZWl2ZXIAAAAAAAAAAAAA".to_string(); let bytes = include_bytes!("../../../tests/files/zkapps/with_sig_auth.bin"); let zkapp = - MinaBaseZkappCommandTStableV1WireStableV1::binprot_read(&mut bytes.as_slice()).unwrap(); + v2::MinaBaseZkappCommandTStableV1WireStableV1::binprot_read(&mut bytes.as_slice()) + .unwrap(); let zkapp_id = zkapp.to_base64().unwrap(); assert_eq!(expexcted, zkapp_id); @@ -1706,7 +1763,16 @@ mod test { let decoded = STANDARD.decode(verification_key_encoded).unwrap(); let verification_key = - MinaBaseVerificationKeyWireStableV1::binprot_read(&mut decoded.as_slice()); + v2::MinaBaseVerificationKeyWireStableV1::binprot_read(&mut decoded.as_slice()); assert!(verification_key.is_ok()); } + + #[test] + fn test_archive_breadcrumb_deserialization() { + let breadcrumb_bytes = include_bytes!("../../../tests/files/archive-breadcrumb/3NK56ZbCS31qb8SvCtCCYza4beRDtKgXA2JL6s3evKouG2KkKtiy.bin"); + let result = + v2::ArchiveTransitionFronntierDiff::binprot_read(&mut breadcrumb_bytes.as_slice()); + + assert!(result.is_ok()); + } } diff --git a/mina-p2p-messages/src/versioned.rs b/mina-p2p-messages/src/versioned.rs index 90b8dcdbc2..b811bde4d9 100644 --- a/mina-p2p-messages/src/versioned.rs +++ b/mina-p2p-messages/src/versioned.rs @@ -1,12 +1,13 @@ use std::{fmt::Debug, marker::PhantomData}; +use malloc_size_of_derive::MallocSizeOf; use serde::{ser::SerializeStruct, Deserialize, Serialize}; /// `Bin_prot` uses integer to represent type version. pub type Ver = u32; /// Wrapper for a type that adds explicit version information. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, MallocSizeOf)] pub struct Versioned(T); impl Versioned { diff --git a/mina-p2p-messages/tests/utils.rs b/mina-p2p-messages/tests/utils.rs index 36cb6207b9..44aed8dd2b 100644 --- a/mina-p2p-messages/tests/utils.rs +++ b/mina-p2p-messages/tests/utils.rs @@ -34,7 +34,7 @@ where let dir = std::fs::read_dir(path)?; for file in dir { let path = file?.path(); - if path.extension().map_or(false, |ext| ext == "bin") { + if path.extension().is_some_and(|ext| ext == "bin") { println!("reading {path:?}..."); let contents = read_file(&path)?; f(path, &contents); @@ -52,7 +52,7 @@ where let dir = std::fs::read_dir(path)?; for file in dir { let path = file?.path(); - if path.extension().map_or(false, |ext| ext == "bin") { + if path.extension().is_some_and(|ext| ext == "bin") { println!("reading {path:?}..."); f(&read_file(&path)?, &path); } diff --git a/node/Cargo.toml b/node/Cargo.toml index 2f2851ee6d..07e3925bbb 100644 --- a/node/Cargo.toml +++ b/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "node" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" @@ -8,9 +8,15 @@ license = "Apache-2.0" workspace = true [dependencies] +base64 = "0.22" +blake2 = "0.10" +hex = "0.4" rand = "0.8.0" serde = "1.0.147" -serde_json = { version = "1.0.82", features = ["unbounded_depth", "arbitrary_precision"] } +serde_json = { version = "1.0.82", features = [ + "unbounded_depth", + "arbitrary_precision", +] } serde_with = { version = "3.6.1", features = ["time_0_3"] } strum = "0.26.2" strum_macros = "0.26.4" @@ -30,6 +36,9 @@ mina-p2p-messages = { workspace = true } vrf = { workspace = true } ark-ff = { workspace = true } +graphannis-malloc_size_of = { workspace = true } +graphannis-malloc_size_of_derive = { workspace = true } + openmina-core = { path = "../core" } snark = { path = "../snark" } p2p = { path = "../p2p" } @@ -41,16 +50,23 @@ static_assertions.workspace = true [build-dependencies] regex = "1" rust-format = "0.3" -vergen = { version = "8.2.4", features = ["build", "cargo", "git", "gitcl", "rustc"] } +vergen = { version = "8.2.4", features = [ + "build", + "cargo", + "git", + "gitcl", + "rustc", +] } [target.'cfg(not(target_family = "wasm"))'.dependencies] linkme = { workspace = true } -redux = { workspace = true, features=["serializable_callbacks"] } +redux = { workspace = true, features = ["serializable_callbacks"] } [target.'cfg(target_family = "wasm")'.dependencies] wasm-bindgen = "0.2" [features] +serializable_callbacks = [] replay = [] p2p-webrtc = ["p2p/p2p-webrtc"] p2p-libp2p = ["p2p/p2p-libp2p"] diff --git a/node/account/Cargo.toml b/node/account/Cargo.toml index 0786d28145..f135d8a0d3 100644 --- a/node/account/Cargo.toml +++ b/node/account/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-account" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/node/common/Cargo.toml b/node/common/Cargo.toml index 89087b327f..54a75e434a 100644 --- a/node/common/Cargo.toml +++ b/node/common/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-common" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" @@ -12,13 +12,15 @@ serde_json = "1.0.94" tracing = "0.1.37" rayon = "1.5" jsonpath-rust = "0.5.0" -tokio = { version = "1.26.0", features = ["time"] } +tokio = { version = "1.26.0", features = ["time", "macros"] } mina-p2p-messages = { workspace = true } mina-signer = { workspace = true } vrf = { workspace = true } ledger = { workspace = true } sha3 = "0.10.8" ark-ff = { workspace = true } +binprot = { git = "https://github.com/openmina/binprot-rs", rev = "400b52c" } +binprot_derive = { git = "https://github.com/openmina/binprot-rs", rev = "400b52c" } node = { path = "../../node", features = ["replay"] } openmina-core = { path = "../../core" } @@ -37,6 +39,8 @@ redux = { workspace = true, features=["serializable_callbacks"] } tracing-subscriber = { version = "0.3.17", features = ["json", "env-filter"] } tracing-appender = "0.2.3" libp2p-identity = { version = "=0.2.7", features = ["ed25519", "rand", "serde"] } +mio = { version = "1.0.2", features = ["os-poll", "net"] } +reqwest = { version = "0.12.8", features = ["blocking", "json"] } [features] p2p-webrtc = ["node/p2p-webrtc"] diff --git a/node/common/src/service/archive.rs b/node/common/src/service/archive.rs new file mode 100644 index 0000000000..8b10393304 --- /dev/null +++ b/node/common/src/service/archive.rs @@ -0,0 +1,512 @@ +use mina_p2p_messages::v2::{self, ArchiveTransitionFronntierDiff}; +use node::core::{channels::mpsc, thread}; +use std::net::SocketAddr; + +use super::NodeService; + +pub struct ArchiveService { + archive_sender: mpsc::UnboundedSender, +} + +const ARCHIVE_SEND_RETRIES: u8 = 5; +const MAX_EVENT_COUNT: u64 = 100; +const RETRY_INTERVAL_MS: u64 = 1000; + +impl ArchiveService { + fn new(archive_sender: mpsc::UnboundedSender) -> Self { + Self { archive_sender } + } + + #[cfg(not(target_arch = "wasm32"))] + fn run( + mut archive_receiver: mpsc::UnboundedReceiver, + address: SocketAddr, + ) { + while let Some(breadcrumb) = archive_receiver.blocking_recv() { + let mut retries = ARCHIVE_SEND_RETRIES; + while retries > 0 { + match rpc::send_diff(address, v2::ArchiveRpc::SendDiff(breadcrumb.clone())) { + Ok(result) => { + if result.should_retry() { + node::core::warn!( + summary = "Archive suddenly closed connection, retrying..." + ); + retries -= 1; + std::thread::sleep(std::time::Duration::from_millis(RETRY_INTERVAL_MS)); + } else { + node::core::warn!(summary = "Successfully sent diff to archive"); + break; + } + } + Err(e) => { + node::core::warn!( + summary = "Failed sending diff to archive", + error = e.to_string(), + retries = retries + ); + retries -= 1; + std::thread::sleep(std::time::Duration::from_millis(RETRY_INTERVAL_MS)); + } + } + } + } + } + + // Note: Placeholder for the wasm implementation, if we decide to include an archive mode in the future + #[cfg(target_arch = "wasm32")] + fn run( + mut archive_receiver: mpsc::UnboundedReceiver, + address: SocketAddr, + ) { + unimplemented!() + } + + pub fn start(address: SocketAddr) -> Self { + let (archive_sender, archive_receiver) = + mpsc::unbounded_channel::(); + + thread::Builder::new() + .name("openmina_archive".to_owned()) + .spawn(move || { + Self::run(archive_receiver, address); + }) + .unwrap(); + + Self::new(archive_sender) + } +} + +impl node::transition_frontier::archive::archive_service::ArchiveService for NodeService { + fn send_to_archive(&mut self, data: ArchiveTransitionFronntierDiff) { + if let Some(archive) = self.archive.as_mut() { + if let Err(e) = archive.archive_sender.send(data.clone()) { + node::core::warn!( + summary = "Failed sending diff to archive service", + error = e.to_string() + ); + } + } + } +} + +// We need to replicate the ocaml node's RPC like interface +#[cfg(not(target_arch = "wasm32"))] +mod rpc { + use binprot::BinProtWrite; + use mina_p2p_messages::rpc_kernel::{Message, NeedsLength, Query, RpcMethod}; + use mina_p2p_messages::v2::{self, ArchiveRpc}; + use mio::event::Event; + use mio::net::TcpStream; + use mio::{Events, Interest, Poll, Registry, Token}; + use std::io::{self, Read, Write}; + use std::net::SocketAddr; + + const MAX_RECURSION_DEPTH: u8 = 25; + + // messages + const HEADER_MSG: [u8; 7] = [2, 253, 82, 80, 67, 0, 1]; + const OK_MSG: [u8; 5] = [2, 1, 0, 1, 0]; + // Note: this is the close message that the ocaml node receives + const CLOSE_MSG: [u8; 7] = [2, 254, 167, 7, 0, 1, 0]; + const HEARTBEAT_MSG: [u8; 1] = [0]; + + fn prepend_length(message: &[u8]) -> Vec { + let length = message.len() as u64; + let mut length_bytes = length.to_le_bytes().to_vec(); + length_bytes.append(&mut message.to_vec()); + length_bytes + } + pub enum HandleResult { + MessageSent, + ConnectionClosed, + ConnectionAlive, + MessageWouldBlock, + } + + impl HandleResult { + pub fn should_retry(&self) -> bool { + matches!(self, Self::ConnectionClosed) + } + } + + pub fn send_diff(address: SocketAddr, data: v2::ArchiveRpc) -> io::Result { + let rpc = encode_to_rpc(data)?; + process_rpc(address, &rpc) + } + + fn encode_to_rpc(data: ArchiveRpc) -> io::Result> { + type Method = mina_p2p_messages::rpc::SendArchiveDiffUnversioned; + let mut v = vec![0; 8]; + + if let Err(e) = Message::Query(Query { + tag: Method::NAME.into(), + version: Method::VERSION, + id: 1, + data: NeedsLength(data), + }) + .binprot_write(&mut v) + { + node::core::warn!( + summary = "Failed binprot serializastion", + error = e.to_string() + ); + return Err(e); + } + + let payload_length = (v.len() - 8) as u64; + v[..8].copy_from_slice(&payload_length.to_le_bytes()); + // Bake in the heartbeat message + v.splice(0..0, prepend_length(&HEARTBEAT_MSG).iter().cloned()); + // also add the heartbeat message to the end of the message + v.extend_from_slice(&prepend_length(&HEARTBEAT_MSG)); + + Ok(v) + } + + fn process_rpc(address: SocketAddr, data: &[u8]) -> io::Result { + let mut poll = Poll::new()?; + let mut events = Events::with_capacity(128); + let mut event_count = 0; + + // We still need a token even for one connection + const TOKEN: Token = Token(0); + + let mut stream = TcpStream::connect(address)?; + + let mut handshake_received = false; + let mut handshake_sent = false; + let mut message_sent = false; + let mut first_heartbeat_received = false; + poll.registry() + .register(&mut stream, TOKEN, Interest::WRITABLE)?; + + loop { + if let Err(e) = poll.poll(&mut events, None) { + if interrupted(&e) { + continue; + } + return Err(e); + } + + for event in events.iter() { + event_count += 1; + // Failsafe to prevent infinite loops + if event_count > super::MAX_EVENT_COUNT { + return Err(io::Error::new( + io::ErrorKind::Other, + format!("FAILSAFE triggered, event count: {}", event_count), + )); + } + match event.token() { + TOKEN => { + match handle_connection_event( + poll.registry(), + &mut stream, + event, + data, + &mut handshake_received, + &mut handshake_sent, + &mut message_sent, + &mut first_heartbeat_received, + )? { + HandleResult::MessageSent => return Ok(HandleResult::MessageSent), + HandleResult::ConnectionClosed => { + return Ok(HandleResult::ConnectionClosed) + } + HandleResult::MessageWouldBlock => { + // do nothing, wait for the next event + continue; + } + HandleResult::ConnectionAlive => { + // keep swapping between readable and writable until we successfully send the message, then keep in read mode. + if message_sent { + poll.registry().reregister( + &mut stream, + TOKEN, + Interest::READABLE, + )?; + continue; + } + + if event.is_writable() { + poll.registry().reregister( + &mut stream, + TOKEN, + Interest::READABLE, + )?; + } else { + poll.registry().reregister( + &mut stream, + TOKEN, + Interest::WRITABLE, + )?; + } + continue; + } + } + } + _ => unreachable!(), + } + } + } + } + + fn _send_heartbeat(connection: &mut TcpStream) -> io::Result { + match connection.write_all(&HEARTBEAT_MSG) { + Ok(_) => { + connection.flush()?; + Ok(HandleResult::ConnectionAlive) + } + Err(ref err) if would_block(err) => Ok(HandleResult::MessageWouldBlock), + Err(ref err) if interrupted(err) => Ok(HandleResult::MessageWouldBlock), + Err(err) => Err(err), + } + } + + struct RecursionGuard { + count: u8, + max_depth: u8, + } + + impl RecursionGuard { + fn new(max_depth: u8) -> Self { + Self { + count: 0, + max_depth, + } + } + + fn increment(&mut self) -> io::Result<()> { + self.count += 1; + if self.count > self.max_depth { + Err(io::ErrorKind::WriteZero.into()) + } else { + Ok(()) + } + } + } + + fn send_data( + connection: &mut TcpStream, + data: &[u8], + recursion_guard: &mut RecursionGuard, + // closure that can be called when the data is sent + on_success: F, + ) -> io::Result + where + F: FnOnce() -> io::Result, + { + match connection.write(data) { + Ok(n) if n < data.len() => { + recursion_guard.increment()?; + let remaining_data = data[n..].to_vec(); + send_data(connection, &remaining_data, recursion_guard, on_success) + } + Ok(_) => { + connection.flush()?; + on_success() + } + Err(ref err) if would_block(err) => Ok(HandleResult::MessageWouldBlock), + Err(ref err) if interrupted(err) => { + recursion_guard + .increment() + .map_err(|_| io::ErrorKind::Interrupted)?; + send_data(connection, data, recursion_guard, on_success) + } + Err(err) => Err(err), + } + } + + #[allow(clippy::too_many_arguments)] + fn handle_connection_event( + registry: &Registry, + connection: &mut TcpStream, + event: &Event, + data: &[u8], + handshake_received: &mut bool, + handshake_sent: &mut bool, + message_sent: &mut bool, + first_heartbeat_received: &mut bool, + ) -> io::Result { + if event.is_writable() { + if !*handshake_sent { + let msg = prepend_length(&HEADER_MSG); + send_data( + connection, + &msg, + &mut RecursionGuard::new(MAX_RECURSION_DEPTH), + || { + *handshake_sent = true; + Ok(HandleResult::ConnectionAlive) + }, + )?; + return Ok(HandleResult::ConnectionAlive); + } + + if *handshake_received && *handshake_sent && !*message_sent && *first_heartbeat_received + { + send_data( + connection, + data, + &mut RecursionGuard::new(MAX_RECURSION_DEPTH), + || { + *message_sent = true; + Ok(HandleResult::ConnectionAlive) + }, + )?; + } + } + + if event.is_readable() { + let mut connection_closed = false; + let mut received_data = vec![0; 4096]; + let mut bytes_read = 0; + + loop { + match connection.read(&mut received_data[bytes_read..]) { + Ok(0) => { + connection_closed = true; + break; + } + Ok(n) => { + bytes_read += n; + if bytes_read == received_data.len() { + received_data.resize(received_data.len() + 1024, 0); + } + } + // Would block "errors" are the OS's way of saying that the + // connection is not actually ready to perform this I/O operation. + Err(ref err) if would_block(err) => break, + Err(ref err) if interrupted(err) => continue, + // Other errors we'll consider fatal. + Err(err) => return Err(err), + } + } + + if connection_closed { + registry.deregister(connection)?; + connection.shutdown(std::net::Shutdown::Both)?; + return Ok(HandleResult::ConnectionClosed); + } + + if bytes_read < 8 { + // malformed message, at least the length should be present + return Ok(HandleResult::ConnectionAlive); + } + + let raw_message = RawMessage::from_bytes(&received_data[..bytes_read]); + let messages = raw_message.parse_raw()?; + + for message in messages { + match message { + ParsedMessage::Header => { + *handshake_received = true; + } + ParsedMessage::Ok | ParsedMessage::Close => { + connection.flush()?; + registry.deregister(connection)?; + connection.shutdown(std::net::Shutdown::Both)?; + return Ok(HandleResult::MessageSent); + } + ParsedMessage::Heartbeat => { + *first_heartbeat_received = true; + } + ParsedMessage::Unknown(msg) => { + registry.deregister(connection)?; + connection.shutdown(std::net::Shutdown::Both)?; + node::core::warn!( + summary = "Received unknown message", + msg = format!("{:?}", msg) + ); + return Ok(HandleResult::ConnectionClosed); + } + } + } + } + + Ok(HandleResult::ConnectionAlive) + } + + fn would_block(err: &io::Error) -> bool { + err.kind() == io::ErrorKind::WouldBlock + } + + fn interrupted(err: &io::Error) -> bool { + err.kind() == io::ErrorKind::Interrupted + } + + enum ParsedMessage { + Heartbeat, + Ok, + Close, + Header, + Unknown(Vec), + } + + struct RawMessage { + length: usize, + data: Vec, + } + + impl RawMessage { + fn from_bytes(bytes: &[u8]) -> Self { + Self { + length: bytes.len(), + data: bytes.to_vec(), + } + } + + fn parse_raw(&self) -> io::Result> { + let mut parsed_bytes: usize = 0; + + // more than one message can be sent in a single packet + let mut messages = Vec::new(); + + while parsed_bytes < self.length { + // first 8 bytes are the length in little endian + let length = u64::from_le_bytes( + self.data[parsed_bytes..parsed_bytes + 8] + .try_into() + .unwrap(), + ) as usize; + parsed_bytes += 8; + + if parsed_bytes + length > self.length { + return Err(io::Error::new( + io::ErrorKind::InvalidData, + "Message length exceeds raw message length", + )); + } + + if length == HEADER_MSG.len() + && self.data[parsed_bytes..parsed_bytes + length] == HEADER_MSG + { + messages.push(ParsedMessage::Header); + } else if length == OK_MSG.len() + && self.data[parsed_bytes..parsed_bytes + length] == OK_MSG + { + messages.push(ParsedMessage::Ok); + } else if length == HEARTBEAT_MSG.len() + && self.data[parsed_bytes..parsed_bytes + length] == HEARTBEAT_MSG + { + messages.push(ParsedMessage::Heartbeat); + } else if length == CLOSE_MSG.len() + && self.data[parsed_bytes..parsed_bytes + length] == CLOSE_MSG + { + messages.push(ParsedMessage::Close); + } else { + messages.push(ParsedMessage::Unknown( + self.data[parsed_bytes..parsed_bytes + length].to_vec(), + )); + } + + parsed_bytes += length; + } + Ok(messages) + } + } +} + +// Note: Placeholder for the wasm implementation, if we decide to include an archive mode in the future +#[cfg(target_arch = "wasm32")] +mod rpc {} diff --git a/node/common/src/service/block_producer/mod.rs b/node/common/src/service/block_producer/mod.rs index 4c51d5b7be..2212f1efd9 100644 --- a/node/common/src/service/block_producer/mod.rs +++ b/node/common/src/service/block_producer/mod.rs @@ -122,7 +122,7 @@ pub fn prove( || constraint_constants() .fork .as_ref() - .map_or(false, |fork| fork.blockchain_length + 1 == height); + .is_some_and(|fork| fork.blockchain_length + 1 == height); if !is_genesis { input.prover_state.producer_private_key = keypair.into(); } @@ -162,6 +162,10 @@ impl node::service::BlockProducerService for crate::NodeService { .prove_sender .send((provers, block_hash, input)); } + + fn with_producer_keypair(&self, f: impl FnOnce(&AccountSecretKey) -> T) -> Option { + Some(f(&self.block_producer.as_ref()?.keypair)) + } } fn dump_failed_block_proof_input( diff --git a/node/common/src/service/builder.rs b/node/common/src/service/builder.rs index 734f2c05b7..6c2015586a 100644 --- a/node/common/src/service/builder.rs +++ b/node/common/src/service/builder.rs @@ -1,3 +1,5 @@ +use std::net::SocketAddr; + use ledger::proofs::provers::BlockProver; use node::{ account::AccountSecretKey, @@ -23,7 +25,7 @@ use crate::{ EventReceiver, EventSender, NodeService, }; -use super::block_producer::BlockProducerService; +use super::{archive::ArchiveService, block_producer::BlockProducerService}; pub struct NodeServiceCommonBuilder { rng_seed: [u8; 32], @@ -34,6 +36,7 @@ pub struct NodeServiceCommonBuilder { event_receiver: EventReceiver, ledger_manager: Option, block_producer: Option, + archive: Option, p2p: Option, gather_stats: bool, rpc: RpcService, @@ -57,6 +60,7 @@ impl NodeServiceCommonBuilder { event_receiver: event_receiver.into(), ledger_manager: None, block_producer: None, + archive: None, p2p: None, rpc: RpcService::new(), gather_stats: false, @@ -74,6 +78,9 @@ impl NodeServiceCommonBuilder { pub fn ledger_init(&mut self) -> &mut Self { let mut ctx = LedgerCtx::default(); ctx.set_event_sender(self.event_sender.clone()); + if self.archive.is_some() { + ctx.set_archive_mode(); + }; self.ledger_manager = Some(LedgerManager::spawn(ctx)); self } @@ -91,6 +98,11 @@ impl NodeServiceCommonBuilder { self } + pub fn archive_init(&mut self, address: SocketAddr) -> &mut Self { + self.archive = Some(ArchiveService::start(address)); + self + } + pub fn p2p_init( &mut self, secret_key: P2pSecretKey, @@ -132,6 +144,7 @@ impl NodeServiceCommonBuilder { ), ledger_manager, block_producer: self.block_producer, + archive: self.archive, p2p, stats: self.gather_stats.then(Stats::new), rpc: self.rpc, diff --git a/node/common/src/service/mod.rs b/node/common/src/service/mod.rs index 6211bb841f..a5981adcab 100644 --- a/node/common/src/service/mod.rs +++ b/node/common/src/service/mod.rs @@ -1,6 +1,7 @@ mod event_receiver; pub use event_receiver::*; +pub mod archive; pub mod block_producer; pub mod p2p; pub mod record; diff --git a/node/common/src/service/p2p.rs b/node/common/src/service/p2p.rs index 50f0df676f..53e696872b 100644 --- a/node/common/src/service/p2p.rs +++ b/node/common/src/service/p2p.rs @@ -75,11 +75,7 @@ impl webrtc::P2pServiceWebrtc for NodeService { auth: ConnectionAuth, ) { let encrypted = auth.encrypt(&self.p2p.sec_key, other_pub_key, &mut self.rng); - if let Some(peer) = self.peers().get(&peer_id) { - let _ = peer - .cmd_sender - .send(webrtc::PeerCmd::ConnectionAuthorizationSend(encrypted)); - } + Self::auth_send(self, peer_id, other_pub_key, encrypted); } fn auth_decrypt( @@ -123,28 +119,23 @@ impl P2pCryptoService for NodeService { fn sign_key(&mut self, key: &[u8; 32]) -> Vec { // TODO: make deterministic let msg = [b"noise-libp2p-static-key:", key.as_slice()].concat(); - let sig = self - .p2p - .mio - .keypair() - .sign(&msg) - .expect("unable to create signature"); + let sig = self.p2p.sec_key.sign(&msg); + let libp2p_sec_key = libp2p_identity::Keypair::try_from(self.p2p.sec_key.clone()).unwrap(); let mut payload = vec![]; payload.extend_from_slice(b"\x0a\x24"); - payload.extend_from_slice(&self.p2p.mio.keypair().public().encode_protobuf()); + payload.extend_from_slice(&libp2p_sec_key.public().encode_protobuf()); payload.extend_from_slice(b"\x12\x40"); - payload.extend_from_slice(&sig); + payload.extend_from_slice(&sig.to_bytes()); payload } fn sign_publication(&mut self, publication: &[u8]) -> Vec { - let msg = [b"libp2p-pubsub:", publication].concat(); self.p2p - .mio - .keypair() - .sign(&msg) - .expect("unable to create signature") + .sec_key + .libp2p_pubsub_sign(publication) + .to_bytes() + .to_vec() } fn verify_publication( diff --git a/node/common/src/service/rpc/mod.rs b/node/common/src/service/rpc/mod.rs index 5063b76ced..8d0d906e44 100644 --- a/node/common/src/service/rpc/mod.rs +++ b/node/common/src/service/rpc/mod.rs @@ -10,10 +10,11 @@ pub mod transition_frontier; use node::rpc::{ RpcBestChainResponse, RpcBlockProducerStatsGetResponse, RpcConsensusConstantsGetResponse, RpcDiscoveryBoostrapStatsResponse, RpcDiscoveryRoutingTableResponse, RpcHealthCheckResponse, - RpcLedgerAccountsResponse, RpcLedgerSlimAccountsResponse, RpcMessageProgressResponse, - RpcPeersGetResponse, RpcReadinessCheckResponse, RpcRequest, RpcStateGetError, - RpcStatusGetResponse, RpcTransactionInjectResponse, RpcTransactionPoolResponse, - RpcTransactionStatusGetResponse, RpcTransitionFrontierUserCommandsResponse, + RpcHeartbeatGetResponse, RpcLedgerAccountsResponse, RpcLedgerSlimAccountsResponse, + RpcMessageProgressResponse, RpcPeersGetResponse, RpcReadinessCheckResponse, RpcRequest, + RpcStateGetError, RpcStatusGetResponse, RpcTransactionInjectResponse, + RpcTransactionPoolResponse, RpcTransactionStatusGetResponse, + RpcTransitionFrontierUserCommandsResponse, }; use serde::{Deserialize, Serialize}; @@ -167,7 +168,6 @@ fn optimize_filtered_state( config | p2p | snark - | consensus | transition_frontier | snark_pool | external_snark_worker @@ -216,6 +216,8 @@ impl node::rpc_effectful::RpcService for NodeService { } rpc_service_impl!(respond_status_get, RpcStatusGetResponse); + rpc_service_impl!(respond_heartbeat_get, RpcHeartbeatGetResponse); + rpc_service_impl!(respond_sync_stats_get, RpcSyncStatsGetResponse); rpc_service_impl!(respond_action_stats_get, RpcActionStatsGetResponse); rpc_service_impl!( diff --git a/node/common/src/service/rpc/sender.rs b/node/common/src/service/rpc/sender.rs index d0357338d6..14cccf912f 100644 --- a/node/common/src/service/rpc/sender.rs +++ b/node/common/src/service/rpc/sender.rs @@ -103,4 +103,12 @@ impl RpcSender { .flatten(); JsValue::from_serde(&res).unwrap_or_default() } + + pub async fn make_heartbeat(&self) -> JsValue { + let res = self + .oneshot_request::(RpcRequest::HeartbeatGet) + .await + .flatten(); + JsValue::from_serde(&res).unwrap_or_default() + } } diff --git a/node/common/src/service/service.rs b/node/common/src/service/service.rs index 79265d0c4a..8cfd81cf75 100644 --- a/node/common/src/service/service.rs +++ b/node/common/src/service/service.rs @@ -18,6 +18,7 @@ use sha3::{ use crate::rpc::RpcReceiver; use super::{ + archive::ArchiveService, block_producer::BlockProducerService, p2p::webrtc_with_libp2p::P2pServiceCtx, replay::ReplayerState, @@ -41,6 +42,7 @@ pub struct NodeService { pub ledger_manager: LedgerManager, pub block_producer: Option, + pub archive: Option, pub p2p: P2pServiceCtx, pub stats: Option, @@ -79,6 +81,10 @@ impl NodeService { self.block_producer.as_ref() } + pub fn archive(&self) -> Option<&ArchiveService> { + self.archive.as_ref() + } + pub fn stats(&mut self) -> Option<&mut Stats> { self.stats.as_mut() } @@ -111,6 +117,7 @@ impl NodeService { snark_block_proof_verify: mpsc::unbounded_channel().0, ledger_manager: LedgerManager::spawn(Default::default()), block_producer: None, + archive: None, p2p: P2pServiceCtx::mocked(p2p_sec_key), stats: Some(Stats::new()), rpc: RpcService::new(), diff --git a/node/invariants/Cargo.toml b/node/invariants/Cargo.toml index 3b4160dbf2..0710b4df74 100644 --- a/node/invariants/Cargo.toml +++ b/node/invariants/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-invariants" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/node/native/Cargo.toml b/node/native/Cargo.toml index 16a41d6845..932584918e 100644 --- a/node/native/Cargo.toml +++ b/node/native/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-native" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" @@ -38,6 +38,9 @@ openmina-core = { path = "../../core" } openmina-node-common = { path = "../common" } node = { path = "../../node", features = ["replay"] } +[dev-dependencies] +openmina-producer-dashboard = { workspace = true } + [features] default = ["p2p-libp2p"] p2p-webrtc = ["openmina-node-common/p2p-webrtc"] diff --git a/node/native/src/http_server.rs b/node/native/src/http_server.rs index 05dc4b675c..c2daf04323 100644 --- a/node/native/src/http_server.rs +++ b/node/native/src/http_server.rs @@ -166,6 +166,21 @@ pub async fn run(port: u16, rpc_sender: RpcSender) { } }); + let rpc_sender_clone = rpc_sender.clone(); + let make_heartbeat = warp::path!("make_heartbeat") + .and(warp::post()) + .then(move || { + let rpc_sender_clone = rpc_sender_clone.clone(); + async move { + let result: RpcHeartbeatGetResponse = rpc_sender_clone + .oneshot_request(RpcRequest::HeartbeatGet) + .await + .flatten(); + + with_json_reply(&result, StatusCode::OK) + } + }); + let rpc_sender_clone = rpc_sender.clone(); let peers_get = warp::path!("state" / "peers") .and(warp::get()) @@ -571,6 +586,7 @@ pub async fn run(port: u16, rpc_sender: RpcSender) { build_env_get, routes, status, + make_heartbeat, peers_get, message_progress_get, stats, diff --git a/node/native/src/node/builder.rs b/node/native/src/node/builder.rs index bdfa4d6a94..3715470d82 100644 --- a/node/native/src/node/builder.rs +++ b/node/native/src/node/builder.rs @@ -1,7 +1,7 @@ use std::{ fs::File, io::{BufRead, BufReader, Read}, - net::IpAddr, + net::{IpAddr, SocketAddr}, path::Path, sync::Arc, time::Duration, @@ -19,7 +19,7 @@ use node::{ }, service::Recorder, snark::{get_srs, BlockVerifier, TransactionVerifier, VerifierSRS}, - transition_frontier::genesis::GenesisConfig, + transition_frontier::{archive::archive_config::ArchiveConfig, genesis::GenesisConfig}, BlockProducerConfig, GlobalConfig, LedgerConfig, P2pConfig, SnarkConfig, SnarkerConfig, SnarkerStrategy, TransitionFrontierConfig, }; @@ -40,6 +40,7 @@ pub struct NodeBuilder { p2p_is_seed: bool, p2p_is_started: bool, block_producer: Option, + archive: Option, snarker: Option, service: NodeServiceBuilder, verifier_srs: Option>, @@ -86,6 +87,7 @@ impl NodeBuilder { p2p_is_seed: false, p2p_is_started: false, block_producer: None, + archive: None, snarker: None, service: NodeServiceBuilder::new(rng_seed), verifier_srs: None, @@ -181,7 +183,7 @@ impl NodeBuilder { &mut self, spawner: impl TaskSpawner, ) -> anyhow::Result<&mut Self> { - let sec_key = self.p2p_sec_key.clone().ok_or_else(|| anyhow::anyhow!("before calling `with_p2p_custom_task_spawner` method, p2p secret key needs to be set with `with_p2p_sec_key`."))?; + let sec_key: P2pSecretKey = self.p2p_sec_key.clone().ok_or_else(|| anyhow::anyhow!("before calling `with_p2p_custom_task_spawner` method, p2p secret key needs to be set with `with_p2p_sec_key`."))?; self.service .p2p_init_with_custom_task_spawner(sec_key, spawner); self.p2p_is_started = true; @@ -216,6 +218,12 @@ impl NodeBuilder { Ok(self.block_producer(key, provers)) } + pub fn archive(&mut self, address: SocketAddr) -> &mut Self { + self.archive = Some(ArchiveConfig::new(&address.to_string())); + self.service.archive_init(address); + self + } + /// Receive block producer's coinbase reward to another account. pub fn custom_coinbase_receiver( &mut self, @@ -348,6 +356,7 @@ impl NodeBuilder { }, transition_frontier: TransitionFrontierConfig::new(self.genesis_config), block_producer: self.block_producer, + archive: self.archive, tx_pool: ledger::transaction_pool::Config { trust_system: (), pool_max_size: self.daemon_conf.tx_pool_max_size(), diff --git a/node/native/src/service/builder.rs b/node/native/src/service/builder.rs index abb4d156ec..5f58d095a2 100644 --- a/node/native/src/service/builder.rs +++ b/node/native/src/service/builder.rs @@ -1,3 +1,5 @@ +use std::net::SocketAddr; + use ledger::proofs::provers::BlockProver; use node::{ account::AccountSecretKey, core::thread, p2p::identity::SecretKey as P2pSecretKey, @@ -53,6 +55,11 @@ impl NodeServiceBuilder { self } + pub fn archive_init(&mut self, address: SocketAddr) -> &mut Self { + self.common.archive_init(address); + self + } + pub fn p2p_init(&mut self, secret_key: P2pSecretKey) -> &mut Self { self.common.p2p_init(secret_key, P2pTaskSpawner {}); self diff --git a/node/src/action.rs b/node/src/action.rs index 3793d97058..fd85ea8138 100644 --- a/node/src/action.rs +++ b/node/src/action.rs @@ -6,7 +6,6 @@ pub type ActionWithMetaRef<'a> = redux::ActionWithMeta<&'a Action>; pub use crate::block_producer::BlockProducerAction; pub use crate::block_producer_effectful::BlockProducerEffectfulAction; -pub use crate::consensus::ConsensusAction; pub use crate::event_source::EventSourceAction; pub use crate::external_snark_worker::ExternalSnarkWorkerAction; use crate::external_snark_worker_effectful::ExternalSnarkWorkerEffectfulAction; @@ -44,7 +43,6 @@ pub enum Action { Ledger(LedgerAction), LedgerEffects(LedgerEffectfulAction), Snark(SnarkAction), - Consensus(ConsensusAction), TransitionFrontier(TransitionFrontierAction), SnarkPool(SnarkPoolAction), SnarkPoolEffect(SnarkPoolEffectfulAction), @@ -84,16 +82,15 @@ impl redux::EnablingCondition for Action { other => state .p2p .ready() - .map_or(false, |p2p| other.is_enabled(p2p, time)), + .is_some_and(|p2p| other.is_enabled(p2p, time)), }, Action::P2pEffectful(a) => state .p2p .ready() - .map_or(false, |state| a.is_enabled(state, time)), + .is_some_and(|state| a.is_enabled(state, time)), Action::Ledger(a) => a.is_enabled(state, time), Action::LedgerEffects(a) => a.is_enabled(state, time), Action::Snark(a) => a.is_enabled(&state.snark, time), - Action::Consensus(a) => a.is_enabled(state, time), Action::TransitionFrontier(a) => a.is_enabled(state, time), Action::SnarkPool(a) => a.is_enabled(state, time), Action::SnarkPoolEffect(a) => a.is_enabled(state, time), diff --git a/node/src/action_kind.rs b/node/src/action_kind.rs index 347ea5078e..6a44f7bb37 100644 --- a/node/src/action_kind.rs +++ b/node/src/action_kind.rs @@ -19,7 +19,6 @@ use crate::block_producer::vrf_evaluator::BlockProducerVrfEvaluatorAction; use crate::block_producer::BlockProducerAction; use crate::block_producer_effectful::vrf_evaluator_effectful::BlockProducerVrfEvaluatorEffectfulAction; use crate::block_producer_effectful::BlockProducerEffectfulAction; -use crate::consensus::ConsensusAction; use crate::event_source::EventSourceAction; use crate::external_snark_worker::ExternalSnarkWorkerAction; use crate::external_snark_worker_effectful::ExternalSnarkWorkerEffectfulAction; @@ -81,6 +80,7 @@ use crate::snark_pool::candidate::SnarkPoolCandidateAction; use crate::snark_pool::{SnarkPoolAction, SnarkPoolEffectfulAction}; use crate::transaction_pool::candidate::TransactionPoolCandidateAction; use crate::transaction_pool::{TransactionPoolAction, TransactionPoolEffectfulAction}; +use crate::transition_frontier::candidate::TransitionFrontierCandidateAction; use crate::transition_frontier::genesis::TransitionFrontierGenesisAction; use crate::transition_frontier::genesis_effectful::TransitionFrontierGenesisEffectfulAction; use crate::transition_frontier::sync::ledger::snarked::TransitionFrontierSyncLedgerSnarkedAction; @@ -126,6 +126,7 @@ pub enum ActionKind { BlockProducerWonSlotTransactionsGet, BlockProducerWonSlotTransactionsSuccess, BlockProducerWonSlotWait, + BlockProducerEffectfulBlockProduced, BlockProducerEffectfulBlockProveInit, BlockProducerEffectfulBlockProveSuccess, BlockProducerEffectfulBlockUnprovenBuild, @@ -153,20 +154,6 @@ pub enum ActionKind { BlockProducerVrfEvaluatorEffectfulInitializeStats, BlockProducerVrfEvaluatorEffectfulSlotEvaluated, CheckTimeouts, - ConsensusBestTipUpdate, - ConsensusBlockChainProofUpdate, - ConsensusBlockPrevalidateError, - ConsensusBlockPrevalidateSuccess, - ConsensusBlockReceived, - ConsensusBlockSnarkVerifyError, - ConsensusBlockSnarkVerifyPending, - ConsensusBlockSnarkVerifySuccess, - ConsensusDetectForkRange, - ConsensusLongRangeForkResolve, - ConsensusP2pBestTipUpdate, - ConsensusPrune, - ConsensusShortRangeForkResolve, - ConsensusTransitionFrontierSyncTargetUpdate, EventSourceNewEvent, EventSourceProcessEvents, EventSourceWaitForEvents, @@ -206,6 +193,7 @@ pub enum ActionKind { P2pCallbacksP2pChannelsStreamingRpcResponseReceived, P2pCallbacksP2pChannelsStreamingRpcTimeout, P2pCallbacksP2pDisconnection, + P2pCallbacksP2pPubsubValidateMessage, P2pCallbacksRpcRespondBestTip, P2pChannelsBestTipInit, P2pChannelsBestTipPending, @@ -404,7 +392,10 @@ pub enum ActionKind { P2pNetworkPnetEffectfulSetupNonce, P2pNetworkPubsubBroadcast, P2pNetworkPubsubBroadcastSigned, + P2pNetworkPubsubBroadcastValidatedMessage, P2pNetworkPubsubGraft, + P2pNetworkPubsubHandleIncomingMessage, + P2pNetworkPubsubIgnoreMessage, P2pNetworkPubsubIncomingData, P2pNetworkPubsubIncomingMessage, P2pNetworkPubsubIncomingMessageCleanup, @@ -414,9 +405,13 @@ pub enum ActionKind { P2pNetworkPubsubOutgoingMessageClear, P2pNetworkPubsubOutgoingMessageError, P2pNetworkPubsubPrune, + P2pNetworkPubsubPruneMessages, + P2pNetworkPubsubRejectMessage, P2pNetworkPubsubSign, P2pNetworkPubsubSignError, + P2pNetworkPubsubValidateIncomingMessage, P2pNetworkPubsubValidateIncomingMessages, + P2pNetworkPubsubWebRtcRebroadcast, P2pNetworkPubsubEffectfulSign, P2pNetworkPubsubEffectfulValidateIncomingMessages, P2pNetworkRpcHeartbeatSend, @@ -482,6 +477,7 @@ pub enum ActionKind { RpcFinish, RpcGlobalStateGet, RpcHealthCheck, + RpcHeartbeatGet, RpcLedgerAccountsGetInit, RpcLedgerAccountsGetPending, RpcLedgerAccountsGetSuccess, @@ -526,6 +522,7 @@ pub enum ActionKind { RpcEffectfulDiscoveryRoutingTable, RpcEffectfulGlobalStateGet, RpcEffectfulHealthCheck, + RpcEffectfulHeartbeatGet, RpcEffectfulLedgerAccountsGetSuccess, RpcEffectfulMessageProgressGet, RpcEffectfulP2pConnectionIncomingError, @@ -620,6 +617,20 @@ pub enum ActionKind { TransitionFrontierGenesisProvenInject, TransitionFrontierSyncFailed, TransitionFrontierSynced, + TransitionFrontierCandidateBestTipUpdate, + TransitionFrontierCandidateBlockChainProofUpdate, + TransitionFrontierCandidateBlockPrevalidateError, + TransitionFrontierCandidateBlockPrevalidateSuccess, + TransitionFrontierCandidateBlockReceived, + TransitionFrontierCandidateBlockSnarkVerifyError, + TransitionFrontierCandidateBlockSnarkVerifyPending, + TransitionFrontierCandidateBlockSnarkVerifySuccess, + TransitionFrontierCandidateDetectForkRange, + TransitionFrontierCandidateLongRangeForkResolve, + TransitionFrontierCandidateP2pBestTipUpdate, + TransitionFrontierCandidatePrune, + TransitionFrontierCandidateShortRangeForkResolve, + TransitionFrontierCandidateTransitionFrontierSyncTargetUpdate, TransitionFrontierGenesisLedgerLoadInit, TransitionFrontierGenesisLedgerLoadPending, TransitionFrontierGenesisLedgerLoadSuccess, @@ -642,6 +653,7 @@ pub enum ActionKind { TransitionFrontierSyncBlocksPeerQuerySuccess, TransitionFrontierSyncBlocksPeersQuery, TransitionFrontierSyncBlocksPending, + TransitionFrontierSyncBlocksSendToArchive, TransitionFrontierSyncBlocksSuccess, TransitionFrontierSyncCommitInit, TransitionFrontierSyncCommitPending, @@ -707,7 +719,7 @@ pub enum ActionKind { } impl ActionKind { - pub const COUNT: u16 = 597; + pub const COUNT: u16 = 609; } impl std::fmt::Display for ActionKind { @@ -727,7 +739,6 @@ impl ActionKindGet for Action { Self::Ledger(a) => a.kind(), Self::LedgerEffects(a) => a.kind(), Self::Snark(a) => a.kind(), - Self::Consensus(a) => a.kind(), Self::TransitionFrontier(a) => a.kind(), Self::SnarkPool(a) => a.kind(), Self::SnarkPoolEffect(a) => a.kind(), @@ -809,6 +820,9 @@ impl ActionKindGet for P2pCallbacksAction { } Self::P2pDisconnection { .. } => ActionKind::P2pCallbacksP2pDisconnection, Self::RpcRespondBestTip { .. } => ActionKind::P2pCallbacksRpcRespondBestTip, + Self::P2pPubsubValidateMessage { .. } => { + ActionKind::P2pCallbacksP2pPubsubValidateMessage + } } } } @@ -844,34 +858,12 @@ impl ActionKindGet for SnarkAction { } } -impl ActionKindGet for ConsensusAction { - fn kind(&self) -> ActionKind { - match self { - Self::BlockReceived { .. } => ActionKind::ConsensusBlockReceived, - Self::BlockPrevalidateSuccess { .. } => ActionKind::ConsensusBlockPrevalidateSuccess, - Self::BlockPrevalidateError { .. } => ActionKind::ConsensusBlockPrevalidateError, - Self::BlockChainProofUpdate { .. } => ActionKind::ConsensusBlockChainProofUpdate, - Self::BlockSnarkVerifyPending { .. } => ActionKind::ConsensusBlockSnarkVerifyPending, - Self::BlockSnarkVerifySuccess { .. } => ActionKind::ConsensusBlockSnarkVerifySuccess, - Self::BlockSnarkVerifyError { .. } => ActionKind::ConsensusBlockSnarkVerifyError, - Self::DetectForkRange { .. } => ActionKind::ConsensusDetectForkRange, - Self::ShortRangeForkResolve { .. } => ActionKind::ConsensusShortRangeForkResolve, - Self::LongRangeForkResolve { .. } => ActionKind::ConsensusLongRangeForkResolve, - Self::BestTipUpdate { .. } => ActionKind::ConsensusBestTipUpdate, - Self::TransitionFrontierSyncTargetUpdate => { - ActionKind::ConsensusTransitionFrontierSyncTargetUpdate - } - Self::P2pBestTipUpdate { .. } => ActionKind::ConsensusP2pBestTipUpdate, - Self::Prune => ActionKind::ConsensusPrune, - } - } -} - impl ActionKindGet for TransitionFrontierAction { fn kind(&self) -> ActionKind { match self { Self::Genesis(a) => a.kind(), Self::GenesisEffect(a) => a.kind(), + Self::Candidate(a) => a.kind(), Self::Sync(a) => a.kind(), Self::GenesisInject => ActionKind::TransitionFrontierGenesisInject, Self::GenesisProvenInject => ActionKind::TransitionFrontierGenesisProvenInject, @@ -1027,6 +1019,7 @@ impl ActionKindGet for BlockProducerEffectfulAction { Self::BlockUnprovenBuild => ActionKind::BlockProducerEffectfulBlockUnprovenBuild, Self::BlockProveInit => ActionKind::BlockProducerEffectfulBlockProveInit, Self::BlockProveSuccess => ActionKind::BlockProducerEffectfulBlockProveSuccess, + Self::BlockProduced { .. } => ActionKind::BlockProducerEffectfulBlockProduced, } } } @@ -1036,6 +1029,7 @@ impl ActionKindGet for RpcAction { match self { Self::GlobalStateGet { .. } => ActionKind::RpcGlobalStateGet, Self::StatusGet { .. } => ActionKind::RpcStatusGet, + Self::HeartbeatGet { .. } => ActionKind::RpcHeartbeatGet, Self::ActionStatsGet { .. } => ActionKind::RpcActionStatsGet, Self::SyncStatsGet { .. } => ActionKind::RpcSyncStatsGet, Self::BlockProducerStatsGet { .. } => ActionKind::RpcBlockProducerStatsGet, @@ -1104,6 +1098,7 @@ impl ActionKindGet for RpcEffectfulAction { match self { Self::GlobalStateGet { .. } => ActionKind::RpcEffectfulGlobalStateGet, Self::StatusGet { .. } => ActionKind::RpcEffectfulStatusGet, + Self::HeartbeatGet { .. } => ActionKind::RpcEffectfulHeartbeatGet, Self::ActionStatsGet { .. } => ActionKind::RpcEffectfulActionStatsGet, Self::SyncStatsGet { .. } => ActionKind::RpcEffectfulSyncStatsGet, Self::BlockProducerStatsGet { .. } => ActionKind::RpcEffectfulBlockProducerStatsGet, @@ -1435,6 +1430,47 @@ impl ActionKindGet for TransitionFrontierGenesisEffectfulAction { } } +impl ActionKindGet for TransitionFrontierCandidateAction { + fn kind(&self) -> ActionKind { + match self { + Self::BlockReceived { .. } => ActionKind::TransitionFrontierCandidateBlockReceived, + Self::BlockPrevalidateSuccess { .. } => { + ActionKind::TransitionFrontierCandidateBlockPrevalidateSuccess + } + Self::BlockPrevalidateError { .. } => { + ActionKind::TransitionFrontierCandidateBlockPrevalidateError + } + Self::BlockChainProofUpdate { .. } => { + ActionKind::TransitionFrontierCandidateBlockChainProofUpdate + } + Self::BlockSnarkVerifyPending { .. } => { + ActionKind::TransitionFrontierCandidateBlockSnarkVerifyPending + } + Self::BlockSnarkVerifySuccess { .. } => { + ActionKind::TransitionFrontierCandidateBlockSnarkVerifySuccess + } + Self::BlockSnarkVerifyError { .. } => { + ActionKind::TransitionFrontierCandidateBlockSnarkVerifyError + } + Self::DetectForkRange { .. } => ActionKind::TransitionFrontierCandidateDetectForkRange, + Self::ShortRangeForkResolve { .. } => { + ActionKind::TransitionFrontierCandidateShortRangeForkResolve + } + Self::LongRangeForkResolve { .. } => { + ActionKind::TransitionFrontierCandidateLongRangeForkResolve + } + Self::BestTipUpdate { .. } => ActionKind::TransitionFrontierCandidateBestTipUpdate, + Self::TransitionFrontierSyncTargetUpdate => { + ActionKind::TransitionFrontierCandidateTransitionFrontierSyncTargetUpdate + } + Self::P2pBestTipUpdate { .. } => { + ActionKind::TransitionFrontierCandidateP2pBestTipUpdate + } + Self::Prune => ActionKind::TransitionFrontierCandidatePrune, + } + } +} + impl ActionKindGet for TransitionFrontierSyncAction { fn kind(&self) -> ActionKind { match self { @@ -1479,6 +1515,9 @@ impl ActionKindGet for TransitionFrontierSyncAction { Self::BlocksNextApplySuccess { .. } => { ActionKind::TransitionFrontierSyncBlocksNextApplySuccess } + Self::BlocksSendToArchive { .. } => { + ActionKind::TransitionFrontierSyncBlocksSendToArchive + } Self::BlocksSuccess => ActionKind::TransitionFrontierSyncBlocksSuccess, Self::CommitInit => ActionKind::TransitionFrontierSyncCommitInit, Self::CommitPending => ActionKind::TransitionFrontierSyncCommitPending, @@ -1945,6 +1984,7 @@ impl ActionKindGet for P2pNetworkPubsubAction { } Self::Graft { .. } => ActionKind::P2pNetworkPubsubGraft, Self::Prune { .. } => ActionKind::P2pNetworkPubsubPrune, + Self::WebRtcRebroadcast { .. } => ActionKind::P2pNetworkPubsubWebRtcRebroadcast, Self::Broadcast { .. } => ActionKind::P2pNetworkPubsubBroadcast, Self::Sign { .. } => ActionKind::P2pNetworkPubsubSign, Self::SignError { .. } => ActionKind::P2pNetworkPubsubSignError, @@ -1953,6 +1993,16 @@ impl ActionKindGet for P2pNetworkPubsubAction { Self::OutgoingMessageClear { .. } => ActionKind::P2pNetworkPubsubOutgoingMessageClear, Self::OutgoingMessageError { .. } => ActionKind::P2pNetworkPubsubOutgoingMessageError, Self::OutgoingData { .. } => ActionKind::P2pNetworkPubsubOutgoingData, + Self::HandleIncomingMessage { .. } => ActionKind::P2pNetworkPubsubHandleIncomingMessage, + Self::ValidateIncomingMessage { .. } => { + ActionKind::P2pNetworkPubsubValidateIncomingMessage + } + Self::PruneMessages { .. } => ActionKind::P2pNetworkPubsubPruneMessages, + Self::RejectMessage { .. } => ActionKind::P2pNetworkPubsubRejectMessage, + Self::IgnoreMessage { .. } => ActionKind::P2pNetworkPubsubIgnoreMessage, + Self::BroadcastValidatedMessage { .. } => { + ActionKind::P2pNetworkPubsubBroadcastValidatedMessage + } } } } diff --git a/node/src/block_producer/block_producer_actions.rs b/node/src/block_producer/block_producer_actions.rs index f98cc90ff2..5dd35f00d0 100644 --- a/node/src/block_producer/block_producer_actions.rs +++ b/node/src/block_producer/block_producer_actions.rs @@ -105,10 +105,10 @@ impl redux::EnablingCondition for BlockProducerAction { BlockProducerAction::WonSlotProduceInit { .. } => { state.block_producer.with(false, |this| { let has_genesis_proven_if_needed = || { - state.transition_frontier.best_tip().map_or(false, |tip| { + state.transition_frontier.best_tip().is_some_and(|tip| { let proven_block = state.transition_frontier.genesis.proven_block(); !tip.is_genesis() - || proven_block.map_or(false, |b| Arc::ptr_eq(&b.block, &tip.block)) + || proven_block.is_some_and(|b| Arc::ptr_eq(&b.block, &tip.block)) }) }; this.current.won_slot_should_produce(time) && has_genesis_proven_if_needed() @@ -220,5 +220,5 @@ fn is_syncing_to_produced_block(state: &crate::State) -> bool { .transition_frontier .sync .best_tip() - .map_or(false, |tip| state.block_producer.is_me(tip.producer())) + .is_some_and(|tip| state.block_producer.is_me(tip.producer())) } diff --git a/node/src/block_producer/block_producer_reducer.rs b/node/src/block_producer/block_producer_reducer.rs index fb47032ba5..a00ccb6d1e 100644 --- a/node/src/block_producer/block_producer_reducer.rs +++ b/node/src/block_producer/block_producer_reducer.rs @@ -305,7 +305,18 @@ impl BlockProducerEnabled { bug_condition!("Invalid state for `BlockProducerAction::BlockProduced` expected: `BlockProducerCurrentState::BlockProveSuccess`, found: {:?}", current_state); } - let dispatcher = state_context.into_dispatcher(); + let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); + + // Store the produced block in stats, used by heartbeats + let block = global_state + .block_producer + .as_ref() + .and_then(|bp| bp.current.produced_block()) + .cloned(); + if let Some(block) = block { + dispatcher.push(BlockProducerEffectfulAction::BlockProduced { block }); + } + dispatcher.push(BlockProducerAction::BlockInject); } BlockProducerAction::BlockInject => { diff --git a/node/src/block_producer/block_producer_state.rs b/node/src/block_producer/block_producer_state.rs index c370247eef..c7d24078c5 100644 --- a/node/src/block_producer/block_producer_state.rs +++ b/node/src/block_producer/block_producer_state.rs @@ -331,7 +331,7 @@ impl BlockProducerCurrentState { } if won_slot < best_tip - || self.produced_block().map_or(false, |block| { + || self.produced_block().is_some_and(|block| { !consensus_take( best_tip.consensus_state(), block.consensus_state(), diff --git a/node/src/block_producer/mod.rs b/node/src/block_producer/mod.rs index d57984e46c..80ba62dd11 100644 --- a/node/src/block_producer/mod.rs +++ b/node/src/block_producer/mod.rs @@ -131,7 +131,7 @@ impl PartialOrd for BlockProducerWonSlot { impl PartialEq for BlockProducerWonSlot { fn eq(&self, other: &ArcBlockWithHash) -> bool { - self.partial_cmp(other).map_or(false, |ord| ord.is_eq()) + self.partial_cmp(other).is_some_and(|ord| ord.is_eq()) } } diff --git a/node/src/block_producer_effectful/block_producer_effectful_actions.rs b/node/src/block_producer_effectful/block_producer_effectful_actions.rs index a81b4702b9..0089e0e6cd 100644 --- a/node/src/block_producer_effectful/block_producer_effectful_actions.rs +++ b/node/src/block_producer_effectful/block_producer_effectful_actions.rs @@ -1,6 +1,6 @@ use super::vrf_evaluator_effectful::BlockProducerVrfEvaluatorEffectfulAction; use crate::block_producer::{BlockProducerWonSlot, BlockProducerWonSlotDiscardReason}; -use openmina_core::ActionEvent; +use openmina_core::{block::ArcBlockWithHash, ActionEvent}; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone, ActionEvent)] @@ -17,6 +17,9 @@ pub enum BlockProducerEffectfulAction { BlockUnprovenBuild, BlockProveInit, BlockProveSuccess, + BlockProduced { + block: ArcBlockWithHash, + }, } impl redux::EnablingCondition for BlockProducerEffectfulAction { diff --git a/node/src/block_producer_effectful/block_producer_effectful_effects.rs b/node/src/block_producer_effectful/block_producer_effectful_effects.rs index ab7a5cdd3f..fa36cb02c3 100644 --- a/node/src/block_producer_effectful/block_producer_effectful_effects.rs +++ b/node/src/block_producer_effectful/block_producer_effectful_effects.rs @@ -221,5 +221,10 @@ pub fn block_producer_effects( } store.dispatch(BlockProducerAction::WonSlotSearch); } + BlockProducerEffectfulAction::BlockProduced { block } => { + if let Some(stats) = store.service.stats() { + stats.block_producer().last_produced_block = Some(block.clone()); + } + } } } diff --git a/node/src/block_producer_effectful/block_producer_effectful_service.rs b/node/src/block_producer_effectful/block_producer_effectful_service.rs index 145f1e249c..41ea20b4fe 100644 --- a/node/src/block_producer_effectful/block_producer_effectful_service.rs +++ b/node/src/block_producer_effectful/block_producer_effectful_service.rs @@ -7,6 +7,7 @@ use mina_p2p_messages::v2::{ MinaBaseStagedLedgerHashStableV1, ProverExtendBlockchainInputStableV2, StagedLedgerDiffDiffStableV2, StateHash, }; +use openmina_node_account::AccountSecretKey; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone)] @@ -24,4 +25,5 @@ pub struct StagedLedgerDiffCreateOutput { pub trait BlockProducerService { fn provers(&self) -> BlockProver; fn prove(&mut self, block_hash: StateHash, input: Box); + fn with_producer_keypair(&self, f: impl FnOnce(&AccountSecretKey) -> T) -> Option; } diff --git a/node/src/config.rs b/node/src/config.rs index 991ccfd882..e2223dbb74 100644 --- a/node/src/config.rs +++ b/node/src/config.rs @@ -11,6 +11,7 @@ pub use crate::ledger::LedgerConfig; pub use crate::p2p::P2pConfig; pub use crate::snark::SnarkConfig; pub use crate::snark_pool::SnarkPoolConfig; +use crate::transition_frontier::archive::archive_config::ArchiveConfig; use crate::transition_frontier::genesis::GenesisConfig; pub use crate::transition_frontier::TransitionFrontierConfig; pub use mina_p2p_messages::v2::MinaBaseProtocolConstantsCheckedValueStableV1 as ProtocolConstants; @@ -23,6 +24,7 @@ pub struct Config { pub snark: SnarkConfig, pub p2p: P2pConfig, pub transition_frontier: TransitionFrontierConfig, + pub archive: Option, pub block_producer: Option, pub global: GlobalConfig, pub tx_pool: ledger::transaction_pool::Config, diff --git a/node/src/consensus/mod.rs b/node/src/consensus/mod.rs deleted file mode 100644 index da66985907..0000000000 --- a/node/src/consensus/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -mod consensus_state; -pub use consensus_state::*; - -mod consensus_actions; -pub use consensus_actions::*; - -mod consensus_reducer; diff --git a/node/src/effects.rs b/node/src/effects.rs index f3755e2feb..586f5f85c5 100644 --- a/node/src/effects.rs +++ b/node/src/effects.rs @@ -100,7 +100,6 @@ pub fn effects(store: &mut Store, action: ActionWithMeta) { | Action::SnarkPool(_) | Action::ExternalSnarkWorker(_) | Action::TransactionPool(_) - | Action::Consensus(_) | Action::Ledger(_) | Action::Rpc(_) | Action::WatchedAccounts(_) @@ -114,14 +113,18 @@ pub fn effects(store: &mut Store, action: ActionWithMeta) { fn p2p_request_best_tip_if_needed(store: &mut Store) { // TODO(binier): refactor let state = store.state(); - let consensus_best_tip_hash = state.consensus.best_tip.as_ref(); + let consensus_best_tip_hash = state.transition_frontier.candidates.best_tip.as_ref(); let best_tip_hash = state.transition_frontier.best_tip().map(|v| &v.hash); let syncing_best_tip_hash = state.transition_frontier.sync.best_tip().map(|v| &v.hash); if consensus_best_tip_hash.is_some() && consensus_best_tip_hash != best_tip_hash && consensus_best_tip_hash != syncing_best_tip_hash - && state.consensus.best_tip_chain_proof.is_none() + && state + .transition_frontier + .candidates + .best_tip_chain_proof + .is_none() { request_best_tip(store, consensus_best_tip_hash.cloned()); } diff --git a/node/src/event_source/event.rs b/node/src/event_source/event.rs index 66bdfa78d9..edd22bd477 100644 --- a/node/src/event_source/event.rs +++ b/node/src/event_source/event.rs @@ -32,6 +32,7 @@ impl std::fmt::Display for Event { match req.as_ref() { RpcRequest::StateGet(filter) => write!(f, "StateGet, {filter:?}"), RpcRequest::StatusGet => write!(f, "StatusGet"), + RpcRequest::HeartbeatGet => write!(f, "HeartbeatGet"), RpcRequest::ActionStatsGet(query) => write!(f, "ActionStatsGet, {query:?}"), RpcRequest::SyncStatsGet(query) => write!(f, "SyncStatsGet, {query:?}"), RpcRequest::BlockProducerStatsGet => write!(f, "BlockProducerStatsGet"), diff --git a/node/src/event_source/event_source_effects.rs b/node/src/event_source/event_source_effects.rs index 3ac099f44f..622e67990e 100644 --- a/node/src/event_source/event_source_effects.rs +++ b/node/src/event_source/event_source_effects.rs @@ -307,6 +307,9 @@ pub fn event_source_effects(store: &mut Store, action: EventSourc RpcRequest::StatusGet => { store.dispatch(RpcAction::StatusGet { rpc_id }); } + RpcRequest::HeartbeatGet => { + store.dispatch(RpcAction::HeartbeatGet { rpc_id }); + } RpcRequest::ActionStatsGet(query) => { store.dispatch(RpcAction::ActionStatsGet { rpc_id, query }); } @@ -430,7 +433,7 @@ pub fn event_source_effects(store: &mut Store, action: EventSourc .transition_frontier .genesis .prove_pending_block_hash() - .map_or(false, |hash| hash == block_hash) + .is_some_and(|hash| hash == block_hash) { // TODO(refactor): before this is dispatched, genesis inject must be dispatched store.dispatch(TransitionFrontierGenesisAction::ProveSuccess { proof }); diff --git a/node/src/external_snark_worker/external_snark_worker_actions.rs b/node/src/external_snark_worker/external_snark_worker_actions.rs index 2599906985..43f2183ae5 100644 --- a/node/src/external_snark_worker/external_snark_worker_actions.rs +++ b/node/src/external_snark_worker/external_snark_worker_actions.rs @@ -73,7 +73,7 @@ impl EnablingCondition for ExternalSnarkWorkerAction { ExternalSnarkWorkerState::Starting ) && now .checked_sub(state.external_snark_worker.0.timestamp) - .map_or(false, |d| d > TIMEOUT) + .is_some_and(|d| d > TIMEOUT) } ExternalSnarkWorkerAction::Kill => !matches!( state.external_snark_worker.0.state, @@ -105,7 +105,7 @@ impl EnablingCondition for ExternalSnarkWorkerAction { &state.external_snark_worker.0.state { now.checked_sub(state.external_snark_worker.0.timestamp) - .map_or(false, |d| d > summary.estimated_duration()) + .is_some_and(|d| d > summary.estimated_duration()) } else { false } diff --git a/node/src/ledger/ledger_reducer.rs b/node/src/ledger/ledger_reducer.rs index cd5e3a2eff..1e8cbb0504 100644 --- a/node/src/ledger/ledger_reducer.rs +++ b/node/src/ledger/ledger_reducer.rs @@ -1,18 +1,30 @@ use crate::Substate; use super::{ - read::LedgerReadState, write::LedgerWriteState, LedgerAction, LedgerActionWithMetaRef, - LedgerState, + read::LedgerReadState, + write::{LedgerWriteAction, LedgerWriteResponse, LedgerWriteState}, + LedgerAction, LedgerActionWithMetaRef, LedgerState, }; impl LedgerState { - pub fn reducer(state_context: Substate, action: LedgerActionWithMetaRef<'_>) { + pub fn reducer(mut state_context: Substate, action: LedgerActionWithMetaRef<'_>) { let (action, meta) = action.split(); + match action { - LedgerAction::Write(action) => LedgerWriteState::reducer( - Substate::from_compatible_substate(state_context), - meta.with_action(action), - ), + LedgerAction::Write(action) => { + if let LedgerWriteAction::Success { + response: LedgerWriteResponse::Commit { result, .. }, + } = action + { + if let Ok(state) = state_context.get_substate_mut() { + state.alive_masks = result.alive_masks; + } + } + LedgerWriteState::reducer( + Substate::from_compatible_substate(state_context), + meta.with_action(action), + ) + } LedgerAction::Read(action) => LedgerReadState::reducer( Substate::from_compatible_substate(state_context), meta.with_action(action), diff --git a/node/src/ledger/ledger_service.rs b/node/src/ledger/ledger_service.rs index 257970a8b6..f8e34c15d3 100644 --- a/node/src/ledger/ledger_service.rs +++ b/node/src/ledger/ledger_service.rs @@ -12,17 +12,19 @@ use crate::{ block_producer_effectful::StagedLedgerDiffCreateOutput, ledger::{ ledger_manager::{LedgerManager, LedgerRequest}, - write::BlockApplyResult, + write::{BlockApplyResult, BlockApplyResultArchive}, }, p2p::channels::rpc::StagedLedgerAuxAndPendingCoinbases, rpc::{ RpcScanStateSummaryBlockTransaction, RpcScanStateSummaryScanStateJob, RpcScanStateSummaryScanStateJobKind, RpcSnarkPoolJobSnarkWorkDone, }, - transition_frontier::genesis::empty_pending_coinbase_hash, - transition_frontier::sync::{ - ledger::staged::StagedLedgerAuxAndPendingCoinbasesValid, - TransitionFrontierRootSnarkedLedgerUpdates, + transition_frontier::{ + genesis::empty_pending_coinbase_hash, + sync::{ + ledger::staged::StagedLedgerAuxAndPendingCoinbasesValid, + TransitionFrontierRootSnarkedLedgerUpdates, + }, }, }; use ark_ff::fields::arithmetic::InvalidBigInt; @@ -34,7 +36,9 @@ use ledger::{ local_state::LocalState, protocol_state::{protocol_state_view, ProtocolStateView}, transaction_partially_applied::TransactionPartiallyApplied, - valid, Transaction, + valid, + zkapp_command::AccessedOrNot, + Transaction, TransactionStatus, UserCommand, }, }, sparse_ledger::SparseLedger, @@ -44,7 +48,7 @@ use ledger::{ validate_block::block_body_hash, }, verifier::Verifier, - Account, AccountId, BaseLedger, Database, Mask, UnregisterBehavior, + Account, AccountId, AccountIndex, BaseLedger, Database, Mask, TokenId, UnregisterBehavior, }; use mina_hasher::Fp; use mina_p2p_messages::{ @@ -174,6 +178,8 @@ pub struct LedgerCtx { additional_snarked_ledgers: BTreeMap, staged_ledgers: StagedLedgersStorage, sync: LedgerSyncState, + /// Returns more data on block application necessary for archive node + archive_mode: bool, event_sender: Option>, } @@ -219,6 +225,10 @@ impl LedgerCtx { } } + pub fn set_archive_mode(&mut self) { + self.archive_mode = true; + } + // TODO(tizoc): Only used for the current workaround to make staged ledger // reconstruction async, can be removed when the ledger services are made async pub fn set_event_sender( @@ -684,7 +694,7 @@ impl LedgerCtx { &Verifier, &prev_state_view, prev_protocol_state.hashes(), - coinbase_receiver, + coinbase_receiver.clone(), supercharge_coinbase, ) .map_err(|err| format!("{err:?}"))?; @@ -714,12 +724,134 @@ impl LedgerCtx { panic!("staged ledger hash mismatch. found: {ledger_hashes:#?}, expected: {expected_ledger_hashes:#?}"); } + let archive_data = if self.archive_mode { + let senders = block + .body() + .transactions() + .filter_map(|tx| UserCommand::try_from(tx).ok().map(|cmd| cmd.fee_payer())) + .collect::>() + .into_iter(); + + let coinbase_receiver_id = AccountId::new(coinbase_receiver, TokenId::default()); + + // https://github.com/MinaProtocol/mina/blob/85149735ca3a76d026e8cf36b8ff22941a048e31/src/app/archive/lib/diff.ml#L78 + let (accessed, not_accessed): (BTreeSet<_>, BTreeSet<_>) = block + .body() + .tranasctions_with_status() + .flat_map(|(tx, status)| { + let status: TransactionStatus = status.into(); + UserCommand::try_from(tx) + .ok() + .map(|cmd| cmd.account_access_statuses(&status)) + .into_iter() + .flatten() + }) + .partition(|(_, status)| *status == AccessedOrNot::Accessed); + + let mut account_ids_accessed: BTreeSet<_> = + accessed.into_iter().map(|(id, _)| id).collect(); + let mut account_ids_not_accessed: BTreeSet<_> = + not_accessed.into_iter().map(|(id, _)| id).collect(); + + // Coinbase receiver is included only when the block has a coinbase transaction + // Note: If for whatever reason the network has set the coinbase amount to zero, + // to mimic the behavior of the ocaml node, we still include the coinbase receiver + // in the accessed accounts as a coinbase transaction is created regardless of the coinbase amount. + // https://github.com/MinaProtocol/mina/blob/b595a2bf00ae138d745737da628bd94bb2bd91e2/src/lib/staged_ledger/pre_diff_info.ml#L139 + let has_coinbase = block.body().has_coinbase(); + + if has_coinbase { + account_ids_accessed.insert(coinbase_receiver_id); + } else { + account_ids_not_accessed.insert(coinbase_receiver_id); + } + + // Include the coinbase fee transfer accounts + let fee_transfer_accounts = + block.body().coinbase_fee_transfers_iter().filter_map(|cb| { + let receiver: CompressedPubKey = cb.receiver_pk.inner().try_into().ok()?; + let account_id = AccountId::new(receiver, TokenId::default()); + Some(account_id) + }); + account_ids_accessed.extend(fee_transfer_accounts); + + // TODO(adonagy): Create a struct instead of tuple + let accounts_accessed: Vec<(AccountIndex, Account)> = account_ids_accessed + .iter() + .filter_map(|id| { + staged_ledger + .ledger() + .index_of_account(id.clone()) + .and_then(|index| { + staged_ledger + .ledger() + .get_at_index(index) + .map(|account| (index, *account)) + }) + }) + .collect(); + + let account_creation_fee = constraint_constants().account_creation_fee; + + // TODO(adonagy): Create a struct instead of tuple + let accounts_created: Vec<(AccountId, u64)> = staged_ledger + .latest_block_accounts_created(pred_block.hash().to_field()?) + .iter() + .map(|id| (id.clone(), account_creation_fee)) + .collect(); + + // A token is used regardless of txn status + // https://github.com/MinaProtocol/mina/blob/85149735ca3a76d026e8cf36b8ff22941a048e31/src/app/archive/lib/diff.ml#L114 + let all_account_ids: BTreeSet<_> = account_ids_accessed + .union(&account_ids_not_accessed) + .collect(); + let tokens_used: BTreeSet<(TokenId, Option)> = if has_coinbase { + all_account_ids + .iter() + .map(|id| { + let token_id = id.token_id.clone(); + let token_owner = staged_ledger.ledger().token_owner(token_id.clone()); + (token_id, token_owner) + }) + .collect() + } else { + BTreeSet::new() + }; + + let sender_receipt_chains_from_parent_ledger = senders + .filter_map(|sender| { + if let Some(location) = staged_ledger.ledger().location_of_account(&sender) { + staged_ledger.ledger().get(location).map(|account| { + ( + sender, + v2::ReceiptChainHash::from(account.receipt_chain_hash), + ) + }) + } else { + None + } + }) + .collect(); + Some(BlockApplyResultArchive { + accounts_accessed, + accounts_created, + tokens_used, + sender_receipt_chains_from_parent_ledger, + }) + } else { + None + }; + self.sync .staged_ledgers .insert(Arc::new(ledger_hashes), staged_ledger); + // staged_ledger.ledger().get_at_index(index) + Ok(BlockApplyResult { + block, just_emitted_a_proof, + archive_data, }) } @@ -842,6 +974,7 @@ impl LedgerCtx { ); CommitResult { + alive_masks: ::ledger::mask::alive_len(), available_jobs, needed_protocol_states, } diff --git a/node/src/ledger/ledger_state.rs b/node/src/ledger/ledger_state.rs index 8f92156bd2..d0aba84995 100644 --- a/node/src/ledger/ledger_state.rs +++ b/node/src/ledger/ledger_state.rs @@ -3,6 +3,7 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone, Default)] pub struct LedgerState { + pub alive_masks: usize, pub write: LedgerWriteState, pub read: LedgerReadState, } diff --git a/node/src/ledger/read/ledger_read_reducer.rs b/node/src/ledger/read/ledger_read_reducer.rs index 52b4c9720b..f49b4583f6 100644 --- a/node/src/ledger/read/ledger_read_reducer.rs +++ b/node/src/ledger/read/ledger_read_reducer.rs @@ -78,7 +78,7 @@ impl LedgerReadState { LedgerReadResponse::DelegatorTable(table), ) => { let expected = state.block_producer.vrf_delegator_table_inputs(); - if !expected.map_or(false, |(expected_hash, producer)| { + if !expected.is_some_and(|(expected_hash, producer)| { ledger_hash == expected_hash && pub_key == producer }) { bug_condition!("delegator table unexpected"); @@ -361,9 +361,7 @@ fn find_peers_with_ledger_rpc( ) => state .transition_frontier .get_state_body(block_hash) - .map_or(false, |b| { - b.blockchain_state.staged_ledger_hash == data.ledger_hash - }), + .is_some_and(|b| b.blockchain_state.staged_ledger_hash == data.ledger_hash), _ => false, }) .map(|(peer_id, rpc_id, _)| (*peer_id, rpc_id, false)); @@ -379,9 +377,7 @@ fn find_peers_with_ledger_rpc( ) => state .transition_frontier .get_state_body(block_hash) - .map_or(false, |b| { - b.blockchain_state.staged_ledger_hash == data.ledger_hash - }), + .is_some_and(|b| b.blockchain_state.staged_ledger_hash == data.ledger_hash), _ => false, }) .map(|(rpc_id, _)| (*peer_id, rpc_id, true)); diff --git a/node/src/ledger/read/ledger_read_state.rs b/node/src/ledger/read/ledger_read_state.rs index d4ac2fc402..df1ce8a880 100644 --- a/node/src/ledger/read/ledger_read_state.rs +++ b/node/src/ledger/read/ledger_read_state.rs @@ -90,6 +90,15 @@ impl LedgerReadState { .iter() .any(|(_, pending)| pending.request() == req) } + + pub fn pending_requests( + &self, + ) -> impl Iterator { + self.pending.iter().filter_map(|(id, s)| match s { + LedgerReadRequestState::Pending { time, request } => Some((id, request, *time)), + _ => None, + }) + } } impl LedgerReadRequestState { diff --git a/node/src/ledger/write/ledger_write_reducer.rs b/node/src/ledger/write/ledger_write_reducer.rs index 7333bdb041..b8019024f4 100644 --- a/node/src/ledger/write/ledger_write_reducer.rs +++ b/node/src/ledger/write/ledger_write_reducer.rs @@ -141,6 +141,10 @@ impl LedgerWriteState { .push(TransitionFrontierSyncAction::BlocksNextApplyError { hash, error }); } Ok(result) => { + dispatcher.push(TransitionFrontierSyncAction::BlocksSendToArchive { + hash: hash.clone(), + data: result.clone(), + }); dispatcher.push(TransitionFrontierSyncAction::BlocksNextApplySuccess { hash, just_emitted_a_proof: result.just_emitted_a_proof, @@ -155,7 +159,7 @@ impl LedgerWriteState { }, ) => { let best_tip = state.transition_frontier.sync.best_tip(); - if best_tip.map_or(false, |tip| tip.hash() == &best_tip_hash) { + if best_tip.is_some_and(|tip| tip.hash() == &best_tip_hash) { dispatcher.push(TransitionFrontierSyncAction::CommitSuccess { result }); } } diff --git a/node/src/ledger/write/ledger_write_state.rs b/node/src/ledger/write/ledger_write_state.rs index 88d327c78e..af3298ba84 100644 --- a/node/src/ledger/write/ledger_write_state.rs +++ b/node/src/ledger/write/ledger_write_state.rs @@ -29,6 +29,16 @@ impl LedgerWriteState { | Self::Success { request, .. } => Some(request), } } + + pub fn pending_requests( + &self, + ) -> impl Iterator { + std::iter::once(match self { + Self::Pending { time, request } => Some((request, *time)), + _ => None, + }) + .flatten() + } } impl Default for LedgerWriteState { diff --git a/node/src/ledger/write/mod.rs b/node/src/ledger/write/mod.rs index 1ce5d91fa4..749f57c090 100644 --- a/node/src/ledger/write/mod.rs +++ b/node/src/ledger/write/mod.rs @@ -1,5 +1,6 @@ mod ledger_write_actions; use ledger::scan_state::transaction_logic::valid; +use ledger::{Account, AccountId, AccountIndex, TokenId}; pub use ledger_write_actions::*; mod ledger_write_state; @@ -13,7 +14,7 @@ use std::sync::Arc; use ledger::scan_state::scan_state::transaction_snark::OneOrTwo; use ledger::scan_state::scan_state::AvailableJobMessage; -use mina_p2p_messages::v2; +use mina_p2p_messages::v2::{self, StateBodyHash}; use serde::{Deserialize, Serialize}; use crate::block_producer_effectful::StagedLedgerDiffCreateOutput; @@ -84,11 +85,86 @@ pub enum LedgerWriteResponse { #[derive(Serialize, Deserialize, Debug, Clone)] pub struct BlockApplyResult { + pub block: ArcBlockWithHash, pub just_emitted_a_proof: bool, + pub archive_data: Option, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct BlockApplyResultArchive { + pub accounts_accessed: Vec<(AccountIndex, Account)>, + pub accounts_created: Vec<(AccountId, u64)>, + pub tokens_used: BTreeSet<(TokenId, Option)>, + pub sender_receipt_chains_from_parent_ledger: Vec<(AccountId, v2::ReceiptChainHash)>, +} + +impl TryFrom<&BlockApplyResult> for v2::ArchiveTransitionFronntierDiff { + type Error = String; + + fn try_from(value: &BlockApplyResult) -> Result { + if let Some(archive_data) = &value.archive_data { + let res = Self::BreadcrumbAdded { + // TODO(adonagy): check if we need the StateBodyHash, if no keep the None + block: ( + (*value.block.block).clone(), + ( + value + .block + .header() + .protocol_state + .body + .try_hash() + .ok() + .map(StateBodyHash::from), + value.block.hash().clone(), + ), + ), + accounts_accessed: archive_data + .accounts_accessed + .iter() + .map(|(index, account)| (index.0.into(), account.into())) + .collect(), + accounts_created: archive_data + .accounts_created + .iter() + .map(|(account_id, fee)| { + ( + (*account_id).clone().into(), + v2::CurrencyFeeStableV1((*fee).into()), + ) + }) + .collect(), + tokens_used: archive_data + .tokens_used + .iter() + .map(|(token_id, account_id)| { + ( + token_id.into(), + account_id.clone().map(|account_id| account_id.into()), + ) + }) + .collect(), + sender_receipt_chains_from_parent_ledger: archive_data + .sender_receipt_chains_from_parent_ledger + .iter() + .map(|(account_id, receipt_chain_hash)| { + ( + (*account_id).clone().into(), + receipt_chain_hash.clone().into_inner(), + ) + }) + .collect(), + }; + Ok(res) + } else { + Err("Archive data not available, not running in archive mode".to_string()) + } + } } #[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct CommitResult { + pub alive_masks: usize, pub available_jobs: Arc>>, pub needed_protocol_states: BTreeSet, } diff --git a/node/src/lib.rs b/node/src/lib.rs index 38b7f2a7c4..2a96fae397 100644 --- a/node/src/lib.rs +++ b/node/src/lib.rs @@ -1,5 +1,8 @@ #![allow(clippy::if_same_then_else)] +extern crate graphannis_malloc_size_of as malloc_size_of; +extern crate graphannis_malloc_size_of_derive as malloc_size_of_derive; + pub use openmina_core as core; #[macro_use] @@ -31,7 +34,6 @@ pub mod stats; pub mod block_producer; pub mod block_producer_effectful; -pub mod consensus; pub mod daemon_json; pub mod event_source; pub mod external_snark_worker; diff --git a/node/src/logger/logger_effects.rs b/node/src/logger/logger_effects.rs index 679e6d3c0e..0ee13da989 100644 --- a/node/src/logger/logger_effects.rs +++ b/node/src/logger/logger_effects.rs @@ -118,7 +118,6 @@ pub fn logger_effects(store: &Store, action: ActionWithMetaRef<'_ Action::SnarkPool(action) => action.action_event(&context), Action::Snark(SnarkAction::WorkVerify(a)) => a.action_event(&context), Action::Snark(SnarkAction::UserCommandVerify(a)) => a.action_event(&context), - Action::Consensus(a) => a.action_event(&context), Action::TransitionFrontier(a) => match a { TransitionFrontierAction::Synced { .. } => { let tip = store.state().transition_frontier.best_tip().unwrap(); diff --git a/node/src/p2p/callbacks/p2p_callbacks_actions.rs b/node/src/p2p/callbacks/p2p_callbacks_actions.rs index 92078b9fac..46fa16c04a 100644 --- a/node/src/p2p/callbacks/p2p_callbacks_actions.rs +++ b/node/src/p2p/callbacks/p2p_callbacks_actions.rs @@ -4,7 +4,7 @@ use p2p::{ rpc::{P2pRpcId, P2pRpcRequest, P2pRpcResponse}, streaming_rpc::P2pStreamingRpcResponseFull, }, - PeerId, + P2pNetworkPubsubMessageCacheId, PeerId, }; use serde::{Deserialize, Serialize}; @@ -46,6 +46,9 @@ pub enum P2pCallbacksAction { RpcRespondBestTip { peer_id: PeerId, }, + P2pPubsubValidateMessage { + message_id: P2pNetworkPubsubMessageCacheId, + }, } impl redux::EnablingCondition for P2pCallbacksAction { @@ -63,6 +66,7 @@ impl redux::EnablingCondition for P2pCallbacksAction { P2pCallbacksAction::RpcRespondBestTip { .. } => { state.transition_frontier.best_tip().is_some() } + P2pCallbacksAction::P2pPubsubValidateMessage { .. } => true, } } } diff --git a/node/src/p2p/callbacks/p2p_callbacks_reducer.rs b/node/src/p2p/callbacks/p2p_callbacks_reducer.rs index 0eb4826b90..06ba7640c6 100644 --- a/node/src/p2p/callbacks/p2p_callbacks_reducer.rs +++ b/node/src/p2p/callbacks/p2p_callbacks_reducer.rs @@ -1,6 +1,9 @@ use ark_ff::fields::arithmetic::InvalidBigInt; -use mina_p2p_messages::v2::{MinaLedgerSyncLedgerAnswerStableV2, StateHash}; -use openmina_core::{block::BlockWithHash, bug_condition, transaction::TransactionWithHash}; +use mina_p2p_messages::{ + gossip::GossipNetMessageV2, + v2::{MinaLedgerSyncLedgerAnswerStableV2, StateHash}, +}; +use openmina_core::{block::BlockWithHash, bug_condition, log, transaction::TransactionWithHash}; use p2p::{ channels::{ best_tip::P2pChannelsBestTipAction, @@ -8,14 +11,16 @@ use p2p::{ streaming_rpc::P2pStreamingRpcResponseFull, }, disconnection::{P2pDisconnectionAction, P2pDisconnectionReason}, - PeerId, + P2pNetworkPubsubAction, PeerId, }; use redux::{ActionMeta, ActionWithMeta, Dispatcher}; use crate::{ p2p_ready, snark_pool::candidate::SnarkPoolCandidateAction, + state::BlockPrevalidationError, transaction_pool::candidate::TransactionPoolCandidateAction, + transition_frontier::candidate::{allow_block_too_late, TransitionFrontierCandidateAction}, transition_frontier::sync::{ ledger::{ snarked::{ @@ -29,7 +34,7 @@ use crate::{ watched_accounts::{ WatchedAccountLedgerInitialState, WatchedAccountsLedgerInitialStateGetError, }, - Action, ConsensusAction, State, WatchedAccountsAction, + Action, State, WatchedAccountsAction, }; use super::P2pCallbacksAction; @@ -48,13 +53,14 @@ impl crate::State { action: ActionWithMeta<&P2pCallbacksAction>, ) { let (action, meta) = action.split(); + let time = meta.time(); let (dispatcher, state) = state_context.into_dispatcher_and_state(); match action { P2pCallbacksAction::P2pChannelsRpcReady { peer_id } => { let peer_id = *peer_id; - if state.p2p.get_peer(&peer_id).map_or(false, |p| p.is_libp2p) { + if state.p2p.get_peer(&peer_id).is_some_and(|p| p.is_libp2p) { // for webrtc peers, we don't need to send this rpc, as we // will receive current best tip in best tip channel anyways. dispatcher.push(P2pChannelsRpcAction::RequestSend { @@ -290,6 +296,80 @@ impl crate::State { best_tip: best_tip.clone(), }); } + P2pCallbacksAction::P2pPubsubValidateMessage { message_id } => { + let Some(message_content) = state.p2p.ready().and_then(|p2p| { + p2p.network + .scheduler + .broadcast_state + .mcache + .get_message(message_id) + }) else { + bug_condition!("Failed to find message for id: {:?}", message_id); + return; + }; + + let pre_validation_result = match message_content { + GossipNetMessageV2::NewState(new_best_tip) => { + match BlockWithHash::try_new(new_best_tip.clone()) { + Ok(block) => { + let allow_block_too_late = allow_block_too_late(state, &block); + match state.prevalidate_block(&block, allow_block_too_late) { + Ok(()) => PreValidationResult::Continue, + Err(error) + if matches!( + error, + BlockPrevalidationError::ReceivedTooEarly { .. } + ) => + { + PreValidationResult::Ignore { + reason: format!( + "Block prevalidation failed: {:?}", + error + ), + } + } + Err(error) => PreValidationResult::Reject { + reason: format!("Block prevalidation failed: {:?}", error), + }, + } + } + Err(_) => { + log::error!(time; "P2pCallbacksAction::P2pPubsubValidateMessage: Invalid bigint in block"); + PreValidationResult::Reject{reason: "P2pCallbacksAction::P2pPubsubValidateMessage: Invalid bigint in block".to_owned()} + } + } + } + _ => { + // TODO: add pre validation for Snark pool and Transaction pool diffs + PreValidationResult::Continue + } + }; + + match pre_validation_result { + PreValidationResult::Continue => { + dispatcher.push(P2pNetworkPubsubAction::ValidateIncomingMessage { + message_id: *message_id, + }); + } + PreValidationResult::Reject { reason } => { + dispatcher.push(P2pNetworkPubsubAction::RejectMessage { + message_id: Some(p2p::BroadcastMessageId::MessageId { + message_id: *message_id, + }), + peer_id: None, + reason, + }); + } + PreValidationResult::Ignore { reason } => { + dispatcher.push(P2pNetworkPubsubAction::IgnoreMessage { + message_id: Some(p2p::BroadcastMessageId::MessageId { + message_id: *message_id, + }), + reason, + }); + } + } + } } } @@ -491,7 +571,7 @@ impl crate::State { return; } } - dispatcher.push(ConsensusAction::BlockChainProofUpdate { + dispatcher.push(TransitionFrontierCandidateAction::BlockChainProofUpdate { hash: best_tip.hash, chain_proof: (hashes, root_block), }); @@ -574,3 +654,9 @@ impl crate::State { } } } + +enum PreValidationResult { + Continue, + Reject { reason: String }, + Ignore { reason: String }, +} diff --git a/node/src/reducer.rs b/node/src/reducer.rs index 2d040bf214..6f64e13e0b 100644 --- a/node/src/reducer.rs +++ b/node/src/reducer.rs @@ -5,7 +5,8 @@ use crate::{ external_snark_worker::ExternalSnarkWorkers, rpc::RpcState, state::{BlockProducerState, LedgerState}, - Action, ActionWithMeta, ConsensusAction, EventSourceAction, P2p, State, + transition_frontier::candidate::TransitionFrontierCandidateAction, + Action, ActionWithMeta, EventSourceAction, P2p, State, }; pub fn reducer( @@ -23,7 +24,7 @@ pub fn reducer( bug_condition!("{}", error); }; } - dispatcher.push(ConsensusAction::TransitionFrontierSyncTargetUpdate); + dispatcher.push(TransitionFrontierCandidateAction::TransitionFrontierSyncTargetUpdate); } Action::EventSource(EventSourceAction::NewEvent { .. }) => {} Action::EventSource(_) => {} @@ -63,12 +64,6 @@ pub fn reducer( Action::Snark(a) => { snark::SnarkState::reducer(Substate::new(state, dispatcher), meta.with_action(a)); } - Action::Consensus(a) => { - crate::consensus::ConsensusState::reducer( - Substate::new(state, dispatcher), - meta.with_action(a), - ); - } Action::TransitionFrontier(a) => { crate::transition_frontier::TransitionFrontierState::reducer( Substate::new(state, dispatcher), diff --git a/node/src/rpc/heartbeat.rs b/node/src/rpc/heartbeat.rs new file mode 100644 index 0000000000..1be92a6ac4 --- /dev/null +++ b/node/src/rpc/heartbeat.rs @@ -0,0 +1,289 @@ +use ledger::FpExt; +use mina_p2p_messages::bigint::BigInt; +use mina_signer::Signature; +use redux::Timestamp; +use serde::{Deserialize, Serialize}; + +use super::{ + RpcNodeStatus, RpcNodeStatusSnarkPool, RpcNodeStatusTransactionPool, + RpcNodeStatusTransitionFrontier, +}; +use crate::{p2p::PeerId, stats::block_producer::BlockProductionAttempt}; +use openmina_node_account::{AccountPublicKey, AccountSecretKey}; + +/// Matches the representation used by o1js where each field is a string +/// containing a decimal representation of the field. +#[derive(Serialize, Debug, Clone, PartialEq, Eq)] +pub struct SignatureJson { + pub field: String, + pub scalar: String, +} + +impl From for SignatureJson { + fn from(sig: Signature) -> Self { + Self { + field: sig.rx.to_decimal(), + scalar: sig.s.to_decimal(), + } + } +} + +impl TryInto for SignatureJson { + type Error = String; + + fn try_into(self) -> Result { + let rx = BigInt::from_decimal(&self.field) + .map_err(|_| "Failed to parse decimals as BigInt")? + .try_into() + .map_err(|_| "Failed to convert rx BigInt to field element")?; + let s = BigInt::from_decimal(&self.scalar) + .map_err(|_| "Failed to parse decimals as BigInt")? + .try_into() + .map_err(|_| "Failed to convert rx BigInt to field element")?; + + Ok(Signature::new(rx, s)) + } +} + +/// A signed heartbeat message from a node +#[derive(Serialize, Debug, Clone)] +pub struct SignedNodeHeartbeat { + pub version: u8, + /// base64 encoded json of the payload + pub payload: String, + pub submitter: AccountPublicKey, + pub signature: SignatureJson, +} + +impl SignedNodeHeartbeat { + /// Verifies that the signature is valid for this heartbeat + pub fn verify_signature(&self) -> bool { + use blake2::digest::{Update, VariableOutput}; + use mina_signer::{CompressedPubKey, PubKey, Signer}; + + let signature = match self.signature.clone().try_into() { + Ok(sig) => sig, + Err(_) => return false, + }; + + let pk: CompressedPubKey = match self.submitter.clone().try_into() { + Ok(pk) => pk, + Err(_) => return false, + }; + + let pk = match PubKey::from_address(&pk.into_address()) { + Ok(pk) => pk, + Err(_) => return false, + }; + + // Calculate digest from payload + let mut hasher = blake2::Blake2bVar::new(32).expect("Invalid Blake2bVar output size"); + let mut blake2_hash = [0u8; 32]; + hasher.update(self.payload.as_bytes()); + hasher.finalize_variable(&mut blake2_hash).unwrap(); + + let digest = NodeHeartbeatPayloadDigest(blake2_hash); + let mut signer = mina_signer::create_legacy::( + mina_signer::NetworkId::TESTNET, + ); + + signer.verify(&signature, &pk, &digest) + } +} + +/// Node heartbeat +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct NodeHeartbeat { + pub status: NodeStatus, + pub node_timestamp: Timestamp, + pub peer_id: PeerId, + // binprot+base64 encoded block + pub last_produced_block: Option, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct NodeStatus { + pub chain_id: Option, + pub transition_frontier: RpcNodeStatusTransitionFrontier, + pub peers_count: u32, + pub snark_pool: RpcNodeStatusSnarkPool, + pub transaction_pool: RpcNodeStatusTransactionPool, + pub current_block_production_attempt: Option, +} + +impl From for NodeStatus { + fn from(status: RpcNodeStatus) -> Self { + Self { + chain_id: status.chain_id, + transition_frontier: status.transition_frontier, + peers_count: status.peers.len() as u32, + snark_pool: status.snark_pool, + transaction_pool: status.transaction_pool, + current_block_production_attempt: status.current_block_production_attempt, + } + } +} + +/// Blake2b hash of the encoded heartbeat payload +#[derive(Clone, Debug)] +pub struct NodeHeartbeatPayloadDigest([u8; 32]); + +impl mina_hasher::Hashable for NodeHeartbeatPayloadDigest { + type D = mina_signer::NetworkId; + + fn to_roinput(&self) -> mina_hasher::ROInput { + let mut hex = [0u8; 64]; + hex::encode_to_slice(self.0, &mut hex).unwrap(); + + // Bits must be reversed to match the JS implementation + for b in hex.iter_mut() { + *b = b.reverse_bits(); + } + + mina_hasher::ROInput::new().append_bytes(&hex) + } + + fn domain_string(network_id: Self::D) -> Option { + match network_id { + Self::D::MAINNET => openmina_core::network::mainnet::SIGNATURE_PREFIX, + Self::D::TESTNET => openmina_core::network::devnet::SIGNATURE_PREFIX, + } + .to_string() + .into() + } +} + +impl NodeHeartbeat { + const CURRENT_VERSION: u8 = 1; + + /// Creates base64 encoded payload and its Blake2b digest + fn payload_and_digest(&self) -> (String, NodeHeartbeatPayloadDigest) { + use base64::{engine::general_purpose::URL_SAFE, Engine as _}; + use blake2::{ + digest::{Update, VariableOutput}, + Blake2bVar, + }; + + let payload = serde_json::to_string(self).unwrap(); + let encoded_payload = URL_SAFE.encode(&payload); + + let mut hasher = Blake2bVar::new(32).expect("Invalid Blake2bVar output size"); + let mut blake2_hash = [0u8; 32]; + + hasher.update(encoded_payload.as_bytes()); + hasher.finalize_variable(&mut blake2_hash).unwrap(); + + (encoded_payload, NodeHeartbeatPayloadDigest(blake2_hash)) + } + + /// Signs the heartbeat using the provided secret key + pub fn sign(&self, secret_key: &AccountSecretKey) -> SignedNodeHeartbeat { + let (payload, digest) = self.payload_and_digest(); + let submitter = secret_key.public_key(); + + let signature = { + use mina_signer::{Keypair, Signer}; + let mut signer = mina_signer::create_legacy::( + mina_signer::NetworkId::TESTNET, + ); + let kp = Keypair::from(secret_key.clone()); + + let signature = signer.sign(&kp, &digest); + signature.into() + }; + + SignedNodeHeartbeat { + version: Self::CURRENT_VERSION, + payload, + submitter, + signature, + } + } +} + +#[cfg(test)] +pub(crate) mod tests { + + use crate::rpc::{ + RpcNodeStatusSnarkPool, RpcNodeStatusTransactionPool, RpcNodeStatusTransitionFrontier, + RpcNodeStatusTransitionFrontierSync, + }; + + use super::*; + use redux::Timestamp; + + #[test] + fn test_heartbeat_signing() { + let heartbeat = create_test_heartbeat(); + let secret_key = AccountSecretKey::deterministic(0); + let signed = heartbeat.sign(&secret_key); + + println!("Private key: {}", secret_key); + println!("Public key: {}", secret_key.public_key()); + println!("Payload: {}", signed.payload); + println!("Signature: {:?}", signed.signature); + + assert_eq!(&signed.payload, "eyJzdGF0dXMiOnsiY2hhaW5faWQiOm51bGwsInRyYW5zaXRpb25fZnJvbnRpZXIiOnsiYmVzdF90aXAiOm51bGwsInN5bmMiOnsidGltZSI6bnVsbCwic3RhdHVzIjoiU3luY2VkIiwicGhhc2UiOiJSdW5uaW5nIiwidGFyZ2V0IjpudWxsfX0sInBlZXJzX2NvdW50IjoxMCwic25hcmtfcG9vbCI6eyJ0b3RhbF9qb2JzIjowLCJzbmFya3MiOjB9LCJ0cmFuc2FjdGlvbl9wb29sIjp7InRyYW5zYWN0aW9ucyI6MCwidHJhbnNhY3Rpb25zX2Zvcl9wcm9wYWdhdGlvbiI6MCwidHJhbnNhY3Rpb25fY2FuZGlkYXRlcyI6MH0sImN1cnJlbnRfYmxvY2tfcHJvZHVjdGlvbl9hdHRlbXB0IjpudWxsfSwibm9kZV90aW1lc3RhbXAiOjAsInBlZXJfaWQiOiIyYkVnQnJQVHpMOHdvdjJENEt6MzRXVkxDeFI0dUNhcnNCbUhZWFdLUUE1d3ZCUXpkOUgiLCJsYXN0X3Byb2R1Y2VkX2Jsb2NrIjpudWxsfQ=="); + assert_eq!( + &signed.signature.field, + "9079786479394174309544438559429014966597223472549276883268325308999016287311" + ); + assert_eq!( + &signed.signature.scalar, + "23390017492020277578751321763314031415515010579676039556553777274088622112706" + ); + assert!(signed.verify_signature()); + } + + #[test] + fn test_heartbeat_signature_deterministic() { + let heartbeat = create_test_heartbeat(); + let secret_key = AccountSecretKey::deterministic(0); + + let signed1 = heartbeat.sign(&secret_key); + let signed2 = heartbeat.sign(&secret_key); + + assert_eq!(signed1.payload, signed2.payload); + assert_eq!(signed1.signature, signed2.signature); + } + + #[test] + fn test_heartbeat_different_keys_different_sigs() { + let heartbeat = create_test_heartbeat(); + let sk1 = AccountSecretKey::deterministic(0); + let sk2 = AccountSecretKey::deterministic(1); + + let signed1 = heartbeat.sign(&sk1); + let signed2 = heartbeat.sign(&sk2); + + assert_eq!(signed1.payload, signed2.payload); + assert_ne!(signed1.signature, signed2.signature); + assert_ne!(signed1.submitter, signed2.submitter); + } + + fn create_test_heartbeat() -> NodeHeartbeat { + NodeHeartbeat { + status: NodeStatus { + chain_id: None, + transition_frontier: RpcNodeStatusTransitionFrontier { + best_tip: None, + sync: RpcNodeStatusTransitionFrontierSync { + time: None, + status: "Synced".to_string(), + phase: "Running".to_string(), + target: None, + }, + }, + peers_count: 10, + snark_pool: RpcNodeStatusSnarkPool::default(), + transaction_pool: RpcNodeStatusTransactionPool::default(), + current_block_production_attempt: None, + }, + node_timestamp: Timestamp::ZERO, + peer_id: "2bEgBrPTzL8wov2D4Kz34WVLCxR4uCarsBmHYXWKQA5wvBQzd9H" + .parse() + .unwrap(), + last_produced_block: None, + } + } +} diff --git a/node/src/rpc/mod.rs b/node/src/rpc/mod.rs index 86ad943b27..629874f092 100644 --- a/node/src/rpc/mod.rs +++ b/node/src/rpc/mod.rs @@ -28,6 +28,9 @@ pub use rpc_reducer::collect_rpc_peers_info; mod rpc_impls; +mod heartbeat; +pub use heartbeat::{NodeHeartbeat, SignedNodeHeartbeat}; + pub use openmina_core::requests::{RpcId, RpcIdType}; use ledger::scan_state::scan_state::transaction_snark::OneOrTwo; @@ -40,6 +43,8 @@ use serde::{Deserialize, Serialize}; use crate::external_snark_worker::{ ExternalSnarkWorkerError, ExternalSnarkWorkerWorkError, SnarkWorkSpecError, }; +use crate::ledger::read::{LedgerReadId, LedgerReadKind}; +use crate::ledger::write::LedgerWriteKind; use crate::p2p::connection::incoming::P2pConnectionIncomingInitOpts; use crate::p2p::connection::outgoing::P2pConnectionOutgoingInitOpts; use crate::p2p::PeerId; @@ -54,6 +59,7 @@ use crate::stats::sync::SyncStatsSnapshot; pub enum RpcRequest { StateGet(Option), StatusGet, + HeartbeatGet, ActionStatsGet(ActionStatsQuery), SyncStatsGet(SyncStatsQuery), BlockProducerStatsGet, @@ -153,6 +159,7 @@ pub enum ActionStatsResponse { #[derive(Serialize, Deserialize, Debug, Clone)] pub enum PeerConnectionStatus { + Disconnecting, Disconnected, Connecting, Connected, @@ -166,8 +173,10 @@ pub struct RpcPeerInfo { pub best_tip_global_slot: Option, pub best_tip_timestamp: Option, pub connection_status: PeerConnectionStatus, + pub connecting_details: Option, pub address: Option, pub incoming: bool, + pub is_libp2p: bool, pub time: u64, } @@ -339,6 +348,7 @@ pub enum RpcStateGetError { pub type RpcStateGetResponse = Result; pub type RpcStatusGetResponse = Option; +pub type RpcHeartbeatGetResponse = Option; pub type RpcActionStatsGetResponse = Option; pub type RpcSyncStatsGetResponse = Option>; pub type RpcBlockProducerStatsGetResponse = Option; @@ -452,19 +462,35 @@ impl From for AccountSlim { pub struct RpcNodeStatus { pub chain_id: Option, pub transition_frontier: RpcNodeStatusTransitionFrontier, - pub peers: Vec, + pub ledger: RpcNodeStatusLedger, pub snark_pool: RpcNodeStatusSnarkPool, pub transaction_pool: RpcNodeStatusTransactionPool, pub current_block_production_attempt: Option, + pub peers: Vec, + pub resources_status: RpcNodeStatusResources, } #[derive(Serialize, Debug, Clone)] +pub struct RpcNodeStatusLedger { + pub alive_masks_after_last_commit: usize, + pub pending_writes: Vec<(LedgerWriteKind, redux::Timestamp)>, + pub pending_reads: Vec<(LedgerReadId, LedgerReadKind, redux::Timestamp)>, +} + +#[derive(Serialize, Debug, Clone)] +pub struct RpcNodeStatusResources { + pub p2p_malloc_size: usize, + pub transition_frontier: serde_json::Value, + pub snark_pool: serde_json::Value, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] pub struct RpcNodeStatusTransitionFrontier { pub best_tip: Option, pub sync: RpcNodeStatusTransitionFrontierSync, } -#[derive(Serialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone)] pub struct RpcNodeStatusTransitionFrontierSync { pub time: Option, pub status: String, @@ -472,21 +498,21 @@ pub struct RpcNodeStatusTransitionFrontierSync { pub target: Option, } -#[derive(Serialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone)] pub struct RpcNodeStatusTransitionFrontierBlockSummary { pub hash: StateHash, pub height: u32, pub global_slot: u32, } -#[derive(Serialize, Debug, Default, Clone)] +#[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct RpcNodeStatusTransactionPool { pub transactions: usize, pub transactions_for_propagation: usize, pub transaction_candidates: usize, } -#[derive(Serialize, Debug, Default, Clone)] +#[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct RpcNodeStatusSnarkPool { pub total_jobs: usize, pub snarks: usize, diff --git a/node/src/rpc/rpc_actions.rs b/node/src/rpc/rpc_actions.rs index a933af061e..2701c70202 100644 --- a/node/src/rpc/rpc_actions.rs +++ b/node/src/rpc/rpc_actions.rs @@ -28,6 +28,9 @@ pub enum RpcAction { StatusGet { rpc_id: RpcId, }, + HeartbeatGet { + rpc_id: RpcId, + }, // Stats ActionStatsGet { @@ -220,6 +223,7 @@ impl redux::EnablingCondition for RpcAction { match self { RpcAction::GlobalStateGet { .. } => true, RpcAction::StatusGet { .. } => true, + RpcAction::HeartbeatGet { .. } => true, RpcAction::ActionStatsGet { .. } => true, RpcAction::SyncStatsGet { .. } => true, RpcAction::BlockProducerStatsGet { .. } => true, @@ -232,17 +236,17 @@ impl redux::EnablingCondition for RpcAction { .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_init()), + .is_some_and(|v| v.status.is_init()), RpcAction::P2pConnectionOutgoingError { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_pending()), + .is_some_and(|v| v.status.is_pending()), RpcAction::P2pConnectionOutgoingSuccess { rpc_id } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_pending()), + .is_some_and(|v| v.status.is_pending()), RpcAction::P2pConnectionIncomingInit { rpc_id, .. } => { !state.rpc.requests.contains_key(rpc_id) } @@ -250,39 +254,39 @@ impl redux::EnablingCondition for RpcAction { .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_init()), + .is_some_and(|v| v.status.is_init()), RpcAction::P2pConnectionIncomingRespond { rpc_id, .. } | RpcAction::P2pConnectionIncomingAnswerReady { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_init() || v.status.is_pending()), + .is_some_and(|v| v.status.is_init() || v.status.is_pending()), RpcAction::P2pConnectionIncomingError { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_init() || v.status.is_pending()), + .is_some_and(|v| v.status.is_init() || v.status.is_pending()), RpcAction::P2pConnectionIncomingSuccess { rpc_id } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_pending()), + .is_some_and(|v| v.status.is_pending()), RpcAction::ScanStateSummaryGetInit { .. } => true, RpcAction::ScanStateSummaryLedgerGetInit { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_init()), + .is_some_and(|v| v.status.is_init()), RpcAction::ScanStateSummaryGetPending { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_init()), + .is_some_and(|v| v.status.is_init()), RpcAction::ScanStateSummaryGetSuccess { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_pending()), + .is_some_and(|v| v.status.is_pending()), RpcAction::SnarkPoolAvailableJobsGet { .. } => true, RpcAction::SnarkPoolJobGet { .. } => true, RpcAction::SnarkerConfigGet { .. } => true, @@ -304,40 +308,40 @@ impl redux::EnablingCondition for RpcAction { .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_init()), + .is_some_and(|v| v.status.is_init()), RpcAction::LedgerAccountsGetSuccess { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_pending()), + .is_some_and(|v| v.status.is_pending()), RpcAction::TransactionInjectInit { .. } => true, RpcAction::TransactionInjectPending { rpc_id } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_init()), + .is_some_and(|v| v.status.is_init()), RpcAction::TransactionInjectSuccess { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_pending()), + .is_some_and(|v| v.status.is_pending()), RpcAction::TransactionInjectRejected { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_pending()), + .is_some_and(|v| v.status.is_pending()), RpcAction::TransactionInjectFailure { rpc_id, .. } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_pending()), + .is_some_and(|v| v.status.is_pending()), RpcAction::TransitionFrontierUserCommandsGet { .. } => true, RpcAction::Finish { rpc_id } => state .rpc .requests .get(rpc_id) - .map_or(false, |v| v.status.is_finished()), + .is_some_and(|v| v.status.is_finished()), } } } diff --git a/node/src/rpc/rpc_reducer.rs b/node/src/rpc/rpc_reducer.rs index 7c847bc4c1..707d0fe90a 100644 --- a/node/src/rpc/rpc_reducer.rs +++ b/node/src/rpc/rpc_reducer.rs @@ -42,6 +42,10 @@ impl RpcState { let dispatcher = state_context.into_dispatcher(); dispatcher.push(RpcEffectfulAction::StatusGet { rpc_id: *rpc_id }); } + RpcAction::HeartbeatGet { rpc_id } => { + let dispatcher = state_context.into_dispatcher(); + dispatcher.push(RpcEffectfulAction::HeartbeatGet { rpc_id: *rpc_id }); + } RpcAction::ActionStatsGet { rpc_id, query } => { let dispatcher = state_context.into_dispatcher(); dispatcher.push(RpcEffectfulAction::ActionStatsGet { @@ -637,31 +641,46 @@ pub fn collect_rpc_peers_info(state: &crate::State) -> Vec { .iter() .map(|(peer_id, state)| { let best_tip = state.status.as_ready().and_then(|r| r.best_tip.as_ref()); - let (connection_status, time, incoming) = match &state.status { + let (connection_status, time, incoming, connecting_details) = match &state.status { p2p::P2pPeerStatus::Connecting(c) => match c { - p2p::connection::P2pConnectionState::Outgoing(o) => { - (PeerConnectionStatus::Connecting, o.time().into(), false) - } - p2p::connection::P2pConnectionState::Incoming(i) => { - (PeerConnectionStatus::Connecting, i.time().into(), true) - } + p2p::connection::P2pConnectionState::Outgoing(o) => ( + PeerConnectionStatus::Connecting, + o.time().into(), + false, + Some(format!("{o:?}")), + ), + p2p::connection::P2pConnectionState::Incoming(i) => ( + PeerConnectionStatus::Connecting, + i.time().into(), + true, + Some(format!("{i:?}")), + ), }, - p2p::P2pPeerStatus::Disconnecting { time } => { - (PeerConnectionStatus::Disconnected, (*time).into(), false) - } - p2p::P2pPeerStatus::Disconnected { time } => { - (PeerConnectionStatus::Disconnected, (*time).into(), false) - } + p2p::P2pPeerStatus::Disconnecting { time } => ( + PeerConnectionStatus::Disconnecting, + (*time).into(), + false, + None, + ), + p2p::P2pPeerStatus::Disconnected { time } => ( + PeerConnectionStatus::Disconnected, + (*time).into(), + false, + None, + ), p2p::P2pPeerStatus::Ready(r) => ( PeerConnectionStatus::Connected, r.connected_since.into(), r.is_incoming, + None, ), }; RpcPeerInfo { peer_id: *peer_id, connection_status, + connecting_details, address: state.dial_opts.as_ref().map(|opts| opts.to_string()), + is_libp2p: state.is_libp2p, incoming, best_tip: best_tip.map(|bt| bt.hash.clone()), best_tip_height: best_tip.map(|bt| bt.height()), diff --git a/node/src/rpc_effectful/rpc_effectful_action.rs b/node/src/rpc_effectful/rpc_effectful_action.rs index d1c7d41714..1159ed2885 100644 --- a/node/src/rpc_effectful/rpc_effectful_action.rs +++ b/node/src/rpc_effectful/rpc_effectful_action.rs @@ -28,6 +28,9 @@ pub enum RpcEffectfulAction { StatusGet { rpc_id: RpcId, }, + HeartbeatGet { + rpc_id: RpcId, + }, ActionStatsGet { rpc_id: RpcId, query: ActionStatsQuery, diff --git a/node/src/rpc_effectful/rpc_effectful_effects.rs b/node/src/rpc_effectful/rpc_effectful_effects.rs index feb513a973..5b7643e8a3 100644 --- a/node/src/rpc_effectful/rpc_effectful_effects.rs +++ b/node/src/rpc_effectful/rpc_effectful_effects.rs @@ -1,3 +1,9 @@ +use std::{ + collections::{BTreeMap, BTreeSet}, + ffi::c_void, + time::Duration, +}; + use super::{super::rpc, RpcEffectfulAction}; use crate::{ block_producer::BlockProducerWonSlot, @@ -6,15 +12,16 @@ use crate::{ p2p_ready, rpc::{ AccountQuery, AccountSlim, ActionStatsQuery, ActionStatsResponse, CurrentMessageProgress, - MessagesStats, RootLedgerSyncProgress, RootStagedLedgerSyncProgress, RpcAction, - RpcBlockProducerStats, RpcMessageProgressResponse, RpcNodeStatus, - RpcNodeStatusTransactionPool, RpcNodeStatusTransitionFrontier, - RpcNodeStatusTransitionFrontierBlockSummary, RpcNodeStatusTransitionFrontierSync, - RpcRequestExtraData, RpcScanStateSummary, RpcScanStateSummaryBlock, - RpcScanStateSummaryBlockTransaction, RpcScanStateSummaryBlockTransactionKind, - RpcScanStateSummaryScanStateJob, RpcSnarkPoolJobFull, RpcSnarkPoolJobSnarkWork, - RpcSnarkPoolJobSummary, RpcSnarkerJobCommitResponse, RpcSnarkerJobSpecResponse, - RpcTransactionInjectResponse, TransactionStatus, + MessagesStats, NodeHeartbeat, RootLedgerSyncProgress, RootStagedLedgerSyncProgress, + RpcAction, RpcBlockProducerStats, RpcMessageProgressResponse, RpcNodeStatus, + RpcNodeStatusLedger, RpcNodeStatusResources, RpcNodeStatusTransactionPool, + RpcNodeStatusTransitionFrontier, RpcNodeStatusTransitionFrontierBlockSummary, + RpcNodeStatusTransitionFrontierSync, RpcRequestExtraData, RpcScanStateSummary, + RpcScanStateSummaryBlock, RpcScanStateSummaryBlockTransaction, + RpcScanStateSummaryBlockTransactionKind, RpcScanStateSummaryScanStateJob, + RpcSnarkPoolJobFull, RpcSnarkPoolJobSnarkWork, RpcSnarkPoolJobSummary, + RpcSnarkerJobCommitResponse, RpcSnarkerJobSpecResponse, RpcTransactionInjectResponse, + TransactionStatus, }, snark_pool::SnarkPoolAction, transition_frontier::sync::{ @@ -26,14 +33,14 @@ use ledger::{ scan_state::currency::{Balance, Magnitude}, Account, }; -use mina_p2p_messages::{rpc_kernel::QueryHeader, v2::MinaBaseTransactionStatusStableV2}; +use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; +use mina_p2p_messages::{rpc_kernel::QueryHeader, v2}; use mina_signer::CompressedPubKey; -use openmina_core::block::ArcBlockWithHash; +use openmina_core::{block::ArcBlockWithHash, bug_condition}; use p2p::channels::streaming_rpc::{ staged_ledger_parts::calc_total_pieces_to_transfer, P2pStreamingRpcReceiveProgress, }; use redux::ActionWithMeta; -use std::{collections::BTreeMap, time::Duration}; macro_rules! respond_or_log { ($e:expr, $t:expr) => { @@ -53,48 +60,35 @@ pub fn rpc_effects(store: &mut Store, action: ActionWithMeta { - let state = store.state.get(); - let chain_id = state.p2p.ready().map(|p2p| p2p.chain_id.to_hex()); - let block_summary = - |b: &ArcBlockWithHash| RpcNodeStatusTransitionFrontierBlockSummary { - hash: b.hash().clone(), - height: b.height(), - global_slot: b.global_slot(), - }; - let current_block_production_attempt = store + let status = compute_node_status(store); + let _ = store.service.respond_status_get(rpc_id, Some(status)); + } + RpcEffectfulAction::HeartbeatGet { rpc_id } => { + let status = compute_node_status(store); + let last_produced_block = store .service .stats() - .and_then(|stats| Some(stats.block_producer().collect_attempts().last()?.clone())); - let status = RpcNodeStatus { - chain_id, - transition_frontier: RpcNodeStatusTransitionFrontier { - best_tip: state.transition_frontier.best_tip().map(block_summary), - sync: RpcNodeStatusTransitionFrontierSync { - time: state.transition_frontier.sync.time(), - status: state.transition_frontier.sync.to_string(), - phase: state.transition_frontier.sync.sync_phase().to_string(), - target: state.transition_frontier.sync.best_tip().map(block_summary), - }, - }, - peers: rpc::collect_rpc_peers_info(state), - snark_pool: state.snark_pool.jobs_iter().fold( - Default::default(), - |mut acc, job| { - if job.snark.is_some() { - acc.snarks = acc.snarks.saturating_add(1); - } - acc.total_jobs = acc.total_jobs.saturating_add(1); - acc - }, - ), - transaction_pool: RpcNodeStatusTransactionPool { - transactions: state.transaction_pool.size(), - transactions_for_propagation: state.transaction_pool.for_propagation_size(), - transaction_candidates: state.transaction_pool.candidates.transactions_count(), - }, - current_block_production_attempt, + .and_then(|stats| stats.block_producer().last_produced_block.take()); + + let last_produced_block = match base64_encode_block(last_produced_block) { + Ok(block) => block, + Err(error) => { + bug_condition!("HeartbeatGet: Failed to encode block, returning None: {error}"); + None + } }; - let _ = store.service.respond_status_get(rpc_id, Some(status)); + + let heartbeat = NodeHeartbeat { + status: status.into(), + node_timestamp: meta.time(), + peer_id: store.state().p2p.my_id(), + last_produced_block, + }; + let response = store + .service() + .with_producer_keypair(move |sk| heartbeat.sign(sk)); + + let _ = store.service.respond_heartbeat_get(rpc_id, response); } RpcEffectfulAction::ActionStatsGet { rpc_id, query } => match query { ActionStatsQuery::SinceStart => { @@ -369,13 +363,14 @@ pub fn rpc_effects(store: &mut Store, action: ActionWithMeta(store: &mut Store, action: ActionWithMeta(store: &mut Store) -> RpcNodeStatus { + let state = store.state.get(); + let chain_id = state.p2p.ready().map(|p2p| p2p.chain_id.to_hex()); + let block_summary = |b: &ArcBlockWithHash| RpcNodeStatusTransitionFrontierBlockSummary { + hash: b.hash().clone(), + height: b.height(), + global_slot: b.global_slot(), + }; + let current_block_production_attempt = store + .service + .stats() + .and_then(|stats| Some(stats.block_producer().collect_attempts().last()?.clone())); + let status = RpcNodeStatus { + chain_id, + transition_frontier: RpcNodeStatusTransitionFrontier { + best_tip: state.transition_frontier.best_tip().map(block_summary), + sync: RpcNodeStatusTransitionFrontierSync { + time: state.transition_frontier.sync.time(), + status: state.transition_frontier.sync.to_string(), + phase: state.transition_frontier.sync.sync_phase().to_string(), + target: state.transition_frontier.sync.best_tip().map(block_summary), + }, + }, + ledger: RpcNodeStatusLedger { + alive_masks_after_last_commit: state.ledger.alive_masks, + pending_writes: state + .ledger + .write + .pending_requests() + .map(|(req, time)| (req.kind(), time)) + .collect(), + pending_reads: state + .ledger + .read + .pending_requests() + .map(|(id, req, time)| (id, req.kind(), time)) + .collect(), + }, + peers: rpc::collect_rpc_peers_info(state), + snark_pool: state + .snark_pool + .jobs_iter() + .fold(Default::default(), |mut acc, job| { + if job.snark.is_some() { + acc.snarks = acc.snarks.saturating_add(1); + } + acc.total_jobs = acc.total_jobs.saturating_add(1); + acc + }), + transaction_pool: RpcNodeStatusTransactionPool { + transactions: state.transaction_pool.size(), + transactions_for_propagation: state.transaction_pool.for_propagation_size(), + transaction_candidates: state.transaction_pool.candidates.transactions_count(), + }, + current_block_production_attempt, + resources_status: RpcNodeStatusResources { + p2p_malloc_size: { + let mut set = BTreeSet::new(); + let fun = move |ptr: *const c_void| !set.insert(ptr.addr()); + let mut ops = MallocSizeOfOps::new(None, Some(Box::new(fun))); + size_of_val(&state.p2p).saturating_add(state.p2p.size_of(&mut ops)) + }, + transition_frontier: state.transition_frontier.resources_usage(), + snark_pool: state.snark_pool.resources_usage(), + }, + }; + status +} + +fn base64_encode_block(block: Option) -> std::io::Result> { + use base64::{engine::general_purpose::URL_SAFE, Engine as _}; + use mina_p2p_messages::binprot::BinProtWrite; + + let Some(block) = block else { return Ok(None) }; + + let mut buf = Vec::with_capacity(10 * 1024 * 1024); + v2::MinaBlockBlockStableV2::binprot_write(&block.block, &mut buf)?; + + let base64_encoded = URL_SAFE.encode(&buf); + + Ok(Some(base64_encoded)) +} diff --git a/node/src/rpc_effectful/rpc_service.rs b/node/src/rpc_effectful/rpc_service.rs index 11e524f3e0..8400db0575 100644 --- a/node/src/rpc_effectful/rpc_service.rs +++ b/node/src/rpc_effectful/rpc_service.rs @@ -3,13 +3,14 @@ use crate::{ rpc::{ RpcActionStatsGetResponse, RpcBestChainResponse, RpcBlockProducerStatsGetResponse, RpcDiscoveryBoostrapStatsResponse, RpcDiscoveryRoutingTableResponse, - RpcHealthCheckResponse, RpcId, RpcLedgerAccountsResponse, RpcLedgerSlimAccountsResponse, - RpcMessageProgressResponse, RpcP2pConnectionOutgoingResponse, RpcPeersGetResponse, - RpcReadinessCheckResponse, RpcScanStateSummaryGetResponse, RpcSnarkPoolGetResponse, - RpcSnarkPoolJobGetResponse, RpcSnarkerConfigGetResponse, RpcSnarkerJobCommitResponse, - RpcSnarkerJobSpecResponse, RpcSnarkerWorkersResponse, RpcStatusGetResponse, - RpcSyncStatsGetResponse, RpcTransactionInjectResponse, RpcTransactionPoolResponse, - RpcTransactionStatusGetResponse, RpcTransitionFrontierUserCommandsResponse, + RpcHealthCheckResponse, RpcHeartbeatGetResponse, RpcId, RpcLedgerAccountsResponse, + RpcLedgerSlimAccountsResponse, RpcMessageProgressResponse, + RpcP2pConnectionOutgoingResponse, RpcPeersGetResponse, RpcReadinessCheckResponse, + RpcScanStateSummaryGetResponse, RpcSnarkPoolGetResponse, RpcSnarkPoolJobGetResponse, + RpcSnarkerConfigGetResponse, RpcSnarkerJobCommitResponse, RpcSnarkerJobSpecResponse, + RpcSnarkerWorkersResponse, RpcStatusGetResponse, RpcSyncStatsGetResponse, + RpcTransactionInjectResponse, RpcTransactionPoolResponse, RpcTransactionStatusGetResponse, + RpcTransitionFrontierUserCommandsResponse, }, State, }; @@ -52,6 +53,11 @@ pub trait RpcService { rpc_id: RpcId, response: RpcStatusGetResponse, ) -> Result<(), RespondError>; + fn respond_heartbeat_get( + &mut self, + rpc_id: RpcId, + response: RpcHeartbeatGetResponse, + ) -> Result<(), RespondError>; fn respond_action_stats_get( &mut self, rpc_id: RpcId, diff --git a/node/src/service.rs b/node/src/service.rs index c40d3fc3c0..9ad4c45671 100644 --- a/node/src/service.rs +++ b/node/src/service.rs @@ -9,6 +9,7 @@ pub use crate::rpc_effectful::RpcService; pub use crate::snark::block_verify_effectful::SnarkBlockVerifyService; pub use crate::snark::work_verify_effectful::SnarkWorkVerifyService; pub use crate::snark_pool::SnarkPoolService; +pub use crate::transition_frontier::archive::archive_service::ArchiveService; pub use crate::transition_frontier::genesis_effectful::TransitionFrontierGenesisService; pub use crate::transition_frontier::sync::ledger::snarked::TransitionFrontierSyncLedgerSnarkedService; pub use redux::TimeService; @@ -31,6 +32,7 @@ pub trait Service: + BlockProducerService + ExternalSnarkWorkerService + RpcService + + ArchiveService { fn stats(&mut self) -> Option<&mut Stats>; fn recorder(&mut self) -> &mut Recorder; diff --git a/node/src/snark_pool/candidate/snark_pool_candidate_actions.rs b/node/src/snark_pool/candidate/snark_pool_candidate_actions.rs index 7ddbb16731..f35ce599a1 100644 --- a/node/src/snark_pool/candidate/snark_pool_candidate_actions.rs +++ b/node/src/snark_pool/candidate/snark_pool_candidate_actions.rs @@ -76,15 +76,13 @@ impl redux::EnablingCondition for SnarkPoolCandidateAction { let is_peer_available = state .p2p .get_ready_peer(peer_id) - .map_or(false, |peer| peer.channels.rpc.can_send_request()); + .is_some_and(|peer| peer.channels.rpc.can_send_request()); is_peer_available && state .snark_pool .candidates .get(*peer_id, job_id) - .map_or(false, |s| { - matches!(s, SnarkPoolCandidateState::InfoReceived { .. }) - }) + .is_some_and(|s| matches!(s, SnarkPoolCandidateState::InfoReceived { .. })) } SnarkPoolCandidateAction::WorkFetchPending { peer_id, job_id, .. @@ -92,16 +90,12 @@ impl redux::EnablingCondition for SnarkPoolCandidateAction { .snark_pool .candidates .get(*peer_id, job_id) - .map_or(false, |s| { - matches!(s, SnarkPoolCandidateState::InfoReceived { .. }) - }), + .is_some_and(|s| matches!(s, SnarkPoolCandidateState::InfoReceived { .. })), SnarkPoolCandidateAction::WorkFetchError { peer_id, job_id } => state .snark_pool .candidates .get(*peer_id, job_id) - .map_or(false, |s| { - matches!(s, SnarkPoolCandidateState::WorkFetchPending { .. }) - }), + .is_some_and(|s| matches!(s, SnarkPoolCandidateState::WorkFetchPending { .. })), SnarkPoolCandidateAction::WorkFetchSuccess { peer_id, work } => { let job_id = work.job_id(); state.snark_pool.contains(&job_id) diff --git a/node/src/snark_pool/candidate/snark_pool_candidate_state.rs b/node/src/snark_pool/candidate/snark_pool_candidate_state.rs index 6a1bb5df62..61f52d1778 100644 --- a/node/src/snark_pool/candidate/snark_pool_candidate_state.rs +++ b/node/src/snark_pool/candidate/snark_pool_candidate_state.rs @@ -51,6 +51,22 @@ impl SnarkPoolCandidatesState { Self::default() } + pub fn check(&self) -> (usize, Vec<(PeerId, SnarkJobId)>) { + let len = self.by_peer.values().map(BTreeMap::len).sum::(); + let lhs = self + .by_job_id + .iter() + .flat_map(|(job_id, v)| v.iter().map(|peer_id| (*peer_id, job_id.clone()))) + .collect::>(); + let rhs = self + .by_peer + .iter() + .flat_map(|(peer_id, v)| v.keys().map(|job_id| (*peer_id, job_id.clone()))) + .collect::>(); + let inconsistency = lhs.symmetric_difference(&rhs).cloned().collect(); + (len, inconsistency) + } + pub fn peer_work_count(&self, peer_id: &PeerId) -> usize { self.by_peer.get(peer_id).map(|v| v.len()).unwrap_or(0) } diff --git a/node/src/snark_pool/snark_pool_actions.rs b/node/src/snark_pool/snark_pool_actions.rs index 2ecb579494..189b5e2352 100644 --- a/node/src/snark_pool/snark_pool_actions.rs +++ b/node/src/snark_pool/snark_pool_actions.rs @@ -55,11 +55,9 @@ impl redux::EnablingCondition for SnarkPoolAction { fn is_enabled(&self, state: &crate::State, time: redux::Timestamp) -> bool { match self { SnarkPoolAction::Candidate(action) => action.is_enabled(state, time), - SnarkPoolAction::AutoCreateCommitment => state - .config - .snarker - .as_ref() - .map_or(false, |v| v.auto_commit), + SnarkPoolAction::AutoCreateCommitment => { + state.config.snarker.as_ref().is_some_and(|v| v.auto_commit) + } SnarkPoolAction::CommitmentCreateMany { .. } => state.config.snarker.is_some(), SnarkPoolAction::CommitmentCreate { job_id } => { state.config.snarker.is_some() && state.snark_pool.should_create_commitment(job_id) @@ -67,19 +65,17 @@ impl redux::EnablingCondition for SnarkPoolAction { SnarkPoolAction::CommitmentAdd { commitment, .. } => state .snark_pool .get(&commitment.job_id) - .map_or(false, |s| match s.commitment.as_ref() { + .is_some_and(|s| match s.commitment.as_ref() { Some(cur) => commitment > &cur.commitment, None => true, }), - SnarkPoolAction::WorkAdd { snark, .. } => { - state - .snark_pool - .get(&snark.job_id()) - .map_or(false, |s| match s.snark.as_ref() { - Some(cur) => snark > &cur.work, - None => true, - }) - } + SnarkPoolAction::WorkAdd { snark, .. } => state + .snark_pool + .get(&snark.job_id()) + .is_some_and(|s| match s.snark.as_ref() { + Some(cur) => snark > &cur.work, + None => true, + }), SnarkPoolAction::P2pSend { peer_id } => state .p2p .get_ready_peer(peer_id) @@ -101,7 +97,7 @@ impl redux::EnablingCondition for SnarkPoolAction { || peer_best_tip.pred_hash() == our_best_tip }) }) - .map_or(false, |p| { + .is_some_and(|p| { let check = |(next_index, limit), last_index| limit > 0 && next_index <= last_index; let last_index = state.snark_pool.last_index(); @@ -113,7 +109,7 @@ impl redux::EnablingCondition for SnarkPoolAction { }), SnarkPoolAction::CheckTimeouts => time .checked_sub(state.snark_pool.last_check_timeouts) - .map_or(false, |dur| dur.as_secs() >= 5), + .is_some_and(|dur| dur.as_secs() >= 5), SnarkPoolAction::JobCommitmentTimeout { job_id } => { state.snark_pool.is_commitment_timed_out(job_id, time) } diff --git a/node/src/snark_pool/snark_pool_reducer.rs b/node/src/snark_pool/snark_pool_reducer.rs index b8d4c6f6ad..ad0cd19057 100644 --- a/node/src/snark_pool/snark_pool_reducer.rs +++ b/node/src/snark_pool/snark_pool_reducer.rs @@ -202,15 +202,13 @@ impl SnarkPoolState { } } - // TODO: we only rebroadcast locally produced snarks here. - // libp2p logic already broadcasts everything right now and doesn't + // TODO: libp2p logic already broadcasts everything right now and doesn't // wait for validation, thad needs to be fixed. See #952 - if *is_sender_local { - dispatcher.push(P2pChannelsSnarkAction::Libp2pBroadcast { - snark: snark.clone(), - nonce: 0, - }); - } + dispatcher.push(P2pChannelsSnarkAction::Libp2pBroadcast { + snark: snark.clone(), + nonce: 0, + is_local: *is_sender_local, + }); } SnarkPoolAction::P2pSendAll { .. } => { let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); diff --git a/node/src/snark_pool/snark_pool_state.rs b/node/src/snark_pool/snark_pool_state.rs index 93ce18faa1..a5c6de0d53 100644 --- a/node/src/snark_pool/snark_pool_state.rs +++ b/node/src/snark_pool/snark_pool_state.rs @@ -130,12 +130,12 @@ impl SnarkPoolState { } pub fn should_create_commitment(&self, job_id: &SnarkJobId) -> bool { - self.get(job_id).map_or(false, |s| s.is_available()) + self.get(job_id).is_some_and(|s| s.is_available()) } pub fn is_commitment_timed_out(&self, id: &SnarkJobId, time_now: Timestamp) -> bool { self.get(id) - .map_or(false, |job| is_job_commitment_timed_out(job, time_now)) + .is_some_and(|job| is_job_commitment_timed_out(job, time_now)) } pub fn timed_out_commitments_iter( @@ -203,6 +203,16 @@ impl SnarkPoolState { self.pool .next_messages_to_send(index_and_limit, |job| job.snark_msg()) } + + pub fn resources_usage(&self) -> serde_json::Value { + let (size, inconsistency) = self.candidates.check(); + + serde_json::json!({ + "pool_size": self.pool.len(), + "candidates_size": size, + "candidates_inconsistency": inconsistency, + }) + } } fn is_job_commitment_timed_out(job: &JobState, time_now: Timestamp) -> bool { @@ -212,7 +222,7 @@ fn is_job_commitment_timed_out(job: &JobState, time_now: Timestamp) -> bool { let timeout = job.estimated_duration(); let passed_time = time_now.checked_sub(commitment.commitment.timestamp()); - let is_timed_out = passed_time.map_or(false, |dur| dur >= timeout); + let is_timed_out = passed_time.is_some_and(|dur| dur >= timeout); let didnt_deliver = job .snark .as_ref() diff --git a/node/src/state.rs b/node/src/state.rs index 293301906f..4c829a4d9d 100644 --- a/node/src/state.rs +++ b/node/src/state.rs @@ -1,9 +1,11 @@ use std::sync::Arc; use std::time::Duration; +use malloc_size_of_derive::MallocSizeOf; use mina_p2p_messages::v2; use openmina_core::constants::PROTOCOL_VERSION; use openmina_core::transaction::{TransactionInfo, TransactionWithHash}; +use p2p::P2pNetworkPubsubMessageCacheId; use rand::prelude::*; use openmina_core::block::BlockWithHash; @@ -29,7 +31,6 @@ use snark::work_verify::SnarkWorkVerifyState; use crate::block_producer::vrf_evaluator::BlockProducerVrfEvaluatorState; pub use crate::block_producer::BlockProducerState; -pub use crate::consensus::ConsensusState; use crate::external_snark_worker::{ExternalSnarkWorker, ExternalSnarkWorkers}; use crate::ledger::read::LedgerReadState; use crate::ledger::write::LedgerWriteState; @@ -45,6 +46,8 @@ use crate::transaction_pool::candidate::{ TransactionPoolCandidateAction, TransactionPoolCandidatesState, }; use crate::transaction_pool::TransactionPoolState; +use crate::transition_frontier::candidate::TransitionFrontierCandidateAction; +pub use crate::transition_frontier::candidate::TransitionFrontierCandidatesState; use crate::transition_frontier::genesis::TransitionFrontierGenesisState; use crate::transition_frontier::sync::ledger::snarked::TransitionFrontierSyncLedgerSnarkedState; use crate::transition_frontier::sync::ledger::staged::TransitionFrontierSyncLedgerStagedState; @@ -54,7 +57,7 @@ pub use crate::transition_frontier::TransitionFrontierState; pub use crate::watched_accounts::WatchedAccountsState; pub use crate::Config; use crate::{config::GlobalConfig, SnarkPoolAction}; -use crate::{ActionWithMeta, ConsensusAction, RpcAction, TransactionPoolAction}; +use crate::{ActionWithMeta, RpcAction, TransactionPoolAction}; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct State { @@ -63,7 +66,6 @@ pub struct State { pub p2p: P2p, pub ledger: LedgerState, pub snark: SnarkState, - pub consensus: ConsensusState, pub transition_frontier: TransitionFrontierState, pub snark_pool: SnarkPoolState, pub external_snark_worker: ExternalSnarkWorkers, @@ -108,8 +110,12 @@ impl_substate_access!( SnarkUserCommandVerifyState, snark.user_command_verify ); -impl_substate_access!(State, ConsensusState, consensus); impl_substate_access!(State, TransitionFrontierState, transition_frontier); +impl_substate_access!( + State, + TransitionFrontierCandidatesState, + transition_frontier.candidates +); impl_substate_access!(State, TransactionPoolState, transaction_pool); impl_substate_access!( State, @@ -281,8 +287,10 @@ impl State { ledger: LedgerState::new(config.ledger), snark_pool: SnarkPoolState::new(), snark: SnarkState::new(config.snark), - consensus: ConsensusState::new(), - transition_frontier: TransitionFrontierState::new(config.transition_frontier), + transition_frontier: TransitionFrontierState::new( + config.transition_frontier, + config.archive.is_some(), + ), external_snark_worker: ExternalSnarkWorkers::new(now), block_producer: BlockProducerState::new(now, config.block_producer), rpc: RpcState::new(), @@ -373,7 +381,7 @@ impl State { let two_mins_in_future = self.time() + Duration::from_secs(2 * 60); self.block_producer.with(false, |bp| { bp.current.won_slot_should_produce(two_mins_in_future) - }) && self.genesis_block().map_or(false, |b| { + }) && self.genesis_block().is_some_and(|b| { let slot = &b.consensus_state().curr_global_slot_since_hard_fork; let epoch = slot .slot_number @@ -472,9 +480,9 @@ impl State { } #[serde_with::serde_as] -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, MallocSizeOf)] pub enum P2p { - Pending(P2pConfig), + Pending(#[ignore_malloc_size_of = "constant"] P2pConfig), Ready(P2pState), } @@ -595,7 +603,7 @@ impl P2p { )), on_p2p_peer_best_tip_update: Some(redux::callback!( on_p2p_peer_best_tip_update(best_tip: BlockWithHash>) -> crate::Action { - ConsensusAction::P2pBestTipUpdate { best_tip } + TransitionFrontierCandidateAction::P2pBestTipUpdate { best_tip } } )), on_p2p_channels_rpc_ready: Some(redux::callback!( @@ -628,6 +636,11 @@ impl P2p { P2pCallbacksAction::P2pChannelsStreamingRpcTimeout { peer_id, id } } )), + on_p2p_pubsub_message_received: Some(redux::callback!( + on_p2p_pubsub_message_received((message_id: P2pNetworkPubsubMessageCacheId)) -> crate::Action{ + P2pCallbacksAction::P2pPubsubValidateMessage { message_id } + } + )), } } diff --git a/node/src/stats/stats_block_producer.rs b/node/src/stats/stats_block_producer.rs index afdc28dc8a..e0c7785612 100644 --- a/node/src/stats/stats_block_producer.rs +++ b/node/src/stats/stats_block_producer.rs @@ -2,7 +2,7 @@ use std::collections::{BTreeMap, VecDeque}; use ledger::AccountIndex; use mina_p2p_messages::v2; -use openmina_core::block::AppliedBlock; +use openmina_core::block::{AppliedBlock, ArcBlockWithHash}; use serde::{Deserialize, Serialize}; use crate::{ @@ -16,6 +16,7 @@ const MAX_HISTORY: usize = 2048; pub struct BlockProducerStats { pub(super) attempts: VecDeque, pub vrf_evaluator: BTreeMap, + pub last_produced_block: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -111,7 +112,7 @@ impl BlockProducerStats { self.attempts .back() .and_then(|v| v.block.as_ref()) - .map_or(false, |b| &b.hash == hash) + .is_some_and(|b| &b.hash == hash) } pub fn collect_attempts(&self) -> Vec { @@ -393,7 +394,11 @@ impl From<(&BlockHash, &BlockWithoutProof)> for ProducedBlock { .as_u32(), transactions: block.into(), completed_works_count: block.body.completed_works_count(), - coinbase: block.body.coinbase_sum(), + coinbase: if block.body.has_coinbase() { + openmina_core::constants::constraint_constants().coinbase_amount + } else { + 0 + }, fees: block.body.fees_sum(), snark_fees: block.body.snark_fees_sum(), } diff --git a/node/src/stats/stats_sync.rs b/node/src/stats/stats_sync.rs index 04a323288c..87132ed02c 100644 --- a/node/src/stats/stats_sync.rs +++ b/node/src/stats/stats_sync.rs @@ -305,7 +305,7 @@ impl SyncStats { .iter() .rev() // .take_while(|s| { - // !s.is_apply_success() || s.block().map_or(false, |b| b.height() == root_height) + // !s.is_apply_success() || s.block().is_some_and( |b| b.height() == root_height) // }) .enumerate() .map(|(i, s)| { diff --git a/node/src/transaction_pool/candidate/transaction_pool_candidate_actions.rs b/node/src/transaction_pool/candidate/transaction_pool_candidate_actions.rs index b2f1875c52..42d5e4ec2d 100644 --- a/node/src/transaction_pool/candidate/transaction_pool_candidate_actions.rs +++ b/node/src/transaction_pool/candidate/transaction_pool_candidate_actions.rs @@ -72,13 +72,13 @@ impl redux::EnablingCondition for TransactionPoolCandidateAction { let is_peer_available = state .p2p .get_ready_peer(peer_id) - .map_or(false, |peer| peer.channels.rpc.can_send_request()); + .is_some_and(|peer| peer.channels.rpc.can_send_request()); is_peer_available && state .transaction_pool .candidates .get(*peer_id, hash) - .map_or(false, |s| { + .is_some_and(|s| { matches!(s, TransactionPoolCandidateState::InfoReceived { .. }) }) } @@ -86,9 +86,7 @@ impl redux::EnablingCondition for TransactionPoolCandidateAction { .transaction_pool .candidates .get(*peer_id, hash) - .map_or(false, |s| { - matches!(s, TransactionPoolCandidateState::InfoReceived { .. }) - }), + .is_some_and(|s| matches!(s, TransactionPoolCandidateState::InfoReceived { .. })), TransactionPoolCandidateAction::FetchError { peer_id, hash } => state .transaction_pool .candidates diff --git a/node/src/transaction_pool/candidate/transaction_pool_candidate_state.rs b/node/src/transaction_pool/candidate/transaction_pool_candidate_state.rs index bcb33c1326..132adccc93 100644 --- a/node/src/transaction_pool/candidate/transaction_pool_candidate_state.rs +++ b/node/src/transaction_pool/candidate/transaction_pool_candidate_state.rs @@ -71,7 +71,7 @@ impl TransactionPoolCandidatesState { pub fn peer_contains(&self, peer_id: PeerId, hash: &TransactionHash) -> bool { self.by_peer .get(&peer_id) - .map_or(false, |txs| txs.contains_key(hash)) + .is_some_and(|txs| txs.contains_key(hash)) } pub fn get( diff --git a/node/src/transaction_pool/transaction_pool_actions.rs b/node/src/transaction_pool/transaction_pool_actions.rs index 1d3eb8b597..f271feff89 100644 --- a/node/src/transaction_pool/transaction_pool_actions.rs +++ b/node/src/transaction_pool/transaction_pool_actions.rs @@ -69,6 +69,7 @@ pub enum TransactionPoolAction { Rebroadcast { accepted: Vec, rejected: Vec<(ValidCommandWithHash, diff::Error)>, + is_local: bool, }, CollectTransactionsByFee, #[action_event(level = trace)] @@ -105,7 +106,7 @@ impl redux::EnablingCondition for TransactionPoolAction { || peer_best_tip.pred_hash() == our_best_tip }) }) - .map_or(false, |p| { + .is_some_and(|p| { let check = |(next_index, limit), last_index| limit > 0 && next_index <= last_index; let last_index = state.transaction_pool.dpool.last_index(); @@ -115,9 +116,9 @@ impl redux::EnablingCondition for TransactionPoolAction { last_index, ) }), - TransactionPoolAction::Rebroadcast { accepted, rejected } => { - !(accepted.is_empty() && rejected.is_empty()) - } + TransactionPoolAction::Rebroadcast { + accepted, rejected, .. + } => !(accepted.is_empty() && rejected.is_empty()), _ => true, } } diff --git a/node/src/transaction_pool/transaction_pool_reducer.rs b/node/src/transaction_pool/transaction_pool_reducer.rs index d0f7c5f6af..c1b4a4e1c4 100644 --- a/node/src/transaction_pool/transaction_pool_reducer.rs +++ b/node/src/transaction_pool/transaction_pool_reducer.rs @@ -300,11 +300,14 @@ impl TransactionPoolState { if let Some(rpc_action) = rpc_action { dispatcher.push(rpc_action); } - // TODO: we only rebroadcast locally injected transactions here. - // libp2p logic already broadcasts everything right now and doesn't + // TODO: libp2p logic already broadcasts everything right now and doesn't // wait for validation, thad needs to be fixed. See #952 - if is_sender_local && was_accepted { - dispatcher.push(TransactionPoolAction::Rebroadcast { accepted, rejected }); + if was_accepted { + dispatcher.push(TransactionPoolAction::Rebroadcast { + accepted, + rejected, + is_local: is_sender_local, + }); } } TransactionPoolAction::ApplyTransitionFrontierDiff { @@ -372,7 +375,11 @@ impl TransactionPoolState { ); } } - TransactionPoolAction::Rebroadcast { accepted, rejected } => { + TransactionPoolAction::Rebroadcast { + accepted, + rejected, + is_local, + } => { let rejected = rejected.iter().map(|(cmd, _)| cmd.data.forget_check()); let all_commands = accepted @@ -387,6 +394,7 @@ impl TransactionPoolState { dispatcher.push(P2pChannelsTransactionAction::Libp2pBroadcast { transaction: Box::new((&cmd).into()), nonce: 0, + is_local: *is_local, }); } } diff --git a/node/src/transition_frontier/archive/archive_config.rs b/node/src/transition_frontier/archive/archive_config.rs new file mode 100644 index 0000000000..5ff5a382be --- /dev/null +++ b/node/src/transition_frontier/archive/archive_config.rs @@ -0,0 +1,14 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ArchiveConfig { + pub address: String, +} + +impl ArchiveConfig { + pub fn new(address: &str) -> Self { + Self { + address: address.to_string(), + } + } +} diff --git a/node/src/transition_frontier/archive/archive_service.rs b/node/src/transition_frontier/archive/archive_service.rs new file mode 100644 index 0000000000..b61ba9a736 --- /dev/null +++ b/node/src/transition_frontier/archive/archive_service.rs @@ -0,0 +1,5 @@ +use mina_p2p_messages::v2::ArchiveTransitionFronntierDiff; + +pub trait ArchiveService: redux::Service { + fn send_to_archive(&mut self, data: ArchiveTransitionFronntierDiff); +} diff --git a/node/src/transition_frontier/archive/mod.rs b/node/src/transition_frontier/archive/mod.rs new file mode 100644 index 0000000000..bbc5c46837 --- /dev/null +++ b/node/src/transition_frontier/archive/mod.rs @@ -0,0 +1,2 @@ +pub mod archive_config; +pub mod archive_service; diff --git a/node/src/transition_frontier/candidate/mod.rs b/node/src/transition_frontier/candidate/mod.rs new file mode 100644 index 0000000000..0234bbb07a --- /dev/null +++ b/node/src/transition_frontier/candidate/mod.rs @@ -0,0 +1,8 @@ +mod transition_frontier_candidate_state; +pub use transition_frontier_candidate_state::*; + +mod transition_frontier_candidate_actions; +pub use transition_frontier_candidate_actions::*; + +mod transition_frontier_candidate_reducer; +pub use transition_frontier_candidate_reducer::allow_block_too_late; diff --git a/node/src/consensus/consensus_actions.rs b/node/src/transition_frontier/candidate/transition_frontier_candidate_actions.rs similarity index 53% rename from node/src/consensus/consensus_actions.rs rename to node/src/transition_frontier/candidate/transition_frontier_candidate_actions.rs index 0aeb10923a..8bb3df049e 100644 --- a/node/src/consensus/consensus_actions.rs +++ b/node/src/transition_frontier/candidate/transition_frontier_candidate_actions.rs @@ -7,18 +7,21 @@ use openmina_core::{action_event, ActionEvent}; use serde::{Deserialize, Serialize}; use snark::block_verify::SnarkBlockVerifyError; -use crate::consensus::ConsensusBlockStatus; use crate::snark::block_verify::SnarkBlockVerifyId; use crate::state::BlockPrevalidationError; -pub type ConsensusActionWithMeta = redux::ActionWithMeta; -pub type ConsensusActionWithMetaRef<'a> = redux::ActionWithMeta<&'a ConsensusAction>; +use super::TransitionFrontierCandidateStatus; + +pub type TransitionFrontierCandidateActionWithMeta = + redux::ActionWithMeta; +pub type TransitionFrontierCandidateActionWithMetaRef<'a> = + redux::ActionWithMeta<&'a TransitionFrontierCandidateAction>; // NOTE: `debug(hash)` must be used instead of `display(hash)` because // for some reason the later breaks CI. `hash = display(&hash)` works too. #[derive(Serialize, Deserialize, Debug, Clone, ActionEvent)] #[action_event(level = debug, fields(debug(hash), debug(error)))] -pub enum ConsensusAction { +pub enum TransitionFrontierCandidateAction { #[action_event(level = info)] BlockReceived { hash: StateHash, @@ -69,104 +72,111 @@ pub enum ConsensusAction { Prune, } -impl redux::EnablingCondition for ConsensusAction { +impl redux::EnablingCondition for TransitionFrontierCandidateAction { fn is_enabled(&self, state: &crate::State, _time: redux::Timestamp) -> bool { match self { - ConsensusAction::BlockReceived { hash, block, .. } => { + TransitionFrontierCandidateAction::BlockReceived { hash, block, .. } => { let block = ArcBlockWithHash { hash: hash.clone(), block: block.clone() }; - !block.is_genesis() && !state.consensus.blocks.contains_key(hash) + !block.is_genesis() && !state.transition_frontier.candidates.blocks.contains_key(hash) }, - ConsensusAction::BlockPrevalidateSuccess { hash } - | ConsensusAction::BlockPrevalidateError { hash, .. } => state - .consensus + TransitionFrontierCandidateAction::BlockPrevalidateSuccess { hash } + | TransitionFrontierCandidateAction::BlockPrevalidateError { hash, .. } => state + .transition_frontier.candidates .blocks .get(hash) - .map_or(false, |block| block.status.is_received()), - ConsensusAction::BlockChainProofUpdate { hash, .. } => { - (state.consensus.best_tip.as_ref() == Some(hash) - && state.consensus.best_tip_chain_proof.is_none()) - || state - .consensus + .is_some_and(|block| block.status.is_received()), + TransitionFrontierCandidateAction::BlockChainProofUpdate { hash, .. } => { + (state.transition_frontier.candidates.best_tip.as_ref() == Some(hash) + && state.transition_frontier.candidates.best_tip_chain_proof.is_none()) + || state.transition_frontier + .candidates .blocks .get(hash) - .map_or(false, |b| b.status.is_pending() && b.chain_proof.is_none()) + .is_some_and( |b| b.status.is_pending() && b.chain_proof.is_none()) }, - ConsensusAction::BlockSnarkVerifyPending { req_id, hash } => { + TransitionFrontierCandidateAction::BlockSnarkVerifyPending { req_id, hash } => { state - .consensus + .transition_frontier + .candidates .blocks .get(hash) - .map_or(false, |block| block.status.is_prevalidated()) + .is_some_and( |block| block.status.is_prevalidated()) && state.snark.block_verify.jobs.contains(*req_id) }, - ConsensusAction::BlockSnarkVerifySuccess { hash } => { + TransitionFrontierCandidateAction::BlockSnarkVerifySuccess { hash } => { state - .consensus + .transition_frontier + .candidates .blocks .get(hash) - .map_or(false, |block| block.status.is_snark_verify_pending()) + .is_some_and( |block| block.status.is_snark_verify_pending()) }, - ConsensusAction::BlockSnarkVerifyError { hash, .. } => { + TransitionFrontierCandidateAction::BlockSnarkVerifyError { hash, .. } => { state - .consensus + .transition_frontier + .candidates .blocks .get(hash) - .map_or(false, |block| block.status.is_snark_verify_pending()) + .is_some_and( |block| block.status.is_snark_verify_pending()) }, - ConsensusAction::DetectForkRange { hash } => { + TransitionFrontierCandidateAction::DetectForkRange { hash } => { state - .consensus + .transition_frontier + .candidates .blocks .get(hash) - .map_or(false, |block| { + .is_some_and( |block| { matches!( block.status, - ConsensusBlockStatus::SnarkVerifySuccess { .. } + TransitionFrontierCandidateStatus::SnarkVerifySuccess { .. } ) }) }, - ConsensusAction::ShortRangeForkResolve { hash } => { + TransitionFrontierCandidateAction::ShortRangeForkResolve { hash } => { state - .consensus + .transition_frontier + .candidates .blocks .get(hash) - .map_or(false, |block| match state.consensus.best_tip() { + .is_some_and( |block| match state.transition_frontier.candidates.best_tip() { Some(tip) => { matches!( &block.status, - ConsensusBlockStatus::ForkRangeDetected { compared_with, short_fork, .. } + TransitionFrontierCandidateStatus::ForkRangeDetected { compared_with, short_fork, .. } if compared_with.as_ref() == Some(tip.hash) && *short_fork ) } None => true, }) }, - ConsensusAction::LongRangeForkResolve { hash } => { + TransitionFrontierCandidateAction::LongRangeForkResolve { hash } => { state - .consensus + .transition_frontier + .candidates .blocks .get(hash) - .map_or(false, |block| match state.consensus.best_tip() { + .is_some_and( |block| match state.transition_frontier.candidates.best_tip() { Some(tip) => { matches!( &block.status, - ConsensusBlockStatus::ForkRangeDetected { compared_with, short_fork, .. } + TransitionFrontierCandidateStatus::ForkRangeDetected { compared_with, short_fork, .. } if compared_with.as_ref() == Some(tip.hash) && !*short_fork ) } None => false, }) }, - ConsensusAction::BestTipUpdate { hash } => { + TransitionFrontierCandidateAction::BestTipUpdate { hash } => { state - .consensus + .transition_frontier + .candidates .is_candidate_decided_to_use_as_tip(hash) }, - ConsensusAction::TransitionFrontierSyncTargetUpdate => { - let Some(best_tip) = state.consensus.best_tip_block_with_hash() else { + TransitionFrontierCandidateAction::TransitionFrontierSyncTargetUpdate => { + let Some(best_tip) = state.transition_frontier.candidates.best_tip_block_with_hash() else { return false; }; // do not need to update transition frontier sync target. @@ -181,12 +191,18 @@ impl redux::EnablingCondition for ConsensusAction { } // has enough data - state.consensus.best_tip_chain_proof(&state.transition_frontier).is_some() + state.transition_frontier.candidates.best_tip_chain_proof(&state.transition_frontier).is_some() }, - ConsensusAction::P2pBestTipUpdate { .. } => true, - ConsensusAction::Prune => { - state.consensus.best_tip().is_some() + TransitionFrontierCandidateAction::P2pBestTipUpdate { .. } => true, + TransitionFrontierCandidateAction::Prune => { + state.transition_frontier.candidates.best_tip().is_some() }, } } } + +impl From for crate::Action { + fn from(value: TransitionFrontierCandidateAction) -> Self { + Self::TransitionFrontier(crate::TransitionFrontierAction::Candidate(value)) + } +} diff --git a/node/src/consensus/consensus_reducer.rs b/node/src/transition_frontier/candidate/transition_frontier_candidate_reducer.rs similarity index 74% rename from node/src/consensus/consensus_reducer.rs rename to node/src/transition_frontier/candidate/transition_frontier_candidate_reducer.rs index 0f7c977c1e..813c54e91a 100644 --- a/node/src/consensus/consensus_reducer.rs +++ b/node/src/transition_frontier/candidate/transition_frontier_candidate_reducer.rs @@ -17,14 +17,16 @@ use crate::{ }; use super::{ - ConsensusAction, ConsensusActionWithMetaRef, ConsensusBlockState, ConsensusBlockStatus, - ConsensusLongRangeForkDecision, ConsensusShortRangeForkDecision, ConsensusState, + ConsensusLongRangeForkDecision, ConsensusShortRangeForkDecision, + TransitionFrontierCandidateAction, TransitionFrontierCandidateActionWithMetaRef, + TransitionFrontierCandidateState, TransitionFrontierCandidateStatus, + TransitionFrontierCandidatesState, }; -impl ConsensusState { +impl TransitionFrontierCandidatesState { pub fn reducer( mut state_context: crate::Substate, - action: ConsensusActionWithMetaRef<'_>, + action: TransitionFrontierCandidateActionWithMetaRef<'_>, ) { let Ok(state) = state_context.get_substate_mut() else { // TODO: log or propagate @@ -33,16 +35,16 @@ impl ConsensusState { let (action, meta) = action.split(); match action { - ConsensusAction::BlockReceived { + TransitionFrontierCandidateAction::BlockReceived { hash, block, chain_proof, } => { state.blocks.insert( hash.clone(), - ConsensusBlockState { + TransitionFrontierCandidateState { block: block.clone(), - status: ConsensusBlockStatus::Received { time: meta.time() }, + status: TransitionFrontierCandidateStatus::Received { time: meta.time() }, chain_proof: chain_proof.clone(), }, ); @@ -59,18 +61,23 @@ impl ConsensusState { match state.prevalidate_block(&block, allow_block_too_late) { Ok(()) => { - dispatcher.push(ConsensusAction::BlockPrevalidateSuccess { hash }); + dispatcher.push( + TransitionFrontierCandidateAction::BlockPrevalidateSuccess { hash }, + ); } Err(error) => { - dispatcher.push(ConsensusAction::BlockPrevalidateError { hash, error }); + dispatcher.push(TransitionFrontierCandidateAction::BlockPrevalidateError { + hash, + error, + }); } } } - ConsensusAction::BlockPrevalidateSuccess { hash } => { + TransitionFrontierCandidateAction::BlockPrevalidateSuccess { hash } => { let Some(block) = state.blocks.get_mut(hash) else { return; }; - block.status = ConsensusBlockStatus::Prevalidated; + block.status = TransitionFrontierCandidateStatus::Prevalidated; // Dispatch let block = (hash.clone(), block.block.clone()).into(); @@ -79,22 +86,22 @@ impl ConsensusState { block, on_init: redux::callback!( on_received_block_snark_verify_init((hash: BlockHash, req_id: SnarkBlockVerifyId)) -> crate::Action { - ConsensusAction::BlockSnarkVerifyPending { hash, req_id } + TransitionFrontierCandidateAction::BlockSnarkVerifyPending { hash, req_id } }), on_success: redux::callback!( on_received_block_snark_verify_success(hash: BlockHash) -> crate::Action { - ConsensusAction::BlockSnarkVerifySuccess { hash } + TransitionFrontierCandidateAction::BlockSnarkVerifySuccess { hash } }), on_error: redux::callback!( on_received_block_snark_verify_error((hash: BlockHash, error: SnarkBlockVerifyError)) -> crate::Action { - ConsensusAction::BlockSnarkVerifyError { hash, error } + TransitionFrontierCandidateAction::BlockSnarkVerifyError { hash, error } }), }); } - ConsensusAction::BlockPrevalidateError { hash, .. } => { + TransitionFrontierCandidateAction::BlockPrevalidateError { hash, .. } => { state.blocks.remove(hash); } - ConsensusAction::BlockChainProofUpdate { hash, chain_proof } => { + TransitionFrontierCandidateAction::BlockChainProofUpdate { hash, chain_proof } => { if state.best_tip.as_ref() == Some(hash) { state.best_tip_chain_proof = Some(chain_proof.clone()); } else if let Some(block) = state.blocks.get_mut(hash) { @@ -102,34 +109,42 @@ impl ConsensusState { } let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); - if global_state.consensus.best_tip.as_ref() != Some(hash) { + if global_state + .transition_frontier + .candidates + .best_tip + .as_ref() + != Some(hash) + { return; } - dispatcher.push(ConsensusAction::TransitionFrontierSyncTargetUpdate); + dispatcher + .push(TransitionFrontierCandidateAction::TransitionFrontierSyncTargetUpdate); } - ConsensusAction::BlockSnarkVerifyPending { req_id, hash } => { + TransitionFrontierCandidateAction::BlockSnarkVerifyPending { req_id, hash } => { if let Some(block) = state.blocks.get_mut(hash) { - block.status = ConsensusBlockStatus::SnarkVerifyPending { + block.status = TransitionFrontierCandidateStatus::SnarkVerifyPending { time: meta.time(), req_id: *req_id, }; } } - ConsensusAction::BlockSnarkVerifySuccess { hash } => { + TransitionFrontierCandidateAction::BlockSnarkVerifySuccess { hash } => { if let Some(block) = state.blocks.get_mut(hash) { - block.status = ConsensusBlockStatus::SnarkVerifySuccess { time: meta.time() }; + block.status = + TransitionFrontierCandidateStatus::SnarkVerifySuccess { time: meta.time() }; } // Dispatch let hash = hash.clone(); let dispatcher = state_context.into_dispatcher(); - dispatcher.push(ConsensusAction::DetectForkRange { hash }); + dispatcher.push(TransitionFrontierCandidateAction::DetectForkRange { hash }); } - ConsensusAction::BlockSnarkVerifyError { .. } => { + TransitionFrontierCandidateAction::BlockSnarkVerifyError { .. } => { // TODO: handle block verification error. } - ConsensusAction::DetectForkRange { hash } => { + TransitionFrontierCandidateAction::DetectForkRange { hash } => { let candidate_hash = hash; let Some(candidate_state) = state.blocks.get(candidate_hash) else { return; @@ -148,7 +163,7 @@ impl ConsensusState { (None, true) }; if let Some(candidate_state) = state.blocks.get_mut(candidate_hash) { - candidate_state.status = ConsensusBlockStatus::ForkRangeDetected { + candidate_state.status = TransitionFrontierCandidateStatus::ForkRangeDetected { time: meta.time(), compared_with: tip_hash, short_fork, @@ -160,10 +175,12 @@ impl ConsensusState { // Dispatch let hash = hash.clone(); let dispatcher = state_context.into_dispatcher(); - dispatcher.push(ConsensusAction::ShortRangeForkResolve { hash: hash.clone() }); - dispatcher.push(ConsensusAction::LongRangeForkResolve { hash }); + dispatcher.push(TransitionFrontierCandidateAction::ShortRangeForkResolve { + hash: hash.clone(), + }); + dispatcher.push(TransitionFrontierCandidateAction::LongRangeForkResolve { hash }); } - ConsensusAction::ShortRangeForkResolve { hash } => { + TransitionFrontierCandidateAction::ShortRangeForkResolve { hash } => { let candidate_hash = hash; if let Some(candidate) = state.blocks.get(candidate_hash) { let (best_tip_hash, decision): (_, ConsensusShortRangeForkDecision) = @@ -194,20 +211,21 @@ impl ConsensusState { candidate.chain_proof = None; } - candidate.status = ConsensusBlockStatus::ShortRangeForkResolve { - time: meta.time(), - compared_with: best_tip_hash, - decision, - }; + candidate.status = + TransitionFrontierCandidateStatus::ShortRangeForkResolve { + time: meta.time(), + compared_with: best_tip_hash, + decision, + }; } } // Dispatch let hash = hash.clone(); let dispatcher = state_context.into_dispatcher(); - dispatcher.push(ConsensusAction::BestTipUpdate { hash }); + dispatcher.push(TransitionFrontierCandidateAction::BestTipUpdate { hash }); } - ConsensusAction::LongRangeForkResolve { hash } => { + TransitionFrontierCandidateAction::LongRangeForkResolve { hash } => { openmina_core::log::debug!(openmina_core::log::system_time(); kind = "ConsensusAction::LongRangeForkResolve"); let candidate_hash = hash; let Some(tip_ref) = state.best_tip() else { @@ -229,7 +247,7 @@ impl ConsensusState { let Some(candidate_state) = state.blocks.get_mut(candidate_hash) else { return; }; - candidate_state.status = ConsensusBlockStatus::LongRangeForkResolve { + candidate_state.status = TransitionFrontierCandidateStatus::LongRangeForkResolve { time: meta.time(), compared_with: tip_hash, decision: if take { @@ -244,9 +262,9 @@ impl ConsensusState { // Dispatch let hash = hash.clone(); let dispatcher = state_context.into_dispatcher(); - dispatcher.push(ConsensusAction::BestTipUpdate { hash }); + dispatcher.push(TransitionFrontierCandidateAction::BestTipUpdate { hash }); } - ConsensusAction::BestTipUpdate { hash } => { + TransitionFrontierCandidateAction::BestTipUpdate { hash } => { state.best_tip = Some(hash.clone()); if let Some(tip) = state.blocks.get_mut(hash) { @@ -255,7 +273,11 @@ impl ConsensusState { // Dispatch let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); - let Some(block) = global_state.consensus.best_tip_block_with_hash() else { + let Some(block) = global_state + .transition_frontier + .candidates + .best_tip_block_with_hash() + else { return; }; for pub_key in global_state.watched_accounts.accounts() { @@ -268,11 +290,16 @@ impl ConsensusState { }); } - dispatcher.push(ConsensusAction::TransitionFrontierSyncTargetUpdate); + dispatcher + .push(TransitionFrontierCandidateAction::TransitionFrontierSyncTargetUpdate); } - ConsensusAction::TransitionFrontierSyncTargetUpdate => { + TransitionFrontierCandidateAction::TransitionFrontierSyncTargetUpdate => { let (dispatcher, state) = state_context.into_dispatcher_and_state(); - let Some(best_tip) = state.consensus.best_tip_block_with_hash() else { + let Some(best_tip) = state + .transition_frontier + .candidates + .best_tip_block_with_hash() + else { bug_condition!( "ConsensusAction::TransitionFrontierSyncTargetUpdate | no chosen best tip" ); @@ -280,7 +307,8 @@ impl ConsensusState { }; let Some((blocks_inbetween, root_block)) = state - .consensus + .transition_frontier + .candidates .best_tip_chain_proof(&state.transition_frontier) else { bug_condition!("ConsensusAction::TransitionFrontierSyncTargetUpdate | no best tip chain proof"); @@ -300,9 +328,9 @@ impl ConsensusState { on_success: None, }); } - ConsensusAction::P2pBestTipUpdate { best_tip } => { + TransitionFrontierCandidateAction::P2pBestTipUpdate { best_tip } => { let dispatcher = state_context.into_dispatcher(); - dispatcher.push(ConsensusAction::BlockReceived { + dispatcher.push(TransitionFrontierCandidateAction::BlockReceived { hash: best_tip.hash.clone(), block: best_tip.block.clone(), chain_proof: None, @@ -312,7 +340,7 @@ impl ConsensusState { dispatcher.push(TransitionFrontierSyncLedgerStagedAction::PartsPeerFetchInit); dispatcher.push(TransitionFrontierSyncAction::BlocksPeersQuery); } - ConsensusAction::Prune => { + TransitionFrontierCandidateAction::Prune => { let Some(best_tip_hash) = state.best_tip.clone() else { return; }; @@ -345,7 +373,7 @@ impl ConsensusState { /// Ideally we would differentiate between requested blocks and blocks /// received from gossip, but this difference doesn't really exist /// in the WebRTC transport, hence this heuristic. -fn allow_block_too_late(state: &crate::State, block: &ArcBlockWithHash) -> bool { +pub fn allow_block_too_late(state: &crate::State, block: &ArcBlockWithHash) -> bool { let (has_greater_blobal_slot, diff_with_best_tip) = state .transition_frontier .best_tip() diff --git a/node/src/consensus/consensus_state.rs b/node/src/transition_frontier/candidate/transition_frontier_candidate_state.rs similarity index 87% rename from node/src/consensus/consensus_state.rs rename to node/src/transition_frontier/candidate/transition_frontier_candidate_state.rs index 330e89421b..40a4abafac 100644 --- a/node/src/consensus/consensus_state.rs +++ b/node/src/transition_frontier/candidate/transition_frontier_candidate_state.rs @@ -40,7 +40,7 @@ impl ConsensusLongRangeForkDecision { } #[derive(Serialize, Deserialize, Debug, Clone)] -pub enum ConsensusBlockStatus { +pub enum TransitionFrontierCandidateStatus { Received { time: redux::Timestamp, }, @@ -69,7 +69,7 @@ pub enum ConsensusBlockStatus { }, } -impl ConsensusBlockStatus { +impl TransitionFrontierCandidateStatus { pub fn is_received(&self) -> bool { matches!(self, Self::Received { .. }) } @@ -99,13 +99,13 @@ impl ConsensusBlockStatus { } #[derive(Serialize, Deserialize, Debug, Clone)] -pub struct ConsensusBlockState { +pub struct TransitionFrontierCandidateState { pub block: Arc, - pub status: ConsensusBlockStatus, + pub status: TransitionFrontierCandidateStatus, pub chain_proof: Option<(Vec, ArcBlockWithHash)>, } -impl ConsensusBlockState { +impl TransitionFrontierCandidateState { pub fn height(&self) -> u32 { self.block .header @@ -119,14 +119,14 @@ impl ConsensusBlockState { } #[derive(Serialize, Deserialize, Debug, Clone, Default)] -pub struct ConsensusState { - pub blocks: BTreeMap, +pub struct TransitionFrontierCandidatesState { + pub blocks: BTreeMap, // TODO(binier): rename to best candidate. Best tip will be in transition_frontier state. pub best_tip: Option, pub best_tip_chain_proof: Option<(Vec, ArcBlockWithHash)>, } -impl ConsensusState { +impl TransitionFrontierCandidatesState { pub fn new() -> Self { Self::default() } @@ -171,17 +171,17 @@ impl ConsensusState { return false; }; match &candidate.status { - ConsensusBlockStatus::Received { .. } => false, - ConsensusBlockStatus::Prevalidated => false, - ConsensusBlockStatus::SnarkVerifyPending { .. } => false, - ConsensusBlockStatus::SnarkVerifySuccess { .. } => false, - ConsensusBlockStatus::ForkRangeDetected { .. } => false, - ConsensusBlockStatus::ShortRangeForkResolve { + TransitionFrontierCandidateStatus::Received { .. } => false, + TransitionFrontierCandidateStatus::Prevalidated => false, + TransitionFrontierCandidateStatus::SnarkVerifyPending { .. } => false, + TransitionFrontierCandidateStatus::SnarkVerifySuccess { .. } => false, + TransitionFrontierCandidateStatus::ForkRangeDetected { .. } => false, + TransitionFrontierCandidateStatus::ShortRangeForkResolve { compared_with, decision, .. } => decision.use_as_best_tip() && &self.best_tip == compared_with, - ConsensusBlockStatus::LongRangeForkResolve { + TransitionFrontierCandidateStatus::LongRangeForkResolve { compared_with, decision, .. @@ -221,7 +221,7 @@ pub struct BlockRef<'a> { pub hash: &'a StateHash, pub header: &'a MinaBlockHeaderStableV2, pub body: &'a StagedLedgerDiffDiffStableV2, - pub status: &'a ConsensusBlockStatus, + pub status: &'a TransitionFrontierCandidateStatus, } impl BlockRef<'_> { diff --git a/node/src/transition_frontier/genesis/transition_frontier_genesis_config.rs b/node/src/transition_frontier/genesis/transition_frontier_genesis_config.rs index 3b561d8776..6fe757c913 100644 --- a/node/src/transition_frontier/genesis/transition_frontier_genesis_config.rs +++ b/node/src/transition_frontier/genesis/transition_frontier_genesis_config.rs @@ -651,6 +651,7 @@ impl PrebuiltGenesisConfig { Ok((masks, load_result)) } + #[allow(clippy::result_unit_err)] pub fn from_loaded( (masks, data): (Vec, GenesisConfigLoaded), ) -> Result { diff --git a/node/src/transition_frontier/mod.rs b/node/src/transition_frontier/mod.rs index e767d704f7..7b93cf7a2a 100644 --- a/node/src/transition_frontier/mod.rs +++ b/node/src/transition_frontier/mod.rs @@ -1,3 +1,5 @@ +pub mod archive; +pub mod candidate; pub mod genesis; pub mod genesis_effectful; pub mod sync; diff --git a/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_actions.rs b/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_actions.rs index 331cf7bf08..8fa768b891 100644 --- a/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_actions.rs +++ b/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_actions.rs @@ -135,11 +135,11 @@ pub enum TransitionFrontierSyncLedgerSnarkedAction { impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnarkedAction { fn is_enabled(&self, state: &crate::State, _time: redux::Timestamp) -> bool { match self { - TransitionFrontierSyncLedgerSnarkedAction::Pending => { - state.transition_frontier.sync.ledger().map_or(false, |s| { - matches!(s, TransitionFrontierSyncLedgerState::Init { .. }) - }) - } + TransitionFrontierSyncLedgerSnarkedAction::Pending => state + .transition_frontier + .sync + .ledger() + .is_some_and(|s| matches!(s, TransitionFrontierSyncLedgerState::Init { .. })), TransitionFrontierSyncLedgerSnarkedAction::PeersQuery => { // This condition passes if: // - there are available peers to query @@ -155,7 +155,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| { + .is_some_and(|s| { s.is_num_accounts_query_next() || s.contains_pending_address_queries() }); peers_available && sync_next_available @@ -187,7 +187,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()?.num_accounts_pending()) - .map_or(false, |pending| { + .is_some_and(|pending| { pending .attempts .get(peer_id) @@ -221,10 +221,10 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| { + .is_some_and(|s| { s.peer_num_account_query_get(peer_id, *rpc_id) .and_then(|s| s.attempts.get(peer_id)) - .map_or(false, |s| matches!(s, PeerRpcState::Pending { .. })) + .is_some_and(|s| matches!(s, PeerRpcState::Pending { .. })) }), TransitionFrontierSyncLedgerSnarkedAction::PeerQueryNumAccountsSuccess { peer_id, @@ -235,11 +235,11 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| { + .is_some_and(|s| { // TODO(tizoc): check if expected response kind is correct. s.peer_num_account_query_get(peer_id, *rpc_id) .and_then(|s| s.attempts.get(peer_id)) - .map_or(false, |s| matches!(s, PeerRpcState::Pending { .. })) + .is_some_and(|s| matches!(s, PeerRpcState::Pending { .. })) }), TransitionFrontierSyncLedgerSnarkedAction::NumAccountsReceived { sender, .. } | TransitionFrontierSyncLedgerSnarkedAction::NumAccountsAccepted { sender, .. } @@ -250,7 +250,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .ledger() .and_then(|s| s.snarked()?.num_accounts_pending()) .and_then(|s| s.attempts.get(sender)) - .map_or(false, |s| s.is_success()) + .is_some_and(|s| s.is_success()) } TransitionFrontierSyncLedgerSnarkedAction::NumAccountsSuccess { .. } => state .transition_frontier @@ -264,7 +264,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| { + .is_some_and(|s| { matches!( s, TransitionFrontierSyncLedgerSnarkedState::NumAccountsSuccess { .. } @@ -275,7 +275,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| match s { + .is_some_and(|s| match s { TransitionFrontierSyncLedgerSnarkedState::MerkleTreeSyncPending { queue, pending_addresses: pending, @@ -349,7 +349,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()?.fetch_pending()) - .map_or(false, |pending| { + .is_some_and(|pending| { pending .iter() .filter_map(|(_, query_state)| query_state.attempts.get(peer_id)) @@ -364,10 +364,10 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| { + .is_some_and(|s| { s.peer_address_query_get(peer_id, *rpc_id) .and_then(|(_, s)| s.attempts.get(peer_id)) - .map_or(false, |s| matches!(s, PeerRpcState::Pending { .. })) + .is_some_and(|s| matches!(s, PeerRpcState::Pending { .. })) }), TransitionFrontierSyncLedgerSnarkedAction::PeerQueryAddressSuccess { peer_id, @@ -379,11 +379,11 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| { + .is_some_and(|s| { // TODO(binier): check if expected response kind is correct. s.peer_address_query_get(peer_id, *rpc_id) .and_then(|(_, s)| s.attempts.get(peer_id)) - .map_or(false, |s| matches!(s, PeerRpcState::Pending { .. })) + .is_some_and(|s| matches!(s, PeerRpcState::Pending { .. })) }) } TransitionFrontierSyncLedgerSnarkedAction::ChildHashesReceived { @@ -408,7 +408,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .ledger() .and_then(|s| s.snarked()?.fetch_pending()?.get(address)) .and_then(|s| s.attempts.get(sender)) - .map_or(false, |s| s.is_success()) + .is_some_and(|s| s.is_success()) } TransitionFrontierSyncLedgerSnarkedAction::ChildAccountsReceived { address, @@ -424,7 +424,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .ledger() .and_then(|s| s.snarked()?.fetch_pending()?.get(address)) .and_then(|s| s.attempts.get(sender)) - .map_or(false, |s| s.is_success()), + .is_some_and(|s| s.is_success()), TransitionFrontierSyncLedgerSnarkedAction::ChildAccountsAccepted { address, count, @@ -437,14 +437,14 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerSnar .ledger() .and_then(|s| s.snarked()?.fetch_pending()?.get(address)) .and_then(|s| s.attempts.get(sender)) - .map_or(false, |s| s.is_success()) + .is_some_and(|s| s.is_success()) } TransitionFrontierSyncLedgerSnarkedAction::Success => state .transition_frontier .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| { + .is_some_and(|s| { matches!( s, TransitionFrontierSyncLedgerSnarkedState::MerkleTreeSyncSuccess { .. } diff --git a/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_state.rs b/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_state.rs index ea85c186be..d05d9ad5e3 100644 --- a/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_state.rs +++ b/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_state.rs @@ -309,7 +309,7 @@ impl TransitionFrontierSyncLedgerSnarkedState { pending_addresses.iter().find(|(_, s)| { s.attempts .get(peer_id) - .map_or(false, |s| s.rpc_id() == Some(expected_rpc_id)) + .is_some_and(|s| s.rpc_id() == Some(expected_rpc_id)) }) } _ => None, diff --git a/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_actions.rs b/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_actions.rs index c4bb9ac58b..19434a8704 100644 --- a/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_actions.rs +++ b/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_actions.rs @@ -68,7 +68,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.snarked()) - .map_or(false, |s| match s { + .is_some_and(|s| match s { TransitionFrontierSyncLedgerSnarkedState::Success { target, .. } => { target.staged.is_some() } @@ -79,11 +79,11 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()) - .map_or(false, |staged| { + .is_some_and(|staged| { let Some(p2p) = state.p2p.ready() else { return false; }; - staged.fetch_attempts().map_or(false, |attempts| { + staged.fetch_attempts().is_some_and(|attempts| { attempts.is_empty() || attempts.iter().all(|(_, s)| s.is_error()) }) && p2p.ready_rpc_peers_iter().next().is_some() }), @@ -92,7 +92,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()) - .map_or(false, |s| { + .is_some_and(|s| { matches!( s, TransitionFrontierSyncLedgerStagedState::PartsFetchPending { .. } @@ -108,7 +108,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .ledger() .and_then(|s| s.staged()?.fetch_attempts()?.get(peer_id)) .and_then(|s| s.fetch_pending_rpc_id()) - .map_or(false, |fetch_rpc_id| fetch_rpc_id == *rpc_id), + .is_some_and(|fetch_rpc_id| fetch_rpc_id == *rpc_id), TransitionFrontierSyncLedgerStagedAction::PartsPeerFetchSuccess { peer_id, rpc_id, @@ -119,13 +119,13 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .ledger() .and_then(|s| s.staged()?.fetch_attempts()?.get(peer_id)) .and_then(|s| s.fetch_pending_rpc_id()) - .map_or(false, |fetch_rpc_id| fetch_rpc_id == *rpc_id), + .is_some_and(|fetch_rpc_id| fetch_rpc_id == *rpc_id), TransitionFrontierSyncLedgerStagedAction::PartsPeerInvalid { sender, .. } => state .transition_frontier .sync .ledger() .and_then(|s| s.staged()?.fetch_attempts()?.get(sender)) - .map_or(false, |s| match s { + .is_some_and(|s| match s { PeerStagedLedgerPartsFetchState::Success { parts, .. } => !parts.is_valid(), _ => false, }), @@ -134,7 +134,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()?.fetch_attempts()?.get(sender)) - .map_or(false, |s| match s { + .is_some_and(|s| match s { PeerStagedLedgerPartsFetchState::Success { parts, .. } => parts.is_valid(), _ => false, }), @@ -143,7 +143,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()?.fetch_attempts()?.get(sender)) - .map_or(false, |s| s.is_valid()), + .is_some_and(|s| s.is_valid()), TransitionFrontierSyncLedgerStagedAction::ReconstructEmpty => state .transition_frontier .sync @@ -155,7 +155,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag } _ => None, }) - .map_or(false, |target| { + .is_some_and(|target| { let hashes = &target.staged.hashes; target.snarked_ledger_hash == hashes.non_snark.ledger_hash && hashes.non_snark.aux_hash == v2::StagedLedgerHashAuxHash::zero() @@ -169,7 +169,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()) - .map_or(false, |s| { + .is_some_and(|s| { matches!( s, TransitionFrontierSyncLedgerStagedState::PartsFetchSuccess { .. } @@ -181,7 +181,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()) - .map_or(false, |s| { + .is_some_and(|s| { matches!( s, TransitionFrontierSyncLedgerStagedState::PartsFetchSuccess { .. } @@ -193,7 +193,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()) - .map_or(false, |s| { + .is_some_and(|s| { matches!( s, TransitionFrontierSyncLedgerStagedState::ReconstructPending { .. } @@ -204,7 +204,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()) - .map_or(false, |s| { + .is_some_and(|s| { // Assumption here is that if the hash doesn't match, it is because the reconstruct // is stale (best tip changed while reconstruction was happening). The staging // ledger reconstruction logic itself will already validate that the resulting @@ -220,7 +220,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncLedgerStag .sync .ledger() .and_then(|s| s.staged()) - .map_or(false, |s| { + .is_some_and(|s| { matches!( s, TransitionFrontierSyncLedgerStagedState::ReconstructSuccess { .. } diff --git a/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_reducer.rs b/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_reducer.rs index d7f2934f92..a008dbe624 100644 --- a/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_reducer.rs +++ b/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_reducer.rs @@ -88,7 +88,7 @@ impl TransitionFrontierSyncLedgerStagedState { if list .last() - .map_or(false, |(_, _, ord): &(_, _, u8)| *ord > order) + .is_some_and(|(_, _, ord): &(_, _, u8)| *ord > order) { // remove less priority peers list.clear(); diff --git a/node/src/transition_frontier/sync/ledger/transition_frontier_sync_ledger_actions.rs b/node/src/transition_frontier/sync/ledger/transition_frontier_sync_ledger_actions.rs index d2a5410b60..bd8fa7a0be 100644 --- a/node/src/transition_frontier/sync/ledger/transition_frontier_sync_ledger_actions.rs +++ b/node/src/transition_frontier/sync/ledger/transition_frontier_sync_ledger_actions.rs @@ -24,11 +24,11 @@ pub enum TransitionFrontierSyncLedgerAction { impl redux::EnablingCondition for TransitionFrontierSyncLedgerAction { fn is_enabled(&self, state: &crate::State, time: redux::Timestamp) -> bool { match self { - TransitionFrontierSyncLedgerAction::Init => { - state.transition_frontier.sync.ledger().map_or(false, |s| { - matches!(s, TransitionFrontierSyncLedgerState::Init { .. }) - }) - } + TransitionFrontierSyncLedgerAction::Init => state + .transition_frontier + .sync + .ledger() + .is_some_and(|s| matches!(s, TransitionFrontierSyncLedgerState::Init { .. })), TransitionFrontierSyncLedgerAction::Snarked(a) => a.is_enabled(state, time), TransitionFrontierSyncLedgerAction::Staged(a) => a.is_enabled(state, time), TransitionFrontierSyncLedgerAction::Success => { diff --git a/node/src/transition_frontier/sync/ledger/transition_frontier_sync_ledger_state.rs b/node/src/transition_frontier/sync/ledger/transition_frontier_sync_ledger_state.rs index 755d95f92f..55be085367 100644 --- a/node/src/transition_frontier/sync/ledger/transition_frontier_sync_ledger_state.rs +++ b/node/src/transition_frontier/sync/ledger/transition_frontier_sync_ledger_state.rs @@ -98,7 +98,7 @@ impl TransitionFrontierSyncLedgerState { if new_target .staged .as_ref() - .map_or(false, |cur| cur.hashes == staged.target().staged.hashes) + .is_some_and(|cur| cur.hashes == staged.target().staged.hashes) { // root staged ledger hash is still the same. Do nothing. } else if staged.target().snarked_ledger_hash == new_target.snarked_ledger_hash { diff --git a/node/src/transition_frontier/sync/transition_frontier_sync_actions.rs b/node/src/transition_frontier/sync/transition_frontier_sync_actions.rs index cb686091d7..516df9eacf 100644 --- a/node/src/transition_frontier/sync/transition_frontier_sync_actions.rs +++ b/node/src/transition_frontier/sync/transition_frontier_sync_actions.rs @@ -5,7 +5,7 @@ use openmina_core::ActionEvent; use redux::Callback; use serde::{Deserialize, Serialize}; -use crate::ledger::write::CommitResult; +use crate::ledger::write::{BlockApplyResult, CommitResult}; use crate::p2p::channels::rpc::P2pRpcId; use crate::p2p::PeerId; use crate::transition_frontier::sync::TransitionFrontierSyncLedgerPending; @@ -106,12 +106,21 @@ pub enum TransitionFrontierSyncAction { hash: StateHash, just_emitted_a_proof: bool, }, + /// Sending block to archive + #[action_event(level = info, fields( + block_hash = display(&hash), + ))] + BlocksSendToArchive { + hash: StateHash, + data: BlockApplyResult, + }, /// Done applying all pending blocks BlocksSuccess, - /// Commit snarked ledger to transition frontier root + /// Commit all the accumulated changes after the + /// synchronization is done to the ledger service. CommitInit, CommitPending, - /// Root snarked ledger commited succesfully + /// Committing changes after sync finished. CommitSuccess { result: CommitResult, }, @@ -130,9 +139,10 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { .best_tip() .map_or(true, |tip| best_tip.hash != tip.hash) && state - .consensus + .transition_frontier + .candidates .best_tip() - .map_or(false, |tip| &best_tip.hash == tip.hash) + .is_some_and(|tip| &best_tip.hash == tip.hash) } TransitionFrontierSyncAction::BestTipUpdate { best_tip, @@ -146,7 +156,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { && state .transition_frontier .best_tip() - .map_or(false, |tip| best_tip.hash != tip.hash) + .is_some_and( |tip| best_tip.hash != tip.hash) && state .transition_frontier .sync @@ -158,7 +168,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { .sync .best_tip() .or(state.transition_frontier.best_tip()) - .map_or(false, |tip| { + .is_some_and( |tip| { if tip.is_genesis() && best_tip.height() > tip.height() { // TODO(binier): once genesis blocks are same, uncomment below. // tip.hash() == &best_tip.header().protocol_state.body.genesis_state_hash @@ -269,7 +279,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { .transition_frontier .sync .blocks_fetch_next() - .map_or(false, |expected| &expected == hash); + .is_some_and(|expected| &expected == hash); let check_peer_available = state .p2p @@ -279,7 +289,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { let peer_best_tip = p.best_tip.as_ref()?; Some(p).filter(|_| sync_best_tip.hash == peer_best_tip.hash) }) - .map_or(false, |p| p.channels.rpc.can_send_request()); + .is_some_and(|p| p.channels.rpc.can_send_request()); check_next_hash && check_peer_available } @@ -289,7 +299,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { .sync .blocks_fetch_retry_iter() .next() - .map_or(false, |expected| &expected == hash); + .is_some_and(|expected| &expected == hash); let check_peer_available = state .p2p @@ -299,7 +309,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { let peer_best_tip = p.best_tip.as_ref()?; Some(p).filter(|_| sync_best_tip.hash == peer_best_tip.hash) }) - .map_or(false, |p| p.channels.rpc.can_send_request()); + .is_some_and(|p| p.channels.rpc.can_send_request()); check_next_hash && check_peer_available } @@ -307,7 +317,7 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { .transition_frontier .sync .block_state(hash) - .map_or(false, |b| b.is_fetch_init_from_peer(peer_id)), + .is_some_and(|b| b.is_fetch_init_from_peer(peer_id)), TransitionFrontierSyncAction::BlocksPeerQueryError { peer_id, rpc_id, .. } => state @@ -324,12 +334,12 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { .sync .block_state(&response.hash) .filter(|s| s.is_fetch_pending_from_peer(peer_id, *rpc_id)) - .map_or(false, |s| s.block_hash() == &response.hash), + .is_some_and(|s| s.block_hash() == &response.hash), TransitionFrontierSyncAction::BlocksFetchSuccess { hash } => state .transition_frontier .sync .block_state(hash) - .map_or(false, |s| s.fetch_pending_fetched_block().is_some()), + .is_some_and(|s| s.fetch_pending_fetched_block().is_some()), TransitionFrontierSyncAction::BlocksNextApplyInit => { state.transition_frontier.sync.blocks_apply_next().is_some() } @@ -337,12 +347,12 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { .transition_frontier .sync .blocks_apply_next() - .map_or(false, |(b, _)| &b.hash == hash), + .is_some_and(|(b, _)| &b.hash == hash), TransitionFrontierSyncAction::BlocksNextApplyError { hash, .. } => state .transition_frontier .sync .blocks_apply_pending() - .map_or(false, |b| &b.hash == hash), + .is_some_and(|b| &b.hash == hash), TransitionFrontierSyncAction::BlocksNextApplySuccess { hash, just_emitted_a_proof: _, @@ -350,13 +360,16 @@ impl redux::EnablingCondition for TransitionFrontierSyncAction { .transition_frontier .sync .blocks_apply_pending() - .map_or(false, |b| &b.hash == hash), + .is_some_and(|b| &b.hash == hash), TransitionFrontierSyncAction::BlocksSuccess => match &state.transition_frontier.sync { TransitionFrontierSyncState::BlocksPending { chain, .. } => { chain.iter().all(|v| v.is_apply_success()) } _ => false, }, + TransitionFrontierSyncAction::BlocksSendToArchive { .. } => { + state.transition_frontier.archive_enabled + } TransitionFrontierSyncAction::CommitInit => matches!( state.transition_frontier.sync, TransitionFrontierSyncState::BlocksSuccess { .. }, diff --git a/node/src/transition_frontier/sync/transition_frontier_sync_effects.rs b/node/src/transition_frontier/sync/transition_frontier_sync_effects.rs index ac92f9569c..8ba7ae8071 100644 --- a/node/src/transition_frontier/sync/transition_frontier_sync_effects.rs +++ b/node/src/transition_frontier/sync/transition_frontier_sync_effects.rs @@ -1,7 +1,7 @@ use mina_p2p_messages::v2::LedgerHash; use openmina_core::block::{AppliedBlock, ArcBlockWithHash}; use p2p::channels::rpc::{P2pChannelsRpcAction, P2pRpcId}; -use p2p::PeerId; +use p2p::{P2pNetworkPubsubAction, PeerId}; use redux::ActionMeta; use crate::ledger::write::{LedgerWriteAction, LedgerWriteRequest}; @@ -304,6 +304,12 @@ impl TransitionFrontierSyncAction { }; let error = SyncError::BlockApplyFailed(failed_block.clone(), error.clone()); store.dispatch(TransitionFrontierAction::SyncFailed { best_tip, error }); + // TODO this should be handled by a callback + store.dispatch(P2pNetworkPubsubAction::RejectMessage { + message_id: Some(p2p::BroadcastMessageId::BlockHash { hash: hash.clone() }), + peer_id: None, + reason: "Failed to apply block".to_owned(), + }); } TransitionFrontierSyncAction::BlocksNextApplySuccess { hash, @@ -317,6 +323,12 @@ impl TransitionFrontierSyncAction { store.dispatch(TransitionFrontierSyncAction::BlocksSuccess); } } + TransitionFrontierSyncAction::BlocksSendToArchive { data, .. } => { + // Should be safe to unwrap because archive mode contains the necessary data, and this action is only called in archive mode + if let Ok(data) = data.try_into() { + store.service().send_to_archive(data); + } + } TransitionFrontierSyncAction::BlocksSuccess => {} // Bootstrap/Catchup is practically complete at this point. // This effect is where the finalization part needs to be diff --git a/node/src/transition_frontier/sync/transition_frontier_sync_reducer.rs b/node/src/transition_frontier/sync/transition_frontier_sync_reducer.rs index 7f8f742b65..4af35b667b 100644 --- a/node/src/transition_frontier/sync/transition_frontier_sync_reducer.rs +++ b/node/src/transition_frontier/sync/transition_frontier_sync_reducer.rs @@ -89,7 +89,7 @@ impl TransitionFrontierSyncState { } else if substate .ledger .staged() - .map_or(false, |s| s.is_parts_fetched()) + .is_some_and(|s| s.is_parts_fetched()) && root_block.pred_hash() == old_root_block.hash() { // Optimization. Prevent changing staging ledger target, @@ -144,7 +144,7 @@ impl TransitionFrontierSyncState { let old_chain_has_new_root_applied = old_chain .iter() .find(|b| b.block_hash() == &new_root.hash) - .map_or(false, |b| b.is_apply_success()); + .is_some_and(|b| b.is_apply_success()); if applied_blocks.contains_key(&new_root.hash) || old_chain_has_new_root_applied { @@ -636,6 +636,7 @@ impl TransitionFrontierSyncState { }, }; } + TransitionFrontierSyncAction::BlocksSendToArchive { .. } => {} TransitionFrontierSyncAction::BlocksSuccess => { let Self::BlocksPending { chain, @@ -753,9 +754,8 @@ fn next_required_ledger_to_sync( .into(); (SyncLedgerTargetKind::NextEpoch, ledger) } else if old_root.snarked_ledger_hash() == new_root.snarked_ledger_hash() - || cur_best_root.map_or(false, |cur| { - cur.snarked_ledger_hash() == new_root.snarked_ledger_hash() - }) + || cur_best_root + .is_some_and(|cur| cur.snarked_ledger_hash() == new_root.snarked_ledger_hash()) { let ledger = TransitionFrontierSyncLedgerSnarkedState::Success { time, diff --git a/node/src/transition_frontier/sync/transition_frontier_sync_state.rs b/node/src/transition_frontier/sync/transition_frontier_sync_state.rs index 848fbd8684..e95ec4e0db 100644 --- a/node/src/transition_frontier/sync/transition_frontier_sync_state.rs +++ b/node/src/transition_frontier/sync/transition_frontier_sync_state.rs @@ -264,7 +264,7 @@ impl TransitionFrontierSyncState { match self { Self::StakingLedgerPending(s) => s.ledger.is_snarked_ledger_synced(), Self::NextEpochLedgerPending(s) => s.ledger.is_snarked_ledger_synced(), - Self::RootLedgerPending(s) => s.ledger.staged().map_or(false, |s| s.is_success()), + Self::RootLedgerPending(s) => s.ledger.staged().is_some_and(|s| s.is_success()), _ => false, } } @@ -319,7 +319,7 @@ impl TransitionFrontierSyncState { rpc_id: P2pRpcId, ) -> bool { self.block_state(hash) - .map_or(false, |s| s.is_fetch_pending_from_peer(peer_id, rpc_id)) + .is_some_and(|s| s.is_fetch_pending_from_peer(peer_id, rpc_id)) } pub fn blocks_fetch_from_peer_pending_rpc_ids<'a>( @@ -467,7 +467,7 @@ impl TransitionFrontierSyncBlockState { let Self::FetchPending { attempts, .. } = self else { return false; }; - attempts.get(peer_id).map_or(false, |s| s.is_fetch_init()) + attempts.get(peer_id).is_some_and(|s| s.is_fetch_init()) } pub fn is_fetch_pending_from_peer(&self, peer_id: &PeerId, rpc_id: P2pRpcId) -> bool { @@ -477,7 +477,7 @@ impl TransitionFrontierSyncBlockState { attempts .get(peer_id) .and_then(|s| s.fetch_pending_rpc_id()) - .map_or(false, |expected| expected == rpc_id) + .is_some_and(|expected| expected == rpc_id) } pub fn fetch_pending_attempts_mut(&mut self) -> Option<&mut BTreeMap> { diff --git a/node/src/transition_frontier/transition_frontier_actions.rs b/node/src/transition_frontier/transition_frontier_actions.rs index 679cafa3dc..6a1e64db84 100644 --- a/node/src/transition_frontier/transition_frontier_actions.rs +++ b/node/src/transition_frontier/transition_frontier_actions.rs @@ -6,6 +6,7 @@ use openmina_core::block::ArcBlockWithHash; use openmina_core::ActionEvent; use serde::{Deserialize, Serialize}; +use super::candidate::TransitionFrontierCandidateAction; use super::genesis::TransitionFrontierGenesisAction; use super::genesis_effectful::TransitionFrontierGenesisEffectfulAction; use super::sync::{SyncError, TransitionFrontierSyncAction, TransitionFrontierSyncState}; @@ -29,6 +30,7 @@ pub enum TransitionFrontierAction { #[action_event(level = info)] GenesisProvenInject, + Candidate(TransitionFrontierCandidateAction), Sync(TransitionFrontierSyncAction), /// Transition frontier synced. Synced { @@ -62,6 +64,7 @@ impl redux::EnablingCondition for TransitionFrontierAction { b.is_genesis() && !Arc::ptr_eq(&genesis.block, &b.block) }) } + TransitionFrontierAction::Candidate(a) => a.is_enabled(state, time), TransitionFrontierAction::Sync(a) => a.is_enabled(state, time), TransitionFrontierAction::Synced { .. } => matches!( state.transition_frontier.sync, @@ -69,12 +72,11 @@ impl redux::EnablingCondition for TransitionFrontierAction { ), TransitionFrontierAction::SyncFailed { best_tip, error } => { let sync = &state.transition_frontier.sync; - sync.best_tip() - .map_or(false, |b| b.hash() == best_tip.hash()) + sync.best_tip().is_some_and(|b| b.hash() == best_tip.hash()) && match error { SyncError::BlockApplyFailed(block, _) => sync .block_state(block.hash()) - .map_or(false, |s| s.is_apply_error()), + .is_some_and(|s| s.is_apply_error()), } } } diff --git a/node/src/transition_frontier/transition_frontier_effects.rs b/node/src/transition_frontier/transition_frontier_effects.rs index c8b2ae97fe..cb990619a8 100644 --- a/node/src/transition_frontier/transition_frontier_effects.rs +++ b/node/src/transition_frontier/transition_frontier_effects.rs @@ -1,13 +1,15 @@ +use mina_p2p_messages::gossip::GossipNetMessageV2; use redux::Timestamp; use crate::block_producer::BlockProducerAction; -use crate::consensus::ConsensusAction; use crate::ledger::LEDGER_DEPTH; use crate::p2p::channels::best_tip::P2pChannelsBestTipAction; +use crate::p2p::P2pNetworkPubsubAction; use crate::snark_pool::{SnarkPoolAction, SnarkWork}; use crate::stats::sync::SyncingLedger; use crate::{Store, TransactionPoolAction}; +use super::candidate::TransitionFrontierCandidateAction; use super::genesis::TransitionFrontierGenesisAction; use super::sync::ledger::snarked::{ TransitionFrontierSyncLedgerSnarkedAction, ACCOUNT_SUBTREE_HEIGHT, @@ -54,6 +56,7 @@ pub fn transition_frontier_effects( synced_effects(&meta, store); } } + TransitionFrontierAction::Candidate(_) => {} TransitionFrontierAction::Sync(a) => { match a { TransitionFrontierSyncAction::Init { @@ -210,6 +213,7 @@ pub fn transition_frontier_effects( } } } + TransitionFrontierSyncAction::BlocksSendToArchive { .. } => {} TransitionFrontierSyncAction::BlocksSuccess => { store.dispatch(TransitionFrontierSyncAction::CommitInit); } @@ -305,9 +309,21 @@ fn synced_effects( best_tip: best_tip.block.clone(), }); } + // TODO this should be handled by a callback + // If this get dispatched, we received block from libp2p. + if !store.dispatch(P2pNetworkPubsubAction::BroadcastValidatedMessage { + message_id: p2p::BroadcastMessageId::BlockHash { + hash: best_tip.hash().clone(), + }, + }) { + // Otherwise block was received from WebRTC so inject it in libp2p. + store.dispatch(P2pNetworkPubsubAction::WebRtcRebroadcast { + message: GossipNetMessageV2::NewState(best_tip.block().clone()), + }); + } let best_tip_hash = best_tip.merkle_root_hash().clone(); - store.dispatch(ConsensusAction::Prune); + store.dispatch(TransitionFrontierCandidateAction::Prune); store.dispatch(BlockProducerAction::BestTipUpdate { best_tip: best_tip.block.clone(), }); diff --git a/node/src/transition_frontier/transition_frontier_reducer.rs b/node/src/transition_frontier/transition_frontier_reducer.rs index 559e2f7e6d..af4d8f0c91 100644 --- a/node/src/transition_frontier/transition_frontier_reducer.rs +++ b/node/src/transition_frontier/transition_frontier_reducer.rs @@ -54,6 +54,12 @@ impl TransitionFrontierState { state.sync = TransitionFrontierSyncState::Synced { time: meta.time() }; } } + TransitionFrontierAction::Candidate(a) => { + super::candidate::TransitionFrontierCandidatesState::reducer( + openmina_core::Substate::from_compatible_substate(state_context), + meta.with_action(a), + ); + } TransitionFrontierAction::Sync(a) => { let best_chain = state.best_chain.clone(); super::sync::TransitionFrontierSyncState::reducer( diff --git a/node/src/transition_frontier/transition_frontier_state.rs b/node/src/transition_frontier/transition_frontier_state.rs index 9fdde1e3b5..9c888ca833 100644 --- a/node/src/transition_frontier/transition_frontier_state.rs +++ b/node/src/transition_frontier/transition_frontier_state.rs @@ -9,6 +9,7 @@ use openmina_core::block::{AppliedBlock, ArcBlockWithHash}; use openmina_core::bug_condition; use serde::{Deserialize, Serialize}; +use super::candidate::TransitionFrontierCandidatesState; use super::genesis::TransitionFrontierGenesisState; use super::sync::TransitionFrontierSyncState; use super::TransitionFrontierConfig; @@ -23,6 +24,7 @@ pub struct TransitionFrontierState { /// Needed protocol states for applying transactions in the root /// scan state that we don't have in the `best_chain` list. pub needed_protocol_states: BTreeMap, + pub candidates: TransitionFrontierCandidatesState, /// Transition frontier synchronization state pub sync: TransitionFrontierSyncState, @@ -31,18 +33,22 @@ pub struct TransitionFrontierState { pub blacklist: BTreeMap, /// The diff of `Self::best_chain` with the previous one pub chain_diff: Option, + /// Archive mode enabled + pub archive_enabled: bool, } impl TransitionFrontierState { - pub fn new(config: TransitionFrontierConfig) -> Self { + pub fn new(config: TransitionFrontierConfig, archive_enabled: bool) -> Self { Self { config, genesis: TransitionFrontierGenesisState::Idle, + candidates: TransitionFrontierCandidatesState::new(), best_chain: Vec::with_capacity(290), needed_protocol_states: Default::default(), sync: TransitionFrontierSyncState::Idle, blacklist: Default::default(), chain_diff: None, + archive_enabled, } } @@ -196,4 +202,21 @@ impl TransitionFrontierState { reorg_best_tip: false, // TODO: Unused for now }) } + + pub fn resources_usage(&self) -> serde_json::Value { + serde_json::json!({ + "best_chain_size": self.best_chain.len(), + "needed_protocol_states_size": self + .needed_protocol_states + .len(), + "blacklist_size": self.blacklist.len(), + "diff_tx_size": self + .chain_diff + .as_ref() + // `saturating_add` is not needed here as collection size cannot overflow usize + // but it makes clippy satisfied + .map(|d| d.new_commands.len().saturating_add(d.removed_commands.len())) + .unwrap_or_default() + }) + } } diff --git a/node/src/watched_accounts/watched_accounts_actions.rs b/node/src/watched_accounts/watched_accounts_actions.rs index a7ee015208..dd18630856 100644 --- a/node/src/watched_accounts/watched_accounts_actions.rs +++ b/node/src/watched_accounts/watched_accounts_actions.rs @@ -64,12 +64,12 @@ fn should_request_ledger_initial_state(state: &crate::State, pub_key: &NonZeroCu state .watched_accounts .get(pub_key) - .filter(|_| state.consensus.best_tip.is_some()) - .map_or(false, |a| match &a.initial_state { + .filter(|_| state.transition_frontier.candidates.best_tip.is_some()) + .is_some_and(|a| match &a.initial_state { WatchedAccountLedgerInitialState::Idle { .. } => true, WatchedAccountLedgerInitialState::Error { .. } => true, WatchedAccountLedgerInitialState::Pending { block, .. } => { - let Some(best_tip) = state.consensus.best_tip() else { + let Some(best_tip) = state.transition_frontier.candidates.best_tip() else { return false; }; &block.hash != best_tip.hash @@ -94,7 +94,7 @@ impl redux::EnablingCondition for WatchedAccountsAction { should_request_ledger_initial_state(state, pub_key) } WatchedAccountsAction::LedgerInitialStateGetError { pub_key, .. } => { - state.watched_accounts.get(pub_key).map_or(false, |a| { + state.watched_accounts.get(pub_key).is_some_and(|a| { matches!( &a.initial_state, WatchedAccountLedgerInitialState::Pending { .. } @@ -104,14 +104,14 @@ impl redux::EnablingCondition for WatchedAccountsAction { WatchedAccountsAction::LedgerInitialStateGetRetry { pub_key } => state .watched_accounts .get(pub_key) - .map_or(false, |a| match &a.initial_state { + .is_some_and(|a| match &a.initial_state { WatchedAccountLedgerInitialState::Error { time: t, .. } => { - time.checked_sub(*t).map_or(false, |d| d.as_secs() >= 3) + time.checked_sub(*t).is_some_and(|d| d.as_secs() >= 3) } _ => false, }), WatchedAccountsAction::LedgerInitialStateGetSuccess { pub_key, .. } => { - state.watched_accounts.get(pub_key).map_or(false, |a| { + state.watched_accounts.get(pub_key).is_some_and(|a| { matches!( &a.initial_state, WatchedAccountLedgerInitialState::Pending { .. } @@ -123,7 +123,7 @@ impl redux::EnablingCondition for WatchedAccountsAction { state .watched_accounts .get(pub_key) - .map_or(false, |v| v.initial_state.is_success()) + .is_some_and(|v| v.initial_state.is_success()) && super::account_relevant_transactions_in_diff_iter(pub_key, diff) .any(|_| true) } @@ -163,7 +163,7 @@ impl redux::EnablingCondition for WatchedAccountsAction { // let peer = state.p2p.get_ready_peer(&self.peer_id)?; // peer.rpc.outgoing.get(self.p2p_rpc_id) // }) - // .map_or(false, |v| v.is_init() || v.is_pending()); + // .is_some_and( |v| v.is_init() || v.is_pending()); should_req_for_block && p2p_rpc_is_pending } diff --git a/node/testing/Cargo.toml b/node/testing/Cargo.toml index 997d0290ee..bd82527d62 100644 --- a/node/testing/Cargo.toml +++ b/node/testing/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-testing" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/node/testing/docker/Dockerfile.openmina b/node/testing/docker/Dockerfile.openmina index 18250403c8..acf620b5e3 100644 --- a/node/testing/docker/Dockerfile.openmina +++ b/node/testing/docker/Dockerfile.openmina @@ -9,7 +9,7 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y RUN rm /bin/sh && ln -s /bin/bash /bin/sh -RUN source ~/.cargo/env && rustup update 1.83 +RUN source ~/.cargo/env && rustup update 1.84 RUN git clone https://github.com/openmina/openmina diff --git a/node/testing/docker/Dockerfile.test b/node/testing/docker/Dockerfile.test index 8227afd053..9ecf3095f5 100644 --- a/node/testing/docker/Dockerfile.test +++ b/node/testing/docker/Dockerfile.test @@ -2,7 +2,7 @@ FROM vladsimplestakingcom/mina-openmina-builder:focal AS builder RUN git fetch && git checkout feat/tests-with-debugger -RUN source ~/.cargo/env && cargo +1.83 build --release -p openmina-node-testing --bin runner --bin openmina-node-testing +RUN source ~/.cargo/env && cargo +1.84 build --release -p openmina-node-testing --bin runner --bin openmina-node-testing FROM vladsimplestakingcom/mina-debugger:2.0.0rampup4-focal diff --git a/node/testing/src/cluster/mod.rs b/node/testing/src/cluster/mod.rs index c65d65d242..e1a8a300f3 100644 --- a/node/testing/src/cluster/mod.rs +++ b/node/testing/src/cluster/mod.rs @@ -280,7 +280,7 @@ impl Cluster { initial_peers, external_addrs: vec![], enabled_channels: ChannelId::iter_all().collect(), - peer_discovery: true, + peer_discovery: testing_config.peer_discovery, timeouts: testing_config.timeouts, limits: P2pLimits::default().with_max_peers(Some(testing_config.max_peers)), meshsub: P2pMeshsubConfig { @@ -293,6 +293,7 @@ impl Cluster { }, transition_frontier: TransitionFrontierConfig::new(testing_config.genesis), block_producer: block_producer_config, + archive: None, tx_pool: ledger::transaction_pool::Config { trust_system: (), pool_max_size: 3000, @@ -647,7 +648,7 @@ impl Cluster { while self .scenario .peek() - .map_or(false, |(scenario, _)| scenario.info.id != target_scenario) + .is_some_and(|(scenario, _)| scenario.info.id != target_scenario) { if !self.exec_next().await? { break; @@ -657,7 +658,7 @@ impl Cluster { while self .scenario .peek() - .map_or(false, |(scenario, _)| scenario.info.id == target_scenario) + .is_some_and(|(scenario, _)| scenario.info.id == target_scenario) { if let Some(step_i) = step_i { if self.scenario.peek_i().unwrap().1 >= step_i { diff --git a/node/testing/src/cluster/runner/run.rs b/node/testing/src/cluster/runner/run.rs index 057497d8df..8b8a23e6ff 100644 --- a/node/testing/src/cluster/runner/run.rs +++ b/node/testing/src/cluster/runner/run.rs @@ -47,7 +47,7 @@ pub enum RunDecision { pub struct DynEffectsData(Arc>); -impl<'a> super::ClusterRunner<'a> { +impl super::ClusterRunner<'_> { /// Execute cluster in the infinite loop, based on conditions specified /// in the `RunCfg`. pub async fn run( diff --git a/node/testing/src/node/ocaml/mod.rs b/node/testing/src/node/ocaml/mod.rs index 0f7f63f4ef..e9adf0b36a 100644 --- a/node/testing/src/node/ocaml/mod.rs +++ b/node/testing/src/node/ocaml/mod.rs @@ -368,11 +368,7 @@ impl OcamlNode { tokio::time::timeout(timeout, async { loop { interval.tick().await; - if self - .synced_best_tip() - .await - .map_or(false, |tip| tip.is_some()) - { + if self.synced_best_tip().await.is_ok_and(|tip| tip.is_some()) { return; } } diff --git a/node/testing/src/node/rust/config.rs b/node/testing/src/node/rust/config.rs index 4754ebad74..1ab15a902b 100644 --- a/node/testing/src/node/rust/config.rs +++ b/node/testing/src/node/rust/config.rs @@ -40,6 +40,7 @@ pub struct RustNodeTestingConfig { pub libp2p_port: Option, #[serde(default)] pub recorder: Recorder, + pub peer_discovery: bool, } #[derive(Serialize, Deserialize, Debug, Default, Clone)] @@ -68,6 +69,7 @@ impl RustNodeTestingConfig { timeouts: P2pTimeouts::default(), libp2p_port: None, recorder: Default::default(), + peer_discovery: true, } } @@ -83,6 +85,7 @@ impl RustNodeTestingConfig { timeouts: P2pTimeouts::without_rpc(), libp2p_port: None, recorder: Default::default(), + peer_discovery: true, } } @@ -118,4 +121,9 @@ impl RustNodeTestingConfig { )); self } + + pub fn with_no_peer_discovery(mut self) -> Self { + self.peer_discovery = false; + self + } } diff --git a/node/testing/src/node/rust/event.rs b/node/testing/src/node/rust/event.rs index 85d34b6d4d..27db833f6b 100644 --- a/node/testing/src/node/rust/event.rs +++ b/node/testing/src/node/rust/event.rs @@ -40,7 +40,7 @@ impl NonDeterministicEvent { } pub fn should_drop_event(event: &Event) -> bool { - Self::new(event).map_or(false, |e| e.should_drop()) + Self::new(event).is_some_and(|e| e.should_drop()) } pub fn should_drop(&self) -> bool { diff --git a/node/testing/src/scenarios/driver.rs b/node/testing/src/scenarios/driver.rs index 364168e275..a863b9cc31 100644 --- a/node/testing/src/scenarios/driver.rs +++ b/node/testing/src/scenarios/driver.rs @@ -190,6 +190,26 @@ impl<'cluster> Driver<'cluster> { } } + /// Waits for a specific event that satisfies the given predicate, executing all events encountered along the way. + /// + /// # Arguments + /// + /// * `duration` - Maximum time to wait for the event + /// * `f` - A predicate function that takes a node ID, event, and state, returning true when the desired event is found + /// + /// # Returns + /// + /// Returns a Result containing: + /// * `Some((node_id, event))` - If an event satisfying the predicate is found before the timeout + /// * `None` - If no matching event is found within the timeout period + /// + /// # Example + /// + /// ```no_run + /// driver.wait_for(Duration::from_secs(5), |node_id, event, state| { + /// matches!(event, Event::BlockReceived { .. }) + /// }).await?; + /// ``` pub async fn wait_for( &mut self, duration: Duration, @@ -402,8 +422,8 @@ impl<'cluster> Driver<'cluster> { } /// Runs the cluster until each of the `nodes` is listening on the localhost interface. -pub async fn wait_for_nodes_listening_on_localhost<'cluster>( - driver: &mut Driver<'cluster>, +pub async fn wait_for_nodes_listening_on_localhost( + driver: &mut Driver<'_>, duration: Duration, nodes: impl IntoIterator, ) -> anyhow::Result { @@ -483,8 +503,8 @@ fn is_network_connection_finalized(conn_state: &P2pNetworkConnectionState) -> Op } /// Runst the cluster until the node is connected to the node that satisfies the predicate. -pub async fn wait_for_connection_established<'cluster, F: PeerPredicate>( - driver: &mut Driver<'cluster>, +pub async fn wait_for_connection_established( + driver: &mut Driver<'_>, duration: Duration, mut f: F, ) -> anyhow::Result { @@ -565,8 +585,8 @@ where /// Runs cluster until there is a `quiet_dur` period of no events, returning /// `Ok(true)` in this case. If there is no such period for `timeout` period of /// time, then returns `Ok(false)` -pub async fn run_until_no_events<'cluster>( - driver: &mut Driver<'cluster>, +pub async fn run_until_no_events( + driver: &mut Driver<'_>, quiet_dur: Duration, timeout: Duration, ) -> anyhow::Result { @@ -651,8 +671,8 @@ where } } -pub async fn wait_for_connection_event<'cluster, F>( - driver: &mut Driver<'cluster>, +pub async fn wait_for_connection_event( + driver: &mut Driver<'_>, duration: Duration, mut f: F, ) -> anyhow::Result @@ -679,15 +699,13 @@ where }) }) }) - .map_or(false, |(peer_id, peer)| { - f.matches(node_id, peer_id, &peer.status) - }) + .is_some_and(|(peer_id, peer)| f.matches(node_id, peer_id, &peer.status)) }; driver.exec_steps_until(duration, pred).await } -pub async fn wait_for_connection_error<'cluster, F>( - driver: &mut Driver<'cluster>, +pub async fn wait_for_connection_error( + driver: &mut Driver<'_>, duration: Duration, mut f: F, ) -> anyhow::Result @@ -700,9 +718,7 @@ where let p2p = state.p2p.ready()?; p2p.peers.iter().find(|(_, peer)| peer_has_addr(peer, addr)) }) - .map_or(false, |(peer_id, peer)| { - f.matches(node_id, peer_id, &peer.status) - }) + .is_some_and(|(peer_id, peer)| f.matches(node_id, peer_id, &peer.status)) }; driver.exec_steps_until(duration, pred).await } diff --git a/node/testing/src/scenarios/mod.rs b/node/testing/src/scenarios/mod.rs index 9bacc6dbe1..06ba500a8f 100644 --- a/node/testing/src/scenarios/mod.rs +++ b/node/testing/src/scenarios/mod.rs @@ -51,7 +51,7 @@ use self::p2p::basic_outgoing_connections::{ MakeMultipleOutgoingConnections, MakeOutgoingConnection, }; use self::p2p::kademlia::KademliaBootstrap; -use self::p2p::pubsub::P2pReceiveBlock; +use self::p2p::pubsub::P2pReceiveMessage; use self::p2p::signaling::P2pSignaling; use self::record_replay::block_production::RecordReplayBlockProduction; use self::record_replay::bootstrap::RecordReplayBootstrap; @@ -83,7 +83,7 @@ pub enum Scenarios { MultiNodeBasicConnectivityPeerDiscovery(MultiNodeBasicConnectivityPeerDiscovery), SimulationSmall(SimulationSmall), SimulationSmallForeverRealTime(SimulationSmallForeverRealTime), - P2pReceiveBlock(P2pReceiveBlock), + P2pReceiveMessage(P2pReceiveMessage), P2pSignaling(P2pSignaling), P2pConnectionDiscoveryRustNodeAsSeed(P2pConnectionDiscoveryRustNodeAsSeed), MultiNodePubsubPropagateBlock(MultiNodePubsubPropagateBlock), @@ -189,7 +189,7 @@ impl Scenarios { } Self::SimulationSmall(_) => SimulationSmall::DOCS, Self::SimulationSmallForeverRealTime(_) => SimulationSmallForeverRealTime::DOCS, - Self::P2pReceiveBlock(_) => P2pReceiveBlock::DOCS, + Self::P2pReceiveMessage(_) => P2pReceiveMessage::DOCS, Self::P2pSignaling(_) => P2pSignaling::DOCS, Self::P2pConnectionDiscoveryRustNodeAsSeed(_) => { P2pConnectionDiscoveryRustNodeAsSeed::DOCS @@ -260,7 +260,7 @@ impl Scenarios { Self::MultiNodeBasicConnectivityPeerDiscovery(v) => v.run(runner).await, Self::SimulationSmall(v) => v.run(runner).await, Self::SimulationSmallForeverRealTime(v) => v.run(runner).await, - Self::P2pReceiveBlock(v) => v.run(runner).await, + Self::P2pReceiveMessage(v) => v.run(runner).await, Self::P2pSignaling(v) => v.run(runner).await, Self::P2pConnectionDiscoveryRustNodeAsSeed(v) => v.run(runner).await, Self::MultiNodePubsubPropagateBlock(v) => v.run(runner).await, diff --git a/node/testing/src/scenarios/multi_node/basic_connectivity_peer_discovery.rs b/node/testing/src/scenarios/multi_node/basic_connectivity_peer_discovery.rs index 4156525d5c..0b4fe5b05e 100644 --- a/node/testing/src/scenarios/multi_node/basic_connectivity_peer_discovery.rs +++ b/node/testing/src/scenarios/multi_node/basic_connectivity_peer_discovery.rs @@ -125,7 +125,7 @@ impl MultiNodeBasicConnectivityPeerDiscovery { .p2p .ready() .and_then(|p2p| p2p.network.scheduler.discovery_state()) - .map_or(false, |discovery_state| discovery_state.is_bootstrapped()) + .is_some_and(|discovery_state| discovery_state.is_bootstrapped()) { // the node must find all already running OCaml nodes // assert_eq!(this.state().p2p.peers.len(), TOTAL_OCAML_NODES as usize); diff --git a/node/testing/src/scenarios/multi_node/vrf_correct_ledgers.rs b/node/testing/src/scenarios/multi_node/vrf_correct_ledgers.rs index 07fd97d8ff..1e4898f8a5 100644 --- a/node/testing/src/scenarios/multi_node/vrf_correct_ledgers.rs +++ b/node/testing/src/scenarios/multi_node/vrf_correct_ledgers.rs @@ -51,6 +51,7 @@ impl MultiNodeVrfGetCorrectLedgers { timeouts: P2pTimeouts::default(), libp2p_port: None, recorder: Default::default(), + peer_discovery: true, }); tokio::time::sleep(Duration::from_secs(2)).await; diff --git a/node/testing/src/scenarios/multi_node/vrf_correct_slots.rs b/node/testing/src/scenarios/multi_node/vrf_correct_slots.rs index 97c0e0a8c7..03e5733076 100644 --- a/node/testing/src/scenarios/multi_node/vrf_correct_slots.rs +++ b/node/testing/src/scenarios/multi_node/vrf_correct_slots.rs @@ -57,6 +57,7 @@ impl MultiNodeVrfGetCorrectSlots { timeouts: P2pTimeouts::default(), libp2p_port: None, recorder: Default::default(), + peer_discovery: true, }); tokio::time::sleep(Duration::from_secs(2)).await; diff --git a/node/testing/src/scenarios/multi_node/vrf_epoch_bounds_correct_ledgers.rs b/node/testing/src/scenarios/multi_node/vrf_epoch_bounds_correct_ledgers.rs index 886ae23b27..98e2c1d87d 100644 --- a/node/testing/src/scenarios/multi_node/vrf_epoch_bounds_correct_ledgers.rs +++ b/node/testing/src/scenarios/multi_node/vrf_epoch_bounds_correct_ledgers.rs @@ -58,6 +58,7 @@ impl MultiNodeVrfEpochBoundsCorrectLedger { timeouts: P2pTimeouts::default(), libp2p_port: None, recorder: Default::default(), + peer_discovery: true, }; let producer_node = runner.add_rust_node(RustNodeTestingConfig { diff --git a/node/testing/src/scenarios/multi_node/vrf_epoch_bounds_evaluation.rs b/node/testing/src/scenarios/multi_node/vrf_epoch_bounds_evaluation.rs index 77eb09bc8f..a126aac773 100644 --- a/node/testing/src/scenarios/multi_node/vrf_epoch_bounds_evaluation.rs +++ b/node/testing/src/scenarios/multi_node/vrf_epoch_bounds_evaluation.rs @@ -44,6 +44,7 @@ impl MultiNodeVrfEpochBoundsEvaluation { timeouts: P2pTimeouts::default(), libp2p_port: None, recorder: Default::default(), + peer_discovery: true, }; let producer_node = runner.add_rust_node(RustNodeTestingConfig { diff --git a/node/testing/src/scenarios/p2p/pubsub.rs b/node/testing/src/scenarios/p2p/pubsub.rs index 225e1d5eb5..e6fbf6ad8b 100644 --- a/node/testing/src/scenarios/p2p/pubsub.rs +++ b/node/testing/src/scenarios/p2p/pubsub.rs @@ -1,53 +1,57 @@ use std::time::Duration; +use node::ActionKind; + use crate::{ hosts, node::RustNodeTestingConfig, - scenarios::{ClusterRunner, Driver}, + scenarios::{ClusterRunner, RunCfg, RunCfgAdvanceTime}, }; -/// Receive a block via meshsub +/// Receive a message via meshsub +/// 1. Create a normal node with default devnet config, with devnet peers as initial peers +/// 2. Wait for 2 minutes +/// 3. Create a node with discovery disabled and first node as only peer +/// 4. Wait for first node to broadcast message to second one #[derive(documented::Documented, Default, Clone, Copy)] -pub struct P2pReceiveBlock; +pub struct P2pReceiveMessage; -impl P2pReceiveBlock { +impl P2pReceiveMessage { pub async fn run(self, mut runner: ClusterRunner<'_>) { - let config = RustNodeTestingConfig::devnet_default() - // make sure it will not ask initial peers - .max_peers(1) - .initial_peers(vec![hosts::devnet()[0].clone()]); + let config = RustNodeTestingConfig::devnet_default().initial_peers(hosts::devnet()); + let retransmitter_openmina_node = runner.add_rust_node(config); - let retransmitter_peer_id = runner - .node(retransmitter_openmina_node) - .unwrap() - .state() - .p2p - .my_id(); + + let _ = runner + .run( + RunCfg::default() + .timeout(Duration::from_secs(120)) + .advance_time(RunCfgAdvanceTime::Real) + .action_handler(|_, _, _, _| false), + ) + .await; let config = RustNodeTestingConfig::devnet_default() - // make sure it will not ask initial peers - .max_peers(1) + // Make sure it doesn't connect to any more peers + .with_no_peer_discovery() .initial_peers(vec![retransmitter_openmina_node.into()]); + let receiver_openmina_node = runner.add_rust_node(config); - let mut driver = Driver::new(runner); - driver - .wait_for(Duration::from_secs(20 * 60), |node, _, state| { - let Some(p2p) = state.p2p.ready() else { - return false; - }; - node == receiver_openmina_node - && p2p - .network - .scheduler - .broadcast_state - .incoming_block - .as_ref() - .map_or(false, |(peer_id, _)| peer_id.eq(&retransmitter_peer_id)) - }) + runner + .run( + RunCfg::default() + .timeout(Duration::from_secs(60 * 30)) + .advance_time(RunCfgAdvanceTime::Real) + .action_handler(move |node, _state, _, action| { + node == receiver_openmina_node + && matches!( + action.action().kind(), + ActionKind::P2pNetworkPubsubValidateIncomingMessage + ) + }), + ) .await - .unwrap(); - - eprintln!("passed"); + .expect("Test failed"); } } diff --git a/node/testing/src/scenarios/record_replay/bootstrap.rs b/node/testing/src/scenarios/record_replay/bootstrap.rs index d6becca307..382ac644ff 100644 --- a/node/testing/src/scenarios/record_replay/bootstrap.rs +++ b/node/testing/src/scenarios/record_replay/bootstrap.rs @@ -37,7 +37,7 @@ impl RecordReplayBootstrap { && state .transition_frontier .best_tip() - .map_or(false, |tip| !tip.is_genesis()) + .is_some_and(|tip| !tip.is_genesis()) }), ) .await diff --git a/node/testing/src/scenarios/solo_node/sync_root_snarked_ledger.rs b/node/testing/src/scenarios/solo_node/sync_root_snarked_ledger.rs index c8a79ec755..6b718972e5 100644 --- a/node/testing/src/scenarios/solo_node/sync_root_snarked_ledger.rs +++ b/node/testing/src/scenarios/solo_node/sync_root_snarked_ledger.rs @@ -246,6 +246,6 @@ impl SoloNodeSyncRootSnarkedLedger { fn is_event_first_ledger_query(self, state: &State, event: &Event) -> bool { self.event_ledger_query_addr(state, event) - .map_or(false, |addr| addr == LedgerAddress::first(addr.length())) + .is_some_and(|addr| addr == LedgerAddress::first(addr.length())) } } diff --git a/node/testing/src/scenarios/solo_node/sync_to_genesis.rs b/node/testing/src/scenarios/solo_node/sync_to_genesis.rs index f5ffc5eb6a..b7af3fedad 100644 --- a/node/testing/src/scenarios/solo_node/sync_to_genesis.rs +++ b/node/testing/src/scenarios/solo_node/sync_to_genesis.rs @@ -54,6 +54,7 @@ impl SoloNodeSyncToGenesis { timeouts: Default::default(), libp2p_port: None, recorder: Default::default(), + peer_discovery: true, }); runner diff --git a/node/testing/src/scenarios/solo_node/sync_to_genesis_custom.rs b/node/testing/src/scenarios/solo_node/sync_to_genesis_custom.rs index 6e795b221f..d0957d931f 100644 --- a/node/testing/src/scenarios/solo_node/sync_to_genesis_custom.rs +++ b/node/testing/src/scenarios/solo_node/sync_to_genesis_custom.rs @@ -70,6 +70,7 @@ impl SoloNodeSyncToGenesisCustom { timeouts: P2pTimeouts::default(), libp2p_port: None, recorder: Default::default(), + peer_discovery: true, }); runner diff --git a/node/testing/src/service/mod.rs b/node/testing/src/service/mod.rs index b32a8f3492..d3b77c84d9 100644 --- a/node/testing/src/service/mod.rs +++ b/node/testing/src/service/mod.rs @@ -12,8 +12,8 @@ use ledger::scan_state::transaction_logic::{verifiable, WithStatus}; use ledger::Mask; use mina_p2p_messages::string::ByteString; use mina_p2p_messages::v2::{ - CurrencyFeeStableV1, LedgerHash, LedgerProofProdStableV2, MinaBaseProofStableV2, - MinaStateSnarkedLedgerStateWithSokStableV2, NonZeroCurvePoint, + ArchiveTransitionFronntierDiff, CurrencyFeeStableV1, LedgerHash, LedgerProofProdStableV2, + MinaBaseProofStableV2, MinaStateSnarkedLedgerStateWithSokStableV2, NonZeroCurvePoint, ProverExtendBlockchainInputStableV2, SnarkWorkerWorkerRpcsVersionedGetWorkV2TResponseA0Single, StateHash, TransactionSnarkStableV2, TransactionSnarkWorkTStableV2Proofs, }; @@ -39,6 +39,7 @@ use node::snark::work_verify::{SnarkWorkVerifyId, SnarkWorkVerifyService}; use node::snark::{BlockVerifier, SnarkEvent, TransactionVerifier, VerifierSRS}; use node::snark_pool::SnarkPoolService; use node::stats::Stats; +use node::transition_frontier::archive::archive_service::ArchiveService; use node::transition_frontier::genesis::GenesisConfig; use node::{ event_source::Event, @@ -499,6 +500,12 @@ impl BlockProducerVrfEvaluatorService for NodeTestingService { } } +impl ArchiveService for NodeTestingService { + fn send_to_archive(&mut self, data: ArchiveTransitionFronntierDiff) { + self.real.send_to_archive(data); + } +} + use std::cell::RefCell; thread_local! { static GENESIS_PROOF: RefCell)>> = const { RefCell::new(None)}; @@ -574,6 +581,13 @@ impl BlockProducerService for NodeTestingService { } } } + + fn with_producer_keypair( + &self, + _f: impl FnOnce(&node::account::AccountSecretKey) -> T, + ) -> Option { + None + } } impl ExternalSnarkWorkerService for NodeTestingService { diff --git a/node/testing/src/service/rpc_service.rs b/node/testing/src/service/rpc_service.rs index 91a923ba55..475a3339c1 100644 --- a/node/testing/src/service/rpc_service.rs +++ b/node/testing/src/service/rpc_service.rs @@ -15,6 +15,7 @@ macro_rules! to_real { impl RpcService for super::NodeTestingService { to_real!(respond_state_get, (&State, Option<&str>)); to_real!(respond_status_get, node::rpc::RpcStatusGetResponse); + to_real!(respond_heartbeat_get, node::rpc::RpcHeartbeatGetResponse); to_real!(respond_sync_stats_get, node::rpc::RpcSyncStatsGetResponse); to_real!( respond_block_producer_stats_get, diff --git a/node/testing/src/simulator/mod.rs b/node/testing/src/simulator/mod.rs index af3c5c555f..0633284716 100644 --- a/node/testing/src/simulator/mod.rs +++ b/node/testing/src/simulator/mod.rs @@ -50,6 +50,7 @@ impl Simulator { timeouts: Default::default(), libp2p_port: None, recorder: self.config.recorder.clone(), + peer_discovery: true, } } @@ -220,9 +221,9 @@ impl Simulator { self.wait_for_all_nodes_synced(runner).await; } - pub async fn setup_and_run_with_listener<'a, AL, ALF>( + pub async fn setup_and_run_with_listener( &mut self, - runner: &mut ClusterRunner<'a>, + runner: &mut ClusterRunner<'_>, listener: ALF, ) where ALF: FnMut() -> AL, @@ -234,25 +235,25 @@ impl Simulator { self.run_with_listener(runner, listener).await; } - pub async fn setup_and_run<'a>(&mut self, runner: &mut ClusterRunner<'a>) { + pub async fn setup_and_run(&mut self, runner: &mut ClusterRunner<'_>) { self.setup(runner).await; self.run_with_listener(runner, || |_, _, _, _| false).await; } - pub async fn setup<'a>(&mut self, runner: &mut ClusterRunner<'a>) { + pub async fn setup(&mut self, runner: &mut ClusterRunner<'_>) { self.set_up_seed_nodes(runner).await; self.set_up_normal_nodes(runner).await; self.set_up_snark_worker_nodes(runner).await; self.set_up_block_producer_nodes(runner).await; } - pub async fn run<'a>(&mut self, runner: &mut ClusterRunner<'a>) { + pub async fn run(&mut self, runner: &mut ClusterRunner<'_>) { self.run_with_listener(runner, || |_, _, _, _| false).await; } - pub async fn run_with_listener<'a, AL, ALF>( + pub async fn run_with_listener( &mut self, - runner: &mut ClusterRunner<'a>, + runner: &mut ClusterRunner<'_>, mut listener: ALF, ) where ALF: FnMut() -> AL, diff --git a/node/testing/tests/common.rs b/node/testing/tests/common.rs index e62bb8caf2..d26df0288c 100644 --- a/node/testing/tests/common.rs +++ b/node/testing/tests/common.rs @@ -1,7 +1,7 @@ #[macro_export] macro_rules! scenario_doc { ($doc:expr) => { - if std::env::var("SCENARIO_INFO").map_or(false, |s| !s.is_empty()) { + if std::env::var("SCENARIO_INFO").is_some_and(|s| !s.is_empty()) { println!("{}", $doc); return; } diff --git a/node/testing/tests/p2p_pubsub.rs b/node/testing/tests/p2p_pubsub.rs index a870b346bc..025853d646 100644 --- a/node/testing/tests/p2p_pubsub.rs +++ b/node/testing/tests/p2p_pubsub.rs @@ -1,5 +1,5 @@ -use openmina_node_testing::scenarios::p2p::pubsub::P2pReceiveBlock; +use openmina_node_testing::scenarios::p2p::pubsub::P2pReceiveMessage; mod common; -scenario_test!(pubsub_receive_block, P2pReceiveBlock, P2pReceiveBlock); +scenario_test!(pubsub_receive_block, P2pReceiveMessage, P2pReceiveMessage); diff --git a/node/web/Cargo.toml b/node/web/Cargo.toml index 76668323cc..11c07fdba0 100644 --- a/node/web/Cargo.toml +++ b/node/web/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-web" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/node/web/src/node/builder.rs b/node/web/src/node/builder.rs index 125ed29617..06b6c7ba7d 100644 --- a/node/web/src/node/builder.rs +++ b/node/web/src/node/builder.rs @@ -258,6 +258,7 @@ impl NodeBuilder { pool_max_size: node::daemon_json::Daemon::DEFAULT.tx_pool_max_size(), slot_tx_end: node::daemon_json::Daemon::DEFAULT.slot_tx_end(), }, + archive: None, }; // build service diff --git a/p2p/Cargo.toml b/p2p/Cargo.toml index 914a00beac..81dfc2e8be 100644 --- a/p2p/Cargo.toml +++ b/p2p/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "p2p" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" @@ -12,7 +12,7 @@ serde_with = { workspace = true } strum = "0.26" strum_macros = "0.26" derive_more = "0.99.17" -rand = { version = "0.8", features = [ "small_rng" ] } +rand = { version = "0.8", features = ["small_rng"] } bytes = "*" bs58 = "0.4.0" base64 = "0.22" @@ -32,13 +32,21 @@ reqwest = { version = "0.11.22", optional = true } unsigned-varint = { version = "0.8.0" } hex = { version = "0.4.3" } bitflags = { version = "2.4.1", features = ["serde"] } + +graphannis-malloc_size_of = { workspace = true } +graphannis-malloc_size_of_derive = { workspace = true } + # crypto zeroize = { version = "1.7" } hkdf = { version = "0.12.4" } blake2 = { version = "0.10.6" } chacha20poly1305 = { version = "0.10.1" } curve25519-dalek = { version = "4.1", features = ["legacy_compatibility"] } -libp2p-identity = { version = "=0.2.7", features = ["ed25519", "rand", "serde"] } +libp2p-identity = { version = "=0.2.7", features = [ + "ed25519", + "rand", + "serde", +] } multiaddr = { version = "0.18.1" } redux = { workspace = true } @@ -48,7 +56,11 @@ salsa-simple = { path = "../tools/salsa-simple" } openmina-core = { path = "../core" } openmina-macros = { path = "../macros" } quick-protobuf = "0.8.1" -crypto-bigint = { version = "0.5.5", features = ["generic-array", "serde", "alloc"] } +crypto-bigint = { version = "0.5.5", features = [ + "generic-array", + "serde", + "alloc", +] } prost = { version = "0.12.4" } @@ -66,10 +78,12 @@ p2p-testing = { path = "testing" } [target.'cfg(not(target_arch = "wasm32"))'.dependencies] -redux = { workspace = true, features=["serializable_callbacks"] } +redux = { workspace = true, features = ["serializable_callbacks"] } tokio = { version = "1.26", features = ["rt"] } webrtc = { git = "https://github.com/openmina/webrtc.git", rev = "e8705db39af1b198b324a5db6ff57fb213ba75e9", optional = true } -datachannel = { git = "https://github.com/openmina/datachannel-rs.git", rev = "1bfb064d0ff3e54a93ae0288409902aab8d102d3", optional = true, features = ["vendored"] } +datachannel = { git = "https://github.com/openmina/datachannel-rs.git", rev = "1bfb064d0ff3e54a93ae0288409902aab8d102d3", optional = true, features = [ + "vendored", +] } reqwest = { version = "0.11", features = ["json"] } mio = { version = "0.8.11", features = ["os-poll", "net"] } libc = { version = "0.2.151" } @@ -81,11 +95,29 @@ wasm-bindgen-futures = "0.4" gloo-timers = { version = "0.3", features = ["futures"] } gloo-utils = "0.2" js-sys = "0.3.64" -web-sys = { version = "0.3", features = ["MessageEvent", "RtcPeerConnection", "RtcConfiguration", "RtcIceTransportPolicy", "RtcDataChannel", "RtcDataChannelInit", "RtcSessionDescription", "RtcSessionDescriptionInit", "RtcSdpType", "RtcPeerConnectionState", "RtcIceGatheringState", "Window", "Request", "RequestInit", "Headers", "Response"] } +web-sys = { version = "0.3", features = [ + "MessageEvent", + "RtcPeerConnection", + "RtcConfiguration", + "RtcIceTransportPolicy", + "RtcDataChannel", + "RtcDataChannelInit", + "RtcSessionDescription", + "RtcSessionDescriptionInit", + "RtcSdpType", + "RtcPeerConnectionState", + "RtcIceGatheringState", + "Window", + "Request", + "RequestInit", + "Headers", + "Response", +] } tokio = { version = "1.26", features = ["macros"] } getrandom = { version = "0.2", features = ["js"] } [features] +serializable_callbacks = [] p2p-webrtc = ["p2p-webrtc-rs"] p2p-webrtc-rs = ["webrtc"] p2p-webrtc-cpp = ["datachannel"] diff --git a/p2p/build.rs b/p2p/build.rs index 9ece6fde94..21d8e18910 100644 --- a/p2p/build.rs +++ b/p2p/build.rs @@ -1,6 +1,9 @@ fn main() { let mut cfg = prost_build::Config::new(); - cfg.type_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]"); + cfg.type_attribute( + ".", + "#[derive(serde::Serialize, serde::Deserialize, malloc_size_of_derive::MallocSizeOf)]", + ); cfg.compile_protos( &[ "src/network/pubsub/message.proto", diff --git a/p2p/libp2p-rpc-behaviour/Cargo.toml b/p2p/libp2p-rpc-behaviour/Cargo.toml index bed5d2fd7f..4647e0eb6e 100644 --- a/p2p/libp2p-rpc-behaviour/Cargo.toml +++ b/p2p/libp2p-rpc-behaviour/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libp2p-rpc-behaviour" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/p2p/src/channels/best_tip/p2p_channels_best_tip_actions.rs b/p2p/src/channels/best_tip/p2p_channels_best_tip_actions.rs index 4e7e9cb276..03b3759110 100644 --- a/p2p/src/channels/best_tip/p2p_channels_best_tip_actions.rs +++ b/p2p/src/channels/best_tip/p2p_channels_best_tip_actions.rs @@ -59,18 +59,16 @@ impl P2pChannelsBestTipAction { impl redux::EnablingCondition for P2pChannelsBestTipAction { fn is_enabled(&self, state: &P2pState, _time: redux::Timestamp) -> bool { match self { - P2pChannelsBestTipAction::Init { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { - matches!(&p.channels.best_tip, P2pChannelsBestTipState::Enabled) - }) - } + P2pChannelsBestTipAction::Init { peer_id } => state + .get_ready_peer(peer_id) + .is_some_and(|p| matches!(&p.channels.best_tip, P2pChannelsBestTipState::Enabled)), P2pChannelsBestTipAction::Pending { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!(&p.channels.best_tip, P2pChannelsBestTipState::Init { .. }) }) } P2pChannelsBestTipAction::Ready { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.best_tip, P2pChannelsBestTipState::Pending { .. } @@ -79,7 +77,7 @@ impl redux::EnablingCondition for P2pChannelsBestTipAction { } P2pChannelsBestTipAction::RequestSend { peer_id } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.best_tip { + .is_some_and(|p| match &p.channels.best_tip { P2pChannelsBestTipState::Ready { local, .. } => matches!( local, BestTipPropagationState::WaitingForRequest { .. } @@ -92,7 +90,7 @@ impl redux::EnablingCondition for P2pChannelsBestTipAction { // us inferior block than it has in the past. state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.best_tip { + .is_some_and(|p| match &p.channels.best_tip { P2pChannelsBestTipState::Ready { local, .. } => { matches!(local, BestTipPropagationState::Requested { .. }) } @@ -101,7 +99,7 @@ impl redux::EnablingCondition for P2pChannelsBestTipAction { } P2pChannelsBestTipAction::RequestReceived { peer_id } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.best_tip { + .is_some_and(|p| match &p.channels.best_tip { P2pChannelsBestTipState::Ready { remote, .. } => matches!( remote, BestTipPropagationState::WaitingForRequest { .. } @@ -111,7 +109,7 @@ impl redux::EnablingCondition for P2pChannelsBestTipAction { }), P2pChannelsBestTipAction::ResponseSend { peer_id, best_tip } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.best_tip { + .is_some_and(|p| match &p.channels.best_tip { P2pChannelsBestTipState::Ready { remote, last_sent, .. } => { diff --git a/p2p/src/channels/p2p_channels_actions.rs b/p2p/src/channels/p2p_channels_actions.rs index d456bce283..2fa46205a1 100644 --- a/p2p/src/channels/p2p_channels_actions.rs +++ b/p2p/src/channels/p2p_channels_actions.rs @@ -115,9 +115,9 @@ pub struct P2pChannelsMessageReceivedAction { impl redux::EnablingCondition for P2pChannelsMessageReceivedAction { fn is_enabled(&self, state: &P2pState, _time: redux::Timestamp) -> bool { - state.get_ready_peer(&self.peer_id).map_or(false, |p| { - p.channels.is_channel_ready(self.message.channel_id()) - }) + state + .get_ready_peer(&self.peer_id) + .is_some_and(|p| p.channels.is_channel_ready(self.message.channel_id())) } } diff --git a/p2p/src/channels/rpc/p2p_channels_rpc_actions.rs b/p2p/src/channels/rpc/p2p_channels_rpc_actions.rs index 85d171f59a..a84f8e1b56 100644 --- a/p2p/src/channels/rpc/p2p_channels_rpc_actions.rs +++ b/p2p/src/channels/rpc/p2p_channels_rpc_actions.rs @@ -75,21 +75,15 @@ impl P2pChannelsRpcAction { impl redux::EnablingCondition for P2pChannelsRpcAction { fn is_enabled(&self, state: &P2pState, time: Timestamp) -> bool { match self { - P2pChannelsRpcAction::Init { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { - matches!(p.channels.rpc, P2pChannelsRpcState::Enabled) - }) - } - P2pChannelsRpcAction::Pending { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { - matches!(p.channels.rpc, P2pChannelsRpcState::Init { .. }) - }) - } - P2pChannelsRpcAction::Ready { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { - matches!(p.channels.rpc, P2pChannelsRpcState::Pending { .. }) - }) - } + P2pChannelsRpcAction::Init { peer_id } => state + .get_ready_peer(peer_id) + .is_some_and(|p| matches!(p.channels.rpc, P2pChannelsRpcState::Enabled)), + P2pChannelsRpcAction::Pending { peer_id } => state + .get_ready_peer(peer_id) + .is_some_and(|p| matches!(p.channels.rpc, P2pChannelsRpcState::Init { .. })), + P2pChannelsRpcAction::Ready { peer_id } => state + .get_ready_peer(peer_id) + .is_some_and(|p| matches!(p.channels.rpc, P2pChannelsRpcState::Pending { .. })), P2pChannelsRpcAction::RequestSend { peer_id, id, @@ -100,7 +94,7 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { .get(peer_id) .filter(|p| !p.is_libp2p() || request.kind().supported_by_libp2p()) .and_then(|p| p.status.as_ready()) - .map_or(false, |p| { + .is_some_and(|p| { matches!( &p.channels.rpc, P2pChannelsRpcState::Ready { @@ -111,7 +105,7 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { ) }), P2pChannelsRpcAction::Timeout { peer_id, id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.rpc, P2pChannelsRpcState::Ready { @@ -124,7 +118,7 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { P2pChannelsRpcAction::ResponseReceived { peer_id, id, .. } => { // TODO(binier): use consensus to enforce that peer doesn't send // us inferior block than it has in the past. - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { match &p.channels.rpc { P2pChannelsRpcState::Ready { local, .. } => { // TODO(binier): validate that response corresponds to request. @@ -140,7 +134,7 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { } P2pChannelsRpcAction::RequestReceived { peer_id, id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.rpc { + .is_some_and(|p| match &p.channels.rpc { P2pChannelsRpcState::Ready { remote, .. } => { remote.pending_requests.len() < MAX_P2P_RPC_REMOTE_CONCURRENT_REQUESTS && remote.pending_requests.iter().all(|v| v.id != *id) @@ -149,7 +143,7 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { }), P2pChannelsRpcAction::ResponsePending { peer_id, id } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.rpc { + .is_some_and(|p| match &p.channels.rpc { P2pChannelsRpcState::Ready { remote, .. } => remote .pending_requests .iter() @@ -177,7 +171,7 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { }; } - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { match &p.channels.rpc { P2pChannelsRpcState::Ready { remote, .. } => { // TODO(binier): validate that response corresponds to request. diff --git a/p2p/src/channels/signaling/discovery/p2p_channels_signaling_discovery_actions.rs b/p2p/src/channels/signaling/discovery/p2p_channels_signaling_discovery_actions.rs index 66f0a3b0b1..42f762028a 100644 --- a/p2p/src/channels/signaling/discovery/p2p_channels_signaling_discovery_actions.rs +++ b/p2p/src/channels/signaling/discovery/p2p_channels_signaling_discovery_actions.rs @@ -102,7 +102,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction fn is_enabled(&self, state: &P2pState, now: redux::Timestamp) -> bool { match self { P2pChannelsSignalingDiscoveryAction::Init { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.signaling.discovery, P2pChannelsSignalingDiscoveryState::Enabled @@ -110,7 +110,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }) } P2pChannelsSignalingDiscoveryAction::Pending { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.signaling.discovery, P2pChannelsSignalingDiscoveryState::Init { .. } @@ -118,7 +118,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }) } P2pChannelsSignalingDiscoveryAction::Ready { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.signaling.discovery, P2pChannelsSignalingDiscoveryState::Pending { .. } @@ -126,7 +126,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }) } P2pChannelsSignalingDiscoveryAction::RequestSend { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { local, .. } => { match local { @@ -136,7 +136,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction // Allow one discovery request per minute. // TODO(binier): make configurable now.checked_sub(*time) - .map_or(false, |dur| dur.as_secs() >= 60) + .is_some_and(|dur| dur.as_secs() >= 60) } _ => false, } @@ -147,7 +147,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction } P2pChannelsSignalingDiscoveryAction::DiscoveryRequestReceived { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { local, .. } => { matches!(local, SignalingDiscoveryState::Requested { .. }) } @@ -159,17 +159,16 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction .. } => { let target_peer_id = target_public_key.peer_id(); - let has_peer_requested_discovery = - state.get_ready_peer(peer_id).map_or(false, |p| { - match &p.channels.signaling.discovery { - P2pChannelsSignalingDiscoveryState::Ready { local, .. } => { - matches!(local, SignalingDiscoveryState::DiscoveryRequested { .. }) - } - _ => false, + let has_peer_requested_discovery = state.get_ready_peer(peer_id).is_some_and(|p| { + match &p.channels.signaling.discovery { + P2pChannelsSignalingDiscoveryState::Ready { local, .. } => { + matches!(local, SignalingDiscoveryState::DiscoveryRequested { .. }) } - }); + _ => false, + } + }); let target_peer_already_discovering_them = - state.get_ready_peer(&target_peer_id).map_or(false, |p| { + state.get_ready_peer(&target_peer_id).is_some_and(|p| { p.channels.signaling.sent_discovered_peer_id() == Some(*peer_id) }); has_peer_requested_discovery @@ -180,7 +179,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction } P2pChannelsSignalingDiscoveryAction::DiscoveredRejectReceived { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { local, .. } => { matches!(local, SignalingDiscoveryState::Discovered { .. }) } @@ -188,7 +187,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }), P2pChannelsSignalingDiscoveryAction::DiscoveredAcceptReceived { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { local, .. } => { matches!(local, SignalingDiscoveryState::Discovered { .. }) } @@ -196,7 +195,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }), P2pChannelsSignalingDiscoveryAction::AnswerSend { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { local, .. } => { matches!(local, SignalingDiscoveryState::DiscoveredAccepted { .. }) } @@ -204,7 +203,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }), P2pChannelsSignalingDiscoveryAction::RequestReceived { peer_id } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { remote, .. } => matches!( remote, SignalingDiscoveryState::WaitingForRequest { .. } @@ -217,7 +216,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction // to handle malicious peers. P2pChannelsSignalingDiscoveryAction::DiscoveryRequestSend { peer_id, .. } => { !state.already_has_min_peers() - && state.get_ready_peer(peer_id).map_or(false, |p| { + && state.get_ready_peer(peer_id).is_some_and(|p| { match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { remote, .. } => { matches!(remote, SignalingDiscoveryState::Requested { .. }) @@ -228,7 +227,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction } P2pChannelsSignalingDiscoveryAction::DiscoveredReceived { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { remote, .. } => { matches!(remote, SignalingDiscoveryState::DiscoveryRequested { .. }) } @@ -236,7 +235,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }), P2pChannelsSignalingDiscoveryAction::DiscoveredReject { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { remote, .. } => { matches!(remote, SignalingDiscoveryState::Discovered { .. }) } @@ -244,7 +243,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }), P2pChannelsSignalingDiscoveryAction::DiscoveredAccept { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { remote, .. } => { matches!(remote, SignalingDiscoveryState::Discovered { .. }) } @@ -252,7 +251,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }), P2pChannelsSignalingDiscoveryAction::AnswerReceived { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { remote, .. } => { matches!(remote, SignalingDiscoveryState::DiscoveredAccepted { .. }) } @@ -260,7 +259,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingDiscoveryAction }), P2pChannelsSignalingDiscoveryAction::AnswerDecrypted { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.discovery { + .is_some_and(|p| match &p.channels.signaling.discovery { P2pChannelsSignalingDiscoveryState::Ready { remote, .. } => { matches!(remote, SignalingDiscoveryState::DiscoveredAccepted { .. }) } diff --git a/p2p/src/channels/signaling/exchange/p2p_channels_signaling_exchange_actions.rs b/p2p/src/channels/signaling/exchange/p2p_channels_signaling_exchange_actions.rs index 93d64d4f6d..c5842b89bd 100644 --- a/p2p/src/channels/signaling/exchange/p2p_channels_signaling_exchange_actions.rs +++ b/p2p/src/channels/signaling/exchange/p2p_channels_signaling_exchange_actions.rs @@ -82,7 +82,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { fn is_enabled(&self, state: &P2pState, _time: redux::Timestamp) -> bool { match self { P2pChannelsSignalingExchangeAction::Init { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.signaling.exchange, P2pChannelsSignalingExchangeState::Enabled @@ -90,7 +90,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { }) } P2pChannelsSignalingExchangeAction::Pending { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.signaling.exchange, P2pChannelsSignalingExchangeState::Init { .. } @@ -98,7 +98,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { }) } P2pChannelsSignalingExchangeAction::Ready { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.signaling.exchange, P2pChannelsSignalingExchangeState::Pending { .. } @@ -107,7 +107,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { } P2pChannelsSignalingExchangeAction::RequestSend { peer_id } => { !state.already_has_max_peers() - && state.get_ready_peer(peer_id).map_or(false, |p| { + && state.get_ready_peer(peer_id).is_some_and(|p| { match &p.channels.signaling.exchange { P2pChannelsSignalingExchangeState::Ready { local, .. } => matches!( local, @@ -120,7 +120,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { } P2pChannelsSignalingExchangeAction::OfferReceived { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.exchange { + .is_some_and(|p| match &p.channels.signaling.exchange { P2pChannelsSignalingExchangeState::Ready { local, .. } => { matches!(local, SignalingExchangeState::Requested { .. }) } @@ -128,7 +128,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { }), P2pChannelsSignalingExchangeAction::OfferDecryptError { peer_id } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.exchange { + .is_some_and(|p| match &p.channels.signaling.exchange { P2pChannelsSignalingExchangeState::Ready { local, .. } => { matches!(local, SignalingExchangeState::Offered { .. }) } @@ -136,7 +136,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { }), P2pChannelsSignalingExchangeAction::OfferDecryptSuccess { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.exchange { + .is_some_and(|p| match &p.channels.signaling.exchange { P2pChannelsSignalingExchangeState::Ready { local, .. } => { matches!(local, SignalingExchangeState::Offered { .. }) } @@ -144,7 +144,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { }), P2pChannelsSignalingExchangeAction::AnswerSend { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.exchange { + .is_some_and(|p| match &p.channels.signaling.exchange { P2pChannelsSignalingExchangeState::Ready { local, .. } => { matches!(local, SignalingExchangeState::Offered { .. }) } @@ -152,7 +152,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { }), P2pChannelsSignalingExchangeAction::RequestReceived { peer_id } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.exchange { + .is_some_and(|p| match &p.channels.signaling.exchange { P2pChannelsSignalingExchangeState::Ready { remote, .. } => matches!( remote, SignalingExchangeState::WaitingForRequest { .. } @@ -162,7 +162,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { }), P2pChannelsSignalingExchangeAction::OfferSend { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.exchange { + .is_some_and(|p| match &p.channels.signaling.exchange { P2pChannelsSignalingExchangeState::Ready { remote, .. } => { matches!(remote, SignalingExchangeState::Requested { .. }) } @@ -170,7 +170,7 @@ impl redux::EnablingCondition for P2pChannelsSignalingExchangeAction { }), P2pChannelsSignalingExchangeAction::AnswerReceived { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.signaling.exchange { + .is_some_and(|p| match &p.channels.signaling.exchange { P2pChannelsSignalingExchangeState::Ready { remote, .. } => { matches!(remote, SignalingExchangeState::Offered { .. }) } diff --git a/p2p/src/channels/snark/p2p_channels_snark_actions.rs b/p2p/src/channels/snark/p2p_channels_snark_actions.rs index cfe2171350..dba737da18 100644 --- a/p2p/src/channels/snark/p2p_channels_snark_actions.rs +++ b/p2p/src/channels/snark/p2p_channels_snark_actions.rs @@ -54,6 +54,7 @@ pub enum P2pChannelsSnarkAction { Libp2pBroadcast { snark: Snark, nonce: u32, + is_local: bool, }, } @@ -77,23 +78,19 @@ impl P2pChannelsSnarkAction { impl redux::EnablingCondition for P2pChannelsSnarkAction { fn is_enabled(&self, state: &P2pState, _time: redux::Timestamp) -> bool { match self { - P2pChannelsSnarkAction::Init { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { - matches!(&p.channels.snark, P2pChannelsSnarkState::Enabled) - }) - } - P2pChannelsSnarkAction::Pending { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { - matches!(&p.channels.snark, P2pChannelsSnarkState::Init { .. }) - }) - } + P2pChannelsSnarkAction::Init { peer_id } => state + .get_ready_peer(peer_id) + .is_some_and(|p| matches!(&p.channels.snark, P2pChannelsSnarkState::Enabled)), + P2pChannelsSnarkAction::Pending { peer_id } => state + .get_ready_peer(peer_id) + .is_some_and(|p| matches!(&p.channels.snark, P2pChannelsSnarkState::Init { .. })), P2pChannelsSnarkAction::Ready { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!(&p.channels.snark, P2pChannelsSnarkState::Pending { .. }) }) } P2pChannelsSnarkAction::RequestSend { peer_id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark, P2pChannelsSnarkState::Ready { @@ -107,7 +104,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkAction { P2pChannelsSnarkAction::PromiseReceived { peer_id, promised_count, - } => state.get_ready_peer(peer_id).map_or(false, |p| { + } => state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark, P2pChannelsSnarkState::Ready { @@ -118,7 +115,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkAction { ) }), P2pChannelsSnarkAction::Received { peer_id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark, P2pChannelsSnarkState::Ready { @@ -130,7 +127,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkAction { } P2pChannelsSnarkAction::RequestReceived { peer_id, limit } => { *limit > 0 - && state.get_ready_peer(peer_id).map_or(false, |p| { + && state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark, P2pChannelsSnarkState::Ready { @@ -151,7 +148,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkAction { && first_index <= last_index && state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.snark { + .is_some_and(|p| match &p.channels.snark { P2pChannelsSnarkState::Ready { remote, next_send_index, @@ -177,7 +174,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkAction { .get(peer_id) .filter(|p| p.is_libp2p()) .and_then(|p| p.status.as_ready()) - .map_or(false, |p| p.channels.snark.is_ready()) + .is_some_and(|p| p.channels.snark.is_ready()) } P2pChannelsSnarkAction::Libp2pBroadcast { .. } => { cfg!(feature = "p2p-libp2p") diff --git a/p2p/src/channels/snark/p2p_channels_snark_reducer.rs b/p2p/src/channels/snark/p2p_channels_snark_reducer.rs index b92755a073..bb6ab1e57b 100644 --- a/p2p/src/channels/snark/p2p_channels_snark_reducer.rs +++ b/p2p/src/channels/snark/p2p_channels_snark_reducer.rs @@ -210,18 +210,27 @@ impl P2pChannelsSnarkState { } Ok(()) } + #[cfg(not(feature = "p2p-libp2p"))] + P2pChannelsSnarkAction::Libp2pBroadcast { .. } => Ok(()), #[cfg(feature = "p2p-libp2p")] - P2pChannelsSnarkAction::Libp2pBroadcast { snark, nonce } => { + P2pChannelsSnarkAction::Libp2pBroadcast { + snark, + nonce, + is_local, + } => { let dispatcher = state_context.into_dispatcher(); let message = Box::new((snark.statement(), (&snark).into())); let message = v2::NetworkPoolSnarkPoolDiffVersionedStableV2::AddSolvedWork(message); let nonce = nonce.into(); let message = GossipNetMessageV2::SnarkPoolDiff { message, nonce }; - dispatcher.push(P2pNetworkPubsubAction::Broadcast { message }); + if is_local { + dispatcher.push(P2pNetworkPubsubAction::Broadcast { message }); + } else { + // rebroadcast snark if received from webrtc network, otherwise noop. + dispatcher.push(P2pNetworkPubsubAction::WebRtcRebroadcast { message }); + } Ok(()) } - #[cfg(not(feature = "p2p-libp2p"))] - P2pChannelsSnarkAction::Libp2pBroadcast { .. } => Ok(()), P2pChannelsSnarkAction::Libp2pReceived { peer_id, snark, .. } => { let (dispatcher, state) = state_context.into_dispatcher_and_state(); let p2p_state: &P2pState = state.substate()?; diff --git a/p2p/src/channels/snark_job_commitment/p2p_channels_snark_job_commitment_actions.rs b/p2p/src/channels/snark_job_commitment/p2p_channels_snark_job_commitment_actions.rs index eae9c84e1a..b1073ec0b6 100644 --- a/p2p/src/channels/snark_job_commitment/p2p_channels_snark_job_commitment_actions.rs +++ b/p2p/src/channels/snark_job_commitment/p2p_channels_snark_job_commitment_actions.rs @@ -65,7 +65,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkJobCommitmentAction fn is_enabled(&self, state: &P2pState, _time: redux::Timestamp) -> bool { match self { P2pChannelsSnarkJobCommitmentAction::Init { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark_job_commitment, P2pChannelsSnarkJobCommitmentState::Enabled @@ -73,7 +73,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkJobCommitmentAction }) } P2pChannelsSnarkJobCommitmentAction::Pending { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark_job_commitment, P2pChannelsSnarkJobCommitmentState::Init { .. } @@ -81,7 +81,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkJobCommitmentAction }) } P2pChannelsSnarkJobCommitmentAction::Ready { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark_job_commitment, P2pChannelsSnarkJobCommitmentState::Pending { .. } @@ -89,7 +89,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkJobCommitmentAction }) } P2pChannelsSnarkJobCommitmentAction::RequestSend { peer_id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark_job_commitment, P2pChannelsSnarkJobCommitmentState::Ready { @@ -103,7 +103,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkJobCommitmentAction P2pChannelsSnarkJobCommitmentAction::PromiseReceived { peer_id, promised_count, - } => state.get_ready_peer(peer_id).map_or(false, |p| { + } => state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark_job_commitment, P2pChannelsSnarkJobCommitmentState::Ready { @@ -115,7 +115,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkJobCommitmentAction ) }), P2pChannelsSnarkJobCommitmentAction::Received { peer_id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark_job_commitment, P2pChannelsSnarkJobCommitmentState::Ready { @@ -127,7 +127,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkJobCommitmentAction } P2pChannelsSnarkJobCommitmentAction::RequestReceived { peer_id, limit } => { *limit > 0 - && state.get_ready_peer(peer_id).map_or(false, |p| { + && state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.snark_job_commitment, P2pChannelsSnarkJobCommitmentState::Ready { @@ -146,7 +146,7 @@ impl redux::EnablingCondition for P2pChannelsSnarkJobCommitmentAction } => { !commitments.is_empty() && first_index <= last_index - && state.get_ready_peer(peer_id).map_or(false, |p| { + && state.get_ready_peer(peer_id).is_some_and(|p| { match &p.channels.snark_job_commitment { P2pChannelsSnarkJobCommitmentState::Ready { remote, diff --git a/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_actions.rs b/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_actions.rs index 748e584695..cb4373f579 100644 --- a/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_actions.rs +++ b/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_actions.rs @@ -112,14 +112,14 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { .get(peer_id) .filter(|p| !p.is_libp2p()) .and_then(|p| p.status.as_ready()) - .map_or(false, |p| { + .is_some_and(|p| { matches!( p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Enabled ) }), P2pChannelsStreamingRpcAction::Pending { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Init { .. } @@ -127,7 +127,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { }) } P2pChannelsStreamingRpcAction::Ready { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Pending { .. } @@ -135,7 +135,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { }) } P2pChannelsStreamingRpcAction::RequestSend { peer_id, id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Ready { @@ -147,7 +147,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { }) } P2pChannelsStreamingRpcAction::Timeout { peer_id, id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Ready { @@ -160,7 +160,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { } P2pChannelsStreamingRpcAction::ResponseNextPartGet { peer_id, id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { + .is_some_and(|p| match &p.channels.streaming_rpc { P2pChannelsStreamingRpcState::Ready { local: P2pStreamingRpcLocalState::Requested { @@ -178,7 +178,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { response, } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { + .is_some_and(|p| match &p.channels.streaming_rpc { P2pChannelsStreamingRpcState::Ready { local: P2pStreamingRpcLocalState::Requested { @@ -196,7 +196,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { response, } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { + .is_some_and(|p| match &p.channels.streaming_rpc { P2pChannelsStreamingRpcState::Ready { local: P2pStreamingRpcLocalState::Requested { @@ -217,7 +217,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { }), P2pChannelsStreamingRpcAction::RequestReceived { peer_id, .. } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { + .is_some_and(|p| match &p.channels.streaming_rpc { P2pChannelsStreamingRpcState::Ready { remote, .. } => { matches!( remote, @@ -230,7 +230,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { P2pChannelsStreamingRpcAction::ResponsePending { peer_id, id } => state .get_ready_peer(peer_id) .and_then(|p| p.channels.streaming_rpc.remote_todo_request()) - .map_or(false, |(rpc_id, _)| rpc_id == *id), + .is_some_and(|(rpc_id, _)| rpc_id == *id), P2pChannelsStreamingRpcAction::ResponseSendInit { peer_id, id, @@ -238,7 +238,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { } => state .get_ready_peer(peer_id) .and_then(|p| p.channels.streaming_rpc.remote_pending_request()) - .map_or(false, |(rpc_id, req)| { + .is_some_and(|(rpc_id, req)| { rpc_id == *id && response .as_ref() @@ -246,7 +246,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { }), P2pChannelsStreamingRpcAction::ResponsePartNextSend { peer_id, id } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { + .is_some_and(|p| match &p.channels.streaming_rpc { P2pChannelsStreamingRpcState::Ready { remote: P2pStreamingRpcRemoteState::Requested { @@ -264,7 +264,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { response, } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { + .is_some_and(|p| match &p.channels.streaming_rpc { P2pChannelsStreamingRpcState::Ready { remote: P2pStreamingRpcRemoteState::Requested { @@ -279,7 +279,7 @@ impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { }), P2pChannelsStreamingRpcAction::ResponseSent { peer_id, id } => state .get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { + .is_some_and(|p| match &p.channels.streaming_rpc { P2pChannelsStreamingRpcState::Ready { remote: P2pStreamingRpcRemoteState::Requested { diff --git a/p2p/src/channels/transaction/p2p_channels_transaction_actions.rs b/p2p/src/channels/transaction/p2p_channels_transaction_actions.rs index bd4365c1b3..fc75742ba8 100644 --- a/p2p/src/channels/transaction/p2p_channels_transaction_actions.rs +++ b/p2p/src/channels/transaction/p2p_channels_transaction_actions.rs @@ -52,6 +52,7 @@ pub enum P2pChannelsTransactionAction { Libp2pBroadcast { transaction: Box, nonce: u32, + is_local: bool, }, } @@ -76,7 +77,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { fn is_enabled(&self, state: &P2pState, _time: redux::Timestamp) -> bool { match self { P2pChannelsTransactionAction::Init { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.transaction, P2pChannelsTransactionState::Enabled @@ -84,7 +85,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { }) } P2pChannelsTransactionAction::Pending { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.transaction, P2pChannelsTransactionState::Init { .. } @@ -92,7 +93,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { }) } P2pChannelsTransactionAction::Ready { peer_id } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.transaction, P2pChannelsTransactionState::Pending { .. } @@ -100,7 +101,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { }) } P2pChannelsTransactionAction::RequestSend { peer_id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.transaction, P2pChannelsTransactionState::Ready { @@ -114,7 +115,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { P2pChannelsTransactionAction::PromiseReceived { peer_id, promised_count, - } => state.get_ready_peer(peer_id).map_or(false, |p| { + } => state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.transaction, P2pChannelsTransactionState::Ready { @@ -125,7 +126,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { ) }), P2pChannelsTransactionAction::Received { peer_id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| { + state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.transaction, P2pChannelsTransactionState::Ready { @@ -137,7 +138,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { } P2pChannelsTransactionAction::RequestReceived { peer_id, limit } => { *limit > 0 - && state.get_ready_peer(peer_id).map_or(false, |p| { + && state.get_ready_peer(peer_id).is_some_and(|p| { matches!( &p.channels.transaction, P2pChannelsTransactionState::Ready { @@ -156,7 +157,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { } => { !transactions.is_empty() && first_index <= last_index - && state.get_ready_peer(peer_id).map_or(false, |p| { + && state.get_ready_peer(peer_id).is_some_and(|p| { match &p.channels.transaction { P2pChannelsTransactionState::Ready { remote, @@ -185,7 +186,7 @@ impl redux::EnablingCondition for P2pChannelsTransactionAction { .get(peer_id) .filter(|p| p.is_libp2p()) .and_then(|p| p.status.as_ready()) - .map_or(false, |p| p.channels.transaction.is_ready()) + .is_some_and(|p| p.channels.transaction.is_ready()) } P2pChannelsTransactionAction::Libp2pBroadcast { .. } => { cfg!(feature = "p2p-libp2p") diff --git a/p2p/src/channels/transaction/p2p_channels_transaction_reducer.rs b/p2p/src/channels/transaction/p2p_channels_transaction_reducer.rs index 48820e1436..9b9e483cde 100644 --- a/p2p/src/channels/transaction/p2p_channels_transaction_reducer.rs +++ b/p2p/src/channels/transaction/p2p_channels_transaction_reducer.rs @@ -230,14 +230,23 @@ impl P2pChannelsTransactionState { #[cfg(not(feature = "p2p-libp2p"))] P2pChannelsTransactionAction::Libp2pBroadcast { .. } => Ok(()), #[cfg(feature = "p2p-libp2p")] - P2pChannelsTransactionAction::Libp2pBroadcast { transaction, nonce } => { + P2pChannelsTransactionAction::Libp2pBroadcast { + transaction, + nonce, + is_local, + } => { let dispatcher = state_context.into_dispatcher(); let message = v2::NetworkPoolTransactionPoolDiffVersionedStableV2( std::iter::once(*transaction).collect(), ); let nonce = nonce.into(); let message = GossipNetMessageV2::TransactionPoolDiff { message, nonce }; - dispatcher.push(P2pNetworkPubsubAction::Broadcast { message }); + if is_local { + dispatcher.push(P2pNetworkPubsubAction::Broadcast { message }); + } else { + // rebroadcast block if received from webrtc network, otherwise noop. + dispatcher.push(P2pNetworkPubsubAction::WebRtcRebroadcast { message }); + } Ok(()) } } diff --git a/p2p/src/connection/incoming/mod.rs b/p2p/src/connection/incoming/mod.rs index ed69097dc0..923f68a43a 100644 --- a/p2p/src/connection/incoming/mod.rs +++ b/p2p/src/connection/incoming/mod.rs @@ -6,6 +6,7 @@ pub use p2p_connection_incoming_actions::*; mod p2p_connection_incoming_reducer; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use crate::connection::RejectionReason; @@ -19,7 +20,7 @@ pub struct P2pConnectionIncomingInitOpts { } // TODO(binier): maybe move to `crate::webrtc`? -#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone, Copy)] +#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone, Copy, MallocSizeOf)] pub enum IncomingSignalingMethod { /// Http rpc is used for sending offer and getting answer as a response. Http, diff --git a/p2p/src/connection/incoming/p2p_connection_incoming_actions.rs b/p2p/src/connection/incoming/p2p_connection_incoming_actions.rs index aefdc64f4c..86c948cc82 100644 --- a/p2p/src/connection/incoming/p2p_connection_incoming_actions.rs +++ b/p2p/src/connection/incoming/p2p_connection_incoming_actions.rs @@ -105,7 +105,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { state.incoming_accept(opts.peer_id, &opts.offer).is_ok() } P2pConnectionIncomingAction::AnswerSdpCreatePending { peer_id } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -115,7 +115,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { }) } P2pConnectionIncomingAction::AnswerSdpCreateError { peer_id, .. } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -125,7 +125,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { }) } P2pConnectionIncomingAction::AnswerSdpCreateSuccess { peer_id, .. } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -135,7 +135,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { }) } P2pConnectionIncomingAction::AnswerReady { peer_id, .. } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -145,7 +145,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { }) } P2pConnectionIncomingAction::AnswerSendSuccess { peer_id } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -155,7 +155,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { }) } P2pConnectionIncomingAction::FinalizePending { peer_id } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -165,7 +165,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { }) } P2pConnectionIncomingAction::FinalizeError { peer_id, .. } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -175,7 +175,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { }) } P2pConnectionIncomingAction::FinalizeSuccess { peer_id, .. } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -188,11 +188,11 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { .peers .get(peer_id) .and_then(|peer| peer.status.as_connecting()?.as_incoming()) - .map_or(false, |s| s.is_timed_out(time, &state.config.timeouts)), + .is_some_and(|s| s.is_timed_out(time, &state.config.timeouts)), P2pConnectionIncomingAction::Error { peer_id, error } => state .peers .get(peer_id) - .map_or(false, |peer| match &peer.status { + .is_some_and(|peer| match &peer.status { P2pPeerStatus::Connecting(P2pConnectionState::Incoming(s)) => match error { P2pConnectionIncomingError::SdpCreateError(_) => { matches!(s, P2pConnectionIncomingState::AnswerSdpCreatePending { .. }) @@ -208,7 +208,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { _ => false, }), P2pConnectionIncomingAction::Success { peer_id } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( @@ -222,7 +222,7 @@ impl redux::EnablingCondition for P2pConnectionIncomingAction { } P2pConnectionIncomingAction::Libp2pReceived { peer_id, .. } => { cfg!(feature = "p2p-libp2p") - && state.peers.get(peer_id).map_or(false, |peer| { + && state.peers.get(peer_id).is_some_and(|peer| { matches!( &peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Incoming( diff --git a/p2p/src/connection/incoming/p2p_connection_incoming_state.rs b/p2p/src/connection/incoming/p2p_connection_incoming_state.rs index 108a83cf7c..638692411f 100644 --- a/p2p/src/connection/incoming/p2p_connection_incoming_state.rs +++ b/p2p/src/connection/incoming/p2p_connection_incoming_state.rs @@ -1,5 +1,6 @@ use std::net::SocketAddr; +use malloc_size_of_derive::MallocSizeOf; use redux::Timestamp; use serde::{Deserialize, Serialize}; @@ -9,21 +10,24 @@ use crate::{webrtc, P2pTimeouts}; use super::IncomingSignalingMethod; -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub enum P2pConnectionIncomingState { Init { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, signaling: IncomingSignalingMethod, offer: Box, rpc_id: Option, }, AnswerSdpCreatePending { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, signaling: IncomingSignalingMethod, offer: Box, rpc_id: Option, }, AnswerSdpCreateSuccess { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, signaling: IncomingSignalingMethod, offer: Box, @@ -31,6 +35,7 @@ pub enum P2pConnectionIncomingState { rpc_id: Option, }, AnswerReady { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, signaling: IncomingSignalingMethod, offer: Box, @@ -38,6 +43,7 @@ pub enum P2pConnectionIncomingState { rpc_id: Option, }, AnswerSendSuccess { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, signaling: IncomingSignalingMethod, offer: Box, @@ -45,6 +51,7 @@ pub enum P2pConnectionIncomingState { rpc_id: Option, }, FinalizePending { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, signaling: IncomingSignalingMethod, offer: Box, @@ -52,6 +59,7 @@ pub enum P2pConnectionIncomingState { rpc_id: Option, }, FinalizeSuccess { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, signaling: IncomingSignalingMethod, offer: Box, @@ -59,11 +67,13 @@ pub enum P2pConnectionIncomingState { rpc_id: Option, }, Error { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, error: P2pConnectionIncomingError, rpc_id: Option, }, Success { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, signaling: IncomingSignalingMethod, offer: Box, @@ -71,11 +81,15 @@ pub enum P2pConnectionIncomingState { rpc_id: Option, }, FinalizePendingLibp2p { + #[ignore_malloc_size_of = "doesn't allocate"] addr: SocketAddr, + #[with_malloc_size_of_func = "measurement::socket_vec"] close_duplicates: Vec, + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, }, Libp2pReceived { + #[ignore_malloc_size_of = "doesn't allocate"] time: redux::Timestamp, }, } @@ -121,7 +135,7 @@ impl P2pConnectionIncomingState { } } -#[derive(Serialize, Deserialize, Debug, Clone, thiserror::Error)] +#[derive(Serialize, Deserialize, Debug, Clone, thiserror::Error, MallocSizeOf)] pub enum P2pConnectionIncomingError { #[error("error creating SDP: {0}")] SdpCreateError(String), @@ -132,3 +146,13 @@ pub enum P2pConnectionIncomingError { #[error("timeout error")] Timeout, } + +mod measurement { + use std::{mem, net::SocketAddr}; + + use malloc_size_of::MallocSizeOfOps; + + pub fn socket_vec(val: &Vec, _ops: &mut MallocSizeOfOps) -> usize { + val.capacity() * mem::size_of::() + } +} diff --git a/p2p/src/connection/incoming_effectful/p2p_connection_incoming_effectful_effects.rs b/p2p/src/connection/incoming_effectful/p2p_connection_incoming_effectful_effects.rs index 6b7dc37a9c..0615afc9aa 100644 --- a/p2p/src/connection/incoming_effectful/p2p_connection_incoming_effectful_effects.rs +++ b/p2p/src/connection/incoming_effectful/p2p_connection_incoming_effectful_effects.rs @@ -36,7 +36,7 @@ impl P2pConnectionIncomingEffectfulAction { if store .service() .auth_decrypt(&other_pub_key, auth) - .map_or(false, |remote_auth| remote_auth == expected_auth) + .is_some_and(|remote_auth| remote_auth == expected_auth) { store.dispatch(P2pConnectionIncomingAction::Success { peer_id }); } else { diff --git a/p2p/src/connection/outgoing/mod.rs b/p2p/src/connection/outgoing/mod.rs index 58dbb7d75d..1b0cfeed66 100644 --- a/p2p/src/connection/outgoing/mod.rs +++ b/p2p/src/connection/outgoing/mod.rs @@ -503,3 +503,17 @@ impl TryFrom<&multiaddr::Multiaddr> for P2pConnectionOutgoingInitLibp2pOpts { }) } } + +mod measurement { + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::P2pConnectionOutgoingInitOpts; + + // `Host` may contain `String` which allocates + // but hostname usually small, compared to `String` container size 24 bytes + impl MallocSizeOf for P2pConnectionOutgoingInitOpts { + fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { + 0 + } + } +} diff --git a/p2p/src/connection/outgoing/p2p_connection_outgoing_actions.rs b/p2p/src/connection/outgoing/p2p_connection_outgoing_actions.rs index ae50f9b742..dcc3b1cee9 100644 --- a/p2p/src/connection/outgoing/p2p_connection_outgoing_actions.rs +++ b/p2p/src/connection/outgoing/p2p_connection_outgoing_actions.rs @@ -103,62 +103,62 @@ impl redux::EnablingCondition for P2pConnectionOutgoingAction { } P2pConnectionOutgoingAction::Reconnect { opts, .. } => { !state.already_has_min_peers() - && state.peers.get(opts.peer_id()).map_or(false, |peer| { + && state.peers.get(opts.peer_id()).is_some_and( |peer| { peer.can_reconnect(time, &state.config.timeouts) }) } P2pConnectionOutgoingAction::OfferSdpCreatePending { peer_id } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::Init { .. }, )))), P2pConnectionOutgoingAction::OfferSdpCreateError { peer_id, .. } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::OfferSdpCreatePending { .. }, )))), P2pConnectionOutgoingAction::OfferSdpCreateSuccess { peer_id, .. } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::OfferSdpCreatePending { .. }, )))), P2pConnectionOutgoingAction::OfferReady { peer_id, .. } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::OfferSdpCreateSuccess { .. }, )))), P2pConnectionOutgoingAction::OfferSendSuccess { peer_id } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::OfferReady { .. }, )))), P2pConnectionOutgoingAction::AnswerRecvPending { peer_id } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::OfferSendSuccess { .. }, )))), P2pConnectionOutgoingAction::AnswerRecvError { peer_id, .. } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::AnswerRecvPending { .. }, )))), P2pConnectionOutgoingAction::AnswerRecvSuccess { peer_id, .. } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::AnswerRecvPending { .. }, )))), P2pConnectionOutgoingAction::FinalizePending { peer_id } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing(v)) if match v { + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing(v)) if match v { P2pConnectionOutgoingState::Init { opts, .. } => opts.is_libp2p(), P2pConnectionOutgoingState::AnswerRecvSuccess { .. } => true, _ => false, @@ -166,25 +166,25 @@ impl redux::EnablingCondition for P2pConnectionOutgoingAction { P2pConnectionOutgoingAction::FinalizeError { peer_id, .. } => state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::FinalizePending { .. }, )))), P2pConnectionOutgoingAction::FinalizeSuccess { peer_id, remote_auth: auth } => state .peers .get(peer_id) .filter(|p| auth.is_some() || p.is_libp2p()) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::FinalizePending { .. }, )))), P2pConnectionOutgoingAction::Timeout { peer_id } => state .peers .get(peer_id) .and_then(|peer| peer.status.as_connecting()?.as_outgoing()) - .map_or(false, |s| s.is_timed_out(time, &state.config.timeouts)), + .is_some_and( |s| s.is_timed_out(time, &state.config.timeouts)), P2pConnectionOutgoingAction::Error { peer_id, error } => state .peers .get(peer_id) - .map_or(false, |peer| match &peer.status { + .is_some_and( |peer| match &peer.status { P2pPeerStatus::Connecting(P2pConnectionState::Outgoing(s)) => match error { P2pConnectionOutgoingError::SdpCreateError(_) => { matches!(s, P2pConnectionOutgoingState::OfferSdpCreatePending { .. }) @@ -208,7 +208,7 @@ impl redux::EnablingCondition for P2pConnectionOutgoingAction { state .peers .get(peer_id) - .map_or(false, |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( + .is_some_and( |peer| matches!(&peer.status, P2pPeerStatus::Connecting(P2pConnectionState::Outgoing( P2pConnectionOutgoingState::FinalizeSuccess { .. }, )))) } diff --git a/p2p/src/connection/outgoing/p2p_connection_outgoing_state.rs b/p2p/src/connection/outgoing/p2p_connection_outgoing_state.rs index 385ba82dff..2161ba6bc7 100644 --- a/p2p/src/connection/outgoing/p2p_connection_outgoing_state.rs +++ b/p2p/src/connection/outgoing/p2p_connection_outgoing_state.rs @@ -1,3 +1,4 @@ +use malloc_size_of_derive::MallocSizeOf; use redux::{Callback, Timestamp}; use serde::{Deserialize, Serialize}; @@ -7,78 +8,98 @@ use crate::{connection::RejectionReason, webrtc, P2pTimeouts, PeerId}; use super::P2pConnectionOutgoingInitOpts; -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub enum P2pConnectionOutgoingState { Init { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, OfferSdpCreatePending { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, OfferSdpCreateSuccess { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, sdp: String, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, OfferReady { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, offer: Box, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, OfferSendSuccess { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, offer: Box, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, AnswerRecvPending { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, offer: Box, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, AnswerRecvSuccess { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, offer: Box, answer: Box, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, FinalizePending { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, offer: Option>, answer: Option>, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, FinalizeSuccess { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, opts: P2pConnectionOutgoingInitOpts, offer: Option>, answer: Option>, rpc_id: Option, + #[ignore_malloc_size_of = "negligible"] on_success: Option)>>, }, Error { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, error: P2pConnectionOutgoingError, rpc_id: Option, }, Success { + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, offer: Option>, answer: Option>, @@ -128,7 +149,7 @@ impl P2pConnectionOutgoingState { } } -#[derive(Serialize, Deserialize, Debug, Clone, thiserror::Error)] +#[derive(Serialize, Deserialize, Debug, Clone, thiserror::Error, MallocSizeOf)] pub enum P2pConnectionOutgoingError { #[error("error creating SDP: {0}")] SdpCreateError(String), diff --git a/p2p/src/connection/outgoing_effectful/p2p_connection_outgoing_effectful_effects.rs b/p2p/src/connection/outgoing_effectful/p2p_connection_outgoing_effectful_effects.rs index 8fd75164fc..c01c82b64c 100644 --- a/p2p/src/connection/outgoing_effectful/p2p_connection_outgoing_effectful_effects.rs +++ b/p2p/src/connection/outgoing_effectful/p2p_connection_outgoing_effectful_effects.rs @@ -77,7 +77,7 @@ impl P2pConnectionOutgoingEffectfulAction { if store .service() .auth_decrypt(&other_pub_key, auth) - .map_or(false, |remote_auth| remote_auth == expected_auth) + .is_some_and(|remote_auth| remote_auth == expected_auth) { store.dispatch(P2pConnectionOutgoingAction::Success { peer_id }); } else { diff --git a/p2p/src/connection/p2p_connection_state.rs b/p2p/src/connection/p2p_connection_state.rs index 30046f7ccb..f3aa46c002 100644 --- a/p2p/src/connection/p2p_connection_state.rs +++ b/p2p/src/connection/p2p_connection_state.rs @@ -1,5 +1,7 @@ use openmina_core::requests::RpcId; use redux::Timestamp; + +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use crate::P2pTimeouts; @@ -7,7 +9,7 @@ use crate::P2pTimeouts; use super::incoming::{P2pConnectionIncomingInitOpts, P2pConnectionIncomingState}; use super::outgoing::{P2pConnectionOutgoingInitOpts, P2pConnectionOutgoingState}; -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] #[serde(tag = "direction")] pub enum P2pConnectionState { Outgoing(P2pConnectionOutgoingState), diff --git a/p2p/src/disconnection/mod.rs b/p2p/src/disconnection/mod.rs index 29debe3d77..a2fbc649c5 100644 --- a/p2p/src/disconnection/mod.rs +++ b/p2p/src/disconnection/mod.rs @@ -41,4 +41,6 @@ pub enum P2pDisconnectionReason { Timeout, #[error("rpc protocol not supported")] Unsupported, + #[error("invalid pubsub message")] + InvalidMessage, } diff --git a/p2p/src/disconnection/p2p_disconnection_actions.rs b/p2p/src/disconnection/p2p_disconnection_actions.rs index af76511d1f..e8fb710170 100644 --- a/p2p/src/disconnection/p2p_disconnection_actions.rs +++ b/p2p/src/disconnection/p2p_disconnection_actions.rs @@ -41,27 +41,27 @@ impl redux::EnablingCondition for P2pDisconnectionAction { match self { P2pDisconnectionAction::RandomTry => time .checked_sub(state.last_random_disconnection_try) - .map_or(false, |dur| dur >= RANDOM_DISCONNECTION_TRY_FREQUENCY), + .is_some_and(|dur| dur >= RANDOM_DISCONNECTION_TRY_FREQUENCY), P2pDisconnectionAction::Init { peer_id, .. } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { !peer.status.is_disconnected_or_disconnecting() && !peer.status.is_error() }) } P2pDisconnectionAction::Finish { peer_id } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { !matches!(peer.status, P2pPeerStatus::Disconnected { .. }) && !peer.status.is_error() }) } P2pDisconnectionAction::PeerClosed { peer_id, .. } => { - state.peers.get(peer_id).map_or(false, |peer| { + state.peers.get(peer_id).is_some_and(|peer| { !peer.status.is_disconnected_or_disconnecting() && !peer.status.is_error() }) } P2pDisconnectionAction::FailedCleanup { peer_id } => state .peers .get(peer_id) - .map_or(false, |peer| !peer.is_libp2p() && peer.status.is_error()), + .is_some_and(|peer| !peer.is_libp2p() && peer.status.is_error()), } } } diff --git a/p2p/src/disconnection_effectful/p2p_disconnection_effectful_actions.rs b/p2p/src/disconnection_effectful/p2p_disconnection_effectful_actions.rs index 5bd7a1e550..33573a22b4 100644 --- a/p2p/src/disconnection_effectful/p2p_disconnection_effectful_actions.rs +++ b/p2p/src/disconnection_effectful/p2p_disconnection_effectful_actions.rs @@ -13,11 +13,10 @@ pub enum P2pDisconnectionEffectfulAction { impl redux::EnablingCondition for P2pDisconnectionEffectfulAction { fn is_enabled(&self, state: &P2pState, _time: redux::Timestamp) -> bool { match self { - P2pDisconnectionEffectfulAction::Init { peer_id } => { - state.peers.get(peer_id).map_or(false, |peer| { - !matches!(peer.status, P2pPeerStatus::Disconnected { .. }) - }) - } + P2pDisconnectionEffectfulAction::Init { peer_id } => state + .peers + .get(peer_id) + .is_some_and(|peer| !matches!(peer.status, P2pPeerStatus::Disconnected { .. })), } } } diff --git a/p2p/src/identify/p2p_identify_reducer.rs b/p2p/src/identify/p2p_identify_reducer.rs index 0b887f2268..d492b240ef 100644 --- a/p2p/src/identify/p2p_identify_reducer.rs +++ b/p2p/src/identify/p2p_identify_reducer.rs @@ -111,7 +111,7 @@ impl P2pState { let kad_state: Option<&P2pNetworkKadState> = state.substate().ok(); let protocol = StreamKind::Discovery(DiscoveryAlgorithm::Kademlia1_0_0); - if kad_state.map_or(false, |state| state.request(&peer_id).is_some()) + if kad_state.is_some_and(|state| state.request(&peer_id).is_some()) && info.protocols.contains(&protocol) { dispatcher.push(P2pNetworkKadRequestAction::MuxReady { peer_id, addr }); diff --git a/p2p/src/identity/mod.rs b/p2p/src/identity/mod.rs index 50947d1a9e..55a59e2151 100644 --- a/p2p/src/identity/mod.rs +++ b/p2p/src/identity/mod.rs @@ -6,3 +6,6 @@ pub use public_key::PublicKey; mod secret_key; pub use secret_key::{EncryptableType, SecretKey}; + +mod signature; +pub use signature::Signature; diff --git a/p2p/src/identity/peer_id.rs b/p2p/src/identity/peer_id.rs index da0e04bf95..e5b33061ce 100644 --- a/p2p/src/identity/peer_id.rs +++ b/p2p/src/identity/peer_id.rs @@ -2,11 +2,12 @@ use std::{fmt, str::FromStr}; use binprot::{BinProtRead, BinProtWrite, Nat0}; use libp2p_identity::DecodingError; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use super::PublicKey; -#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Copy)] +#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Copy, MallocSizeOf)] pub struct PeerId([u64; 4]); impl PeerId { @@ -73,7 +74,7 @@ impl fmt::Debug for PeerId { } } -#[derive(Clone, Debug, PartialEq, thiserror::Error, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, thiserror::Error, Serialize, Deserialize, MallocSizeOf)] pub enum PeerIdFromLibp2pPeerId { #[error("error decoding public key from protobuf: {0}")] Protobuf(String), diff --git a/p2p/src/identity/secret_key.rs b/p2p/src/identity/secret_key.rs index 9b4b272f4f..7e6ba1ad81 100644 --- a/p2p/src/identity/secret_key.rs +++ b/p2p/src/identity/secret_key.rs @@ -1,12 +1,12 @@ use std::{fmt, path::Path, str::FromStr}; use base64::Engine; -use ed25519_dalek::SigningKey as Ed25519SecretKey; +use ed25519_dalek::{ed25519::signature::SignerMut, SigningKey as Ed25519SecretKey}; use openmina_core::{EncryptedSecretKey, EncryptedSecretKeyFile, EncryptionError}; use rand::{CryptoRng, Rng}; use serde::{Deserialize, Serialize}; -use crate::identity::PublicKey; +use super::{PublicKey, Signature}; #[derive(Clone)] pub struct SecretKey(Ed25519SecretKey); @@ -170,6 +170,23 @@ impl SecretKey { let data: Vec = self.decrypt_raw(other_pk, ciphertext.as_ref())?; serde_json::from_slice(&data).map_err(Box::::from) } + + pub fn sign(&mut self, data: &[u8]) -> Signature { + Signature(self.0.sign(data)) + } + + pub fn libp2p_pubsub_sign(&mut self, msg: &[u8]) -> Signature { + self.sign(&[b"libp2p-pubsub:", msg].concat()) + } + + pub fn libp2p_pubsub_pb_message_sign( + &mut self, + msg: &crate::pb::Message, + ) -> Result { + let mut buf = Vec::new(); + prost::Message::encode(msg, &mut buf)?; + Ok(self.libp2p_pubsub_sign(&buf)) + } } pub trait EncryptableType: Serialize + for<'a> Deserialize<'a> { diff --git a/p2p/src/identity/signature.rs b/p2p/src/identity/signature.rs new file mode 100644 index 0000000000..7b57fa0d3b --- /dev/null +++ b/p2p/src/identity/signature.rs @@ -0,0 +1,128 @@ +use std::{ + fmt, + io::{Read, Write}, + str::FromStr, +}; + +use binprot::{BinProtRead, BinProtWrite}; +use ed25519_dalek::Signature as Ed25519Signature; +use serde::{ + de::{SeqAccess, Visitor}, + Deserialize, Serialize, +}; + +#[derive(Eq, PartialEq, Clone)] +pub struct Signature(pub(super) Ed25519Signature); + +impl Signature { + const BYTE_SIZE: usize = Ed25519Signature::BYTE_SIZE; + + pub fn from_bytes(bytes: [u8; Self::BYTE_SIZE]) -> Self { + Self(Ed25519Signature::from_bytes(&bytes)) + } + + pub fn to_bytes(&self) -> [u8; Self::BYTE_SIZE] { + self.0.to_bytes() + } +} + +impl FromStr for Signature { + type Err = hex::FromHexError; + + fn from_str(s: &str) -> Result { + hex::decode(s)? + .try_into() + .map(Self::from_bytes) + .or(Err(hex::FromHexError::InvalidStringLength)) + } +} + +impl fmt::Display for Signature { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", hex::encode(self.to_bytes())) + } +} + +impl fmt::Debug for Signature { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Signature({self})") + } +} + +impl From for [u8; Signature::BYTE_SIZE] { + fn from(value: Signature) -> Self { + value.to_bytes() + } +} + +impl Serialize for Signature { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + if serializer.is_human_readable() { + serializer.serialize_str(&self.to_string()) + } else { + self.to_bytes().serialize(serializer) + } + } +} + +impl<'de> serde::Deserialize<'de> for Signature { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + if deserializer.is_human_readable() { + let s: String = Deserialize::deserialize(deserializer)?; + Ok(s.parse().map_err(serde::de::Error::custom)?) + } else { + struct ArrayVisitor; + + impl<'de> Visitor<'de> for ArrayVisitor { + type Value = [u8; Signature::BYTE_SIZE]; + + fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "signature bytes({})", Signature::BYTE_SIZE) + } + + #[inline] + fn visit_seq(self, mut a: A) -> Result + where + A: SeqAccess<'de>, + { + let mut bytes: Self::Value = [0; Signature::BYTE_SIZE]; + + for (i, byte) in bytes.iter_mut().enumerate() { + *byte = a + .next_element()? + .ok_or(serde::de::Error::invalid_length(i + 1, &self))?; + } + + Ok(bytes) + } + } + + deserializer + .deserialize_tuple(Self::BYTE_SIZE, ArrayVisitor) + .map(Self::from_bytes) + } + } +} + +impl BinProtWrite for Signature { + fn binprot_write(&self, w: &mut W) -> std::io::Result<()> { + w.write_all(&self.to_bytes()) + } +} + +impl BinProtRead for Signature { + fn binprot_read(r: &mut R) -> Result + where + Self: Sized, + { + let mut buf = [0; Ed25519Signature::BYTE_SIZE]; + r.read_exact(&mut buf)?; + Ok(Self::from_bytes(buf)) + } +} diff --git a/p2p/src/lib.rs b/p2p/src/lib.rs index ed642d2939..17506046a3 100644 --- a/p2p/src/lib.rs +++ b/p2p/src/lib.rs @@ -1,4 +1,8 @@ -///#![feature(trivial_bounds)] +//#![feature(trivial_bounds)] + +extern crate graphannis_malloc_size_of as malloc_size_of; +extern crate graphannis_malloc_size_of_derive as malloc_size_of_derive; + pub mod channels; pub mod connection; pub mod disconnection; @@ -89,7 +93,7 @@ fn is_time_passed( then: redux::Timestamp, duration: Option, ) -> bool { - duration.map_or(false, |d| now.checked_sub(then) >= Some(d)) + duration.is_some_and(|d| now.checked_sub(then) >= Some(d)) } pub trait P2pStateTrait: diff --git a/p2p/src/network/identify/p2p_network_identify_protocol.rs b/p2p/src/network/identify/p2p_network_identify_protocol.rs index 18be379241..12b4010922 100644 --- a/p2p/src/network/identify/p2p_network_identify_protocol.rs +++ b/p2p/src/network/identify/p2p_network_identify_protocol.rs @@ -3,16 +3,21 @@ use crate::{ identity::PublicKey, token::{self, StreamKind}, }; + +use malloc_size_of_derive::MallocSizeOf; use multiaddr::Multiaddr; use quick_protobuf::{BytesReader, MessageRead, MessageWrite, Writer}; use serde::{Deserialize, Serialize}; -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkIdentify { pub protocol_version: Option, pub agent_version: Option, + #[ignore_malloc_size_of = "doesn't allocate"] pub public_key: Option, + #[with_malloc_size_of_func = "measurement::multiaddr_vec"] pub listen_addrs: Vec, + #[with_malloc_size_of_func = "measurement::multiaddr_opt"] pub observed_addr: Option, pub protocols: Vec, } @@ -184,3 +189,19 @@ impl From for P2pNetworkIdentifyMultiaddrError { P2pNetworkIdentifyMultiaddrError(value.to_string()) } } + +mod measurement { + use std::mem; + + use malloc_size_of::MallocSizeOfOps; + + use super::Multiaddr; + + pub fn multiaddr_vec(v: &Vec, _ops: &mut MallocSizeOfOps) -> usize { + v.capacity() * mem::size_of::() + v.iter().map(Multiaddr::len).sum::() + } + + pub fn multiaddr_opt(v: &Option, _ops: &mut MallocSizeOfOps) -> usize { + v.as_ref().map_or(0, Multiaddr::len) + } +} diff --git a/p2p/src/network/identify/p2p_network_identify_state.rs b/p2p/src/network/identify/p2p_network_identify_state.rs index bcd1c2dc5c..a8017dba53 100644 --- a/p2p/src/network/identify/p2p_network_identify_state.rs +++ b/p2p/src/network/identify/p2p_network_identify_state.rs @@ -1,7 +1,8 @@ use crate::{network::identify::stream::P2pNetworkIdentifyStreamState, PeerId, StreamId}; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkIdentifyState { pub streams: crate::network::scheduler::StreamState, } @@ -37,7 +38,7 @@ impl P2pNetworkIdentifyState { pub fn remove_identify_stream_state(&mut self, peer_id: &PeerId, stream_id: &StreamId) -> bool { self.streams .get_mut(peer_id) - .map_or(false, |m| m.remove(stream_id).is_some()) + .is_some_and(|m| m.remove(stream_id).is_some()) } pub fn prune_peer_state(&mut self, peer_id: &PeerId) { diff --git a/p2p/src/network/identify/stream/p2p_network_identify_stream_state.rs b/p2p/src/network/identify/stream/p2p_network_identify_stream_state.rs index 31914479ed..8d5854fbf1 100644 --- a/p2p/src/network/identify/stream/p2p_network_identify_stream_state.rs +++ b/p2p/src/network/identify/stream/p2p_network_identify_stream_state.rs @@ -2,6 +2,7 @@ use crate::{ network::identify::{P2pNetworkIdentify, P2pNetworkIdentifyFromMessageError}, P2pNetworkStreamProtobufError, }; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; #[derive(Clone, Copy, Debug, Serialize, Deserialize)] @@ -20,7 +21,7 @@ impl From for P2pNetworkIdentifyStreamKind { } } -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, MallocSizeOf)] pub enum P2pNetworkIdentifyStreamState { #[default] Default, @@ -38,7 +39,10 @@ pub enum P2pNetworkIdentifyStreamState { data: Box, }, /// Error handling the stream. - Error(P2pNetworkStreamProtobufError), + Error( + #[ignore_malloc_size_of = "error message"] + P2pNetworkStreamProtobufError, + ), } impl P2pNetworkIdentifyStreamState { diff --git a/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_actions.rs b/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_actions.rs index a2f3a11f94..fd417e3f45 100644 --- a/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_actions.rs +++ b/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_actions.rs @@ -39,21 +39,18 @@ impl EnablingCondition for P2pNetworkKadBootstrapAction { .and_then(|discovery_state| discovery_state.bootstrap_state()); match self { P2pNetworkKadBootstrapAction::CreateRequests => { - state.map_or(false, |bootstrap_state| bootstrap_state.requests.len() < 3) + state.is_some_and(|bootstrap_state| bootstrap_state.requests.len() < 3) + } + P2pNetworkKadBootstrapAction::AppendRequest { .. } => { + state.is_some_and(|bootstrap_state| bootstrap_state.peer_id_req_vec.len() < 3) + } + P2pNetworkKadBootstrapAction::FinalizeRequests => { + state.is_some_and(|bootstrap_state| bootstrap_state.peer_id_req_vec.len() <= 3) } - P2pNetworkKadBootstrapAction::AppendRequest { .. } => state - .map_or(false, |bootstrap_state| { - bootstrap_state.peer_id_req_vec.len() < 3 - }), - P2pNetworkKadBootstrapAction::FinalizeRequests => state - .map_or(false, |bootstrap_state| { - bootstrap_state.peer_id_req_vec.len() <= 3 - }), P2pNetworkKadBootstrapAction::RequestDone { peer_id, .. } - | P2pNetworkKadBootstrapAction::RequestError { peer_id, .. } => state - .map_or(false, |bootstrap_state| { - bootstrap_state.request(peer_id).is_some() - }), + | P2pNetworkKadBootstrapAction::RequestError { peer_id, .. } => { + state.is_some_and(|bootstrap_state| bootstrap_state.request(peer_id).is_some()) + } } } } diff --git a/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_state.rs b/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_state.rs index 4f5722349c..3c25bd014e 100644 --- a/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_state.rs +++ b/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_state.rs @@ -3,6 +3,7 @@ use std::{ net::SocketAddr, }; +use malloc_size_of_derive::MallocSizeOf; use redux::Timestamp; use serde::{Deserialize, Serialize}; @@ -11,17 +12,19 @@ use crate::{ P2pNetworkKadLatestRequestPeers, PeerId, }; -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkKadBootstrapState { /// Key that is used to request closest peers. Usually self peer_id. pub key: PeerId, /// Kademlia key, `sha265(self.key)`. pub kademlia_key: P2pNetworkKadKey, /// Peers that already been contacted (successfully or not) for FIND_NODE. + #[with_malloc_size_of_func = "measurement::peer_id_map"] pub processed_peers: BTreeSet, /// Ongoing FIND_NODE requests. /// /// TODO: replace with something more lightweight. + #[with_malloc_size_of_func = "measurement::requests_map"] pub requests: BTreeMap, /// Number of successful requests pub successful_requests: usize, @@ -52,24 +55,27 @@ impl P2pNetworkKadBootstrapState { } } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkKadBoostrapRequestState { /// Address that is used for the current connection. // TODO: generalize to DNS addrs + #[ignore_malloc_size_of = "doesn't allocate"] pub addr: SocketAddr, /// When connection to the peer was initiated. + #[ignore_malloc_size_of = "doesn't allocate"] pub time: Timestamp, /// Addresses yet to be used, if current connection will fail. // TODO: use Multiaddr + #[with_malloc_size_of_func = "measurement::socket_addr_vec"] pub addrs_to_use: Vec, } -#[derive(Clone, Debug, Serialize, Deserialize, Default)] +#[derive(Clone, Debug, Serialize, Deserialize, Default, MallocSizeOf)] pub struct P2pNetworkKadBootstrapStats { pub requests: Vec, } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] #[serde(tag = "type")] pub enum P2pNetworkKadBootstrapRequestStat { Ongoing(P2pNetworkKadBootstrapOngoingRequest), @@ -77,27 +83,62 @@ pub enum P2pNetworkKadBootstrapRequestStat { Failed(P2pNetworkKadBootstrapFailedRequest), } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkKadBootstrapOngoingRequest { pub peer_id: PeerId, pub address: P2pConnectionOutgoingInitOpts, + #[ignore_malloc_size_of = "doesn't allocate"] pub start: Timestamp, } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkKadBootstrapSuccessfulRequest { pub peer_id: PeerId, pub address: P2pConnectionOutgoingInitOpts, + #[ignore_malloc_size_of = "doesn't allocate"] pub start: Timestamp, + #[ignore_malloc_size_of = "doesn't allocate"] pub finish: Timestamp, pub closest_peers: P2pNetworkKadLatestRequestPeers, } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkKadBootstrapFailedRequest { pub peer_id: PeerId, pub address: P2pConnectionOutgoingInitOpts, + #[ignore_malloc_size_of = "doesn't allocate"] pub start: Timestamp, + #[ignore_malloc_size_of = "doesn't allocate"] pub finish: Timestamp, pub error: String, } + +mod measurement { + use std::{ + collections::{BTreeMap, BTreeSet}, + mem, + net::SocketAddr, + }; + + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::P2pNetworkKadBoostrapRequestState; + use crate::PeerId; + + pub fn socket_addr_vec(val: &Vec, _ops: &mut MallocSizeOfOps) -> usize { + val.capacity() * mem::size_of::() + } + + pub fn peer_id_map(val: &BTreeSet, _ops: &mut MallocSizeOfOps) -> usize { + val.len() * mem::size_of::() + } + + pub fn requests_map( + val: &BTreeMap, + ops: &mut MallocSizeOfOps, + ) -> usize { + val.iter() + .map(|(k, v)| mem::size_of_val(k) + mem::size_of_val(v) + v.size_of(ops)) + .sum() + } +} diff --git a/p2p/src/network/kad/p2p_network_kad_internals.rs b/p2p/src/network/kad/p2p_network_kad_internals.rs index 8ad5b92b62..fd98066317 100644 --- a/p2p/src/network/kad/p2p_network_kad_internals.rs +++ b/p2p/src/network/kad/p2p_network_kad_internals.rs @@ -6,6 +6,7 @@ use std::{ use crypto_bigint::{ArrayEncoding, Encoding, U256}; use derive_more::From; use libp2p_identity::DecodingError; +use malloc_size_of_derive::MallocSizeOf; use multiaddr::Multiaddr; use openmina_core::bug_condition; use serde::{Deserialize, Serialize}; @@ -63,8 +64,12 @@ mod u256_serde { } /// Kademlia key, sha256 of the node's peer id. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -pub struct P2pNetworkKadKey(#[serde(with = "u256_serde")] U256); +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, MallocSizeOf)] +pub struct P2pNetworkKadKey( + #[serde(with = "u256_serde")] + #[ignore_malloc_size_of = "doesn't allocate"] + U256, +); impl P2pNetworkKadKey { pub fn distance(self, rhs: &Self) -> P2pNetworkKadDist { @@ -72,7 +77,7 @@ impl P2pNetworkKadKey { } } -#[derive(Clone, Debug, Serialize, PartialEq, Deserialize, thiserror::Error)] +#[derive(Clone, Debug, Serialize, PartialEq, Deserialize, thiserror::Error, MallocSizeOf)] pub enum P2pNetworkKadKeyError { #[error("decoding error")] DecodingError, @@ -374,11 +379,13 @@ impl Extend for P2pNetworkKadRoutingTable { } } -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkKadEntry { pub key: P2pNetworkKadKey, pub peer_id: PeerId, + #[with_malloc_size_of_func = "measurement::multiaddr_vec"] addrs: Vec, + #[ignore_malloc_size_of = "doesn't allocate"] pub connection: ConnectionType, } @@ -411,7 +418,7 @@ impl P2pNetworkKadEntry { } } -#[derive(Clone, Debug, Serialize, PartialEq, Deserialize, thiserror::Error)] +#[derive(Clone, Debug, Serialize, PartialEq, Deserialize, thiserror::Error, MallocSizeOf)] pub enum P2pNetworkKadEntryTryFromError { #[error(transparent)] PeerId(#[from] P2pNetworkKademliaPeerIdError), @@ -529,7 +536,7 @@ impl<'a, const K: usize> Iterator for ClosestPeers<'a, K> { } } -#[derive(Clone, Debug, Default, Serialize, Deserialize, From)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, From, MallocSizeOf)] pub struct P2pNetworkKadBucket(Vec); impl P2pNetworkKadBucket { @@ -884,3 +891,22 @@ mod tests { } } } +mod measurement { + use std::mem; + + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::{Multiaddr, P2pNetworkKadBucket, P2pNetworkKadRoutingTable}; + + pub fn multiaddr_vec(v: &Vec, _ops: &mut MallocSizeOfOps) -> usize { + v.capacity() * mem::size_of::() + v.iter().map(Multiaddr::len).sum::() + } + + impl MallocSizeOf for P2pNetworkKadRoutingTable { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + self.this_key.size_of(ops) + + self.buckets.capacity() * mem::size_of::>() + + self.buckets.iter().map(|b| b.size_of(ops)).sum::() + } + } +} diff --git a/p2p/src/network/kad/p2p_network_kad_protocol.rs b/p2p/src/network/kad/p2p_network_kad_protocol.rs index 2077f72cbd..e2d28968d6 100644 --- a/p2p/src/network/kad/p2p_network_kad_protocol.rs +++ b/p2p/src/network/kad/p2p_network_kad_protocol.rs @@ -1,6 +1,7 @@ use std::borrow::Cow; use libp2p_identity::DecodingError; +use malloc_size_of_derive::MallocSizeOf; use multiaddr::Multiaddr; use serde::{Deserialize, Serialize}; @@ -37,7 +38,7 @@ impl From for super::mod_Message::ConnectionType { } } -#[derive(Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Clone, Serialize, Deserialize, PartialEq, Eq, MallocSizeOf)] pub struct CID(pub Vec); #[cfg(test)] @@ -69,12 +70,12 @@ impl std::fmt::Debug for CID { } } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub enum P2pNetworkKademliaRpcRequest { FindNode { key: CID }, } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub enum P2pNetworkKademliaRpcReply { FindNode { closer_peers: Vec, @@ -91,7 +92,7 @@ impl P2pNetworkKademliaRpcRequest { } } -#[derive(Clone, Debug, Serialize, PartialEq, Deserialize, thiserror::Error)] +#[derive(Clone, Debug, Serialize, PartialEq, Deserialize, thiserror::Error, MallocSizeOf)] pub enum P2pNetworkKademliaPeerIdError { #[error("error decoding PeerId from bytes: lenght {0} while expected 32")] Parse(String), @@ -135,7 +136,7 @@ pub enum P2pNetworkKademliaRpcPeerTryFromError { Multiaddr(#[from] P2pNetworkKademliaMultiaddrError), } -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, thiserror::Error)] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, thiserror::Error, MallocSizeOf)] #[error("error decoding Multiaddr from bytes: {0}")] pub struct P2pNetworkKademliaMultiaddrError(String); @@ -151,7 +152,7 @@ impl From for P2pNetworkKademliaMultiaddrError { } } -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, thiserror::Error)] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, thiserror::Error, MallocSizeOf)] pub enum P2pNetworkKademliaRpcFromMessageError { #[error(transparent)] PeerId(#[from] P2pNetworkKademliaPeerIdError), diff --git a/p2p/src/network/kad/p2p_network_kad_reducer.rs b/p2p/src/network/kad/p2p_network_kad_reducer.rs index 50f0327472..de1503ef2d 100644 --- a/p2p/src/network/kad/p2p_network_kad_reducer.rs +++ b/p2p/src/network/kad/p2p_network_kad_reducer.rs @@ -115,9 +115,10 @@ impl super::P2pNetworkKadState { P2pNetworkKadBootstrapState::new(key).map_err(|k| k.to_string())?, ); - if state.bootstrap_state().map_or(false, |bootstrap_state| { - bootstrap_state.requests.len() < super::ALPHA - }) { + if state + .bootstrap_state() + .is_some_and(|bootstrap_state| bootstrap_state.requests.len() < super::ALPHA) + { let dispatcher = state_context.into_dispatcher(); dispatcher.push(P2pNetworkKadBootstrapAction::CreateRequests); } diff --git a/p2p/src/network/kad/p2p_network_kad_state.rs b/p2p/src/network/kad/p2p_network_kad_state.rs index 4b03501fbd..927e5ce77a 100644 --- a/p2p/src/network/kad/p2p_network_kad_state.rs +++ b/p2p/src/network/kad/p2p_network_kad_state.rs @@ -1,5 +1,6 @@ use std::{collections::BTreeMap, net::SocketAddr}; +use malloc_size_of_derive::MallocSizeOf; use redux::Timestamp; use serde::{Deserialize, Serialize}; @@ -9,11 +10,11 @@ use super::{ }; use crate::{ bootstrap::{P2pNetworkKadBootstrapRequestStat, P2pNetworkKadBootstrapStats}, - is_time_passed, P2pTimeouts, PeerId, StreamId, + is_time_passed, P2pTimeouts, PeerId, StreamId, StreamState, }; /// Kademlia status. -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, MallocSizeOf)] #[serde(tag = "type")] pub enum P2pNetworkKadStatus { /// Initial state. @@ -24,6 +25,7 @@ pub enum P2pNetworkKadStatus { /// Kademlia is bootstrapped. Bootstrapped { /// Timestamp of the bootstrap. + #[ignore_malloc_size_of = "doesn't allocate"] time: Timestamp, /// Stats for the latest bootstrap process. stats: P2pNetworkKadBootstrapStats, @@ -49,12 +51,13 @@ impl P2pNetworkKadStatus { } } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkKadState { pub routing_table: P2pNetworkKadRoutingTable, pub latest_request_peers: P2pNetworkKadLatestRequestPeers, + #[with_malloc_size_of_func = "measurement::requests_map"] pub requests: BTreeMap, - pub streams: crate::network::scheduler::StreamState, + pub streams: StreamState, pub status: P2pNetworkKadStatus, pub filter_addrs: bool, } @@ -160,11 +163,20 @@ impl P2pNetworkKadState { pub fn remove_kad_stream_state(&mut self, peer_id: &PeerId, stream_id: &StreamId) -> bool { self.streams .get_mut(peer_id) - .map_or(false, |m| m.remove(stream_id).is_some()) + .is_some_and(|m| m.remove(stream_id).is_some()) } } -#[derive(Clone, Debug, Default, Serialize, Deserialize, derive_more::Deref, derive_more::From)] +#[derive( + Clone, + Debug, + Default, + Serialize, + Deserialize, + derive_more::Deref, + derive_more::From, + MallocSizeOf, +)] pub struct P2pNetworkKadLatestRequestPeers(Vec<(PeerId, P2pNetworkKadLatestRequestPeerKind)>); impl P2pNetworkKadLatestRequestPeers { @@ -189,9 +201,26 @@ impl P2pNetworkKadLatestRequestPeers { } } -#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, MallocSizeOf)] pub enum P2pNetworkKadLatestRequestPeerKind { New, Existing, Discarded, } +mod measurement { + use std::{collections::BTreeMap, mem}; + + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::P2pNetworkKadRequestState; + use crate::PeerId; + + pub fn requests_map( + val: &BTreeMap, + ops: &mut MallocSizeOfOps, + ) -> usize { + val.iter() + .map(|(k, v)| mem::size_of_val(k) + mem::size_of_val(v) + v.size_of(ops)) + .sum() + } +} diff --git a/p2p/src/network/kad/request/p2p_network_kad_request_actions.rs b/p2p/src/network/kad/request/p2p_network_kad_request_actions.rs index 020ad42977..2275615fcf 100644 --- a/p2p/src/network/kad/request/p2p_network_kad_request_actions.rs +++ b/p2p/src/network/kad/request/p2p_network_kad_request_actions.rs @@ -82,7 +82,7 @@ impl EnablingCondition for P2pNetworkKadRequestAction { .network .scheduler .discovery_state() - .map_or(false, |discovery_state| { + .is_some_and(|discovery_state| { // no request for New, some request for anything else. discovery_state.request(self.peer_id()).is_none() == matches!(self, P2pNetworkKadRequestAction::New { .. }) diff --git a/p2p/src/network/kad/request/p2p_network_kad_request_state.rs b/p2p/src/network/kad/request/p2p_network_kad_request_state.rs index 24c71047fe..4c14131554 100644 --- a/p2p/src/network/kad/request/p2p_network_kad_request_state.rs +++ b/p2p/src/network/kad/request/p2p_network_kad_request_state.rs @@ -1,22 +1,24 @@ use std::net::SocketAddr; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use crate::{P2pNetworkKadEntry, PeerId, StreamId}; -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, MallocSizeOf)] pub struct P2pNetworkKadRequestState { /// ID of the peer we want to send request to. pub peer_id: PeerId, /// Request key, resulting entries will be those that closest to it. pub key: PeerId, /// Address + #[ignore_malloc_size_of = "doesn't allocate"] pub addr: SocketAddr, /// Request status. pub status: P2pNetworkKadRequestStatus, } -#[derive(Debug, Clone, Serialize, Deserialize, Default)] +#[derive(Debug, Clone, Serialize, Deserialize, Default, MallocSizeOf)] pub enum P2pNetworkKadRequestStatus { #[default] Default, diff --git a/p2p/src/network/kad/stream/p2p_network_kad_stream_state.rs b/p2p/src/network/kad/stream/p2p_network_kad_stream_state.rs index 5d98396264..fc421064d9 100644 --- a/p2p/src/network/kad/stream/p2p_network_kad_stream_state.rs +++ b/p2p/src/network/kad/stream/p2p_network_kad_stream_state.rs @@ -1,3 +1,4 @@ +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use crate::{ @@ -5,7 +6,7 @@ use crate::{ P2pNetworkKademliaRpcRequest, P2pNetworkStreamProtobufError, }; -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, MallocSizeOf)] pub enum P2pNetworkKadStreamState { Incoming(P2pNetworkKadIncomingStreamState), Outgoing(P2pNetworkKadOutgoingStreamState), @@ -22,7 +23,7 @@ impl P2pNetworkKadStreamState { } /// Incoming Kademlia stream is used by a remote peer to perform a Kademlia request. -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, MallocSizeOf)] pub enum P2pNetworkKadIncomingStreamState { #[default] Default, @@ -44,7 +45,8 @@ pub enum P2pNetworkKadIncomingStreamState { /// TODO: use enum for errors. Error(P2pNetworkStreamProtobufError), } -#[derive(Clone, Debug, Default, Serialize, Deserialize)] + +#[derive(Clone, Debug, Default, Serialize, Deserialize, MallocSizeOf)] pub enum P2pNetworkKadOutgoingStreamState { #[default] Default, @@ -63,7 +65,6 @@ pub enum P2pNetworkKadOutgoingStreamState { /// The stream is closed. Closed, /// Error handling the stream. - /// TODO: use enum for errors. Error(P2pNetworkStreamProtobufError), } diff --git a/p2p/src/network/mod.rs b/p2p/src/network/mod.rs index 49c1fc9f15..a397fd9847 100644 --- a/p2p/src/network/mod.rs +++ b/p2p/src/network/mod.rs @@ -1,4 +1,5 @@ mod p2p_network_actions; +use malloc_size_of_derive::MallocSizeOf; use serde::Deserialize; use serde::Serialize; @@ -56,12 +57,13 @@ pub use self::data::{Data, DataSized}; mod data { use std::{fmt, ops}; + use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; - #[derive(Clone)] + #[derive(Clone, MallocSizeOf)] pub struct DataSized(pub [u8; N]); - #[derive(Clone, Default)] + #[derive(Clone, Default, MallocSizeOf)] pub struct Data(pub Box<[u8]>); impl Data { @@ -185,12 +187,15 @@ mod data { } /// Errors that might happen while handling protobuf messages received via a stream. -#[derive(Debug, Clone, PartialEq, thiserror::Error, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, thiserror::Error, Serialize, Deserialize, MallocSizeOf)] pub enum P2pNetworkStreamProtobufError { #[error("error reading message length")] MessageLength, #[error("message is too long: {0} exceeds {1}")] - Limit(usize, Limit), + Limit( + usize, + #[ignore_malloc_size_of = "doesn't allocate"] Limit, + ), #[error("error reading message: {0}")] Message(String), #[error("error converting protobuf message: {0}")] diff --git a/p2p/src/network/noise/p2p_network_noise_reducer.rs b/p2p/src/network/noise/p2p_network_noise_reducer.rs index 27b06329b9..a08d864c8f 100644 --- a/p2p/src/network/noise/p2p_network_noise_reducer.rs +++ b/p2p/src/network/noise/p2p_network_noise_reducer.rs @@ -242,7 +242,7 @@ impl P2pNetworkNoiseState { .get(&peer_id) .and_then(|peer_state| peer_state.status.as_connecting()) .and_then(|connecting| connecting.as_incoming()) - .map_or(false, |incoming| matches!(incoming, P2pConnectionIncomingState::FinalizePendingLibp2p { addr: a, .. } if a == &addr.sock_addr)); + .is_some_and( |incoming| matches!(incoming, P2pConnectionIncomingState::FinalizePendingLibp2p { addr: a, .. } if a == &addr.sock_addr)); if !this_connection_is_kept { return Ok(()); diff --git a/p2p/src/network/noise/p2p_network_noise_state.rs b/p2p/src/network/noise/p2p_network_noise_state.rs index af5c255573..6977d4813a 100644 --- a/p2p/src/network/noise/p2p_network_noise_state.rs +++ b/p2p/src/network/noise/p2p_network_noise_state.rs @@ -1,5 +1,6 @@ use std::collections::VecDeque; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use thiserror::Error; use zeroize::Zeroize; @@ -15,8 +16,9 @@ use crate::{identity::PublicKey, PeerId}; use super::super::*; -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pNetworkNoiseState { + #[ignore_malloc_size_of = "doesn't allocate"] pub local_pk: PublicKey, pub buffer: Vec, pub incoming_chunks: VecDeque>, @@ -94,7 +96,7 @@ impl P2pNetworkNoiseState { } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub enum P2pNetworkNoiseStateInner { Initiator(P2pNetworkNoiseStateInitiator), Responder(P2pNetworkNoiseStateResponder), @@ -105,10 +107,11 @@ pub enum P2pNetworkNoiseStateInner { // noise_hash: DataSized<32>, recv_nonce: u64, send_nonce: u64, + #[ignore_malloc_size_of = "doesn't allocate"] remote_pk: PublicKey, remote_peer_id: PeerId, }, - Error(NoiseError), + Error(#[ignore_malloc_size_of = "error"] NoiseError), } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -598,3 +601,26 @@ mod wrapper { } } } + +mod measurement { + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::{P2pNetworkNoiseStateInitiator, P2pNetworkNoiseStateResponder}; + + impl MallocSizeOf for P2pNetworkNoiseStateInitiator { + fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { + self.payload.len() + } + } + + impl MallocSizeOf for P2pNetworkNoiseStateResponder { + fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { + match self { + Self::Init { + buffer, payload, .. + } => buffer.capacity() + payload.len(), + _ => 0, + } + } + } +} diff --git a/p2p/src/network/p2p_network_reducer.rs b/p2p/src/network/p2p_network_reducer.rs index c0560551c0..fd4073f6bc 100644 --- a/p2p/src/network/p2p_network_reducer.rs +++ b/p2p/src/network/p2p_network_reducer.rs @@ -83,7 +83,7 @@ impl P2pNetworkState { if state .pending .as_ref() - .map_or(false, |query_header| query_header.id == id) + .is_some_and(|query_header| query_header.id == id) { Some(state) } else { diff --git a/p2p/src/network/p2p_network_state.rs b/p2p/src/network/p2p_network_state.rs index 3f2ca1a84f..5488b69b6a 100644 --- a/p2p/src/network/p2p_network_state.rs +++ b/p2p/src/network/p2p_network_state.rs @@ -72,7 +72,7 @@ impl P2pNetworkState { if state .pending .as_ref() - .map_or(false, |query_header| query_header.id == id) + .is_some_and(|query_header| query_header.id == id) { Some(state) } else { diff --git a/p2p/src/network/pnet/p2p_network_pnet_state.rs b/p2p/src/network/pnet/p2p_network_pnet_state.rs index a0cc44739a..9bd92be19a 100644 --- a/p2p/src/network/pnet/p2p_network_pnet_state.rs +++ b/p2p/src/network/pnet/p2p_network_pnet_state.rs @@ -1,3 +1,4 @@ +use malloc_size_of_derive::MallocSizeOf; use redux::Timestamp; use serde::{Deserialize, Serialize}; use zeroize::Zeroize; @@ -7,8 +8,9 @@ use salsa_simple::XSalsa20; use crate::P2pTimeouts; #[serde_with::serde_as] -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pNetworkPnetState { + #[ignore_malloc_size_of = "doesn't allocate"] pub time: Option, #[serde_as(as = "serde_with::hex::Hex")] @@ -63,3 +65,18 @@ pub enum Half { Buffering { buffer: [u8; 24], offset: usize }, Done { cipher: XSalsa20, to_send: Vec }, } + +mod measurement { + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::*; + + impl MallocSizeOf for Half { + fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { + match self { + Self::Done { to_send, .. } => to_send.capacity(), + _ => 0, + } + } + } +} diff --git a/p2p/src/network/pubsub/mod.rs b/p2p/src/network/pubsub/mod.rs index 9f9bdbc94e..79ab3d009f 100644 --- a/p2p/src/network/pubsub/mod.rs +++ b/p2p/src/network/pubsub/mod.rs @@ -1,4 +1,4 @@ -mod pb { +pub mod pb { include!(concat!(env!("OUT_DIR"), "/gossipsub.rs")); } @@ -7,7 +7,8 @@ pub use self::p2p_network_pubsub_actions::P2pNetworkPubsubAction; mod p2p_network_pubsub_state; pub use self::p2p_network_pubsub_state::{ - P2pNetworkPubsubClientState, P2pNetworkPubsubClientTopicState, P2pNetworkPubsubState, + P2pNetworkPubsubClientState, P2pNetworkPubsubClientTopicState, P2pNetworkPubsubMessageCacheId, + P2pNetworkPubsubState, }; #[cfg(feature = "p2p-libp2p")] @@ -18,3 +19,46 @@ const TOPIC: &str = "coda/consensus-messages/0.0.1"; pub mod pubsub_effectful; pub use pubsub_effectful::P2pNetworkPubsubEffectfulAction; + +use binprot::BinProtWrite; +use mina_p2p_messages::gossip::GossipNetMessageV2; +use openmina_core::bug_condition; +use sha2::{Digest, Sha256}; + +use crate::identity::SecretKey; + +#[derive(serde::Serialize, serde:: Deserialize, Debug, Clone)] +pub enum BroadcastMessageId { + BlockHash { + hash: mina_p2p_messages::v2::StateHash, + }, + MessageId { + message_id: P2pNetworkPubsubMessageCacheId, + }, +} + +pub(super) fn webrtc_source_sk(message: &GossipNetMessageV2) -> SecretKey { + let mut hasher = Sha256::new(); + if let Err(err) = message.binprot_write(&mut hasher) { + bug_condition!("trying to broadcast message which can't be binprot serialized! err: {err}"); + return SecretKey::from_bytes([0; 32]); + } + SecretKey::from_bytes(hasher.finalize().into()) +} + +pub(super) fn webrtc_source_sk_from_bytes(bytes: &[u8]) -> SecretKey { + let mut hasher = Sha256::new(); + hasher.update(bytes); + SecretKey::from_bytes(hasher.finalize().into()) +} + +pub(super) fn encode_message(message: &GossipNetMessageV2) -> std::io::Result> { + let mut buffer = vec![0; 8]; + + message.binprot_write(&mut buffer)?; + + let len = buffer.len() - 8; + buffer[..8].clone_from_slice(&(len as u64).to_le_bytes()); + + Ok(buffer) +} diff --git a/p2p/src/network/pubsub/p2p_network_pubsub_actions.rs b/p2p/src/network/pubsub/p2p_network_pubsub_actions.rs index ee12863b77..63ff30c9d4 100644 --- a/p2p/src/network/pubsub/p2p_network_pubsub_actions.rs +++ b/p2p/src/network/pubsub/p2p_network_pubsub_actions.rs @@ -1,4 +1,4 @@ -use super::pb; +use super::{p2p_network_pubsub_state::P2pNetworkPubsubMessageCacheId, pb, BroadcastMessageId}; use crate::{token::BroadcastAlgorithm, ConnectionAddr, Data, P2pState, PeerId, StreamId}; use mina_p2p_messages::gossip::GossipNetMessageV2; use openmina_core::ActionEvent; @@ -61,19 +61,41 @@ pub enum P2pNetworkPubsubAction { }, /// Clean up temporary states after processing an incoming message. - IncomingMessageCleanup { peer_id: PeerId }, + IncomingMessageCleanup { + peer_id: PeerId, + }, /// Add a peer to the mesh network for a specific topic. - Graft { peer_id: PeerId, topic_id: String }, + Graft { + peer_id: PeerId, + topic_id: String, + }, /// Remove a peer from the mesh network for a specific topic. - Prune { peer_id: PeerId, topic_id: String }, + Prune { + peer_id: PeerId, + topic_id: String, + }, + + /// Rebroadcast message received from WebRTC connection. + /// + /// Expected to be dispatched after the message has been processed, + /// in spite of whether it was received from libp2p or webrtc network. + /// + /// If received from libp2p network, or if we have already broadcasted + /// this message, the message will be in the `mcache` state, + /// in which case the action won't be enabled (will be filtered out). + WebRtcRebroadcast { + message: GossipNetMessageV2, + }, /// Initiate the broadcasting of a message to all subscribed peers. /// /// **Fields:** /// - `message`: The gossip network message to broadcast. - Broadcast { message: GossipNetMessageV2 }, + Broadcast { + message: GossipNetMessageV2, + }, /// Prepare a message for signing before broadcasting. /// @@ -91,32 +113,75 @@ pub enum P2pNetworkPubsubAction { /// An error occured during the signing process. #[action_event(level = warn, fields(display(author), display(topic)))] - SignError { author: PeerId, topic: String }, + SignError { + author: PeerId, + topic: String, + }, /// Finalize the broadcasting of a signed message by attaching the signature. /// /// **Fields:** /// - `signature`: The cryptographic signature of the message. - BroadcastSigned { signature: Data }, + BroadcastSigned { + signature: Data, + }, /// Prepare an outgoing message to send to a specific peer. - OutgoingMessage { peer_id: PeerId }, + OutgoingMessage { + peer_id: PeerId, + }, /// Clear the outgoing message state for a specific peer after sending. - OutgoingMessageClear { peer_id: PeerId }, + OutgoingMessageClear { + peer_id: PeerId, + }, /// An error occured during the sending of an outgoing message. /// /// **Fields:** /// - `msg`: The protobuf message that failed to send. #[action_event(level = warn, fields(display(peer_id), debug(msg)))] - OutgoingMessageError { msg: pb::Rpc, peer_id: PeerId }, + OutgoingMessageError { + msg: pb::Rpc, + peer_id: PeerId, + }, /// Send encoded data over an outgoing stream to a specific peer. /// /// **Fields:** /// - `data`: The encoded data to be sent. - OutgoingData { data: Data, peer_id: PeerId }, + OutgoingData { + data: Data, + peer_id: PeerId, + }, + + HandleIncomingMessage { + message: pb::Message, + message_content: GossipNetMessageV2, + peer_id: PeerId, + }, + + ValidateIncomingMessage { + message_id: P2pNetworkPubsubMessageCacheId, + }, + + /// Delete expired messages from state + PruneMessages {}, + + RejectMessage { + message_id: Option, + peer_id: Option, + reason: String, + }, + IgnoreMessage { + message_id: Option, + reason: String, + }, + + // After message is fully validated, broadcast it to other peers + BroadcastValidatedMessage { + message_id: BroadcastMessageId, + }, } impl From for crate::P2pAction { @@ -127,14 +192,32 @@ impl From for crate::P2pAction { impl redux::EnablingCondition for P2pNetworkPubsubAction { fn is_enabled(&self, state: &P2pState, _time: redux::Timestamp) -> bool { + let pubsub = &state.network.scheduler.broadcast_state; match self { - P2pNetworkPubsubAction::OutgoingMessage { peer_id } => state - .network - .scheduler - .broadcast_state + P2pNetworkPubsubAction::OutgoingMessage { peer_id } => pubsub .clients .get(peer_id) - .map_or(false, |s| !s.message_is_empty()), + .is_some_and(|s| !s.message_is_empty()), + P2pNetworkPubsubAction::Prune { peer_id, topic_id } => pubsub + .topics + .get(topic_id) + .is_some_and(|topics| topics.contains_key(peer_id)), + P2pNetworkPubsubAction::WebRtcRebroadcast { message } => { + let source = super::webrtc_source_sk(message) + .public_key() + .peer_id() + .try_into() + .unwrap(); + pubsub + .mcache + .get_message(&P2pNetworkPubsubMessageCacheId { source, seqno: 0 }) + .is_none() + } + P2pNetworkPubsubAction::BroadcastValidatedMessage { message_id } + | P2pNetworkPubsubAction::RejectMessage { + message_id: Some(message_id), + .. + } => pubsub.mcache.contains_broadcast_id(message_id), _ => true, } } diff --git a/p2p/src/network/pubsub/p2p_network_pubsub_reducer.rs b/p2p/src/network/pubsub/p2p_network_pubsub_reducer.rs index 7782ad9965..95b616f33a 100644 --- a/p2p/src/network/pubsub/p2p_network_pubsub_reducer.rs +++ b/p2p/src/network/pubsub/p2p_network_pubsub_reducer.rs @@ -1,23 +1,32 @@ -use std::collections::btree_map::Entry; +use std::{collections::btree_map::Entry, time::Duration}; use binprot::BinProtRead; -use mina_p2p_messages::{gossip, v2}; +use mina_p2p_messages::{ + gossip::{self, GossipNetMessageV2}, + v2::NetworkPoolSnarkPoolDiffVersionedStableV2, +}; use openmina_core::{block::BlockWithHash, bug_condition, fuzz_maybe, fuzzed_maybe, Substate}; use redux::{Dispatcher, Timestamp}; use crate::{ channels::{snark::P2pChannelsSnarkAction, transaction::P2pChannelsTransactionAction}, + disconnection::{P2pDisconnectionAction, P2pDisconnectionReason}, peer::P2pPeerAction, - Data, P2pConfig, P2pNetworkYamuxAction, PeerId, + Data, P2pConfig, P2pNetworkYamuxAction, P2pState, PeerId, }; use super::{ - p2p_network_pubsub_state::P2pNetworkPubsubClientMeshAddingState, + p2p_network_pubsub_state::{ + source_from_message, P2pNetworkPubsubClientMeshAddingState, + P2pNetworkPubsubMessageCacheMessage, + }, pb::{self, Message}, P2pNetworkPubsubAction, P2pNetworkPubsubClientState, P2pNetworkPubsubEffectfulAction, - P2pNetworkPubsubState, TOPIC, + P2pNetworkPubsubMessageCacheId, P2pNetworkPubsubState, TOPIC, }; +const MAX_MESSAGE_KEEP_DURATION: Duration = Duration::from_secs(300); + impl P2pNetworkPubsubState { pub fn reducer( mut state_context: Substate, @@ -29,6 +38,7 @@ impl P2pNetworkPubsubState { { let pubsub_state = state_context.get_substate_mut()?; let (action, meta) = action.split(); + let time = meta.time(); match action { P2pNetworkPubsubAction::NewStream { @@ -173,22 +183,32 @@ impl P2pNetworkPubsubState { message, seen_limit, } => { + // Check that if we can extract source from message, this is pre check + if source_from_message(&message).is_err() { + let dispatcher = state_context.into_dispatcher(); + dispatcher.push(P2pNetworkPubsubAction::RejectMessage { + message_id: None, + peer_id: Some(peer_id), + reason: "Invalid originator in message".to_owned(), + }); + return Ok(()); + } + // Check result later to ensure we always dispatch the cleanup action let reduce_incoming_result = - pubsub_state.reduce_incoming_message(peer_id, message, seen_limit); + pubsub_state.reduce_incoming_message(&message, seen_limit); let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); + let p2p_state: &P2pState = global_state.substate()?; + let state: &Self = global_state.substate()?; dispatcher.push(P2pNetworkPubsubAction::IncomingMessageCleanup { peer_id }); - reduce_incoming_result?; - - let state: &Self = global_state.substate()?; - let config: &P2pConfig = global_state.substate()?; + let message_content = reduce_incoming_result?; for (topic_id, map) in &state.topics { let mesh_size = map.values().filter(|s| s.on_mesh()).count(); - let could_accept = mesh_size < config.meshsub.outbound_degree_high; + let could_accept = mesh_size < p2p_state.config.meshsub.outbound_degree_high; if !could_accept { if let Some(topic_state) = map.get(&peer_id) { @@ -200,29 +220,43 @@ impl P2pNetworkPubsubState { } } - if let Err(error) = Self::broadcast(dispatcher, global_state) { - bug_condition!( - "Failure when trying to broadcast incoming pubsub message: {error}" - ); - }; - - if let Some((_, block)) = state.incoming_block.as_ref() { - let best_tip = BlockWithHash::try_new(block.clone())?; - dispatcher.push(P2pPeerAction::BestTipUpdate { peer_id, best_tip }); - } - for (transaction, nonce) in &state.incoming_transactions { - dispatcher.push(P2pChannelsTransactionAction::Libp2pReceived { + // This happens if message was already seen + if let Some(message_content) = message_content { + dispatcher.push(P2pNetworkPubsubAction::HandleIncomingMessage { + message, + message_content, peer_id, - transaction: Box::new(transaction.clone()), - nonce: *nonce, }); - } - for (snark, nonce) in &state.incoming_snarks { - dispatcher.push(P2pChannelsSnarkAction::Libp2pReceived { - peer_id, - snark: Box::new(snark.clone()), - nonce: *nonce, + } else { + dispatcher.push(P2pNetworkPubsubAction::IgnoreMessage { + message_id: None, + reason: "Message already seen".to_owned(), }); + }; + + Ok(()) + } + P2pNetworkPubsubAction::HandleIncomingMessage { + message, + message_content, + peer_id, + } => { + let Ok(message_id) = + pubsub_state + .mcache + .put(message, message_content, peer_id, time) + else { + bug_condition!("Unable to add message to `mcache`"); + return Ok(()); + }; + + let (dispatcher, state) = state_context.into_dispatcher_and_state(); + let p2p_state: &P2pState = state.substate()?; + + if let Some(callback) = p2p_state.callbacks.on_p2p_pubsub_message_received.clone() { + dispatcher.push_callback(callback, message_id); + } else { + dispatcher.push(P2pNetworkPubsubAction::ValidateIncomingMessage { message_id }); } Ok(()) } @@ -355,18 +389,74 @@ impl P2pNetworkPubsubState { Ok(()) } P2pNetworkPubsubAction::OutgoingMessageError { .. } => Ok(()), - P2pNetworkPubsubAction::Broadcast { message } => { - let mut buffer = vec![0; 8]; + P2pNetworkPubsubAction::WebRtcRebroadcast { message } => { + let data = match super::encode_message(&message) { + Err(err) => { + bug_condition!("binprot serialization error: {err}"); + return Ok(()); + } + Ok(data) => data, + }; - if binprot::BinProtWrite::binprot_write(&message, &mut buffer).is_err() { - bug_condition!("binprot serialization error"); - return Ok(()); - } + let mut source_sk = super::webrtc_source_sk_from_bytes(&data[8..]); + let source_peer_id = source_sk.public_key().peer_id(); + let message_id = P2pNetworkPubsubMessageCacheId { + source: libp2p_identity::PeerId::try_from(source_peer_id).unwrap(), + seqno: 0, + }; + let mut msg = pb::Message { + from: Some(message_id.source.to_bytes().to_vec()), + data: Some(data), + seqno: Some(message_id.seqno.to_be_bytes().to_vec()), + topic: super::TOPIC.to_owned(), + signature: None, + key: None, + }; + + msg.signature = match source_sk.libp2p_pubsub_pb_message_sign(&msg) { + Err(err) => { + bug_condition!("pubsub prost encode error: {err}"); + return Ok(()); + } + Ok(sig) => Some(sig.to_bytes().to_vec()), + }; + + let message_state = match &message { + GossipNetMessageV2::NewState(block) => { + P2pNetworkPubsubMessageCacheMessage::PreValidatedBlockMessage { + block_hash: block.try_hash()?, + message: msg, + peer_id: source_peer_id, + time, + } + } + _ => P2pNetworkPubsubMessageCacheMessage::PreValidated { + message: msg, + peer_id: source_peer_id, + time, + }, + }; + + pubsub_state.mcache.map.insert(message_id, message_state); + + let dispatcher = state_context.into_dispatcher(); + + dispatcher.push(P2pNetworkPubsubAction::BroadcastValidatedMessage { + message_id: super::BroadcastMessageId::MessageId { message_id }, + }); - let len = buffer.len() - 8; - buffer[..8].clone_from_slice(&(len as u64).to_le_bytes()); + Ok(()) + } + P2pNetworkPubsubAction::Broadcast { message } => { + let data = match super::encode_message(&message) { + Err(err) => { + bug_condition!("binprot serialization error: {err}"); + return Ok(()); + } + Ok(data) => data, + }; - Self::prepare_to_sign(state_context, buffer) + Self::prepare_to_sign(state_context, data) } P2pNetworkPubsubAction::Sign { seqno, @@ -394,11 +484,7 @@ impl P2pNetworkPubsubState { }; let dispatcher = state_context.into_dispatcher(); - dispatcher.push(P2pNetworkPubsubEffectfulAction::Sign { - author, - topic, - message, - }); + dispatcher.push(P2pNetworkPubsubEffectfulAction::Sign { author, message }); Ok(()) } P2pNetworkPubsubAction::SignError { .. } => { @@ -440,6 +526,167 @@ impl P2pNetworkPubsubState { } Ok(()) } + P2pNetworkPubsubAction::ValidateIncomingMessage { message_id } => { + let Some(message) = pubsub_state.mcache.map.remove(&message_id) else { + bug_condition!("Message with id: {:?} not found", message_id); + return Ok(()); + }; + + let P2pNetworkPubsubMessageCacheMessage::Init { + message, + content, + time, + peer_id, + } = message + else { + bug_condition!( + "`P2pNetworkPubsubAction::ValidateIncomingMessage` called on invalid state" + ); + return Ok(()); + }; + + let new_message_state = match &content { + GossipNetMessageV2::NewState(block) => { + let block_hash = block.try_hash()?; + P2pNetworkPubsubMessageCacheMessage::PreValidatedBlockMessage { + block_hash, + message, + peer_id, + time, + } + } + _ => P2pNetworkPubsubMessageCacheMessage::PreValidated { + message, + peer_id, + time, + }, + }; + pubsub_state + .mcache + .map + .insert(message_id, new_message_state); + + let dispatcher = state_context.into_dispatcher(); + + match content { + GossipNetMessageV2::NewState(block) => { + let best_tip = BlockWithHash::try_new(block.clone())?; + dispatcher.push(P2pPeerAction::BestTipUpdate { peer_id, best_tip }); + return Ok(()); + } + GossipNetMessageV2::TransactionPoolDiff { message, nonce } => { + let nonce = nonce.as_u32(); + for transaction in message.0 { + dispatcher.push(P2pChannelsTransactionAction::Libp2pReceived { + peer_id, + transaction: Box::new(transaction), + nonce, + }); + } + } + GossipNetMessageV2::SnarkPoolDiff { + message: NetworkPoolSnarkPoolDiffVersionedStableV2::AddSolvedWork(work), + nonce, + } => { + dispatcher.push(P2pChannelsSnarkAction::Libp2pReceived { + peer_id, + snark: Box::new(work.1.into()), + nonce: nonce.as_u32(), + }); + } + _ => {} + } + + dispatcher.push(P2pNetworkPubsubAction::BroadcastValidatedMessage { + message_id: super::BroadcastMessageId::MessageId { message_id }, + }); + Ok(()) + } + P2pNetworkPubsubAction::BroadcastValidatedMessage { message_id } => { + let Some((mcache_message_id, message)) = + pubsub_state.mcache.get_message_id_and_message(&message_id) + else { + bug_condition!("Message with id: {:?} not found", message_id); + return Ok(()); + }; + let raw_message = message.message().clone(); + let peer_id = message.peer_id(); + + pubsub_state.reduce_incoming_validated_message( + mcache_message_id, + peer_id, + &raw_message, + ); + + let Some((_message_id, message)) = + pubsub_state.mcache.get_message_id_and_message(&message_id) + else { + bug_condition!("Message with id: {:?} not found", message_id); + return Ok(()); + }; + + *message = P2pNetworkPubsubMessageCacheMessage::Validated { + message: raw_message, + peer_id, + time: message.time(), + }; + + let (dispatcher, state) = state_context.into_dispatcher_and_state(); + + Self::broadcast(dispatcher, state) + } + P2pNetworkPubsubAction::PruneMessages {} => { + let messages = pubsub_state + .mcache + .map + .iter() + .filter_map(|(message_id, message)| { + if message.time() + MAX_MESSAGE_KEEP_DURATION > time { + Some(message_id.to_owned()) + } else { + None + } + }) + .collect::>(); + + for message_id in messages { + pubsub_state.mcache.remove_message(message_id); + } + Ok(()) + } + P2pNetworkPubsubAction::RejectMessage { + message_id, + peer_id, + .. + } => { + let mut peer_id = peer_id; + if let Some(message_id) = message_id { + let Some((_message_id, message)) = + pubsub_state.mcache.get_message_id_and_message(&message_id) + else { + bug_condition!("Message not found for id: {:?}", message_id); + return Ok(()); + }; + + if peer_id.is_none() { + peer_id = Some(message.peer_id()); + } + + pubsub_state.mcache.remove_message(_message_id); + } + + let dispatcher = state_context.into_dispatcher(); + + if let Some(peer_id) = peer_id { + dispatcher.push(P2pDisconnectionAction::Init { + peer_id, + reason: P2pDisconnectionReason::InvalidMessage, + }); + } + + Ok(()) + } + P2pNetworkPubsubAction::IgnoreMessage { .. } => Ok(()), } } @@ -468,58 +715,14 @@ impl P2pNetworkPubsubState { Ok(()) } - #[inline(never)] - fn reduce_incoming_message( + fn reduce_incoming_validated_message( &mut self, + message_id: P2pNetworkPubsubMessageCacheId, peer_id: PeerId, - message: Message, - seen_limit: usize, - ) -> Result<(), String> { + message: &Message, + ) { let topic = self.topics.entry(message.topic.clone()).or_default(); - if let Some(signature) = &message.signature { - // skip recently seen message - if !self.seen.contains(signature) { - self.seen.push_back(signature.clone()); - // keep only last `n` to avoid memory leak - if self.seen.len() > seen_limit { - self.seen.pop_front(); - } - } else { - return Ok(()); - } - } - - if let Some(data) = &message.data { - if data.len() > 8 { - let mut slice = &data[8..]; - match gossip::GossipNetMessageV2::binprot_read(&mut slice) { - Ok(gossip::GossipNetMessageV2::NewState(block)) => { - self.incoming_block = Some((peer_id, block)); - } - Ok(gossip::GossipNetMessageV2::TransactionPoolDiff { message, nonce }) => { - let nonce = nonce.as_u32(); - let txs = message.0.into_iter().map(|tx| (tx, nonce)); - self.incoming_transactions.extend(txs); - } - Ok(gossip::GossipNetMessageV2::SnarkPoolDiff { message, nonce }) => { - if let v2::NetworkPoolSnarkPoolDiffVersionedStableV2::AddSolvedWork(work) = - message - { - self.incoming_snarks.push((work.1.into(), nonce.as_u32())); - } - } - Err(err) => { - return Err(err.to_string()); - } - } - } - } - - let message_id = self.mcache.put(message.clone()); - - // TODO: this should only happen after the contents have been validated. - // The only validation that has happened so far is that the message can be parsed. self.clients .iter_mut() .filter(|(c, _)| { @@ -531,17 +734,66 @@ impl P2pNetworkPubsubState { return; }; if topic_state.on_mesh() { - state.publish(&message) + state.publish(message) } else { let ctr = state.message.control.get_or_insert_with(Default::default); ctr.ihave.push(pb::ControlIHave { topic_id: Some(message.topic.clone()), - message_ids: message_id.clone().into_iter().collect(), + message_ids: vec![message_id.to_raw_bytes()], }) } }); + } - Ok(()) + /// Processes an incoming message by checking for duplicates and deserializing its contents. + /// + /// This function performs two main operations: + /// 1. Deduplication: Tracks recently seen messages using their signatures to avoid processing duplicates + /// 2. Deserialization: Converts valid message data into a `GossipNetMessageV2` structure + /// + /// # Arguments + /// + /// * `message` - The incoming message to process + /// * `seen_limit` - Maximum number of message signatures to keep in the deduplication cache + /// + /// # Returns + /// + /// * `Ok(Some(GossipNetMessageV2))` - Successfully processed and deserialized message + /// * `Ok(None)` - Message was a duplicate (already seen) + /// * `Err(String)` - Error during processing (invalid message format or deserialization failure) + /// + #[inline(never)] + fn reduce_incoming_message( + &mut self, + message: &Message, + seen_limit: usize, + ) -> Result, String> { + let Some(signature) = &message.signature else { + bug_condition!("Validation failed: missing signature"); + return Ok(None); + }; + + // skip recently seen message + if !self.seen.contains(signature) { + self.seen.push_back(signature.clone()); + // keep only last `n` to avoid memory leak + if self.seen.len() > seen_limit { + self.seen.pop_front(); + } + } else { + return Ok(None); + } + + match &message.data { + Some(data) if data.len() > 8 => { + let mut slice = &data[8..]; + Ok(Some( + gossip::GossipNetMessageV2::binprot_read(&mut slice) + .map_err(|e| format!("Invalid `GossipNetMessageV2` message, error: {e}"))?, + )) + } + _ => Err("Invalid message".to_owned()), + } } fn combined_with_pending_buffer<'a>(buffer: &'a mut Vec, data: &'a [u8]) -> &'a [u8] { @@ -644,9 +896,9 @@ impl P2pNetworkPubsubState { // Respond to iwant requests by publishing available messages from the cache. for iwant in iwant_requests { for msg_id in &iwant.message_ids { - if let Some(msg) = self.mcache.map.get(msg_id) { + if let Some(msg) = self.mcache.get_message_from_raw_message_id(msg_id) { if let Some(client) = self.clients.get_mut(peer_id) { - client.publish(msg); + client.publish(msg.message()); } } } diff --git a/p2p/src/network/pubsub/p2p_network_pubsub_state.rs b/p2p/src/network/pubsub/p2p_network_pubsub_state.rs index 8ece03753e..e09f9dfda6 100644 --- a/p2p/src/network/pubsub/p2p_network_pubsub_state.rs +++ b/p2p/src/network/pubsub/p2p_network_pubsub_state.rs @@ -1,16 +1,17 @@ -use super::pb; +use super::{pb, BroadcastMessageId}; use crate::{token::BroadcastAlgorithm, ConnectionAddr, PeerId, StreamId}; +use libp2p_identity::ParseError; +use mina_p2p_messages::gossip::GossipNetMessageV2; +use openmina_core::{snark::Snark, transaction::Transaction}; +use redux::Timestamp; +use serde::{Deserialize, Serialize}; use std::{ collections::{BTreeMap, BTreeSet, VecDeque}, - sync::Arc, time::Duration, }; -use mina_p2p_messages::v2; -use openmina_core::{snark::Snark, transaction::Transaction}; -use redux::Timestamp; -use serde::{Deserialize, Serialize}; +use malloc_size_of_derive::MallocSizeOf; pub const IWANT_TIMEOUT_DURATION: Duration = Duration::from_secs(5); @@ -20,9 +21,10 @@ pub const IWANT_TIMEOUT_DURATION: Duration = Duration::from_secs(5); /// message caching, and topic subscriptions. It handles incoming and outgoing /// messages, manages the mesh network topology, and ensures efficient message /// broadcasting across the network. -#[derive(Default, Serialize, Deserialize, Debug, Clone)] +#[derive(Default, Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pNetworkPubsubState { /// State of each connected peer. + #[with_malloc_size_of_func = "measurement::clients"] pub clients: BTreeMap, /// Current message sequence number. @@ -44,9 +46,6 @@ pub struct P2pNetworkPubsubState { /// For quick access and reducing redundant data transmission across peers. pub mcache: P2pNetworkPubsubMessageCache, - /// Incoming block from a peer, if any. - pub incoming_block: Option<(PeerId, Arc)>, - /// Incoming transactions from peers along with their nonces. pub incoming_transactions: Vec<(Transaction, u32)>, @@ -54,15 +53,17 @@ pub struct P2pNetworkPubsubState { pub incoming_snarks: Vec<(Snark, u32)>, /// Topics and their subscribed peers. + #[with_malloc_size_of_func = "measurement::topics"] pub topics: BTreeMap>, /// `iwant` requests, tracking the number of times peers have expressed interest in specific messages. pub iwant: VecDeque, } -#[derive(Default, Serialize, Deserialize, Debug, Clone)] +#[derive(Default, Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pNetworkPubsubIwantRequestCount { pub message_id: Vec, + #[with_malloc_size_of_func = "measurement::timestamps"] pub count: Vec, } @@ -71,15 +72,19 @@ impl P2pNetworkPubsubState { self.clients.remove(peer_id); } - pub fn filter_iwant_message_ids(&mut self, message_id: &Vec, timestamp: Timestamp) -> bool { - if self.mcache.map.contains_key(message_id) { + pub fn filter_iwant_message_ids(&mut self, message_id: &[u8], timestamp: Timestamp) -> bool { + if self + .mcache + .get_message_from_raw_message_id(message_id) + .is_some() + { return false; } let message_count = self .iwant .iter_mut() - .find(|message| &message.message_id == message_id); + .find(|message| message.message_id == message_id); match message_count { Some(message) => { @@ -90,7 +95,7 @@ impl P2pNetworkPubsubState { .filter(|time| { timestamp .checked_sub(*time) - .map_or(false, |duration| duration < IWANT_TIMEOUT_DURATION) + .is_some_and(|duration| duration < IWANT_TIMEOUT_DURATION) }) .collect(); @@ -103,7 +108,7 @@ impl P2pNetworkPubsubState { } None => { let message_count = P2pNetworkPubsubIwantRequestCount { - message_id: message_id.to_owned(), + message_id: message_id.to_vec(), count: vec![timestamp], }; @@ -123,8 +128,6 @@ impl P2pNetworkPubsubState { self.incoming_transactions.shrink_to(0x20); self.incoming_snarks.shrink_to(0x20); - - self.incoming_block = None; } } @@ -133,7 +136,7 @@ impl P2pNetworkPubsubState { /// This struct maintains essential information about the client's protocol, /// connection details, message buffers, and caching mechanisms. It facilitates /// efficient message handling and broadcasting within the pubsub system. -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pNetworkPubsubClientState { /// Broadcast algorithm used for this client. pub protocol: BroadcastAlgorithm, @@ -172,11 +175,11 @@ pub struct P2pNetworkPubsubClientState { impl P2pNetworkPubsubClientState { pub fn publish(&mut self, message: &pb::Message) { - let Some(id) = compute_message_id(message) else { + let Ok(id) = P2pNetworkPubsubMessageCacheId::compute_message_id(message) else { self.message.publish.push(message.clone()); return; }; - if self.cache.map.insert(id.clone()) { + if self.cache.map.insert(id) { self.message.publish.push(message.clone()); } self.cache.queue.push_back(id); @@ -200,54 +203,207 @@ impl P2pNetworkPubsubClientState { #[derive(Default, Serialize, Deserialize, Debug, Clone)] pub struct P2pNetworkPubsubRecentlyPublishCache { - pub map: BTreeSet>, - pub queue: VecDeque>, + pub map: BTreeSet, + pub queue: VecDeque, } // TODO: store blocks, snarks and txs separately #[derive(Default, Serialize, Deserialize, Debug, Clone)] pub struct P2pNetworkPubsubMessageCache { - pub map: BTreeMap, pb::Message>, - pub queue: VecDeque>, + pub map: BTreeMap, + pub queue: VecDeque, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub enum P2pNetworkPubsubMessageCacheMessage { + Init { + message: pb::Message, + content: GossipNetMessageV2, + peer_id: PeerId, + time: Timestamp, + }, + PreValidatedBlockMessage { + block_hash: mina_p2p_messages::v2::StateHash, + message: pb::Message, + peer_id: PeerId, + time: Timestamp, + }, + // This is temporary handling for transactions and snark pool + PreValidated { + message: pb::Message, + peer_id: PeerId, + time: Timestamp, + }, + Validated { + message: pb::Message, + peer_id: PeerId, + time: Timestamp, + }, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Copy)] +pub struct P2pNetworkPubsubMessageCacheId { + pub source: libp2p_identity::PeerId, + pub seqno: u64, +} + +impl P2pNetworkPubsubMessageCacheId { + // TODO: what if wasm32? + // How to test it? + pub fn compute_message_id( + message: &pb::Message, + ) -> Result { + let source = source_from_message(message)?; + + let seqno = message + .seqno + .as_ref() + .and_then(|b| <[u8; 8]>::try_from(b.as_slice()).ok()) + .map(u64::from_be_bytes) + .unwrap_or_default(); + + Ok(P2pNetworkPubsubMessageCacheId { source, seqno }) + } + + pub fn to_raw_bytes(&self) -> Vec { + let mut message_id = self.source.to_base58(); + message_id.push_str(&self.seqno.to_string()); + message_id.into_bytes() + } +} + +impl P2pNetworkPubsubMessageCacheMessage { + pub fn message(&self) -> &pb::Message { + match self { + Self::Init { message, .. } => message, + Self::PreValidated { message, .. } => message, + Self::PreValidatedBlockMessage { message, .. } => message, + Self::Validated { message, .. } => message, + } + } + pub fn time(&self) -> Timestamp { + *match self { + Self::Init { time, .. } => time, + Self::PreValidated { time, .. } => time, + Self::PreValidatedBlockMessage { time, .. } => time, + Self::Validated { time, .. } => time, + } + } + pub fn peer_id(&self) -> PeerId { + *match self { + Self::Init { peer_id, .. } => peer_id, + Self::PreValidated { peer_id, .. } => peer_id, + Self::PreValidatedBlockMessage { peer_id, .. } => peer_id, + Self::Validated { peer_id, .. } => peer_id, + } + } } impl P2pNetworkPubsubMessageCache { const CAPACITY: usize = 100; - pub fn put(&mut self, message: pb::Message) -> Option> { - let id = compute_message_id(&message)?; - self.map.insert(id.clone(), message); - self.queue.push_back(id.clone()); + pub fn put( + &mut self, + message: pb::Message, + content: GossipNetMessageV2, + peer_id: PeerId, + time: Timestamp, + ) -> Result { + let id = P2pNetworkPubsubMessageCacheId::compute_message_id(&message)?; + self.map.insert( + id, + P2pNetworkPubsubMessageCacheMessage::Init { + message, + content, + time, + peer_id, + }, + ); + + self.queue.push_back(id); if self.queue.len() > Self::CAPACITY { if let Some(id) = self.queue.pop_front() { self.map.remove(&id); } } - Some(id) + Ok(id) + } + + pub fn get_message(&self, id: &P2pNetworkPubsubMessageCacheId) -> Option<&GossipNetMessageV2> { + let message = self.map.get(id)?; + match message { + P2pNetworkPubsubMessageCacheMessage::Init { content, .. } => Some(content), + _ => None, + } + } + + pub fn contains_broadcast_id(&self, message_id: &BroadcastMessageId) -> bool { + match message_id { + super::BroadcastMessageId::BlockHash { hash } => self + .map + .values() + .any(|message| matches!(message, P2pNetworkPubsubMessageCacheMessage::PreValidatedBlockMessage { block_hash, .. } if block_hash == hash)), + super::BroadcastMessageId::MessageId { message_id } => { + self.map.contains_key(message_id) + } + } + } + + pub fn get_message_id_and_message( + &mut self, + message_id: &BroadcastMessageId, + ) -> Option<( + P2pNetworkPubsubMessageCacheId, + &mut P2pNetworkPubsubMessageCacheMessage, + )> { + match message_id { + super::BroadcastMessageId::BlockHash { hash } => { + self.map + .iter_mut() + .find_map(|(message_id, message)| match message { + P2pNetworkPubsubMessageCacheMessage::PreValidatedBlockMessage { + block_hash, + .. + } if block_hash == hash => Some((*message_id, message)), + _ => None, + }) + } + super::BroadcastMessageId::MessageId { message_id } => self + .map + .get_mut(message_id) + .map(|content| (*message_id, content)), + } + } + + pub fn remove_message(&mut self, message_id: P2pNetworkPubsubMessageCacheId) { + let _ = self.map.remove(&message_id); + if let Some(position) = self.queue.iter().position(|id| id == &message_id) { + self.queue.remove(position); + } + } + + pub fn get_message_from_raw_message_id( + &self, + message_id: &[u8], + ) -> Option<&P2pNetworkPubsubMessageCacheMessage> { + self.map.iter().find_map(|(key, value)| { + if key.to_raw_bytes() == message_id { + Some(value) + } else { + None + } + }) } } -// TODO: what if wasm32? -// How to test it? -pub fn compute_message_id(message: &pb::Message) -> Option> { +pub fn source_from_message(message: &pb::Message) -> Result { let source_bytes = message .from .as_ref() .map(AsRef::as_ref) .unwrap_or(&[0, 1, 0][..]); - let mut source_string = libp2p_identity::PeerId::from_bytes(source_bytes) - .ok()? - .to_base58(); - - let sequence_number = message - .seqno - .as_ref() - .and_then(|b| <[u8; 8]>::try_from(b.as_slice()).ok()) - .map(u64::from_be_bytes) - .unwrap_or_default(); - source_string.push_str(&sequence_number.to_string()); - Some(source_string.into_bytes()) + libp2p_identity::PeerId::from_bytes(source_bytes) } #[derive(Default, Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] @@ -277,3 +433,54 @@ impl P2pNetworkPubsubClientTopicState { matches!(&self.mesh, P2pNetworkPubsubClientMeshAddingState::Added) } } + +mod measurement { + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + use std::mem; + + use super::*; + + pub fn clients( + val: &BTreeMap, + ops: &mut MallocSizeOfOps, + ) -> usize { + val.values().map(|v| v.size_of(ops)).sum() + } + + pub fn topics( + val: &BTreeMap>, + ops: &mut MallocSizeOfOps, + ) -> usize { + val.iter() + .map(|(k, v)| k.size_of(ops) + v.size_of(ops)) + .sum() + } + + pub fn timestamps(val: &Vec, _ops: &mut MallocSizeOfOps) -> usize { + val.capacity() * mem::size_of::() + } + + impl MallocSizeOf for P2pNetworkPubsubRecentlyPublishCache { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + let map_size = self.map.len() * size_of::(); + let queue_size = self.queue.capacity() * size_of::(); + map_size + queue_size + } + } + + impl MallocSizeOf for P2pNetworkPubsubMessageCache { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + let map_size = self.map.len() + * (size_of::() + + size_of::()); + let queue_size = self.queue.capacity() * size_of::(); + map_size + queue_size + } + } + + impl MallocSizeOf for P2pNetworkPubsubClientTopicState { + fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { + 0 + } + } +} diff --git a/p2p/src/network/pubsub/pubsub_effectful/p2p_network_pubsub_effectful_actions.rs b/p2p/src/network/pubsub/pubsub_effectful/p2p_network_pubsub_effectful_actions.rs index 3120fa3452..49a7fb8f3c 100644 --- a/p2p/src/network/pubsub/pubsub_effectful/p2p_network_pubsub_effectful_actions.rs +++ b/p2p/src/network/pubsub/pubsub_effectful/p2p_network_pubsub_effectful_actions.rs @@ -9,13 +9,8 @@ pub enum P2pNetworkPubsubEffectfulAction { /// /// **Fields:** /// - `author`: The identifier of the peer authoring the message. - /// - `topic`: The topic under which the message is published. /// - `message`: The protobuf message to be signed. - Sign { - author: PeerId, - topic: String, - message: Message, - }, + Sign { author: PeerId, message: Message }, /// Validate a batch of incoming messages from a peer. /// diff --git a/p2p/src/network/pubsub/pubsub_effectful/p2p_network_pubsub_effectful_effects.rs b/p2p/src/network/pubsub/pubsub_effectful/p2p_network_pubsub_effectful_effects.rs index d47bce28ee..028df638b6 100644 --- a/p2p/src/network/pubsub/pubsub_effectful/p2p_network_pubsub_effectful_effects.rs +++ b/p2p/src/network/pubsub/pubsub_effectful/p2p_network_pubsub_effectful_effects.rs @@ -29,14 +29,13 @@ impl P2pNetworkPubsubEffectfulAction { Store::Service: P2pCryptoService, { match self { - P2pNetworkPubsubEffectfulAction::Sign { - author, - topic, - message, - } => { + P2pNetworkPubsubEffectfulAction::Sign { author, message } => { let mut publication = vec![]; if prost::Message::encode(&message, &mut publication).is_err() { - store.dispatch(P2pNetworkPubsubAction::SignError { author, topic }); + store.dispatch(P2pNetworkPubsubAction::SignError { + author, + topic: message.topic, + }); } else { let signature = store.service().sign_publication(&publication).into(); store.dispatch(P2pNetworkPubsubAction::BroadcastSigned { signature }); diff --git a/p2p/src/network/rpc/p2p_network_rpc_state.rs b/p2p/src/network/rpc/p2p_network_rpc_state.rs index 37c127969c..a80c47826c 100644 --- a/p2p/src/network/rpc/p2p_network_rpc_state.rs +++ b/p2p/src/network/rpc/p2p_network_rpc_state.rs @@ -20,14 +20,16 @@ use super::super::*; const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(10); #[serde_with::serde_as] -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pNetworkRpcState { pub addr: ConnectionAddr, pub stream_id: StreamId, pub last_id: P2pRpcId, + #[ignore_malloc_size_of = "primitive"] pub last_heartbeat_sent: Option, pub pending: Option, #[serde_as(as = "Vec<(_, _)>")] + #[ignore_malloc_size_of = "TODO(vlad)"] pub total_stats: BTreeMap<(CharString, Ver), usize>, pub is_incoming: bool, pub buffer: Vec, @@ -54,12 +56,12 @@ impl P2pNetworkRpcState { pub fn should_send_heartbeat(&self, now: redux::Timestamp) -> bool { self.last_heartbeat_sent.map_or(true, |last_sent| { now.checked_sub(last_sent) - .map_or(false, |dur| dur >= HEARTBEAT_INTERVAL) + .is_some_and(|dur| dur >= HEARTBEAT_INTERVAL) }) } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub enum RpcMessage { Handshake, Heartbeat, @@ -104,10 +106,14 @@ impl RpcMessage { } } -#[derive(Debug, Clone, Serialize, Deserialize, thiserror::Error)] +#[derive(Debug, Clone, Serialize, Deserialize, thiserror::Error, MallocSizeOf)] pub enum P2pNetworkRpcError { #[error("error reading binprot message: {0}")] Binprot(String), #[error("message {0} with size {1} exceeds limit of {2}")] - Limit(String, usize, Limit), + Limit( + String, + usize, + #[ignore_malloc_size_of = "primitive"] Limit, + ), } diff --git a/p2p/src/network/scheduler/p2p_network_scheduler_actions.rs b/p2p/src/network/scheduler/p2p_network_scheduler_actions.rs index 45539aa20d..1465090e5f 100644 --- a/p2p/src/network/scheduler/p2p_network_scheduler_actions.rs +++ b/p2p/src/network/scheduler/p2p_network_scheduler_actions.rs @@ -126,11 +126,9 @@ impl redux::EnablingCondition for P2pNetworkSchedulerAction { | P2pNetworkSchedulerAction::IncomingConnectionIsReady { .. } | P2pNetworkSchedulerAction::SelectDone { .. } | P2pNetworkSchedulerAction::SelectError { .. } => true, - P2pNetworkSchedulerAction::IncomingDidAccept { addr, .. } => { - addr.as_ref().map_or(false, |addr| { - !state.network.scheduler.connections.contains_key(addr) - }) - } + P2pNetworkSchedulerAction::IncomingDidAccept { addr, .. } => addr + .as_ref() + .is_some_and(|addr| !state.network.scheduler.connections.contains_key(addr)), P2pNetworkSchedulerAction::OutgoingConnect { addr } => state .network .scheduler @@ -145,7 +143,7 @@ impl redux::EnablingCondition for P2pNetworkSchedulerAction { .scheduler .connections .get(addr) - .map_or(false, |conn_state| !conn_state.incoming), + .is_some_and(|conn_state| !conn_state.incoming), P2pNetworkSchedulerAction::IncomingDataDidReceive { addr, .. } | P2pNetworkSchedulerAction::IncomingDataIsReady { addr } | P2pNetworkSchedulerAction::YamuxDidInit { addr, .. } => { @@ -157,22 +155,20 @@ impl redux::EnablingCondition for P2pNetworkSchedulerAction { .scheduler .connections .get(addr) - .map_or(false, |conn_state| conn_state.closed.is_none()), + .is_some_and(|conn_state| conn_state.closed.is_none()), P2pNetworkSchedulerAction::Disconnected { addr, reason } => state .network .scheduler .connections .get(addr) - .map_or(false, |conn_state| { - conn_state.closed.as_ref() == Some(reason) - }), + .is_some_and(|conn_state| conn_state.closed.as_ref() == Some(reason)), // TODO: introduce state for closed connection P2pNetworkSchedulerAction::Prune { addr } => state .network .scheduler .connections .get(addr) - .map_or(false, |conn_state| conn_state.closed.is_some()), + .is_some_and(|conn_state| conn_state.closed.is_some()), P2pNetworkSchedulerAction::PruneStream { peer_id, stream_id } => state .network .scheduler diff --git a/p2p/src/network/scheduler/p2p_network_scheduler_state.rs b/p2p/src/network/scheduler/p2p_network_scheduler_state.rs index 63e51ab04f..6cae923c02 100644 --- a/p2p/src/network/scheduler/p2p_network_scheduler_state.rs +++ b/p2p/src/network/scheduler/p2p_network_scheduler_state.rs @@ -1,8 +1,10 @@ use std::{ collections::{BTreeMap, BTreeSet}, net::{IpAddr, SocketAddr}, + ops::{Deref, DerefMut}, }; +use malloc_size_of_derive::MallocSizeOf; use redux::Timestamp; use serde::{Deserialize, Serialize}; @@ -10,13 +12,35 @@ use crate::{disconnection::P2pDisconnectionReason, identity::PublicKey, PeerId}; use super::super::*; -pub type StreamState = BTreeMap>; +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct StreamState(pub BTreeMap>); + +impl Default for StreamState { + fn default() -> Self { + Self(Default::default()) + } +} + +impl Deref for StreamState { + type Target = BTreeMap>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for StreamState { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} #[derive(Serialize, Deserialize, PartialEq, PartialOrd, Eq, Ord, Debug, Clone, Copy)] pub struct ConnectionAddr { pub sock_addr: SocketAddr, pub incoming: bool, } + impl std::fmt::Display for ConnectionAddr { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{} (incoming: {})", self.sock_addr, self.incoming) @@ -77,7 +101,7 @@ impl P2pNetworkSchedulerState { } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pNetworkConnectionState { pub incoming: bool, pub pnet: P2pNetworkPnetState, @@ -85,7 +109,9 @@ pub struct P2pNetworkConnectionState { pub auth: Option, pub select_mux: P2pNetworkSelectState, pub mux: Option, + #[with_malloc_size_of_func = "measurement::streams_map"] pub streams: BTreeMap, + #[ignore_malloc_size_of = "error"] pub closed: Option, // the number of bytes that peer allowed to send us before yamux is negotiated pub limit: usize, @@ -202,7 +228,7 @@ pub enum P2pNetworkConnectionError { YamuxBadWindowUpdate(StreamId), } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub enum P2pNetworkAuthState { Noise(P2pNetworkNoiseState), } @@ -215,7 +241,7 @@ impl P2pNetworkAuthState { } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub enum P2pNetworkConnectionMuxState { Yamux(P2pNetworkYamuxState), } @@ -258,3 +284,62 @@ pub enum P2pNetworkStreamHandlerState { Broadcast, Discovery, } +mod measurement { + use std::mem; + + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::*; + + pub fn streams_map( + val: &BTreeMap, + ops: &mut MallocSizeOfOps, + ) -> usize { + val.iter() + .map(|(k, v)| mem::size_of_val(k) + mem::size_of_val(v) + v.size_of(ops)) + .sum() + } + + impl MallocSizeOf for StreamState + where + T: MallocSizeOf, + { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + self.0 + .iter() + .map(|(k, v)| { + mem::size_of_val(k) + + mem::size_of_val(v) + + v.iter() + .map(|(k, v)| { + mem::size_of_val(k) + mem::size_of_val(v) + v.size_of(ops) + }) + .sum::() + }) + .sum() + } + } + + impl MallocSizeOf for ConnectionAddr { + fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { + 0 + } + } + + impl MallocSizeOf for P2pNetworkSchedulerState { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + self.interfaces.len() * mem::size_of::() + + self.listeners.len() * mem::size_of::() + + self + .connections + .iter() + .map(|(k, v)| mem::size_of_val(k) + mem::size_of_val(v) + v.size_of(ops)) + .sum::() + + self.broadcast_state.size_of(ops) + + self.identify_state.size_of(ops) + + self.discovery_state.size_of(ops) + + self.rpc_incoming_streams.size_of(ops) + + self.rpc_outgoing_streams.size_of(ops) + } + } +} diff --git a/p2p/src/network/select/p2p_network_select_state.rs b/p2p/src/network/select/p2p_network_select_state.rs index 6829acbf8b..19d1364a45 100644 --- a/p2p/src/network/select/p2p_network_select_state.rs +++ b/p2p/src/network/select/p2p_network_select_state.rs @@ -1,5 +1,6 @@ use std::collections::VecDeque; +use malloc_size_of_derive::MallocSizeOf; use redux::Timestamp; use serde::{Deserialize, Serialize}; use token::Token; @@ -8,8 +9,9 @@ use crate::{ConnectionAddr, Data, P2pTimeouts}; use super::*; -#[derive(Default, Serialize, Deserialize, Debug, Clone)] +#[derive(Default, Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pNetworkSelectState { + #[ignore_malloc_size_of = "doesn't allocate"] pub time: Option, pub recv: token::State, pub tokens: VecDeque, @@ -110,7 +112,7 @@ impl P2pNetworkSelectState { } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub enum P2pNetworkSelectStateInner { Error(String), Initiator { proposing: token::Protocol }, diff --git a/p2p/src/network/select/token.rs b/p2p/src/network/select/token.rs index a8889bec14..cf02302957 100644 --- a/p2p/src/network/select/token.rs +++ b/p2p/src/network/select/token.rs @@ -1,3 +1,4 @@ +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use crate::Data; @@ -5,7 +6,7 @@ use crate::Data; const MAX_TOKEN_LENGTH: usize = 256; /// Possible valid token of multistream-select protocol -#[derive(Clone, Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug, MallocSizeOf)] pub enum Token { Handshake, Na, @@ -78,6 +79,12 @@ pub enum Protocol { Stream(StreamKind), } +impl malloc_size_of::MallocSizeOf for Protocol { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + impl Protocol { pub const fn name(&self) -> &'static [u8] { match self { @@ -144,6 +151,12 @@ pub enum StreamKind { Rpc(RpcAlgorithm), } +impl malloc_size_of::MallocSizeOf for StreamKind { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + impl StreamKind { pub const fn name(&self) -> &'static [u8] { match self { @@ -197,6 +210,12 @@ pub enum BitswapAlgorithm { MinaBitswap1_2_0, } +impl malloc_size_of::MallocSizeOf for BitswapAlgorithm { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + impl BitswapAlgorithm { pub const fn name(&self) -> &'static [u8] { match self { @@ -284,6 +303,12 @@ pub enum BroadcastAlgorithm { Meshsub1_1_0, } +impl malloc_size_of::MallocSizeOf for BroadcastAlgorithm { + fn size_of(&self, _ops: &mut malloc_size_of::MallocSizeOfOps) -> usize { + 0 + } +} + impl BroadcastAlgorithm { pub const fn name(&self) -> &'static [u8] { match self { @@ -321,7 +346,7 @@ impl RpcAlgorithm { } } -#[derive(Default, Serialize, Deserialize, Debug, Clone)] +#[derive(Default, Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct State { pub buffer: Vec, } diff --git a/p2p/src/network/yamux/p2p_network_yamux_actions.rs b/p2p/src/network/yamux/p2p_network_yamux_actions.rs index 1e86296d73..f34166d421 100644 --- a/p2p/src/network/yamux/p2p_network_yamux_actions.rs +++ b/p2p/src/network/yamux/p2p_network_yamux_actions.rs @@ -20,7 +20,6 @@ pub enum P2pNetworkYamuxAction { #[action_event(level = trace)] IncomingFrame { addr: ConnectionAddr, - frame: YamuxFrame, }, #[action_event(level = trace)] OutgoingFrame { diff --git a/p2p/src/network/yamux/p2p_network_yamux_reducer.rs b/p2p/src/network/yamux/p2p_network_yamux_reducer.rs index ff8074f88f..e3fbdb72c1 100644 --- a/p2p/src/network/yamux/p2p_network_yamux_reducer.rs +++ b/p2p/src/network/yamux/p2p_network_yamux_reducer.rs @@ -2,26 +2,43 @@ use std::collections::VecDeque; use openmina_core::{bug_condition, fuzz_maybe, fuzzed_maybe, Substate, SubstateAccess}; -use crate::P2pLimits; - -use self::p2p_network_yamux_state::{ - YamuxFlags, YamuxFrame, YamuxFrameInner, YamuxFrameParseError, YamuxSessionError, - YamuxStreamState, +use crate::{ + yamux::p2p_network_yamux_state::{YamuxFrame, YamuxFrameInner}, + Data, Limit, P2pLimits, P2pNetworkAuthState, P2pNetworkConnectionError, + P2pNetworkConnectionMuxState, P2pNetworkNoiseAction, P2pNetworkSchedulerAction, + P2pNetworkSchedulerState, P2pNetworkSelectAction, P2pNetworkStreamState, SelectKind, }; -use super::{super::*, *}; +use super::{ + p2p_network_yamux_state::{YamuxStreamState, MAX_WINDOW_SIZE}, + P2pNetworkYamuxAction, P2pNetworkYamuxState, YamuxFlags, YamuxPing, +}; impl P2pNetworkYamuxState { - pub fn set_err(&mut self, err: YamuxFrameParseError) { - self.terminated = Some(Err(err)); - } - - pub fn set_res(&mut self, res: Result<(), YamuxSessionError>) { - self.terminated = Some(Ok(res)); - } - - /// Substate is accessed + /// Handles the main reducer logic for Yamux protocol actions. It processes incoming and outgoing + /// data, selects appropriate behavior based on frame types, and manages the state of streams + /// within a Yamux session. + /// + /// # High-Level Overview + /// + /// - When data arrives, it is appended to an internal buffer. The buffer is then parsed for + /// valid Yamux frames (using protocol-specific header fields and logic). Incomplete data + /// remains in the buffer for future parsing. + /// - On successful parsing, frames are enqueued for further handling (e.g., dispatching + /// actions to notify higher-level protocols or responding to pings). + /// - If protocol inconsistencies or invalid headers are encountered, it marks an error or + /// terminates gracefully, preventing further processing of unexpected data. + /// - Outgoing data is prepared as frames that respect the window constraints and established + /// flags (e.g., SYN, ACK, FIN), and they are dispatched for transmission. + /// - Once frames are processed, the function checks if the buffer has grown beyond a certain + /// threshold relative to its initial capacity. If so, and if the remaining data is small, + /// it resets the buffer capacity to a default size to avoid excessive memory usage. + /// - The function also manages streams and their states, ensuring that proper handshake + /// flags are set (SYN, ACK) when a new stream is opened or accepted, enforcing limits on + /// the number of streams, and notifying higher-level components about events like + /// incoming data or connection errors. pub fn reducer( + // Substate is accessed mut state_context: Substate, action: redux::ActionWithMeta, ) -> Result<(), String> @@ -47,107 +64,15 @@ impl P2pNetworkYamuxState { match action { P2pNetworkYamuxAction::IncomingData { data, addr } => { - yamux_state.buffer.extend_from_slice(&data); - let mut offset = 0; - loop { - let buf = &yamux_state.buffer[offset..]; - if buf.len() >= 12 { - let _version = match buf[0] { - 0 => 0, - unknown => { - yamux_state.set_err(YamuxFrameParseError::Version(unknown)); - break; - } - }; - let flags = u16::from_be_bytes(buf[2..4].try_into().expect("cannot fail")); - let Some(flags) = YamuxFlags::from_bits(flags) else { - yamux_state.set_err(YamuxFrameParseError::Flags(flags)); - break; - }; - let stream_id = - u32::from_be_bytes(buf[4..8].try_into().expect("cannot fail")); - let b = buf[8..12].try_into().expect("cannot fail"); - - match buf[1] { - 0 => { - let len = u32::from_be_bytes(b) as usize; - if len > yamux_state.message_size_limit { - yamux_state.set_res(Err(YamuxSessionError::Internal)); - break; - } - if buf.len() >= 12 + len { - let frame = YamuxFrame { - flags, - stream_id, - inner: YamuxFrameInner::Data( - buf[12..(12 + len)].to_vec().into(), - ), - }; - yamux_state.incoming.push_back(frame); - offset += 12 + len; - continue; - } - } - 1 => { - let difference = i32::from_be_bytes(b); - let frame = YamuxFrame { - flags, - stream_id, - inner: YamuxFrameInner::WindowUpdate { difference }, - }; - yamux_state.incoming.push_back(frame); - offset += 12; - continue; - } - 2 => { - let opaque = i32::from_be_bytes(b); - let frame = YamuxFrame { - flags, - stream_id, - inner: YamuxFrameInner::Ping { opaque }, - }; - yamux_state.incoming.push_back(frame); - offset += 12; - continue; - } - 3 => { - let code = u32::from_be_bytes(b); - let result = match code { - 0 => Ok(()), - 1 => Err(YamuxSessionError::Protocol), - 2 => Err(YamuxSessionError::Internal), - unknown => { - yamux_state - .set_err(YamuxFrameParseError::ErrorCode(unknown)); - break; - } - }; - let frame = YamuxFrame { - flags, - stream_id, - inner: YamuxFrameInner::GoAway(result), - }; - yamux_state.incoming.push_back(frame); - offset += 12; - continue; - } - unknown => { - yamux_state.set_err(YamuxFrameParseError::Type(unknown)); - break; - } - } - } - - break; - } - - yamux_state.buffer = yamux_state.buffer[offset..].to_vec(); + yamux_state.extend_buffer(&data); + yamux_state.parse_frames(); - let incoming_data = yamux_state.incoming.clone(); + let frame_count = yamux_state.incoming_frame_count(); let dispatcher = state_context.into_dispatcher(); - incoming_data.into_iter().for_each(|frame| { - dispatcher.push(P2pNetworkYamuxAction::IncomingFrame { addr, frame }) - }); + + for _ in 0..frame_count { + dispatcher.push(P2pNetworkYamuxAction::IncomingFrame { addr }) + } Ok(()) } @@ -181,69 +106,69 @@ impl P2pNetworkYamuxState { Ok(()) } - P2pNetworkYamuxAction::IncomingFrame { addr, frame } => { + P2pNetworkYamuxAction::IncomingFrame { addr } => { let mut pending_outgoing = VecDeque::default(); - if let Some(frame) = yamux_state.incoming.pop_front() { - if frame.flags.contains(YamuxFlags::SYN) { - yamux_state - .streams - .insert(frame.stream_id, YamuxStreamState::incoming()); + let Some(frame) = yamux_state.incoming.pop_front() else { + bug_condition!( + "Frame not found for action `P2pNetworkYamuxAction::IncomingFrame`" + ); + return Ok(()); + }; + + if frame.flags.contains(YamuxFlags::SYN) { + yamux_state + .streams + .insert(frame.stream_id, YamuxStreamState::incoming()); - if frame.stream_id != 0 { - connection_state.streams.insert( - frame.stream_id, - P2pNetworkStreamState::new_incoming(meta.time()), - ); + if frame.stream_id != 0 { + connection_state.streams.insert( + frame.stream_id, + P2pNetworkStreamState::new_incoming(meta.time()), + ); + } + } + if frame.flags.contains(YamuxFlags::ACK) { + yamux_state + .streams + .entry(frame.stream_id) + .or_default() + .established = true; + } + + match &frame.inner { + YamuxFrameInner::Data(_) => { + if let Some(stream) = yamux_state.streams.get_mut(&frame.stream_id) { + // must not underflow + // TODO: check it and disconnect peer that violates flow rules + stream.window_ours = + stream.window_ours.saturating_sub(frame.len_as_u32()); } } - if frame.flags.contains(YamuxFlags::ACK) { - yamux_state + YamuxFrameInner::WindowUpdate { difference } => { + let stream = yamux_state .streams .entry(frame.stream_id) - .or_default() - .established = true; - } - - match frame.inner { - YamuxFrameInner::Data(data) => { - if let Some(stream) = yamux_state.streams.get_mut(&frame.stream_id) { - // must not underflow - // TODO: check it and disconnect peer that violates flow rules - stream.window_ours = - stream.window_ours.wrapping_sub(data.len() as u32); - } - } - YamuxFrameInner::WindowUpdate { difference } => { - let stream = yamux_state - .streams - .entry(frame.stream_id) - .or_insert_with(YamuxStreamState::incoming); - stream.update_window(false, difference); - if difference > 0 { - // have some fresh space in the window - // try send as many frames as can - let mut window = stream.window_theirs; - while let Some(mut frame) = stream.pending.pop_front() { - let len = frame.len() as u32; - if let Some(new_window) = window.checked_sub(len) { - pending_outgoing.push_back(frame); - window = new_window; - } else { - if let Some(remaining) = - frame.split_at((len - window) as usize) - { - stream.pending.push_front(remaining); - } - pending_outgoing.push_back(frame); - - break; - } + .or_insert_with(YamuxStreamState::incoming); + + stream.window_theirs = stream.window_theirs.saturating_add(*difference); + + if *difference > 0 { + // have some fresh space in the window + // try send as many frames as can + let mut window = stream.window_theirs; + while let Some(frame) = stream.pending.pop_front() { + let len = frame.len_as_u32(); + pending_outgoing.push_back(frame); + if let Some(new_window) = window.checked_sub(len) { + window = new_window; + } else { + break; } } } - YamuxFrameInner::Ping { .. } => {} - YamuxFrameInner::GoAway(res) => yamux_state.set_res(res), } + YamuxFrameInner::Ping { .. } => {} + YamuxFrameInner::GoAway(res) => yamux_state.set_res(*res), } let (dispatcher, state) = state_context.into_dispatcher_and_state(); @@ -306,11 +231,11 @@ impl P2pNetworkYamuxState { } match &frame.inner { YamuxFrameInner::Data(data) => { - // here we are very permissive - // always when our window is smaller 64 kb, just increase it by 256 kb - // if we need fine grained back pressure, it should be implemented here - if stream.window_ours < 64 * 1024 { - let difference = 256 * 1024; + // when our window size is less than half of the max window size send window update + if stream.window_ours < stream.max_window_size / 2 { + let difference = + stream.max_window_size.saturating_mul(2).min(1024 * 1024); + dispatcher.push(P2pNetworkYamuxAction::OutgoingFrame { addr, frame: YamuxFrame { @@ -344,16 +269,9 @@ impl P2pNetworkYamuxState { }); } } - YamuxFrameInner::WindowUpdate { difference } => { - if *difference < 0 { - let error = - P2pNetworkConnectionError::YamuxBadWindowUpdate(frame.stream_id); - dispatcher.push(P2pNetworkSchedulerAction::Error { addr, error }); - } else { - while let Some(frame) = pending_outgoing.pop_front() { - dispatcher - .push(P2pNetworkYamuxAction::OutgoingFrame { addr, frame }); - } + YamuxFrameInner::WindowUpdate { .. } => { + while let Some(frame) = pending_outgoing.pop_front() { + dispatcher.push(P2pNetworkYamuxAction::OutgoingFrame { addr, frame }); } } _ => {} @@ -367,41 +285,39 @@ impl P2pNetworkYamuxState { return Ok(()); }; match &mut frame.inner { - YamuxFrameInner::Data(data) => { + YamuxFrameInner::Data(_) => { if let Some(new_window) = - stream.window_theirs.checked_sub(data.len() as u32) + stream.window_theirs.checked_sub(frame.len_as_u32()) { // their window is big enough, decrease the size // and send the whole frame stream.window_theirs = new_window; - } else if stream.window_theirs != 0 && stream.pending.is_empty() { - // their window is not big enough, but has some space, - // and the queue is empty, - // do not send the whole frame, - // split it and put remaining in the queue, + } else { + // their window is not big enough + // split the frame to send as much as you can and put the rest in the queue if let Some(remaining) = frame.split_at(stream.window_theirs as usize) { - stream.pending.push_back(remaining); + stream.pending.push_front(remaining); } + // the window will be zero after sending stream.window_theirs = 0; - } else { - // either the window cannot accept any byte, - // or the queue is already not empty - // in both cases the whole frame goes in the queue and nothing to send - stream.pending.push_back(frame); + + // if size of pending that is above the limit, ignore the peer if stream.pending.iter().map(YamuxFrame::len).sum::() > yamux_state.pending_outgoing_limit { let dispatcher = state_context.into_dispatcher(); let error = P2pNetworkConnectionError::YamuxOverflow(stream_id); dispatcher.push(P2pNetworkSchedulerAction::Error { addr, error }); + return Ok(()); } - - return Ok(()); } } YamuxFrameInner::WindowUpdate { difference } => { - stream.update_window(true, *difference); + stream.window_ours = stream.window_ours.saturating_add(*difference); + if stream.window_ours > stream.max_window_size { + stream.max_window_size = stream.window_ours.min(MAX_WINDOW_SIZE); + } } _ => {} } @@ -468,24 +384,3 @@ impl P2pNetworkYamuxState { } } } - -impl YamuxStreamState { - pub fn update_window(&mut self, ours: bool, difference: i32) { - let window = if ours { - &mut self.window_ours - } else { - &mut self.window_theirs - }; - if difference < 0 { - let decreasing = (-difference) as u32; - if *window < decreasing { - *window = 0; - } else { - *window = (*window).wrapping_sub(decreasing); - } - } else { - let increasing = difference as u32; - *window = (*window).wrapping_add(increasing); - } - } -} diff --git a/p2p/src/network/yamux/p2p_network_yamux_state.rs b/p2p/src/network/yamux/p2p_network_yamux_state.rs index 766465a786..938ab3d68b 100644 --- a/p2p/src/network/yamux/p2p_network_yamux_state.rs +++ b/p2p/src/network/yamux/p2p_network_yamux_state.rs @@ -1,9 +1,14 @@ use std::collections::{BTreeMap, VecDeque}; +use malloc_size_of_derive::MallocSizeOf; use serde::{Deserialize, Serialize}; use super::super::*; +pub const INITIAL_RECV_BUFFER_CAPACITY: usize = 0x40000; // 256kb +pub const INITIAL_WINDOW_SIZE: u32 = INITIAL_RECV_BUFFER_CAPACITY as u32; +pub const MAX_WINDOW_SIZE: u32 = 16 * 1024 * 1024; // 16mb + #[derive(Serialize, Deserialize, Debug, Clone, Default)] pub struct P2pNetworkYamuxState { pub message_size_limit: Limit, @@ -44,9 +49,150 @@ impl P2pNetworkYamuxState { windows + headers * SIZE_OF_HEADER } + + pub fn set_err(&mut self, err: YamuxFrameParseError) { + self.terminated = Some(Err(err)); + } + + pub fn set_res(&mut self, res: Result<(), YamuxSessionError>) { + self.terminated = Some(Ok(res)); + } + + /// Attempts to parse a Yamux frame from the buffer starting at the given offset. + /// Returns the number of bytes consumed if a frame was successfully parsed. + pub fn try_parse_frame(&mut self, offset: usize) -> Option { + let buf = &self.buffer[offset..]; + if buf.len() < 12 { + return None; + } + + let _version = match buf[0] { + 0 => 0, + unknown => { + self.set_err(YamuxFrameParseError::Version(unknown)); + return None; + } + }; + + let flags = u16::from_be_bytes(buf[2..4].try_into().expect("cannot fail")); + let Some(flags) = YamuxFlags::from_bits(flags) else { + self.set_err(YamuxFrameParseError::Flags(flags)); + return None; + }; + let stream_id = u32::from_be_bytes(buf[4..8].try_into().expect("cannot fail")); + let b = buf[8..12].try_into().expect("cannot fail"); + + match buf[1] { + // Data frame - contains actual payload data for the stream + 0 => { + let len = u32::from_be_bytes(b) as usize; + if len > self.message_size_limit { + self.set_res(Err(YamuxSessionError::Internal)); + return None; + } + if buf.len() >= 12 + len { + let frame = YamuxFrame { + flags, + stream_id, + inner: YamuxFrameInner::Data(buf[12..(12 + len)].to_vec().into()), + }; + self.incoming.push_back(frame); + Some(12 + len) + } else { + None + } + } + // Window Update frame - used for flow control, updates available window size + 1 => { + let difference = u32::from_be_bytes(b); + let frame = YamuxFrame { + flags, + stream_id, + inner: YamuxFrameInner::WindowUpdate { difference }, + }; + self.incoming.push_back(frame); + Some(12) + } + // Ping frame - used for keepalive and round-trip time measurements + 2 => { + let opaque = u32::from_be_bytes(b); + let frame = YamuxFrame { + flags, + stream_id, + inner: YamuxFrameInner::Ping { opaque }, + }; + self.incoming.push_back(frame); + Some(12) + } + // GoAway frame - signals session termination with optional error code + 3 => { + let code = u32::from_be_bytes(b); + let result = match code { + 0 => Ok(()), // Normal termination + 1 => Err(YamuxSessionError::Protocol), // Protocol error + 2 => Err(YamuxSessionError::Internal), // Internal error + unknown => { + self.set_err(YamuxFrameParseError::ErrorCode(unknown)); + return None; + } + }; + let frame = YamuxFrame { + flags, + stream_id, + inner: YamuxFrameInner::GoAway(result), + }; + self.incoming.push_back(frame); + Some(12) + } + // Unknown frame type + unknown => { + self.set_err(YamuxFrameParseError::Type(unknown)); + None + } + } + } + + /// Attempts to parse all available complete frames from the buffer, + /// then shifts and compacts the buffer as needed. + pub fn parse_frames(&mut self) { + let mut offset = 0; + while let Some(consumed) = self.try_parse_frame(offset) { + offset += consumed; + } + self.shift_and_compact_buffer(offset); + } + + fn shift_and_compact_buffer(&mut self, offset: usize) { + let new_len = self.buffer.len() - offset; + if self.buffer.capacity() > INITIAL_RECV_BUFFER_CAPACITY * 2 + && new_len < INITIAL_RECV_BUFFER_CAPACITY / 2 + { + let old_buffer = &self.buffer; + let mut new_buffer = Vec::with_capacity(INITIAL_RECV_BUFFER_CAPACITY); + new_buffer.extend_from_slice(&old_buffer[offset..]); + self.buffer = new_buffer; + } else { + self.buffer.copy_within(offset.., 0); + self.buffer.truncate(new_len); + } + } + + /// Extends the internal buffer with new data, ensuring it has appropriate capacity. + /// On first use, reserves the initial capacity. + pub fn extend_buffer(&mut self, data: &[u8]) { + if self.buffer.capacity() == 0 { + self.buffer.reserve(INITIAL_RECV_BUFFER_CAPACITY); + } + self.buffer.extend_from_slice(data); + } + + /// Returns the number of incoming frames that have been parsed and are ready for processing. + pub fn incoming_frame_count(&self) -> usize { + self.incoming.len() + } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct YamuxStreamState { pub incoming: bool, pub syn_sent: bool, @@ -55,6 +201,7 @@ pub struct YamuxStreamState { pub writable: bool, pub window_theirs: u32, pub window_ours: u32, + pub max_window_size: u32, pub pending: VecDeque, } @@ -66,8 +213,9 @@ impl Default for YamuxStreamState { established: false, readable: false, writable: false, - window_theirs: 256 * 1024, - window_ours: 256 * 1024, + window_theirs: INITIAL_WINDOW_SIZE, + window_ours: INITIAL_WINDOW_SIZE, + max_window_size: INITIAL_WINDOW_SIZE, pending: VecDeque::default(), } } @@ -95,7 +243,7 @@ bitflags::bitflags! { #[derive(Serialize, Deserialize, Debug, Clone, Copy)] pub struct YamuxPing { pub stream_id: StreamId, - pub opaque: i32, + pub opaque: u32, pub response: bool, } @@ -134,8 +282,9 @@ pub enum YamuxFrameParseError { ErrorCode(u32), } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct YamuxFrame { + #[ignore_malloc_size_of = "doesn't allocate"] pub flags: YamuxFlags, pub stream_id: StreamId, pub inner: YamuxFrameInner, @@ -194,6 +343,15 @@ impl YamuxFrame { } } + // When we parse the frame we parse length as u32 and so `data.len()` should always be representable as u32 + pub fn len_as_u32(&self) -> u32 { + if let YamuxFrameInner::Data(data) = &self.inner { + u32::try_from(data.len()).unwrap_or(u32::MAX) + } else { + 0 + } + } + /// If this data is bigger then `pos`, keep only first `pos` bytes and return some remaining /// otherwise return none pub fn split_at(&mut self, pos: usize) -> Option { @@ -224,15 +382,15 @@ impl YamuxFrame { } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub enum YamuxFrameInner { Data(Data), - WindowUpdate { difference: i32 }, - Ping { opaque: i32 }, - GoAway(Result<(), YamuxSessionError>), + WindowUpdate { difference: u32 }, + Ping { opaque: u32 }, + GoAway(#[ignore_malloc_size_of = "doesn't allocate"] Result<(), YamuxSessionError>), } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, Copy)] pub enum YamuxSessionError { Protocol, Internal, @@ -263,3 +421,28 @@ mod tests { assert_eq!(Kademlia.stream_id(true), 6); } } + +mod measurement { + use std::mem; + + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::{P2pNetworkYamuxState, YamuxFrame}; + + impl MallocSizeOf for P2pNetworkYamuxState { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + self.buffer.capacity() + + self.incoming.capacity() * mem::size_of::() + + self + .incoming + .iter() + .map(|frame| frame.size_of(ops)) + .sum::() + + self + .streams + .iter() + .map(|(k, v)| mem::size_of_val(k) + mem::size_of_val(v) + v.size_of(ops)) + .sum::() + } + } +} diff --git a/p2p/src/p2p_reducer.rs b/p2p/src/p2p_reducer.rs index e98125bad5..7b85ca71e7 100644 --- a/p2p/src/p2p_reducer.rs +++ b/p2p/src/p2p_reducer.rs @@ -9,7 +9,8 @@ use crate::{ }, disconnection::{P2pDisconnectedState, P2pDisconnectionAction}, P2pAction, P2pNetworkKadKey, P2pNetworkKademliaAction, P2pNetworkPnetAction, - P2pNetworkRpcAction, P2pNetworkSelectAction, P2pNetworkState, P2pPeerState, P2pState, PeerId, + P2pNetworkPubsubAction, P2pNetworkRpcAction, P2pNetworkSelectAction, P2pNetworkState, + P2pPeerState, P2pState, PeerId, }; use openmina_core::{bug_condition, Substate}; use redux::{ActionMeta, ActionWithMeta, Dispatcher, Timestamp}; @@ -92,6 +93,7 @@ impl P2pState { state.p2p_pnet_timeouts(dispatcher, time)?; state.p2p_select_timeouts(dispatcher, time)?; state.p2p_rpc_heartbeats(dispatcher, time)?; + dispatcher.push(P2pNetworkPubsubAction::PruneMessages {}); } state.rpc_timeouts(dispatcher, time)?; @@ -346,8 +348,9 @@ impl P2pState { scheduler .rpc_incoming_streams + .0 .iter() - .chain(&scheduler.rpc_outgoing_streams) + .chain(&scheduler.rpc_outgoing_streams.0) .flat_map(|(peer_id, state)| { state .iter() diff --git a/p2p/src/p2p_state.rs b/p2p/src/p2p_state.rs index 664b7682a7..c1f57f4aa9 100644 --- a/p2p/src/p2p_state.rs +++ b/p2p/src/p2p_state.rs @@ -1,3 +1,9 @@ +use std::{ + collections::{BTreeMap, BTreeSet}, + sync::Arc, + time::Duration, +}; + use openmina_core::{ block::{ArcBlockWithHash, BlockWithHash}, impl_substate_access, @@ -6,13 +12,10 @@ use openmina_core::{ transaction::{TransactionInfo, TransactionWithHash}, ChainId, SubstateAccess, }; + +use malloc_size_of_derive::MallocSizeOf; use redux::{Callback, Timestamp}; use serde::{Deserialize, Serialize}; -use std::{ - collections::{BTreeMap, BTreeSet}, - sync::Arc, - time::Duration, -}; use crate::{ bootstrap::P2pNetworkKadBootstrapState, @@ -33,8 +36,8 @@ use crate::{ identify::{P2pNetworkIdentify, P2pNetworkIdentifyState}, P2pNetworkState, }, - Limit, P2pConfig, P2pLimits, P2pNetworkKadState, P2pNetworkPubsubState, - P2pNetworkSchedulerState, P2pTimeouts, PeerId, + Limit, P2pConfig, P2pLimits, P2pNetworkKadState, P2pNetworkPubsubMessageCacheId, + P2pNetworkPubsubState, P2pNetworkSchedulerState, P2pTimeouts, PeerId, }; use mina_p2p_messages::v2; @@ -184,17 +187,17 @@ impl P2pState { self.peers .get(peer_id) .and_then(|p| p.status.as_connecting()) - .map_or(false, |p| !p.is_error()) + .is_some_and(|p| !p.is_error()) } pub fn is_peer_connected_or_connecting(&self, peer_id: &PeerId) -> bool { self.peers .get(peer_id) - .map_or(false, |p| p.status.is_connected_or_connecting()) + .is_some_and(|p| p.status.is_connected_or_connecting()) } pub fn is_libp2p_peer(&self, peer_id: &PeerId) -> bool { - self.peers.get(peer_id).map_or(false, |p| p.is_libp2p()) + self.peers.get(peer_id).is_some_and(|p| p.is_libp2p()) } pub fn is_peer_rpc_timed_out( @@ -203,7 +206,7 @@ impl P2pState { rpc_id: P2pRpcId, now: redux::Timestamp, ) -> bool { - self.get_ready_peer(peer_id).map_or(false, |p| { + self.get_ready_peer(peer_id).is_some_and(|p| { p.channels .rpc .is_timed_out(rpc_id, now, &self.config.timeouts) @@ -216,7 +219,7 @@ impl P2pState { rpc_id: P2pStreamingRpcId, now: redux::Timestamp, ) -> bool { - self.get_ready_peer(peer_id).map_or(false, |p| { + self.get_ready_peer(peer_id).is_some_and(|p| { p.channels .streaming_rpc .is_timed_out(rpc_id, now, &self.config.timeouts) @@ -343,7 +346,7 @@ impl P2pState { } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] pub struct P2pPeerState { pub is_libp2p: bool, pub dial_opts: Option, @@ -399,12 +402,18 @@ impl P2pPeerState { } } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, MallocSizeOf)] #[serde(tag = "state")] pub enum P2pPeerStatus { Connecting(P2pConnectionState), - Disconnecting { time: redux::Timestamp }, - Disconnected { time: redux::Timestamp }, + Disconnecting { + #[ignore_malloc_size_of = "doesn't allocate"] + time: redux::Timestamp, + }, + Disconnected { + #[ignore_malloc_size_of = "doesn't allocate"] + time: redux::Timestamp, + }, Ready(P2pPeerStatusReady), } @@ -562,6 +571,9 @@ pub struct P2pCallbacks { /// Callback for [`P2pChannelsStreamingRpcAction::ResponseReceived`] pub on_p2p_channels_streaming_rpc_response_received: OptionalCallback<(PeerId, P2pRpcId, Option)>, + + /// Callback for received pubsub message + pub on_p2p_pubsub_message_received: OptionalCallback, } impl_substate_access!(P2pState, P2pNetworkState, network); @@ -612,3 +624,29 @@ impl_substate_access!( network.scheduler.broadcast_state ); impl_substate_access!(P2pState, P2pConfig, config); + +mod measurement { + use malloc_size_of::{MallocSizeOf, MallocSizeOfOps}; + + use super::*; + + impl MallocSizeOf for P2pState { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + self.peers.values().map(|v| v.size_of(ops)).sum::() + + self.network.scheduler.size_of(ops) + } + } + + impl MallocSizeOf for P2pPeerStatusReady { + fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { + self.best_tip + .as_ref() + .map(|v| { + usize::from(!ops.have_seen_ptr(Arc::as_ptr(&v.block))) + * (size_of::() + v.block.size_of(ops)) + }) + .unwrap_or_default() + // TODO(vlad): `channels` + } + } +} diff --git a/p2p/src/peer/p2p_peer_actions.rs b/p2p/src/peer/p2p_peer_actions.rs index 547c61efc9..1f298c66c7 100644 --- a/p2p/src/peer/p2p_peer_actions.rs +++ b/p2p/src/peer/p2p_peer_actions.rs @@ -48,7 +48,7 @@ impl redux::EnablingCondition for P2pPeerAction { P2pPeerAction::Ready { peer_id, .. } => state .peers .get(peer_id) - .map_or(false, |p| p.status.is_connecting_success()), + .is_some_and(|p| p.status.is_connecting_success()), P2pPeerAction::BestTipUpdate { peer_id, .. } => { // TODO: don't enable if block inferior than existing peer's // best tip. diff --git a/p2p/src/service_impl/mio/mod.rs b/p2p/src/service_impl/mio/mod.rs index 0ab78ab33a..8ab9473545 100644 --- a/p2p/src/service_impl/mio/mod.rs +++ b/p2p/src/service_impl/mio/mod.rs @@ -484,6 +484,11 @@ where if limit > self.recv_buf.len() { // TODO: upper bound? resize to `limit` or try to allocate some extra space too? self.recv_buf.resize(limit, 0); + + openmina_core::warn!( + openmina_core::log::system_time(); + summary = format!("Increasing buffer size to {}kb", limit / 1024) + ); } let mut keep = false; diff --git a/p2p/src/service_impl/mod.rs b/p2p/src/service_impl/mod.rs index bd7b9ccfb2..421343a815 100644 --- a/p2p/src/service_impl/mod.rs +++ b/p2p/src/service_impl/mod.rs @@ -87,6 +87,14 @@ pub mod webrtc { encrypted: &T::Encrypted, ) -> Result>; + fn auth_send( + &mut self, + peer_id: PeerId, + other_pub_key: &PublicKey, + auth: Option, + ) { + } + fn auth_encrypt_and_send( &mut self, peer_id: PeerId, diff --git a/p2p/src/service_impl/webrtc/mod.rs b/p2p/src/service_impl/webrtc/mod.rs index 3c4c45ebdf..1ca88e3a37 100644 --- a/p2p/src/service_impl/webrtc/mod.rs +++ b/p2p/src/service_impl/webrtc/mod.rs @@ -883,6 +883,19 @@ pub trait P2pServiceWebrtc: redux::Service { encrypted: &T::Encrypted, ) -> Result>; + fn auth_send( + &mut self, + peer_id: PeerId, + _other_pub_key: &PublicKey, + auth: Option, + ) { + if let Some(peer) = self.peers().get(&peer_id) { + let _ = peer + .cmd_sender + .send(PeerCmd::ConnectionAuthorizationSend(auth)); + } + } + fn auth_encrypt_and_send( &mut self, peer_id: PeerId, diff --git a/p2p/src/service_impl/webrtc/web.rs b/p2p/src/service_impl/webrtc/web.rs index baee93fc06..df83300143 100644 --- a/p2p/src/service_impl/webrtc/web.rs +++ b/p2p/src/service_impl/webrtc/web.rs @@ -113,7 +113,7 @@ impl RTCConnection { let mut tx = Some(tx); let conn = self.weak_ref(); let callback = Closure::::new(move || { - if conn.upgrade().map_or(false, |conn| { + if conn.upgrade().is_some_and(|conn| { matches!(conn.ice_gathering_state(), RtcIceGatheringState::Complete) }) { if let Some(tx) = tx.take() { diff --git a/p2p/src/webrtc/signal.rs b/p2p/src/webrtc/signal.rs index 72e9d42949..a47a5d0066 100644 --- a/p2p/src/webrtc/signal.rs +++ b/p2p/src/webrtc/signal.rs @@ -1,5 +1,6 @@ use binprot_derive::{BinProtRead, BinProtWrite}; use derive_more::From; +use malloc_size_of_derive::MallocSizeOf; use openmina_core::ChainId; use serde::{Deserialize, Serialize}; @@ -7,25 +8,29 @@ use crate::identity::{EncryptableType, PeerId, PublicKey}; use super::{ConnectionAuth, Host}; -#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] +#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone, MallocSizeOf)] pub struct Offer { pub sdp: String, + #[ignore_malloc_size_of = "doesn't allocate"] pub chain_id: ChainId, /// Offerer's identity public key. + #[ignore_malloc_size_of = "doesn't allocate"] pub identity_pub_key: PublicKey, /// Peer id that the offerer wants to connect to. pub target_peer_id: PeerId, // TODO(binier): remove host and get ip from ice candidates instead /// Host name or IP of the signaling server of the offerer. + #[ignore_malloc_size_of = "neglectible"] pub host: Host, /// Port of the signaling server of the offerer. pub listen_port: Option, } -#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] +#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone, MallocSizeOf)] pub struct Answer { pub sdp: String, /// Offerer's identity public key. + #[ignore_malloc_size_of = "doesn't allocate"] pub identity_pub_key: PublicKey, /// Peer id that the offerer wants to connect to. pub target_peer_id: PeerId, @@ -37,7 +42,9 @@ pub enum Signal { Answer(Answer), } -#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone, Copy, thiserror::Error)] +#[derive( + Serialize, Deserialize, Eq, PartialEq, Debug, Clone, Copy, thiserror::Error, MallocSizeOf, +)] pub enum RejectionReason { #[error("peer is on a different chain")] ChainIdMismatch, diff --git a/p2p/testing/Cargo.toml b/p2p/testing/Cargo.toml index 2e7eb727e5..6941fcb38d 100644 --- a/p2p/testing/Cargo.toml +++ b/p2p/testing/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "p2p-testing" -version = "0.13.0" +version = "0.14.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/p2p/testing/src/libp2p_node.rs b/p2p/testing/src/libp2p_node.rs index c6c7e19e14..9e455ef367 100644 --- a/p2p/testing/src/libp2p_node.rs +++ b/p2p/testing/src/libp2p_node.rs @@ -78,7 +78,6 @@ pub struct Libp2pBehaviour { #[behaviour(ignore)] port: u16, - // TODO(vlad9486): move maps inside `RpcBehaviour` // map msg_id into (tag, version) #[behaviour(ignore)] pub ongoing: BTreeMap<(PeerId, u64), (RpcTag, u32)>, diff --git a/p2p/testing/src/predicates.rs b/p2p/testing/src/predicates.rs index 3762c13f7a..c0235b000d 100644 --- a/p2p/testing/src/predicates.rs +++ b/p2p/testing/src/predicates.rs @@ -194,7 +194,7 @@ where .iter() .position(|(_id, _v)| _id == &id && _v == &v) }) - .map_or(false, |i| { + .is_some_and(|i| { nodes_items.swap_remove(i); nodes_items.is_empty() }) diff --git a/p2p/testing/src/stream.rs b/p2p/testing/src/stream.rs index 2f01cb881e..85dc7a24db 100644 --- a/p2p/testing/src/stream.rs +++ b/p2p/testing/src/stream.rs @@ -254,7 +254,7 @@ mod tests { let take_during = cluster.stream().take_during(d); let all_under_timeout = take_during - .all(|event| ready(event.timestamp().map_or(false, |t| t < timeout))) + .all(|event| ready(event.timestamp().is_some_and(|t| t < timeout))) .await; assert!(all_under_timeout); } diff --git a/poseidon/Cargo.toml b/poseidon/Cargo.toml index ee132fbc08..1e79ed21d1 100644 --- a/poseidon/Cargo.toml +++ b/poseidon/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "poseidon" -version = "0.13.0" +version = "0.14.0" edition = "2021" [lints] diff --git a/producer-dashboard/.sqlx/query-ed33d9e57251b18ec97b7c5defc895b365ffe2efda32252d0f519c587be839c6.json b/producer-dashboard/.sqlx/query-009408bec267f7f8ffd0c41abc0ff5a9c0dbefe5b2ef90b137922e28be92d519.json similarity index 63% rename from producer-dashboard/.sqlx/query-ed33d9e57251b18ec97b7c5defc895b365ffe2efda32252d0f519c587be839c6.json rename to producer-dashboard/.sqlx/query-009408bec267f7f8ffd0c41abc0ff5a9c0dbefe5b2ef90b137922e28be92d519.json index 1724db8298..5dccef59b6 100644 --- a/producer-dashboard/.sqlx/query-ed33d9e57251b18ec97b7c5defc895b365ffe2efda32252d0f519c587be839c6.json +++ b/producer-dashboard/.sqlx/query-009408bec267f7f8ffd0c41abc0ff5a9c0dbefe5b2ef90b137922e28be92d519.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n b.id, \n b.state_hash, \n b.height, \n b.timestamp, \n b.chain_status AS \"chain_status: ChainStatus\",\n pk_creator.value AS \"creator_key\",\n pk_winner.value AS \"winner_key\",\n b.global_slot_since_genesis,\n b.global_slot_since_hard_fork,\n b.parent_id\n FROM \n blocks b\n JOIN \n public_keys pk_creator ON b.creator_id = pk_creator.id\n JOIN \n public_keys pk_winner ON b.block_winner_id = pk_winner.id\n WHERE \n pk_creator.value = $1", + "query": "SELECT \n b.id, \n b.state_hash, \n b.height, \n b.timestamp, \n b.chain_status AS \"chain_status: ChainStatus\",\n pk_creator.value AS \"creator_key\",\n pk_winner.value AS \"winner_key\",\n b.global_slot_since_genesis,\n b.global_slot_since_hard_fork,\n b.parent_id\nFROM \n blocks b\nJOIN \n public_keys pk_creator ON b.creator_id = pk_creator.id\nJOIN \n public_keys pk_winner ON b.block_winner_id = pk_winner.id\nWHERE \n pk_creator.value = $1", "describe": { "columns": [ { @@ -83,5 +83,5 @@ true ] }, - "hash": "ed33d9e57251b18ec97b7c5defc895b365ffe2efda32252d0f519c587be839c6" + "hash": "009408bec267f7f8ffd0c41abc0ff5a9c0dbefe5b2ef90b137922e28be92d519" } diff --git a/producer-dashboard/.sqlx/query-51208aef4730b5cd251ca00745b48abf2445ff5c7ea1a3db14d78794b08f3045.json b/producer-dashboard/.sqlx/query-51208aef4730b5cd251ca00745b48abf2445ff5c7ea1a3db14d78794b08f3045.json new file mode 100644 index 0000000000..1bff8c57c9 --- /dev/null +++ b/producer-dashboard/.sqlx/query-51208aef4730b5cd251ca00745b48abf2445ff5c7ea1a3db14d78794b08f3045.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT\n b.state_hash AS \"state_hash!\"\nFROM blocks b\nORDER BY b.id DESC\nLIMIT 1;", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "state_hash!", + "type_info": "Text" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "51208aef4730b5cd251ca00745b48abf2445ff5c7ea1a3db14d78794b08f3045" +} diff --git a/producer-dashboard/.sqlx/query-5ebfde9470ac007c38bbeee89ca24bf71ae658b976ed75bb7beba547d2ec3a8b.json b/producer-dashboard/.sqlx/query-5ebfde9470ac007c38bbeee89ca24bf71ae658b976ed75bb7beba547d2ec3a8b.json new file mode 100644 index 0000000000..71a278461b --- /dev/null +++ b/producer-dashboard/.sqlx/query-5ebfde9470ac007c38bbeee89ca24bf71ae658b976ed75bb7beba547d2ec3a8b.json @@ -0,0 +1,88 @@ +{ + "db_name": "PostgreSQL", + "query": "WITH RECURSIVE chain AS (\n (SELECT * FROM blocks WHERE state_hash = $1)\n\n UNION ALL\n\n SELECT b.* FROM blocks b\n INNER JOIN chain\n ON b.id = chain.parent_id AND chain.id <> chain.parent_id\n)\n\nSELECT \n c.id AS \"id!\", \n c.state_hash AS \"state_hash!\", \n c.height AS \"height!\", \n c.timestamp AS \"timestamp!\", \n c.chain_status AS \"chain_status!: ChainStatus\",\n pk_creator.value AS \"creator_key\",\n pk_winner.value AS \"winner_key\",\n c.global_slot_since_genesis AS \"global_slot_since_genesis!\",\n c.global_slot_since_hard_fork AS \"global_slot_since_hard_fork!\",\n c.parent_id\nFROM \n chain c\nJOIN \n public_keys pk_creator ON c.creator_id = pk_creator.id\nJOIN \n public_keys pk_winner ON c.block_winner_id = pk_winner.id\nLIMIT $2", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id!", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "state_hash!", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "height!", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "timestamp!", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "chain_status!: ChainStatus", + "type_info": { + "Custom": { + "name": "chain_status_type", + "kind": { + "Enum": [ + "canonical", + "orphaned", + "pending" + ] + } + } + } + }, + { + "ordinal": 5, + "name": "creator_key", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "winner_key", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "global_slot_since_genesis!", + "type_info": "Int8" + }, + { + "ordinal": 8, + "name": "global_slot_since_hard_fork!", + "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "parent_id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + false, + false, + null, + null, + null + ] + }, + "hash": "5ebfde9470ac007c38bbeee89ca24bf71ae658b976ed75bb7beba547d2ec3a8b" +} diff --git a/producer-dashboard/.sqlx/query-b379c0ac7a9917a84f112237c0e6afb45c407854ff4d6b014bb6a1691d27eb14.json b/producer-dashboard/.sqlx/query-640b32fbf6b8d598b4d083399f09b606e8b0053498e1d5437059460311980f30.json similarity index 63% rename from producer-dashboard/.sqlx/query-b379c0ac7a9917a84f112237c0e6afb45c407854ff4d6b014bb6a1691d27eb14.json rename to producer-dashboard/.sqlx/query-640b32fbf6b8d598b4d083399f09b606e8b0053498e1d5437059460311980f30.json index 351ee21002..001b77b97f 100644 --- a/producer-dashboard/.sqlx/query-b379c0ac7a9917a84f112237c0e6afb45c407854ff4d6b014bb6a1691d27eb14.json +++ b/producer-dashboard/.sqlx/query-640b32fbf6b8d598b4d083399f09b606e8b0053498e1d5437059460311980f30.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n b.id, \n b.state_hash, \n b.height, \n b.timestamp, \n b.chain_status AS \"chain_status: ChainStatus\",\n pk_creator.value AS \"creator_key\",\n pk_winner.value AS \"winner_key\",\n b.global_slot_since_genesis,\n b.global_slot_since_hard_fork,\n b.parent_id\n FROM \n blocks b\n JOIN \n public_keys pk_creator ON b.creator_id = pk_creator.id\n JOIN \n public_keys pk_winner ON b.block_winner_id = pk_winner.id\n WHERE \n b.global_slot_since_hard_fork BETWEEN $1 AND $2", + "query": "SELECT \n b.id, \n b.state_hash, \n b.height, \n b.timestamp, \n b.chain_status AS \"chain_status: ChainStatus\",\n pk_creator.value AS \"creator_key\",\n pk_winner.value AS \"winner_key\",\n b.global_slot_since_genesis,\n b.global_slot_since_hard_fork,\n b.parent_id\nFROM \n blocks b\nJOIN \n public_keys pk_creator ON b.creator_id = pk_creator.id\nJOIN \n public_keys pk_winner ON b.block_winner_id = pk_winner.id\nWHERE \n b.global_slot_since_hard_fork BETWEEN $1 AND $2", "describe": { "columns": [ { @@ -84,5 +84,5 @@ true ] }, - "hash": "b379c0ac7a9917a84f112237c0e6afb45c407854ff4d6b014bb6a1691d27eb14" + "hash": "640b32fbf6b8d598b4d083399f09b606e8b0053498e1d5437059460311980f30" } diff --git a/producer-dashboard/.sqlx/query-9114bb761d7026cd21152c8ba5bd640de95a0767a7009dd3acc8344895079628.json b/producer-dashboard/.sqlx/query-d2cfd7b40cf0c5af718390dc2e57b46a82306adc0993d1f0de8783a60d8aa9a7.json similarity index 54% rename from producer-dashboard/.sqlx/query-9114bb761d7026cd21152c8ba5bd640de95a0767a7009dd3acc8344895079628.json rename to producer-dashboard/.sqlx/query-d2cfd7b40cf0c5af718390dc2e57b46a82306adc0993d1f0de8783a60d8aa9a7.json index eff3408b20..2ff06e31d5 100644 --- a/producer-dashboard/.sqlx/query-9114bb761d7026cd21152c8ba5bd640de95a0767a7009dd3acc8344895079628.json +++ b/producer-dashboard/.sqlx/query-d2cfd7b40cf0c5af718390dc2e57b46a82306adc0993d1f0de8783a60d8aa9a7.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "WITH RECURSIVE chain AS (\n (SELECT * FROM blocks WHERE state_hash = $1)\n \n UNION ALL\n \n SELECT b.* FROM blocks b\n INNER JOIN chain\n ON b.id = chain.parent_id AND chain.id <> chain.parent_id\n )\n \n SELECT \n c.id AS \"id!\", \n c.state_hash AS \"state_hash!\", \n c.height AS \"height!\", \n c.timestamp AS \"timestamp!\", \n c.chain_status AS \"chain_status!: ChainStatus\",\n pk_creator.value AS \"creator_key\",\n pk_winner.value AS \"winner_key\",\n c.global_slot_since_genesis AS \"global_slot_since_genesis!\",\n c.global_slot_since_hard_fork AS \"global_slot_since_hard_fork!\",\n c.parent_id\n FROM \n chain c\n JOIN \n public_keys pk_creator ON c.creator_id = pk_creator.id\n JOIN \n public_keys pk_winner ON c.block_winner_id = pk_winner.id\n WHERE \n c.global_slot_since_hard_fork BETWEEN $2 AND $3\n ", + "query": "WITH RECURSIVE chain AS (\n (SELECT * FROM blocks WHERE state_hash = $1)\n UNION ALL\n SELECT b.* FROM blocks b\n INNER JOIN chain\n ON b.id = chain.parent_id AND chain.id <> chain.parent_id\n )\n\n SELECT \n c.id AS \"id!\", \n c.state_hash AS \"state_hash!\", \n c.height AS \"height!\", \n c.timestamp AS \"timestamp!\", \n c.chain_status AS \"chain_status!: ChainStatus\",\n pk_creator.value AS \"creator_key\",\n pk_winner.value AS \"winner_key\",\n c.global_slot_since_genesis AS \"global_slot_since_genesis!\",\n c.global_slot_since_hard_fork AS \"global_slot_since_hard_fork!\",\n c.parent_id\n FROM \n chain c\n JOIN \n public_keys pk_creator ON c.creator_id = pk_creator.id\n JOIN \n public_keys pk_winner ON c.block_winner_id = pk_winner.id\n WHERE \n c.global_slot_since_hard_fork BETWEEN $2 AND $3", "describe": { "columns": [ { @@ -85,5 +85,5 @@ null ] }, - "hash": "9114bb761d7026cd21152c8ba5bd640de95a0767a7009dd3acc8344895079628" + "hash": "d2cfd7b40cf0c5af718390dc2e57b46a82306adc0993d1f0de8783a60d8aa9a7" } diff --git a/producer-dashboard/Cargo.toml b/producer-dashboard/Cargo.toml index cb046b2cf1..69a56d0d27 100644 --- a/producer-dashboard/Cargo.toml +++ b/producer-dashboard/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "openmina-producer-dashboard" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" [[bin]] name = "openmina-producer-dashboard" -path = "src/main.rs" +path = "src/bin/producer_dashboard.rs" [dependencies] tokio = { version = "1", features = ["full"] } @@ -23,7 +23,7 @@ openmina-node-account = { workspace = true } mina-p2p-messages = { workspace = true } time = {version = "0.3", features = ["formatting", "parsing"]} clap = { version = "4.5", features = ["derive"]} -sqlx = { version = "0.7", features = [ "runtime-tokio", "postgres", "json"] } +sqlx = { version = "0.8", features = [ "runtime-tokio", "postgres", "json"] } dotenvy = "0.15" sled = "0.34" bincode = "1.3.3" diff --git a/producer-dashboard/src/archive/mod.rs b/producer-dashboard/src/archive/mod.rs index ff5c4eae02..ec00c1bb1d 100644 --- a/producer-dashboard/src/archive/mod.rs +++ b/producer-dashboard/src/archive/mod.rs @@ -1,6 +1,9 @@ +use postgres_types::ChainStatus; use serde::{Deserialize, Serialize}; use sqlx::PgPool; +pub mod postgres_types; +pub mod raw_types; pub mod watchdog; #[derive(Debug, Clone)] @@ -8,42 +11,34 @@ pub struct ArchiveConnector { pool: PgPool, } +pub enum ArchiveUrl { + Url(String), + Env, +} + impl ArchiveConnector { - pub async fn connect() -> Self { - // TODO(adonagy): unwrap - let db_url = if let Ok(url) = dotenvy::var("DATABASE_URL") { - url - } else { - std::env::var("DATABASE_URL").expect("No db url found, check env var DATABASE_URL") + pub async fn connect(postgres_url: ArchiveUrl) -> Self { + let db_url = match postgres_url { + ArchiveUrl::Url(url) => url, + ArchiveUrl::Env => { + if let Ok(url) = dotenvy::var("DATABASE_URL") { + url + } else { + std::env::var("DATABASE_URL") + .expect("No db url found, check env var DATABASE_URL") + } + } }; - + // TODO(adonagy): unwrap let pool = PgPool::connect(&db_url).await.unwrap(); Self { pool } } pub async fn _get_producer_blocks(&self, producer_pk: &str) -> Result, sqlx::Error> { - sqlx::query_as!( + sqlx::query_file_as!( Block, - r#"SELECT - b.id, - b.state_hash, - b.height, - b.timestamp, - b.chain_status AS "chain_status: ChainStatus", - pk_creator.value AS "creator_key", - pk_winner.value AS "winner_key", - b.global_slot_since_genesis, - b.global_slot_since_hard_fork, - b.parent_id - FROM - blocks b - JOIN - public_keys pk_creator ON b.creator_id = pk_creator.id - JOIN - public_keys pk_winner ON b.block_winner_id = pk_winner.id - WHERE - pk_creator.value = $1"#, + "src/archive/sql/query_producer_blocks.sql", producer_pk ) .fetch_all(&self.pool) @@ -55,27 +50,9 @@ impl ArchiveConnector { start_slot: i64, finish_slot: i64, ) -> Result, sqlx::Error> { - sqlx::query_as!( + sqlx::query_file_as!( Block, - r#"SELECT - b.id, - b.state_hash, - b.height, - b.timestamp, - b.chain_status AS "chain_status: ChainStatus", - pk_creator.value AS "creator_key", - pk_winner.value AS "winner_key", - b.global_slot_since_genesis, - b.global_slot_since_hard_fork, - b.parent_id - FROM - blocks b - JOIN - public_keys pk_creator ON b.creator_id = pk_creator.id - JOIN - public_keys pk_winner ON b.block_winner_id = pk_winner.id - WHERE - b.global_slot_since_hard_fork BETWEEN $1 AND $2"#, + "src/archive/sql/query_blocks_in_slot_range.sql", start_slot, finish_slot ) @@ -89,38 +66,9 @@ impl ArchiveConnector { finish_slot: i64, best_tip_hash: String, ) -> Result, sqlx::Error> { - sqlx::query_as!( + sqlx::query_file_as!( Block, - r#"WITH RECURSIVE chain AS ( - (SELECT * FROM blocks WHERE state_hash = $1) - - UNION ALL - - SELECT b.* FROM blocks b - INNER JOIN chain - ON b.id = chain.parent_id AND chain.id <> chain.parent_id - ) - - SELECT - c.id AS "id!", - c.state_hash AS "state_hash!", - c.height AS "height!", - c.timestamp AS "timestamp!", - c.chain_status AS "chain_status!: ChainStatus", - pk_creator.value AS "creator_key", - pk_winner.value AS "winner_key", - c.global_slot_since_genesis AS "global_slot_since_genesis!", - c.global_slot_since_hard_fork AS "global_slot_since_hard_fork!", - c.parent_id - FROM - chain c - JOIN - public_keys pk_creator ON c.creator_id = pk_creator.id - JOIN - public_keys pk_winner ON c.block_winner_id = pk_winner.id - WHERE - c.global_slot_since_hard_fork BETWEEN $2 AND $3 - "#, + "src/archive/sql/query_canonical_chain.sql", best_tip_hash, start_slot, finish_slot @@ -128,14 +76,34 @@ impl ArchiveConnector { .fetch_all(&self.pool) .await } + + pub async fn get_last_canonical_blocks( + &self, + best_tip_hash: String, + limit: i64, + ) -> Result, sqlx::Error> { + sqlx::query_file_as!( + Block, + "src/archive/sql/query_last_canonical_blocks.sql", + best_tip_hash, + limit + ) + .fetch_all(&self.pool) + .await + } + + pub async fn get_latest_block(&self) -> Result { + let block = sqlx::query_file_as!(LatestBlock, "src/archive/sql/query_latest_block.sql") + .fetch_one(&self.pool) + .await?; + + Ok(block.state_hash) + } } -#[derive(sqlx::Type, Debug, Clone, Serialize, Deserialize, PartialEq)] -#[sqlx(type_name = "chain_status_type", rename_all = "lowercase")] -pub enum ChainStatus { - Canonical, - Orphaned, - Pending, +pub type StateHash = String; +struct LatestBlock { + state_hash: String, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -164,7 +132,7 @@ mod test { #[tokio::test] async fn test() { - let db = ArchiveConnector::connect().await; + let db = ArchiveConnector::connect(ArchiveUrl::Env).await; let blocks = db ._get_producer_blocks("B62qkPpK6z4ktWjxcmFzM4cFWjWLzrjNh6USjUMiYGcF3YAVbdo2p4H") diff --git a/producer-dashboard/src/archive/postgres_types.rs b/producer-dashboard/src/archive/postgres_types.rs new file mode 100644 index 0000000000..b5e1d166a8 --- /dev/null +++ b/producer-dashboard/src/archive/postgres_types.rs @@ -0,0 +1,59 @@ +use serde::{Deserialize, Serialize}; + +#[derive(sqlx::Type, Debug, Clone, Serialize, Deserialize, PartialEq)] +#[sqlx(type_name = "chain_status_type", rename_all = "snake_case")] +pub enum ChainStatus { + Canonical, + Orphaned, + Pending, +} + +#[derive(sqlx::Type, Debug, Clone, Serialize, Deserialize, PartialEq)] +#[sqlx(type_name = "authorization_kind_type")] +pub enum AuthorizationKind { + #[sqlx(rename = "None_given")] + NoneGiven, + Signature, + Proof, +} + +#[derive(sqlx::Type, Debug, Clone, Serialize, Deserialize, PartialEq)] +#[sqlx(type_name = "transaction_status_type", rename_all = "snake_case")] +pub enum TransactionStatus { + Applied, + Failed, +} + +#[derive(sqlx::Type, Debug, Clone, Serialize, Deserialize, PartialEq)] +#[sqlx(type_name = "internal_command_type", rename_all = "snake_case")] +pub enum InternalCommandType { + FeeTransferViaCoinbase, + FeeTransfer, + Coinbase, +} + +#[derive(sqlx::Type, Debug, Clone, Serialize, Deserialize, PartialEq)] +#[sqlx(type_name = "may_use_token")] +pub enum MayUseToken { + No, + ParentsOwnToken, + InheritFromParent, +} + +#[derive(sqlx::Type, Debug, Clone, Serialize, Deserialize, PartialEq)] +#[sqlx(type_name = "user_command_type", rename_all = "snake_case")] +pub enum UserCommandType { + Payment, + Delegation, +} + +#[derive(sqlx::Type, Debug, Clone, Serialize, Deserialize, PartialEq)] +#[sqlx(type_name = "zkapp_auth_required_type", rename_all = "snake_case")] +pub enum ZkappAuthRequiredType { + None, + Either, + Proof, + Signature, + Both, + Impossible, +} diff --git a/producer-dashboard/src/archive/raw_types.rs b/producer-dashboard/src/archive/raw_types.rs new file mode 100644 index 0000000000..e07f788a43 --- /dev/null +++ b/producer-dashboard/src/archive/raw_types.rs @@ -0,0 +1,464 @@ +use serde::{Deserialize, Serialize}; + +use super::{ + postgres_types::{ + AuthorizationKind, InternalCommandType, MayUseToken, TransactionStatus, UserCommandType, + ZkappAuthRequiredType, + }, + ArchiveConnector, ArchiveUrl, ChainStatus, +}; + +// macro_rules! define_fetch_all { +// ($fn_name:ident, $struct_name:ty, $sql_file:expr) => { +// pub async fn $fn_name(&self) -> Result, sqlx::Error> { +// sqlx::query_file_as!($struct_name, $sql_file) +// .fetch_all(&self.inner.pool) +// .await +// } +// }; +// } + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawBlock { + id: i32, + state_hash: String, + parent_id: Option, + parent_hash: String, + creator_id: i32, + block_winner_id: i32, + last_vrf_output: String, + snarked_ledger_hash_id: i32, + staking_epoch_data_id: i32, + next_epoch_data_id: i32, + min_window_density: i64, + sub_window_densities: Vec, + total_currency: String, + ledger_hash: String, + height: i64, + global_slot_since_hard_fork: i64, + global_slot_since_genesis: i64, + protocol_version_id: i32, + proposed_protocol_version_id: Option, + timestamp: String, + chain_status: ChainStatus, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawAccountIdentifier { + id: i32, + public_key_id: i32, + token_id: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawAccountsAccessed { + ledger_index: i32, + block_id: i32, + account_identifier_id: i32, + token_symbol_id: i32, + balance: String, + nonce: i64, + receipt_chain_hash: String, + delegate_id: Option, + voting_for_id: i32, + timing_id: Option, + permissions_id: i32, + zkapp_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawAccountsCreated { + block_id: i32, + account_identifier_id: i32, + creation_fee: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawBlocksInternalCommands { + block_id: i32, + internal_command_id: i32, + sequence_no: i32, + secondary_sequence_no: i32, + status: TransactionStatus, + failure_reason: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawBlocksUserCommands { + block_id: i32, + user_command_id: i32, + sequence_no: i32, + status: TransactionStatus, + failure_reason: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawBlocksZkappCommands { + block_id: i32, + zkapp_command_id: i32, + sequence_no: i32, + status: TransactionStatus, + failure_reasons_ids: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawEpochData { + id: i32, + seed: String, + ledger_hash_id: i32, + total_currency: String, + start_checkpoint: String, + lock_checkpoint: String, + epoch_length: i64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawInternalCommands { + id: i32, + command_type: InternalCommandType, + receiver_id: i32, + fee: String, + hash: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawProtocolVersion { + id: i32, + transaction: i32, + network: i32, + patch: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawPublicKeys { + id: i32, + value: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawSnarkedLedgerHashes { + id: i32, + value: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawTimingInfo { + id: i32, + account_identifier_id: i32, + initial_minimum_balance: String, + cliff_time: i64, + cliff_amount: String, + vesting_period: i64, + vesting_increment: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawTokenSymbols { + id: i32, + value: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawTokens { + id: i32, + value: String, + owner_public_key_id: Option, + owner_token_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawUserCommands { + id: i32, + command_type: UserCommandType, + fee_payer_id: i32, + source_id: i32, + receiver_id: i32, + nonce: i64, + amount: Option, + fee: String, + valid_until: Option, + memo: String, + hash: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawVotingFor { + id: i32, + value: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappAccountPrecondition { + id: i32, + balance_id: Option, + nonce_id: Option, + receipt_chain_hash: Option, + delegate_id: Option, + state_id: i32, + action_state_id: Option, + proved_state: Option, + is_new: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappAccountUpdate { + id: i32, + body_id: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappAccountUpdateBody { + id: i32, + account_identifier_id: i32, + update_id: i32, + balance_change: String, + increment_nonce: bool, + events_id: i32, + actions_id: i32, + call_data_id: i32, + call_depth: i32, + zkapp_network_precondition_id: i32, + zkapp_account_precondition_id: i32, + zkapp_valid_while_precondition_id: Option, + use_full_commitment: bool, + implicit_account_creation_fee: bool, + may_use_token: MayUseToken, + authorization_kind: AuthorizationKind, + verification_key_hash_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappAccountUpdateFailure { + id: i32, + index: i32, + failures: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappAccount { + id: i32, + app_state_id: i32, + verification_key_id: Option, + zkapp_version: i64, + action_state_id: i32, + last_action_slot: i64, + proved_state: bool, + zkapp_uri_id: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappActionState { + id: i32, + element0: i32, + element1: i32, + element2: i32, + element3: i32, + element4: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappAmountBounds { + id: i32, + amount_lower_bound: String, + amount_upper_bound: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappBalanceBounds { + id: i32, + balance_lower_bound: String, + balance_upper_bound: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappCommands { + id: i32, + zkapp_fee_payer_body_id: i32, + zkapp_account_updates_ids: Vec, + memo: String, + hash: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappEpochData { + id: i32, + epoch_ledger_id: Option, + epoch_seed: Option, + start_checkpoint: Option, + lock_checkpoint: Option, + epoch_length_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappEpochLedger { + id: i32, + hash_id: Option, + total_currency_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappEvents { + id: i32, + element_ids: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappFeePayerBody { + id: i32, + public_key_id: i32, + fee: String, + valid_until: Option, + nonce: i64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappField { + id: i32, + field: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappFieldArray { + id: i32, + element_ids: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappGlobalSlotBounds { + id: i32, + global_slot_lower_bound: i64, + global_slot_upper_bound: i64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappLengthBounds { + id: i32, + length_lower_bound: i64, + length_upper_bound: i64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappNetworkPrecondition { + id: i32, + snarked_ledger_hash_id: Option, + blockchain_length_id: Option, + min_window_density_id: Option, + total_currency_id: Option, + global_slot_since_genesis: Option, + staking_epoch_data_id: Option, + next_epoch_data_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappNonceBounds { + id: i32, + nonce_lower_bound: i64, + nonce_upper_bound: i64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappPermissions { + id: i32, + edit_state: ZkappAuthRequiredType, + send: ZkappAuthRequiredType, + receive: ZkappAuthRequiredType, + access: ZkappAuthRequiredType, + set_delegate: ZkappAuthRequiredType, + set_permissions: ZkappAuthRequiredType, + set_verification_key_auth: ZkappAuthRequiredType, + set_verification_key_txn_version: i32, + set_zkapp_uri: ZkappAuthRequiredType, + edit_action_state: ZkappAuthRequiredType, + set_token_symbol: ZkappAuthRequiredType, + increment_nonce: ZkappAuthRequiredType, + set_voting_for: ZkappAuthRequiredType, + set_timing: ZkappAuthRequiredType, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappStates { + id: i32, + element0: i32, + element1: i32, + element2: i32, + element3: i32, + element4: i32, + element5: i32, + element6: i32, + element7: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappStatesNullable { + id: i32, + element0: Option, + element1: Option, + element2: Option, + element3: Option, + element4: Option, + element5: Option, + element6: Option, + element7: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappTimingInfo { + id: i32, + initial_minimum_balance: String, + cliff_time: i64, + cliff_amount: String, + vesting_period: i64, + vesting_increment: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappTokenIdBounds { + id: i32, + token_id_lower_bound: String, + token_id_upper_bound: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappUpdates { + id: i32, + app_state_id: i32, + delegate_id: Option, + verification_key_id: Option, + permissions_id: Option, + zkapp_uri_id: Option, + token_symbol_id: Option, + timing_id: Option, + voting_for_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappUris { + id: i32, + value: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappVerificationKeyHashes { + id: i32, + value: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct RawZkappVerificationKeys { + id: i32, + verification_key: String, + hash_id: i32, +} + +#[allow(dead_code)] +pub struct ArchiveConnectorForTest { + inner: ArchiveConnector, +} + +impl ArchiveConnectorForTest { + pub async fn new(url: ArchiveUrl) -> Self { + Self { + inner: ArchiveConnector::connect(url).await, + } + } +} diff --git a/producer-dashboard/src/archive/sql/archive_schema.sql b/producer-dashboard/src/archive/sql/archive_schema.sql new file mode 100644 index 0000000000..df06ad9727 --- /dev/null +++ b/producer-dashboard/src/archive/sql/archive_schema.sql @@ -0,0 +1,3406 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 16.4 (Debian 16.4-1.pgdg120+1) +-- Dumped by pg_dump version 16.4 (Debian 16.4-1.pgdg120+1) + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: authorization_kind_type; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.authorization_kind_type AS ENUM ( + 'None_given', + 'Signature', + 'Proof' +); + + +ALTER TYPE public.authorization_kind_type OWNER TO postgres; + +-- +-- Name: chain_status_type; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.chain_status_type AS ENUM ( + 'canonical', + 'orphaned', + 'pending' +); + + +ALTER TYPE public.chain_status_type OWNER TO postgres; + +-- +-- Name: internal_command_type; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.internal_command_type AS ENUM ( + 'fee_transfer_via_coinbase', + 'fee_transfer', + 'coinbase' +); + + +ALTER TYPE public.internal_command_type OWNER TO postgres; + +-- +-- Name: may_use_token; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.may_use_token AS ENUM ( + 'No', + 'ParentsOwnToken', + 'InheritFromParent' +); + + +ALTER TYPE public.may_use_token OWNER TO postgres; + +-- +-- Name: transaction_status; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.transaction_status AS ENUM ( + 'applied', + 'failed' +); + + +ALTER TYPE public.transaction_status OWNER TO postgres; + +-- +-- Name: user_command_type; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.user_command_type AS ENUM ( + 'payment', + 'delegation' +); + + +ALTER TYPE public.user_command_type OWNER TO postgres; + +-- +-- Name: zkapp_auth_required_type; Type: TYPE; Schema: public; Owner: postgres +-- + +CREATE TYPE public.zkapp_auth_required_type AS ENUM ( + 'none', + 'either', + 'proof', + 'signature', + 'both', + 'impossible' +); + + +ALTER TYPE public.zkapp_auth_required_type OWNER TO postgres; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: account_identifiers; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.account_identifiers ( + id integer NOT NULL, + public_key_id integer NOT NULL, + token_id integer NOT NULL +); + + +ALTER TABLE public.account_identifiers OWNER TO postgres; + +-- +-- Name: account_identifiers_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.account_identifiers_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.account_identifiers_id_seq OWNER TO postgres; + +-- +-- Name: account_identifiers_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.account_identifiers_id_seq OWNED BY public.account_identifiers.id; + + +-- +-- Name: accounts_accessed; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.accounts_accessed ( + ledger_index integer NOT NULL, + block_id integer NOT NULL, + account_identifier_id integer NOT NULL, + token_symbol_id integer NOT NULL, + balance text NOT NULL, + nonce bigint NOT NULL, + receipt_chain_hash text NOT NULL, + delegate_id integer, + voting_for_id integer NOT NULL, + timing_id integer, + permissions_id integer NOT NULL, + zkapp_id integer +); + + +ALTER TABLE public.accounts_accessed OWNER TO postgres; + +-- +-- Name: accounts_created; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.accounts_created ( + block_id integer NOT NULL, + account_identifier_id integer NOT NULL, + creation_fee text NOT NULL +); + + +ALTER TABLE public.accounts_created OWNER TO postgres; + +-- +-- Name: blocks; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.blocks ( + id integer NOT NULL, + state_hash text NOT NULL, + parent_id integer, + parent_hash text NOT NULL, + creator_id integer NOT NULL, + block_winner_id integer NOT NULL, + last_vrf_output text NOT NULL, + snarked_ledger_hash_id integer NOT NULL, + staking_epoch_data_id integer NOT NULL, + next_epoch_data_id integer NOT NULL, + min_window_density bigint NOT NULL, + sub_window_densities bigint[] NOT NULL, + total_currency text NOT NULL, + ledger_hash text NOT NULL, + height bigint NOT NULL, + global_slot_since_hard_fork bigint NOT NULL, + global_slot_since_genesis bigint NOT NULL, + protocol_version_id integer NOT NULL, + proposed_protocol_version_id integer, + "timestamp" text NOT NULL, + chain_status public.chain_status_type NOT NULL +); + + +ALTER TABLE public.blocks OWNER TO postgres; + +-- +-- Name: blocks_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.blocks_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.blocks_id_seq OWNER TO postgres; + +-- +-- Name: blocks_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.blocks_id_seq OWNED BY public.blocks.id; + + +-- +-- Name: blocks_internal_commands; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.blocks_internal_commands ( + block_id integer NOT NULL, + internal_command_id integer NOT NULL, + sequence_no integer NOT NULL, + secondary_sequence_no integer NOT NULL, + status public.transaction_status NOT NULL, + failure_reason text +); + + +ALTER TABLE public.blocks_internal_commands OWNER TO postgres; + +-- +-- Name: blocks_user_commands; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.blocks_user_commands ( + block_id integer NOT NULL, + user_command_id integer NOT NULL, + sequence_no integer NOT NULL, + status public.transaction_status NOT NULL, + failure_reason text +); + + +ALTER TABLE public.blocks_user_commands OWNER TO postgres; + +-- +-- Name: blocks_zkapp_commands; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.blocks_zkapp_commands ( + block_id integer NOT NULL, + zkapp_command_id integer NOT NULL, + sequence_no integer NOT NULL, + status public.transaction_status NOT NULL, + failure_reasons_ids integer[] +); + + +ALTER TABLE public.blocks_zkapp_commands OWNER TO postgres; + +-- +-- Name: epoch_data; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.epoch_data ( + id integer NOT NULL, + seed text NOT NULL, + ledger_hash_id integer NOT NULL, + total_currency text NOT NULL, + start_checkpoint text NOT NULL, + lock_checkpoint text NOT NULL, + epoch_length bigint NOT NULL +); + + +ALTER TABLE public.epoch_data OWNER TO postgres; + +-- +-- Name: epoch_data_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.epoch_data_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.epoch_data_id_seq OWNER TO postgres; + +-- +-- Name: epoch_data_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.epoch_data_id_seq OWNED BY public.epoch_data.id; + + +-- +-- Name: internal_commands; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.internal_commands ( + id integer NOT NULL, + command_type public.internal_command_type NOT NULL, + receiver_id integer NOT NULL, + fee text NOT NULL, + hash text NOT NULL +); + + +ALTER TABLE public.internal_commands OWNER TO postgres; + +-- +-- Name: internal_commands_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.internal_commands_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.internal_commands_id_seq OWNER TO postgres; + +-- +-- Name: internal_commands_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.internal_commands_id_seq OWNED BY public.internal_commands.id; + + +-- +-- Name: protocol_versions; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.protocol_versions ( + id integer NOT NULL, + transaction integer NOT NULL, + network integer NOT NULL, + patch integer NOT NULL +); + + +ALTER TABLE public.protocol_versions OWNER TO postgres; + +-- +-- Name: protocol_versions_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.protocol_versions_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.protocol_versions_id_seq OWNER TO postgres; + +-- +-- Name: protocol_versions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.protocol_versions_id_seq OWNED BY public.protocol_versions.id; + + +-- +-- Name: public_keys; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.public_keys ( + id integer NOT NULL, + value text NOT NULL +); + + +ALTER TABLE public.public_keys OWNER TO postgres; + +-- +-- Name: public_keys_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.public_keys_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.public_keys_id_seq OWNER TO postgres; + +-- +-- Name: public_keys_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.public_keys_id_seq OWNED BY public.public_keys.id; + + +-- +-- Name: snarked_ledger_hashes; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.snarked_ledger_hashes ( + id integer NOT NULL, + value text NOT NULL +); + + +ALTER TABLE public.snarked_ledger_hashes OWNER TO postgres; + +-- +-- Name: snarked_ledger_hashes_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.snarked_ledger_hashes_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.snarked_ledger_hashes_id_seq OWNER TO postgres; + +-- +-- Name: snarked_ledger_hashes_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.snarked_ledger_hashes_id_seq OWNED BY public.snarked_ledger_hashes.id; + + +-- +-- Name: timing_info; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.timing_info ( + id integer NOT NULL, + account_identifier_id integer NOT NULL, + initial_minimum_balance text NOT NULL, + cliff_time bigint NOT NULL, + cliff_amount text NOT NULL, + vesting_period bigint NOT NULL, + vesting_increment text NOT NULL +); + + +ALTER TABLE public.timing_info OWNER TO postgres; + +-- +-- Name: timing_info_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.timing_info_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.timing_info_id_seq OWNER TO postgres; + +-- +-- Name: timing_info_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.timing_info_id_seq OWNED BY public.timing_info.id; + + +-- +-- Name: token_symbols; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.token_symbols ( + id integer NOT NULL, + value text NOT NULL +); + + +ALTER TABLE public.token_symbols OWNER TO postgres; + +-- +-- Name: token_symbols_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.token_symbols_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.token_symbols_id_seq OWNER TO postgres; + +-- +-- Name: token_symbols_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.token_symbols_id_seq OWNED BY public.token_symbols.id; + + +-- +-- Name: tokens; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.tokens ( + id integer NOT NULL, + value text NOT NULL, + owner_public_key_id integer, + owner_token_id integer +); + + +ALTER TABLE public.tokens OWNER TO postgres; + +-- +-- Name: tokens_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.tokens_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.tokens_id_seq OWNER TO postgres; + +-- +-- Name: tokens_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.tokens_id_seq OWNED BY public.tokens.id; + + +-- +-- Name: user_commands; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.user_commands ( + id integer NOT NULL, + command_type public.user_command_type NOT NULL, + fee_payer_id integer NOT NULL, + source_id integer NOT NULL, + receiver_id integer NOT NULL, + nonce bigint NOT NULL, + amount text, + fee text NOT NULL, + valid_until bigint, + memo text NOT NULL, + hash text NOT NULL +); + + +ALTER TABLE public.user_commands OWNER TO postgres; + +-- +-- Name: user_commands_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.user_commands_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.user_commands_id_seq OWNER TO postgres; + +-- +-- Name: user_commands_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.user_commands_id_seq OWNED BY public.user_commands.id; + + +-- +-- Name: voting_for; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.voting_for ( + id integer NOT NULL, + value text NOT NULL +); + + +ALTER TABLE public.voting_for OWNER TO postgres; + +-- +-- Name: voting_for_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.voting_for_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.voting_for_id_seq OWNER TO postgres; + +-- +-- Name: voting_for_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.voting_for_id_seq OWNED BY public.voting_for.id; + + +-- +-- Name: zkapp_account_precondition; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_account_precondition ( + id integer NOT NULL, + balance_id integer, + nonce_id integer, + receipt_chain_hash text, + delegate_id integer, + state_id integer NOT NULL, + action_state_id integer, + proved_state boolean, + is_new boolean +); + + +ALTER TABLE public.zkapp_account_precondition OWNER TO postgres; + +-- +-- Name: zkapp_account_precondition_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_account_precondition_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_account_precondition_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_account_precondition_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_account_precondition_id_seq OWNED BY public.zkapp_account_precondition.id; + + +-- +-- Name: zkapp_account_update; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_account_update ( + id integer NOT NULL, + body_id integer NOT NULL +); + + +ALTER TABLE public.zkapp_account_update OWNER TO postgres; + +-- +-- Name: zkapp_account_update_body; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_account_update_body ( + id integer NOT NULL, + account_identifier_id integer NOT NULL, + update_id integer NOT NULL, + balance_change text NOT NULL, + increment_nonce boolean NOT NULL, + events_id integer NOT NULL, + actions_id integer NOT NULL, + call_data_id integer NOT NULL, + call_depth integer NOT NULL, + zkapp_network_precondition_id integer NOT NULL, + zkapp_account_precondition_id integer NOT NULL, + zkapp_valid_while_precondition_id integer, + use_full_commitment boolean NOT NULL, + implicit_account_creation_fee boolean NOT NULL, + may_use_token public.may_use_token NOT NULL, + authorization_kind public.authorization_kind_type NOT NULL, + verification_key_hash_id integer +); + + +ALTER TABLE public.zkapp_account_update_body OWNER TO postgres; + +-- +-- Name: zkapp_account_update_body_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_account_update_body_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_account_update_body_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_account_update_body_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_account_update_body_id_seq OWNED BY public.zkapp_account_update_body.id; + + +-- +-- Name: zkapp_account_update_failures; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_account_update_failures ( + id integer NOT NULL, + index integer NOT NULL, + failures text[] NOT NULL +); + + +ALTER TABLE public.zkapp_account_update_failures OWNER TO postgres; + +-- +-- Name: zkapp_account_update_failures_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_account_update_failures_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_account_update_failures_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_account_update_failures_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_account_update_failures_id_seq OWNED BY public.zkapp_account_update_failures.id; + + +-- +-- Name: zkapp_account_update_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_account_update_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_account_update_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_account_update_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_account_update_id_seq OWNED BY public.zkapp_account_update.id; + + +-- +-- Name: zkapp_accounts; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_accounts ( + id integer NOT NULL, + app_state_id integer NOT NULL, + verification_key_id integer, + zkapp_version bigint NOT NULL, + action_state_id integer NOT NULL, + last_action_slot bigint NOT NULL, + proved_state boolean NOT NULL, + zkapp_uri_id integer NOT NULL +); + + +ALTER TABLE public.zkapp_accounts OWNER TO postgres; + +-- +-- Name: zkapp_accounts_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_accounts_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_accounts_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_accounts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_accounts_id_seq OWNED BY public.zkapp_accounts.id; + + +-- +-- Name: zkapp_action_states; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_action_states ( + id integer NOT NULL, + element0 integer NOT NULL, + element1 integer NOT NULL, + element2 integer NOT NULL, + element3 integer NOT NULL, + element4 integer NOT NULL +); + + +ALTER TABLE public.zkapp_action_states OWNER TO postgres; + +-- +-- Name: zkapp_action_states_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_action_states_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_action_states_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_action_states_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_action_states_id_seq OWNED BY public.zkapp_action_states.id; + + +-- +-- Name: zkapp_amount_bounds; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_amount_bounds ( + id integer NOT NULL, + amount_lower_bound text NOT NULL, + amount_upper_bound text NOT NULL +); + + +ALTER TABLE public.zkapp_amount_bounds OWNER TO postgres; + +-- +-- Name: zkapp_amount_bounds_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_amount_bounds_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_amount_bounds_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_amount_bounds_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_amount_bounds_id_seq OWNED BY public.zkapp_amount_bounds.id; + + +-- +-- Name: zkapp_balance_bounds; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_balance_bounds ( + id integer NOT NULL, + balance_lower_bound text NOT NULL, + balance_upper_bound text NOT NULL +); + + +ALTER TABLE public.zkapp_balance_bounds OWNER TO postgres; + +-- +-- Name: zkapp_balance_bounds_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_balance_bounds_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_balance_bounds_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_balance_bounds_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_balance_bounds_id_seq OWNED BY public.zkapp_balance_bounds.id; + + +-- +-- Name: zkapp_commands; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_commands ( + id integer NOT NULL, + zkapp_fee_payer_body_id integer NOT NULL, + zkapp_account_updates_ids integer[] NOT NULL, + memo text NOT NULL, + hash text NOT NULL +); + + +ALTER TABLE public.zkapp_commands OWNER TO postgres; + +-- +-- Name: zkapp_commands_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_commands_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_commands_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_commands_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_commands_id_seq OWNED BY public.zkapp_commands.id; + + +-- +-- Name: zkapp_epoch_data; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_epoch_data ( + id integer NOT NULL, + epoch_ledger_id integer, + epoch_seed text, + start_checkpoint text, + lock_checkpoint text, + epoch_length_id integer +); + + +ALTER TABLE public.zkapp_epoch_data OWNER TO postgres; + +-- +-- Name: zkapp_epoch_data_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_epoch_data_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_epoch_data_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_epoch_data_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_epoch_data_id_seq OWNED BY public.zkapp_epoch_data.id; + + +-- +-- Name: zkapp_epoch_ledger; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_epoch_ledger ( + id integer NOT NULL, + hash_id integer, + total_currency_id integer +); + + +ALTER TABLE public.zkapp_epoch_ledger OWNER TO postgres; + +-- +-- Name: zkapp_epoch_ledger_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_epoch_ledger_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_epoch_ledger_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_epoch_ledger_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_epoch_ledger_id_seq OWNED BY public.zkapp_epoch_ledger.id; + + +-- +-- Name: zkapp_events; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_events ( + id integer NOT NULL, + element_ids integer[] NOT NULL +); + + +ALTER TABLE public.zkapp_events OWNER TO postgres; + +-- +-- Name: zkapp_events_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_events_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_events_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_events_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_events_id_seq OWNED BY public.zkapp_events.id; + + +-- +-- Name: zkapp_fee_payer_body; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_fee_payer_body ( + id integer NOT NULL, + public_key_id integer NOT NULL, + fee text NOT NULL, + valid_until bigint, + nonce bigint NOT NULL +); + + +ALTER TABLE public.zkapp_fee_payer_body OWNER TO postgres; + +-- +-- Name: zkapp_fee_payer_body_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_fee_payer_body_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_fee_payer_body_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_fee_payer_body_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_fee_payer_body_id_seq OWNED BY public.zkapp_fee_payer_body.id; + + +-- +-- Name: zkapp_field; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_field ( + id integer NOT NULL, + field text NOT NULL +); + + +ALTER TABLE public.zkapp_field OWNER TO postgres; + +-- +-- Name: zkapp_field_array; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_field_array ( + id integer NOT NULL, + element_ids integer[] NOT NULL +); + + +ALTER TABLE public.zkapp_field_array OWNER TO postgres; + +-- +-- Name: zkapp_field_array_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_field_array_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_field_array_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_field_array_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_field_array_id_seq OWNED BY public.zkapp_field_array.id; + + +-- +-- Name: zkapp_field_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_field_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_field_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_field_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_field_id_seq OWNED BY public.zkapp_field.id; + + +-- +-- Name: zkapp_global_slot_bounds; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_global_slot_bounds ( + id integer NOT NULL, + global_slot_lower_bound bigint NOT NULL, + global_slot_upper_bound bigint NOT NULL +); + + +ALTER TABLE public.zkapp_global_slot_bounds OWNER TO postgres; + +-- +-- Name: zkapp_global_slot_bounds_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_global_slot_bounds_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_global_slot_bounds_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_global_slot_bounds_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_global_slot_bounds_id_seq OWNED BY public.zkapp_global_slot_bounds.id; + + +-- +-- Name: zkapp_length_bounds; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_length_bounds ( + id integer NOT NULL, + length_lower_bound bigint NOT NULL, + length_upper_bound bigint NOT NULL +); + + +ALTER TABLE public.zkapp_length_bounds OWNER TO postgres; + +-- +-- Name: zkapp_length_bounds_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_length_bounds_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_length_bounds_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_length_bounds_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_length_bounds_id_seq OWNED BY public.zkapp_length_bounds.id; + + +-- +-- Name: zkapp_network_precondition; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_network_precondition ( + id integer NOT NULL, + snarked_ledger_hash_id integer, + blockchain_length_id integer, + min_window_density_id integer, + total_currency_id integer, + global_slot_since_genesis integer, + staking_epoch_data_id integer, + next_epoch_data_id integer +); + + +ALTER TABLE public.zkapp_network_precondition OWNER TO postgres; + +-- +-- Name: zkapp_network_precondition_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_network_precondition_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_network_precondition_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_network_precondition_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_network_precondition_id_seq OWNED BY public.zkapp_network_precondition.id; + + +-- +-- Name: zkapp_nonce_bounds; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_nonce_bounds ( + id integer NOT NULL, + nonce_lower_bound bigint NOT NULL, + nonce_upper_bound bigint NOT NULL +); + + +ALTER TABLE public.zkapp_nonce_bounds OWNER TO postgres; + +-- +-- Name: zkapp_nonce_bounds_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_nonce_bounds_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_nonce_bounds_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_nonce_bounds_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_nonce_bounds_id_seq OWNED BY public.zkapp_nonce_bounds.id; + + +-- +-- Name: zkapp_permissions; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_permissions ( + id integer NOT NULL, + edit_state public.zkapp_auth_required_type NOT NULL, + send public.zkapp_auth_required_type NOT NULL, + receive public.zkapp_auth_required_type NOT NULL, + access public.zkapp_auth_required_type NOT NULL, + set_delegate public.zkapp_auth_required_type NOT NULL, + set_permissions public.zkapp_auth_required_type NOT NULL, + set_verification_key_auth public.zkapp_auth_required_type NOT NULL, + set_verification_key_txn_version integer NOT NULL, + set_zkapp_uri public.zkapp_auth_required_type NOT NULL, + edit_action_state public.zkapp_auth_required_type NOT NULL, + set_token_symbol public.zkapp_auth_required_type NOT NULL, + increment_nonce public.zkapp_auth_required_type NOT NULL, + set_voting_for public.zkapp_auth_required_type NOT NULL, + set_timing public.zkapp_auth_required_type NOT NULL +); + + +ALTER TABLE public.zkapp_permissions OWNER TO postgres; + +-- +-- Name: zkapp_permissions_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_permissions_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_permissions_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_permissions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_permissions_id_seq OWNED BY public.zkapp_permissions.id; + + +-- +-- Name: zkapp_states; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_states ( + id integer NOT NULL, + element0 integer NOT NULL, + element1 integer NOT NULL, + element2 integer NOT NULL, + element3 integer NOT NULL, + element4 integer NOT NULL, + element5 integer NOT NULL, + element6 integer NOT NULL, + element7 integer NOT NULL +); + + +ALTER TABLE public.zkapp_states OWNER TO postgres; + +-- +-- Name: zkapp_states_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_states_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_states_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_states_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_states_id_seq OWNED BY public.zkapp_states.id; + + +-- +-- Name: zkapp_states_nullable; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_states_nullable ( + id integer NOT NULL, + element0 integer, + element1 integer, + element2 integer, + element3 integer, + element4 integer, + element5 integer, + element6 integer, + element7 integer +); + + +ALTER TABLE public.zkapp_states_nullable OWNER TO postgres; + +-- +-- Name: zkapp_states_nullable_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_states_nullable_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_states_nullable_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_states_nullable_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_states_nullable_id_seq OWNED BY public.zkapp_states_nullable.id; + + +-- +-- Name: zkapp_timing_info; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_timing_info ( + id integer NOT NULL, + initial_minimum_balance text NOT NULL, + cliff_time bigint NOT NULL, + cliff_amount text NOT NULL, + vesting_period bigint NOT NULL, + vesting_increment text NOT NULL +); + + +ALTER TABLE public.zkapp_timing_info OWNER TO postgres; + +-- +-- Name: zkapp_timing_info_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_timing_info_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_timing_info_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_timing_info_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_timing_info_id_seq OWNED BY public.zkapp_timing_info.id; + + +-- +-- Name: zkapp_token_id_bounds; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_token_id_bounds ( + id integer NOT NULL, + token_id_lower_bound text NOT NULL, + token_id_upper_bound text NOT NULL +); + + +ALTER TABLE public.zkapp_token_id_bounds OWNER TO postgres; + +-- +-- Name: zkapp_token_id_bounds_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_token_id_bounds_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_token_id_bounds_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_token_id_bounds_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_token_id_bounds_id_seq OWNED BY public.zkapp_token_id_bounds.id; + + +-- +-- Name: zkapp_updates; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_updates ( + id integer NOT NULL, + app_state_id integer NOT NULL, + delegate_id integer, + verification_key_id integer, + permissions_id integer, + zkapp_uri_id integer, + token_symbol_id integer, + timing_id integer, + voting_for_id integer +); + + +ALTER TABLE public.zkapp_updates OWNER TO postgres; + +-- +-- Name: zkapp_updates_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_updates_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_updates_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_updates_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_updates_id_seq OWNED BY public.zkapp_updates.id; + + +-- +-- Name: zkapp_uris; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_uris ( + id integer NOT NULL, + value text NOT NULL +); + + +ALTER TABLE public.zkapp_uris OWNER TO postgres; + +-- +-- Name: zkapp_uris_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_uris_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_uris_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_uris_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_uris_id_seq OWNED BY public.zkapp_uris.id; + + +-- +-- Name: zkapp_verification_key_hashes; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_verification_key_hashes ( + id integer NOT NULL, + value text NOT NULL +); + + +ALTER TABLE public.zkapp_verification_key_hashes OWNER TO postgres; + +-- +-- Name: zkapp_verification_key_hashes_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_verification_key_hashes_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_verification_key_hashes_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_verification_key_hashes_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_verification_key_hashes_id_seq OWNED BY public.zkapp_verification_key_hashes.id; + + +-- +-- Name: zkapp_verification_keys; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.zkapp_verification_keys ( + id integer NOT NULL, + verification_key text NOT NULL, + hash_id integer NOT NULL +); + + +ALTER TABLE public.zkapp_verification_keys OWNER TO postgres; + +-- +-- Name: zkapp_verification_keys_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.zkapp_verification_keys_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER SEQUENCE public.zkapp_verification_keys_id_seq OWNER TO postgres; + +-- +-- Name: zkapp_verification_keys_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.zkapp_verification_keys_id_seq OWNED BY public.zkapp_verification_keys.id; + + +-- +-- Name: account_identifiers id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.account_identifiers ALTER COLUMN id SET DEFAULT nextval('public.account_identifiers_id_seq'::regclass); + + +-- +-- Name: blocks id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks ALTER COLUMN id SET DEFAULT nextval('public.blocks_id_seq'::regclass); + + +-- +-- Name: epoch_data id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.epoch_data ALTER COLUMN id SET DEFAULT nextval('public.epoch_data_id_seq'::regclass); + + +-- +-- Name: internal_commands id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.internal_commands ALTER COLUMN id SET DEFAULT nextval('public.internal_commands_id_seq'::regclass); + + +-- +-- Name: protocol_versions id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.protocol_versions ALTER COLUMN id SET DEFAULT nextval('public.protocol_versions_id_seq'::regclass); + + +-- +-- Name: public_keys id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.public_keys ALTER COLUMN id SET DEFAULT nextval('public.public_keys_id_seq'::regclass); + + +-- +-- Name: snarked_ledger_hashes id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.snarked_ledger_hashes ALTER COLUMN id SET DEFAULT nextval('public.snarked_ledger_hashes_id_seq'::regclass); + + +-- +-- Name: timing_info id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.timing_info ALTER COLUMN id SET DEFAULT nextval('public.timing_info_id_seq'::regclass); + + +-- +-- Name: token_symbols id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.token_symbols ALTER COLUMN id SET DEFAULT nextval('public.token_symbols_id_seq'::regclass); + + +-- +-- Name: tokens id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tokens ALTER COLUMN id SET DEFAULT nextval('public.tokens_id_seq'::regclass); + + +-- +-- Name: user_commands id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_commands ALTER COLUMN id SET DEFAULT nextval('public.user_commands_id_seq'::regclass); + + +-- +-- Name: voting_for id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.voting_for ALTER COLUMN id SET DEFAULT nextval('public.voting_for_id_seq'::regclass); + + +-- +-- Name: zkapp_account_precondition id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_precondition ALTER COLUMN id SET DEFAULT nextval('public.zkapp_account_precondition_id_seq'::regclass); + + +-- +-- Name: zkapp_account_update id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update ALTER COLUMN id SET DEFAULT nextval('public.zkapp_account_update_id_seq'::regclass); + + +-- +-- Name: zkapp_account_update_body id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body ALTER COLUMN id SET DEFAULT nextval('public.zkapp_account_update_body_id_seq'::regclass); + + +-- +-- Name: zkapp_account_update_failures id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_failures ALTER COLUMN id SET DEFAULT nextval('public.zkapp_account_update_failures_id_seq'::regclass); + + +-- +-- Name: zkapp_accounts id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_accounts ALTER COLUMN id SET DEFAULT nextval('public.zkapp_accounts_id_seq'::regclass); + + +-- +-- Name: zkapp_action_states id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_action_states ALTER COLUMN id SET DEFAULT nextval('public.zkapp_action_states_id_seq'::regclass); + + +-- +-- Name: zkapp_amount_bounds id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_amount_bounds ALTER COLUMN id SET DEFAULT nextval('public.zkapp_amount_bounds_id_seq'::regclass); + + +-- +-- Name: zkapp_balance_bounds id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_balance_bounds ALTER COLUMN id SET DEFAULT nextval('public.zkapp_balance_bounds_id_seq'::regclass); + + +-- +-- Name: zkapp_commands id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_commands ALTER COLUMN id SET DEFAULT nextval('public.zkapp_commands_id_seq'::regclass); + + +-- +-- Name: zkapp_epoch_data id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_epoch_data ALTER COLUMN id SET DEFAULT nextval('public.zkapp_epoch_data_id_seq'::regclass); + + +-- +-- Name: zkapp_epoch_ledger id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_epoch_ledger ALTER COLUMN id SET DEFAULT nextval('public.zkapp_epoch_ledger_id_seq'::regclass); + + +-- +-- Name: zkapp_events id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_events ALTER COLUMN id SET DEFAULT nextval('public.zkapp_events_id_seq'::regclass); + + +-- +-- Name: zkapp_fee_payer_body id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_fee_payer_body ALTER COLUMN id SET DEFAULT nextval('public.zkapp_fee_payer_body_id_seq'::regclass); + + +-- +-- Name: zkapp_field id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_field ALTER COLUMN id SET DEFAULT nextval('public.zkapp_field_id_seq'::regclass); + + +-- +-- Name: zkapp_field_array id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_field_array ALTER COLUMN id SET DEFAULT nextval('public.zkapp_field_array_id_seq'::regclass); + + +-- +-- Name: zkapp_global_slot_bounds id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_global_slot_bounds ALTER COLUMN id SET DEFAULT nextval('public.zkapp_global_slot_bounds_id_seq'::regclass); + + +-- +-- Name: zkapp_length_bounds id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_length_bounds ALTER COLUMN id SET DEFAULT nextval('public.zkapp_length_bounds_id_seq'::regclass); + + +-- +-- Name: zkapp_network_precondition id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition ALTER COLUMN id SET DEFAULT nextval('public.zkapp_network_precondition_id_seq'::regclass); + + +-- +-- Name: zkapp_nonce_bounds id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_nonce_bounds ALTER COLUMN id SET DEFAULT nextval('public.zkapp_nonce_bounds_id_seq'::regclass); + + +-- +-- Name: zkapp_permissions id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_permissions ALTER COLUMN id SET DEFAULT nextval('public.zkapp_permissions_id_seq'::regclass); + + +-- +-- Name: zkapp_states id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states ALTER COLUMN id SET DEFAULT nextval('public.zkapp_states_id_seq'::regclass); + + +-- +-- Name: zkapp_states_nullable id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable ALTER COLUMN id SET DEFAULT nextval('public.zkapp_states_nullable_id_seq'::regclass); + + +-- +-- Name: zkapp_timing_info id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_timing_info ALTER COLUMN id SET DEFAULT nextval('public.zkapp_timing_info_id_seq'::regclass); + + +-- +-- Name: zkapp_token_id_bounds id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_token_id_bounds ALTER COLUMN id SET DEFAULT nextval('public.zkapp_token_id_bounds_id_seq'::regclass); + + +-- +-- Name: zkapp_updates id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates ALTER COLUMN id SET DEFAULT nextval('public.zkapp_updates_id_seq'::regclass); + + +-- +-- Name: zkapp_uris id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_uris ALTER COLUMN id SET DEFAULT nextval('public.zkapp_uris_id_seq'::regclass); + + +-- +-- Name: zkapp_verification_key_hashes id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_verification_key_hashes ALTER COLUMN id SET DEFAULT nextval('public.zkapp_verification_key_hashes_id_seq'::regclass); + + +-- +-- Name: zkapp_verification_keys id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_verification_keys ALTER COLUMN id SET DEFAULT nextval('public.zkapp_verification_keys_id_seq'::regclass); + + +-- +-- Name: account_identifiers account_identifiers_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.account_identifiers + ADD CONSTRAINT account_identifiers_pkey PRIMARY KEY (id); + + +-- +-- Name: account_identifiers account_identifiers_public_key_id_token_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.account_identifiers + ADD CONSTRAINT account_identifiers_public_key_id_token_id_key UNIQUE (public_key_id, token_id); + + +-- +-- Name: accounts_accessed accounts_accessed_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_pkey PRIMARY KEY (block_id, account_identifier_id); + + +-- +-- Name: accounts_created accounts_created_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_created + ADD CONSTRAINT accounts_created_pkey PRIMARY KEY (block_id, account_identifier_id); + + +-- +-- Name: blocks_internal_commands blocks_internal_commands_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_internal_commands + ADD CONSTRAINT blocks_internal_commands_pkey PRIMARY KEY (block_id, internal_command_id, sequence_no, secondary_sequence_no); + + +-- +-- Name: blocks blocks_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_pkey PRIMARY KEY (id); + + +-- +-- Name: blocks blocks_state_hash_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_state_hash_key UNIQUE (state_hash); + + +-- +-- Name: blocks_user_commands blocks_user_commands_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_user_commands + ADD CONSTRAINT blocks_user_commands_pkey PRIMARY KEY (block_id, user_command_id, sequence_no); + + +-- +-- Name: blocks_zkapp_commands blocks_zkapp_commands_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_zkapp_commands + ADD CONSTRAINT blocks_zkapp_commands_pkey PRIMARY KEY (block_id, zkapp_command_id, sequence_no); + + +-- +-- Name: epoch_data epoch_data_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.epoch_data + ADD CONSTRAINT epoch_data_pkey PRIMARY KEY (id); + + +-- +-- Name: epoch_data epoch_data_seed_ledger_hash_id_total_currency_start_checkpo_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.epoch_data + ADD CONSTRAINT epoch_data_seed_ledger_hash_id_total_currency_start_checkpo_key UNIQUE (seed, ledger_hash_id, total_currency, start_checkpoint, lock_checkpoint, epoch_length); + + +-- +-- Name: internal_commands internal_commands_hash_command_type_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.internal_commands + ADD CONSTRAINT internal_commands_hash_command_type_key UNIQUE (hash, command_type); + + +-- +-- Name: internal_commands internal_commands_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.internal_commands + ADD CONSTRAINT internal_commands_pkey PRIMARY KEY (id); + + +-- +-- Name: protocol_versions protocol_versions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.protocol_versions + ADD CONSTRAINT protocol_versions_pkey PRIMARY KEY (id); + + +-- +-- Name: protocol_versions protocol_versions_transaction_network_patch_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.protocol_versions + ADD CONSTRAINT protocol_versions_transaction_network_patch_key UNIQUE (transaction, network, patch); + + +-- +-- Name: public_keys public_keys_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.public_keys + ADD CONSTRAINT public_keys_pkey PRIMARY KEY (id); + + +-- +-- Name: public_keys public_keys_value_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.public_keys + ADD CONSTRAINT public_keys_value_key UNIQUE (value); + + +-- +-- Name: snarked_ledger_hashes snarked_ledger_hashes_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.snarked_ledger_hashes + ADD CONSTRAINT snarked_ledger_hashes_pkey PRIMARY KEY (id); + + +-- +-- Name: snarked_ledger_hashes snarked_ledger_hashes_value_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.snarked_ledger_hashes + ADD CONSTRAINT snarked_ledger_hashes_value_key UNIQUE (value); + + +-- +-- Name: timing_info timing_info_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.timing_info + ADD CONSTRAINT timing_info_pkey PRIMARY KEY (id); + + +-- +-- Name: token_symbols token_symbols_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.token_symbols + ADD CONSTRAINT token_symbols_pkey PRIMARY KEY (id); + + +-- +-- Name: tokens tokens_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tokens + ADD CONSTRAINT tokens_pkey PRIMARY KEY (id); + + +-- +-- Name: tokens tokens_value_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tokens + ADD CONSTRAINT tokens_value_key UNIQUE (value); + + +-- +-- Name: user_commands user_commands_hash_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_commands + ADD CONSTRAINT user_commands_hash_key UNIQUE (hash); + + +-- +-- Name: user_commands user_commands_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_commands + ADD CONSTRAINT user_commands_pkey PRIMARY KEY (id); + + +-- +-- Name: voting_for voting_for_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.voting_for + ADD CONSTRAINT voting_for_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_account_precondition zkapp_account_precondition_balance_id_receipt_chain_hash_de_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_precondition + ADD CONSTRAINT zkapp_account_precondition_balance_id_receipt_chain_hash_de_key UNIQUE (balance_id, receipt_chain_hash, delegate_id, state_id, action_state_id, proved_state, is_new, nonce_id); + + +-- +-- Name: zkapp_account_precondition zkapp_account_precondition_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_precondition + ADD CONSTRAINT zkapp_account_precondition_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_account_update_failures zkapp_account_update_failures_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_failures + ADD CONSTRAINT zkapp_account_update_failures_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_account_update zkapp_account_update_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update + ADD CONSTRAINT zkapp_account_update_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_accounts zkapp_accounts_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_accounts + ADD CONSTRAINT zkapp_accounts_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_action_states zkapp_action_states_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_action_states + ADD CONSTRAINT zkapp_action_states_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_amount_bounds zkapp_amount_bounds_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_amount_bounds + ADD CONSTRAINT zkapp_amount_bounds_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_balance_bounds zkapp_balance_bounds_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_balance_bounds + ADD CONSTRAINT zkapp_balance_bounds_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_commands zkapp_commands_hash_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_commands + ADD CONSTRAINT zkapp_commands_hash_key UNIQUE (hash); + + +-- +-- Name: zkapp_commands zkapp_commands_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_commands + ADD CONSTRAINT zkapp_commands_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_epoch_data zkapp_epoch_data_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_epoch_data + ADD CONSTRAINT zkapp_epoch_data_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_epoch_ledger zkapp_epoch_ledger_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_epoch_ledger + ADD CONSTRAINT zkapp_epoch_ledger_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_events zkapp_events_element_ids_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_events + ADD CONSTRAINT zkapp_events_element_ids_key UNIQUE (element_ids); + + +-- +-- Name: zkapp_events zkapp_events_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_events + ADD CONSTRAINT zkapp_events_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_fee_payer_body zkapp_fee_payer_body_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_fee_payer_body + ADD CONSTRAINT zkapp_fee_payer_body_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_field_array zkapp_field_array_element_ids_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_field_array + ADD CONSTRAINT zkapp_field_array_element_ids_key UNIQUE (element_ids); + + +-- +-- Name: zkapp_field_array zkapp_field_array_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_field_array + ADD CONSTRAINT zkapp_field_array_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_field zkapp_field_field_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_field + ADD CONSTRAINT zkapp_field_field_key UNIQUE (field); + + +-- +-- Name: zkapp_field zkapp_field_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_field + ADD CONSTRAINT zkapp_field_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_global_slot_bounds zkapp_global_slot_bounds_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_global_slot_bounds + ADD CONSTRAINT zkapp_global_slot_bounds_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_length_bounds zkapp_length_bounds_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_length_bounds + ADD CONSTRAINT zkapp_length_bounds_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_network_precondition zkapp_network_precondition_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition + ADD CONSTRAINT zkapp_network_precondition_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_nonce_bounds zkapp_nonce_bounds_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_nonce_bounds + ADD CONSTRAINT zkapp_nonce_bounds_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_permissions zkapp_permissions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_permissions + ADD CONSTRAINT zkapp_permissions_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_states zkapp_states_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_timing_info zkapp_timing_info_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_timing_info + ADD CONSTRAINT zkapp_timing_info_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_token_id_bounds zkapp_token_id_bounds_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_token_id_bounds + ADD CONSTRAINT zkapp_token_id_bounds_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_updates zkapp_updates_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_uris zkapp_uris_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_uris + ADD CONSTRAINT zkapp_uris_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_uris zkapp_uris_value_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_uris + ADD CONSTRAINT zkapp_uris_value_key UNIQUE (value); + + +-- +-- Name: zkapp_verification_key_hashes zkapp_verification_key_hashes_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_verification_key_hashes + ADD CONSTRAINT zkapp_verification_key_hashes_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_verification_key_hashes zkapp_verification_key_hashes_value_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_verification_key_hashes + ADD CONSTRAINT zkapp_verification_key_hashes_value_key UNIQUE (value); + + +-- +-- Name: zkapp_verification_keys zkapp_verification_keys_hash_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_verification_keys + ADD CONSTRAINT zkapp_verification_keys_hash_id_key UNIQUE (hash_id); + + +-- +-- Name: zkapp_verification_keys zkapp_verification_keys_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_verification_keys + ADD CONSTRAINT zkapp_verification_keys_pkey PRIMARY KEY (id); + + +-- +-- Name: zkapp_verification_keys zkapp_verification_keys_verification_key_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_verification_keys + ADD CONSTRAINT zkapp_verification_keys_verification_key_key UNIQUE (verification_key); + + +-- +-- Name: idx_accounts_accessed_block_account_identifier_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_accounts_accessed_block_account_identifier_id ON public.accounts_accessed USING btree (account_identifier_id); + + +-- +-- Name: idx_accounts_accessed_block_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_accounts_accessed_block_id ON public.accounts_accessed USING btree (block_id); + + +-- +-- Name: idx_accounts_created_block_account_identifier_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_accounts_created_block_account_identifier_id ON public.accounts_created USING btree (account_identifier_id); + + +-- +-- Name: idx_accounts_created_block_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_accounts_created_block_id ON public.accounts_created USING btree (block_id); + + +-- +-- Name: idx_blocks_creator_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_creator_id ON public.blocks USING btree (creator_id); + + +-- +-- Name: idx_blocks_height; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_height ON public.blocks USING btree (height); + + +-- +-- Name: idx_blocks_internal_commands_block_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_internal_commands_block_id ON public.blocks_internal_commands USING btree (block_id); + + +-- +-- Name: idx_blocks_internal_commands_internal_command_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_internal_commands_internal_command_id ON public.blocks_internal_commands USING btree (internal_command_id); + + +-- +-- Name: idx_blocks_internal_commands_secondary_sequence_no; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_internal_commands_secondary_sequence_no ON public.blocks_internal_commands USING btree (secondary_sequence_no); + + +-- +-- Name: idx_blocks_internal_commands_sequence_no; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_internal_commands_sequence_no ON public.blocks_internal_commands USING btree (sequence_no); + + +-- +-- Name: idx_blocks_parent_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_parent_id ON public.blocks USING btree (parent_id); + + +-- +-- Name: idx_blocks_user_commands_block_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_user_commands_block_id ON public.blocks_user_commands USING btree (block_id); + + +-- +-- Name: idx_blocks_user_commands_sequence_no; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_user_commands_sequence_no ON public.blocks_user_commands USING btree (sequence_no); + + +-- +-- Name: idx_blocks_user_commands_user_command_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_user_commands_user_command_id ON public.blocks_user_commands USING btree (user_command_id); + + +-- +-- Name: idx_blocks_zkapp_commands_block_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_zkapp_commands_block_id ON public.blocks_zkapp_commands USING btree (block_id); + + +-- +-- Name: idx_blocks_zkapp_commands_sequence_no; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_zkapp_commands_sequence_no ON public.blocks_zkapp_commands USING btree (sequence_no); + + +-- +-- Name: idx_blocks_zkapp_commands_zkapp_command_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_blocks_zkapp_commands_zkapp_command_id ON public.blocks_zkapp_commands USING btree (zkapp_command_id); + + +-- +-- Name: idx_chain_status; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_chain_status ON public.blocks USING btree (chain_status); + + +-- +-- Name: idx_token_symbols_value; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_token_symbols_value ON public.token_symbols USING btree (value); + + +-- +-- Name: idx_voting_for_value; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_voting_for_value ON public.voting_for USING btree (value); + + +-- +-- Name: idx_zkapp_events_element_ids; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_zkapp_events_element_ids ON public.zkapp_events USING btree (element_ids); + + +-- +-- Name: idx_zkapp_field_array_element_ids; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_zkapp_field_array_element_ids ON public.zkapp_field_array USING btree (element_ids); + + +-- +-- Name: account_identifiers account_identifiers_public_key_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.account_identifiers + ADD CONSTRAINT account_identifiers_public_key_id_fkey FOREIGN KEY (public_key_id) REFERENCES public.public_keys(id) ON DELETE CASCADE; + + +-- +-- Name: account_identifiers account_identifiers_token_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.account_identifiers + ADD CONSTRAINT account_identifiers_token_id_fkey FOREIGN KEY (token_id) REFERENCES public.tokens(id) ON DELETE CASCADE; + + +-- +-- Name: accounts_accessed accounts_accessed_account_identifier_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_account_identifier_id_fkey FOREIGN KEY (account_identifier_id) REFERENCES public.account_identifiers(id); + + +-- +-- Name: accounts_accessed accounts_accessed_block_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.blocks(id); + + +-- +-- Name: accounts_accessed accounts_accessed_delegate_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_delegate_id_fkey FOREIGN KEY (delegate_id) REFERENCES public.public_keys(id); + + +-- +-- Name: accounts_accessed accounts_accessed_permissions_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_permissions_id_fkey FOREIGN KEY (permissions_id) REFERENCES public.zkapp_permissions(id); + + +-- +-- Name: accounts_accessed accounts_accessed_timing_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_timing_id_fkey FOREIGN KEY (timing_id) REFERENCES public.timing_info(id); + + +-- +-- Name: accounts_accessed accounts_accessed_token_symbol_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_token_symbol_id_fkey FOREIGN KEY (token_symbol_id) REFERENCES public.token_symbols(id); + + +-- +-- Name: accounts_accessed accounts_accessed_voting_for_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_voting_for_id_fkey FOREIGN KEY (voting_for_id) REFERENCES public.voting_for(id); + + +-- +-- Name: accounts_accessed accounts_accessed_zkapp_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_accessed + ADD CONSTRAINT accounts_accessed_zkapp_id_fkey FOREIGN KEY (zkapp_id) REFERENCES public.zkapp_accounts(id); + + +-- +-- Name: accounts_created accounts_created_account_identifier_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_created + ADD CONSTRAINT accounts_created_account_identifier_id_fkey FOREIGN KEY (account_identifier_id) REFERENCES public.account_identifiers(id); + + +-- +-- Name: accounts_created accounts_created_block_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.accounts_created + ADD CONSTRAINT accounts_created_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.blocks(id); + + +-- +-- Name: blocks blocks_block_winner_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_block_winner_id_fkey FOREIGN KEY (block_winner_id) REFERENCES public.public_keys(id); + + +-- +-- Name: blocks blocks_creator_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_creator_id_fkey FOREIGN KEY (creator_id) REFERENCES public.public_keys(id); + + +-- +-- Name: blocks_internal_commands blocks_internal_commands_block_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_internal_commands + ADD CONSTRAINT blocks_internal_commands_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.blocks(id) ON DELETE CASCADE; + + +-- +-- Name: blocks_internal_commands blocks_internal_commands_internal_command_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_internal_commands + ADD CONSTRAINT blocks_internal_commands_internal_command_id_fkey FOREIGN KEY (internal_command_id) REFERENCES public.internal_commands(id) ON DELETE CASCADE; + + +-- +-- Name: blocks blocks_next_epoch_data_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_next_epoch_data_id_fkey FOREIGN KEY (next_epoch_data_id) REFERENCES public.epoch_data(id); + + +-- +-- Name: blocks blocks_parent_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_parent_id_fkey FOREIGN KEY (parent_id) REFERENCES public.blocks(id); + + +-- +-- Name: blocks blocks_proposed_protocol_version_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_proposed_protocol_version_id_fkey FOREIGN KEY (proposed_protocol_version_id) REFERENCES public.protocol_versions(id); + + +-- +-- Name: blocks blocks_protocol_version_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_protocol_version_id_fkey FOREIGN KEY (protocol_version_id) REFERENCES public.protocol_versions(id); + + +-- +-- Name: blocks blocks_snarked_ledger_hash_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_snarked_ledger_hash_id_fkey FOREIGN KEY (snarked_ledger_hash_id) REFERENCES public.snarked_ledger_hashes(id); + + +-- +-- Name: blocks blocks_staking_epoch_data_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks + ADD CONSTRAINT blocks_staking_epoch_data_id_fkey FOREIGN KEY (staking_epoch_data_id) REFERENCES public.epoch_data(id); + + +-- +-- Name: blocks_user_commands blocks_user_commands_block_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_user_commands + ADD CONSTRAINT blocks_user_commands_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.blocks(id) ON DELETE CASCADE; + + +-- +-- Name: blocks_user_commands blocks_user_commands_user_command_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_user_commands + ADD CONSTRAINT blocks_user_commands_user_command_id_fkey FOREIGN KEY (user_command_id) REFERENCES public.user_commands(id) ON DELETE CASCADE; + + +-- +-- Name: blocks_zkapp_commands blocks_zkapp_commands_block_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_zkapp_commands + ADD CONSTRAINT blocks_zkapp_commands_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.blocks(id) ON DELETE CASCADE; + + +-- +-- Name: blocks_zkapp_commands blocks_zkapp_commands_zkapp_command_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.blocks_zkapp_commands + ADD CONSTRAINT blocks_zkapp_commands_zkapp_command_id_fkey FOREIGN KEY (zkapp_command_id) REFERENCES public.zkapp_commands(id) ON DELETE CASCADE; + + +-- +-- Name: epoch_data epoch_data_ledger_hash_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.epoch_data + ADD CONSTRAINT epoch_data_ledger_hash_id_fkey FOREIGN KEY (ledger_hash_id) REFERENCES public.snarked_ledger_hashes(id); + + +-- +-- Name: internal_commands internal_commands_receiver_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.internal_commands + ADD CONSTRAINT internal_commands_receiver_id_fkey FOREIGN KEY (receiver_id) REFERENCES public.public_keys(id); + + +-- +-- Name: timing_info timing_info_account_identifier_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.timing_info + ADD CONSTRAINT timing_info_account_identifier_id_fkey FOREIGN KEY (account_identifier_id) REFERENCES public.account_identifiers(id); + + +-- +-- Name: tokens tokens_owner_public_key_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tokens + ADD CONSTRAINT tokens_owner_public_key_id_fkey FOREIGN KEY (owner_public_key_id) REFERENCES public.public_keys(id) ON DELETE CASCADE; + + +-- +-- Name: tokens tokens_owner_token_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.tokens + ADD CONSTRAINT tokens_owner_token_id_fkey FOREIGN KEY (owner_token_id) REFERENCES public.tokens(id); + + +-- +-- Name: user_commands user_commands_fee_payer_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_commands + ADD CONSTRAINT user_commands_fee_payer_id_fkey FOREIGN KEY (fee_payer_id) REFERENCES public.public_keys(id); + + +-- +-- Name: user_commands user_commands_receiver_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_commands + ADD CONSTRAINT user_commands_receiver_id_fkey FOREIGN KEY (receiver_id) REFERENCES public.public_keys(id); + + +-- +-- Name: user_commands user_commands_source_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.user_commands + ADD CONSTRAINT user_commands_source_id_fkey FOREIGN KEY (source_id) REFERENCES public.public_keys(id); + + +-- +-- Name: zkapp_account_precondition zkapp_account_precondition_action_state_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_precondition + ADD CONSTRAINT zkapp_account_precondition_action_state_id_fkey FOREIGN KEY (action_state_id) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_account_precondition zkapp_account_precondition_balance_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_precondition + ADD CONSTRAINT zkapp_account_precondition_balance_id_fkey FOREIGN KEY (balance_id) REFERENCES public.zkapp_balance_bounds(id); + + +-- +-- Name: zkapp_account_precondition zkapp_account_precondition_delegate_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_precondition + ADD CONSTRAINT zkapp_account_precondition_delegate_id_fkey FOREIGN KEY (delegate_id) REFERENCES public.public_keys(id); + + +-- +-- Name: zkapp_account_precondition zkapp_account_precondition_nonce_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_precondition + ADD CONSTRAINT zkapp_account_precondition_nonce_id_fkey FOREIGN KEY (nonce_id) REFERENCES public.zkapp_nonce_bounds(id); + + +-- +-- Name: zkapp_account_precondition zkapp_account_precondition_state_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_precondition + ADD CONSTRAINT zkapp_account_precondition_state_id_fkey FOREIGN KEY (state_id) REFERENCES public.zkapp_states_nullable(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_account_identifier_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_account_identifier_id_fkey FOREIGN KEY (account_identifier_id) REFERENCES public.account_identifiers(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_actions_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_actions_id_fkey FOREIGN KEY (actions_id) REFERENCES public.zkapp_events(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_call_data_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_call_data_id_fkey FOREIGN KEY (call_data_id) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_events_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_events_id_fkey FOREIGN KEY (events_id) REFERENCES public.zkapp_events(id); + + +-- +-- Name: zkapp_account_update zkapp_account_update_body_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update + ADD CONSTRAINT zkapp_account_update_body_id_fkey FOREIGN KEY (body_id) REFERENCES public.zkapp_account_update_body(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_update_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_update_id_fkey FOREIGN KEY (update_id) REFERENCES public.zkapp_updates(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_verification_key_hash_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_verification_key_hash_id_fkey FOREIGN KEY (verification_key_hash_id) REFERENCES public.zkapp_verification_key_hashes(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_zkapp_account_precondition_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_zkapp_account_precondition_id_fkey FOREIGN KEY (zkapp_account_precondition_id) REFERENCES public.zkapp_account_precondition(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_zkapp_network_precondition_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_zkapp_network_precondition_id_fkey FOREIGN KEY (zkapp_network_precondition_id) REFERENCES public.zkapp_network_precondition(id); + + +-- +-- Name: zkapp_account_update_body zkapp_account_update_body_zkapp_valid_while_precondition_i_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_account_update_body + ADD CONSTRAINT zkapp_account_update_body_zkapp_valid_while_precondition_i_fkey FOREIGN KEY (zkapp_valid_while_precondition_id) REFERENCES public.zkapp_global_slot_bounds(id); + + +-- +-- Name: zkapp_accounts zkapp_accounts_action_state_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_accounts + ADD CONSTRAINT zkapp_accounts_action_state_id_fkey FOREIGN KEY (action_state_id) REFERENCES public.zkapp_action_states(id); + + +-- +-- Name: zkapp_accounts zkapp_accounts_app_state_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_accounts + ADD CONSTRAINT zkapp_accounts_app_state_id_fkey FOREIGN KEY (app_state_id) REFERENCES public.zkapp_states(id); + + +-- +-- Name: zkapp_accounts zkapp_accounts_verification_key_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_accounts + ADD CONSTRAINT zkapp_accounts_verification_key_id_fkey FOREIGN KEY (verification_key_id) REFERENCES public.zkapp_verification_keys(id); + + +-- +-- Name: zkapp_accounts zkapp_accounts_zkapp_uri_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_accounts + ADD CONSTRAINT zkapp_accounts_zkapp_uri_id_fkey FOREIGN KEY (zkapp_uri_id) REFERENCES public.zkapp_uris(id); + + +-- +-- Name: zkapp_action_states zkapp_action_states_element0_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_action_states + ADD CONSTRAINT zkapp_action_states_element0_fkey FOREIGN KEY (element0) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_action_states zkapp_action_states_element1_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_action_states + ADD CONSTRAINT zkapp_action_states_element1_fkey FOREIGN KEY (element1) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_action_states zkapp_action_states_element2_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_action_states + ADD CONSTRAINT zkapp_action_states_element2_fkey FOREIGN KEY (element2) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_action_states zkapp_action_states_element3_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_action_states + ADD CONSTRAINT zkapp_action_states_element3_fkey FOREIGN KEY (element3) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_action_states zkapp_action_states_element4_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_action_states + ADD CONSTRAINT zkapp_action_states_element4_fkey FOREIGN KEY (element4) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_commands zkapp_commands_zkapp_fee_payer_body_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_commands + ADD CONSTRAINT zkapp_commands_zkapp_fee_payer_body_id_fkey FOREIGN KEY (zkapp_fee_payer_body_id) REFERENCES public.zkapp_fee_payer_body(id); + + +-- +-- Name: zkapp_epoch_data zkapp_epoch_data_epoch_ledger_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_epoch_data + ADD CONSTRAINT zkapp_epoch_data_epoch_ledger_id_fkey FOREIGN KEY (epoch_ledger_id) REFERENCES public.zkapp_epoch_ledger(id); + + +-- +-- Name: zkapp_epoch_data zkapp_epoch_data_epoch_length_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_epoch_data + ADD CONSTRAINT zkapp_epoch_data_epoch_length_id_fkey FOREIGN KEY (epoch_length_id) REFERENCES public.zkapp_length_bounds(id); + + +-- +-- Name: zkapp_epoch_ledger zkapp_epoch_ledger_hash_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_epoch_ledger + ADD CONSTRAINT zkapp_epoch_ledger_hash_id_fkey FOREIGN KEY (hash_id) REFERENCES public.snarked_ledger_hashes(id); + + +-- +-- Name: zkapp_epoch_ledger zkapp_epoch_ledger_total_currency_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_epoch_ledger + ADD CONSTRAINT zkapp_epoch_ledger_total_currency_id_fkey FOREIGN KEY (total_currency_id) REFERENCES public.zkapp_amount_bounds(id); + + +-- +-- Name: zkapp_fee_payer_body zkapp_fee_payer_body_public_key_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_fee_payer_body + ADD CONSTRAINT zkapp_fee_payer_body_public_key_id_fkey FOREIGN KEY (public_key_id) REFERENCES public.public_keys(id); + + +-- +-- Name: zkapp_network_precondition zkapp_network_precondition_blockchain_length_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition + ADD CONSTRAINT zkapp_network_precondition_blockchain_length_id_fkey FOREIGN KEY (blockchain_length_id) REFERENCES public.zkapp_length_bounds(id); + + +-- +-- Name: zkapp_network_precondition zkapp_network_precondition_global_slot_since_genesis_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition + ADD CONSTRAINT zkapp_network_precondition_global_slot_since_genesis_fkey FOREIGN KEY (global_slot_since_genesis) REFERENCES public.zkapp_global_slot_bounds(id); + + +-- +-- Name: zkapp_network_precondition zkapp_network_precondition_min_window_density_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition + ADD CONSTRAINT zkapp_network_precondition_min_window_density_id_fkey FOREIGN KEY (min_window_density_id) REFERENCES public.zkapp_length_bounds(id); + + +-- +-- Name: zkapp_network_precondition zkapp_network_precondition_next_epoch_data_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition + ADD CONSTRAINT zkapp_network_precondition_next_epoch_data_id_fkey FOREIGN KEY (next_epoch_data_id) REFERENCES public.zkapp_epoch_data(id); + + +-- +-- Name: zkapp_network_precondition zkapp_network_precondition_snarked_ledger_hash_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition + ADD CONSTRAINT zkapp_network_precondition_snarked_ledger_hash_id_fkey FOREIGN KEY (snarked_ledger_hash_id) REFERENCES public.snarked_ledger_hashes(id); + + +-- +-- Name: zkapp_network_precondition zkapp_network_precondition_staking_epoch_data_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition + ADD CONSTRAINT zkapp_network_precondition_staking_epoch_data_id_fkey FOREIGN KEY (staking_epoch_data_id) REFERENCES public.zkapp_epoch_data(id); + + +-- +-- Name: zkapp_network_precondition zkapp_network_precondition_total_currency_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_network_precondition + ADD CONSTRAINT zkapp_network_precondition_total_currency_id_fkey FOREIGN KEY (total_currency_id) REFERENCES public.zkapp_amount_bounds(id); + + +-- +-- Name: zkapp_states zkapp_states_element0_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_element0_fkey FOREIGN KEY (element0) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states zkapp_states_element1_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_element1_fkey FOREIGN KEY (element1) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states zkapp_states_element2_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_element2_fkey FOREIGN KEY (element2) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states zkapp_states_element3_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_element3_fkey FOREIGN KEY (element3) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states zkapp_states_element4_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_element4_fkey FOREIGN KEY (element4) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states zkapp_states_element5_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_element5_fkey FOREIGN KEY (element5) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states zkapp_states_element6_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_element6_fkey FOREIGN KEY (element6) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states zkapp_states_element7_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states + ADD CONSTRAINT zkapp_states_element7_fkey FOREIGN KEY (element7) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_element0_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_element0_fkey FOREIGN KEY (element0) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_element1_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_element1_fkey FOREIGN KEY (element1) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_element2_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_element2_fkey FOREIGN KEY (element2) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_element3_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_element3_fkey FOREIGN KEY (element3) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_element4_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_element4_fkey FOREIGN KEY (element4) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_element5_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_element5_fkey FOREIGN KEY (element5) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_element6_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_element6_fkey FOREIGN KEY (element6) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_states_nullable zkapp_states_nullable_element7_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_states_nullable + ADD CONSTRAINT zkapp_states_nullable_element7_fkey FOREIGN KEY (element7) REFERENCES public.zkapp_field(id); + + +-- +-- Name: zkapp_updates zkapp_updates_app_state_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_app_state_id_fkey FOREIGN KEY (app_state_id) REFERENCES public.zkapp_states_nullable(id); + + +-- +-- Name: zkapp_updates zkapp_updates_delegate_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_delegate_id_fkey FOREIGN KEY (delegate_id) REFERENCES public.public_keys(id); + + +-- +-- Name: zkapp_updates zkapp_updates_permissions_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_permissions_id_fkey FOREIGN KEY (permissions_id) REFERENCES public.zkapp_permissions(id); + + +-- +-- Name: zkapp_updates zkapp_updates_timing_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_timing_id_fkey FOREIGN KEY (timing_id) REFERENCES public.zkapp_timing_info(id); + + +-- +-- Name: zkapp_updates zkapp_updates_token_symbol_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_token_symbol_id_fkey FOREIGN KEY (token_symbol_id) REFERENCES public.token_symbols(id); + + +-- +-- Name: zkapp_updates zkapp_updates_verification_key_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_verification_key_id_fkey FOREIGN KEY (verification_key_id) REFERENCES public.zkapp_verification_keys(id); + + +-- +-- Name: zkapp_updates zkapp_updates_voting_for_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_voting_for_id_fkey FOREIGN KEY (voting_for_id) REFERENCES public.voting_for(id); + + +-- +-- Name: zkapp_updates zkapp_updates_zkapp_uri_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_updates + ADD CONSTRAINT zkapp_updates_zkapp_uri_id_fkey FOREIGN KEY (zkapp_uri_id) REFERENCES public.zkapp_uris(id); + + +-- +-- Name: zkapp_verification_keys zkapp_verification_keys_hash_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.zkapp_verification_keys + ADD CONSTRAINT zkapp_verification_keys_hash_id_fkey FOREIGN KEY (hash_id) REFERENCES public.zkapp_verification_key_hashes(id); + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/producer-dashboard/src/archive/sql/query_blocks_in_slot_range.sql b/producer-dashboard/src/archive/sql/query_blocks_in_slot_range.sql new file mode 100644 index 0000000000..642c23842c --- /dev/null +++ b/producer-dashboard/src/archive/sql/query_blocks_in_slot_range.sql @@ -0,0 +1,19 @@ +SELECT + b.id, + b.state_hash, + b.height, + b.timestamp, + b.chain_status AS "chain_status: ChainStatus", + pk_creator.value AS "creator_key", + pk_winner.value AS "winner_key", + b.global_slot_since_genesis, + b.global_slot_since_hard_fork, + b.parent_id +FROM + blocks b +JOIN + public_keys pk_creator ON b.creator_id = pk_creator.id +JOIN + public_keys pk_winner ON b.block_winner_id = pk_winner.id +WHERE + b.global_slot_since_hard_fork BETWEEN $1 AND $2 \ No newline at end of file diff --git a/producer-dashboard/src/archive/sql/query_canonical_chain.sql b/producer-dashboard/src/archive/sql/query_canonical_chain.sql new file mode 100644 index 0000000000..be8bcecb9d --- /dev/null +++ b/producer-dashboard/src/archive/sql/query_canonical_chain.sql @@ -0,0 +1,27 @@ +WITH RECURSIVE chain AS ( + (SELECT * FROM blocks WHERE state_hash = $1) + UNION ALL + SELECT b.* FROM blocks b + INNER JOIN chain + ON b.id = chain.parent_id AND chain.id <> chain.parent_id + ) + + SELECT + c.id AS "id!", + c.state_hash AS "state_hash!", + c.height AS "height!", + c.timestamp AS "timestamp!", + c.chain_status AS "chain_status!: ChainStatus", + pk_creator.value AS "creator_key", + pk_winner.value AS "winner_key", + c.global_slot_since_genesis AS "global_slot_since_genesis!", + c.global_slot_since_hard_fork AS "global_slot_since_hard_fork!", + c.parent_id + FROM + chain c + JOIN + public_keys pk_creator ON c.creator_id = pk_creator.id + JOIN + public_keys pk_winner ON c.block_winner_id = pk_winner.id + WHERE + c.global_slot_since_hard_fork BETWEEN $2 AND $3 \ No newline at end of file diff --git a/producer-dashboard/src/archive/sql/query_last_canonical_blocks.sql b/producer-dashboard/src/archive/sql/query_last_canonical_blocks.sql new file mode 100644 index 0000000000..0a5615e765 --- /dev/null +++ b/producer-dashboard/src/archive/sql/query_last_canonical_blocks.sql @@ -0,0 +1,28 @@ +WITH RECURSIVE chain AS ( + (SELECT * FROM blocks WHERE state_hash = $1) + + UNION ALL + + SELECT b.* FROM blocks b + INNER JOIN chain + ON b.id = chain.parent_id AND chain.id <> chain.parent_id +) + +SELECT + c.id AS "id!", + c.state_hash AS "state_hash!", + c.height AS "height!", + c.timestamp AS "timestamp!", + c.chain_status AS "chain_status!: ChainStatus", + pk_creator.value AS "creator_key", + pk_winner.value AS "winner_key", + c.global_slot_since_genesis AS "global_slot_since_genesis!", + c.global_slot_since_hard_fork AS "global_slot_since_hard_fork!", + c.parent_id +FROM + chain c +JOIN + public_keys pk_creator ON c.creator_id = pk_creator.id +JOIN + public_keys pk_winner ON c.block_winner_id = pk_winner.id +LIMIT $2 \ No newline at end of file diff --git a/producer-dashboard/src/archive/sql/query_latest_block.sql b/producer-dashboard/src/archive/sql/query_latest_block.sql new file mode 100644 index 0000000000..568f2f0342 --- /dev/null +++ b/producer-dashboard/src/archive/sql/query_latest_block.sql @@ -0,0 +1,5 @@ +SELECT + b.state_hash AS "state_hash!" +FROM blocks b +ORDER BY b.id DESC +LIMIT 1; \ No newline at end of file diff --git a/producer-dashboard/src/archive/sql/query_producer_blocks.sql b/producer-dashboard/src/archive/sql/query_producer_blocks.sql new file mode 100644 index 0000000000..4d3a46e2c0 --- /dev/null +++ b/producer-dashboard/src/archive/sql/query_producer_blocks.sql @@ -0,0 +1,19 @@ +SELECT + b.id, + b.state_hash, + b.height, + b.timestamp, + b.chain_status AS "chain_status: ChainStatus", + pk_creator.value AS "creator_key", + pk_winner.value AS "winner_key", + b.global_slot_since_genesis, + b.global_slot_since_hard_fork, + b.parent_id +FROM + blocks b +JOIN + public_keys pk_creator ON b.creator_id = pk_creator.id +JOIN + public_keys pk_winner ON b.block_winner_id = pk_winner.id +WHERE + pk_creator.value = $1 \ No newline at end of file diff --git a/producer-dashboard/src/archive/watchdog.rs b/producer-dashboard/src/archive/watchdog.rs index ebe4428a92..4583a70117 100644 --- a/producer-dashboard/src/archive/watchdog.rs +++ b/producer-dashboard/src/archive/watchdog.rs @@ -15,7 +15,7 @@ impl ArchiveWatchdog { tokio::spawn(async move { Self { producer_pk, - archive_connector: ArchiveConnector::connect().await, + archive_connector: ArchiveConnector::connect(super::ArchiveUrl::Env).await, db, node_status, } diff --git a/producer-dashboard/src/main.rs b/producer-dashboard/src/bin/producer_dashboard.rs similarity index 81% rename from producer-dashboard/src/main.rs rename to producer-dashboard/src/bin/producer_dashboard.rs index 2052dd1b11..ee4c69a69b 100644 --- a/producer-dashboard/src/main.rs +++ b/producer-dashboard/src/bin/producer_dashboard.rs @@ -1,38 +1,17 @@ -use node::NodeData; use openmina_node_account::AccountSecretKey; - -use std::sync::Arc; -use tokio::sync::{mpsc, RwLock}; - -use clap::Parser; - -use crate::{ +use openmina_producer_dashboard::{ archive::watchdog::ArchiveWatchdog, + config, evaluator::{EpochInit, Evaluator}, node::{watchdog::spawn_watchdog, Node}, + rpc, storage::db_sled::Database, + NodeStatus, }; -mod archive; -mod config; -pub mod evaluator; -mod node; -mod rpc; -mod storage; +use tokio::sync::mpsc; -#[derive(Debug, thiserror::Error)] -pub enum StakingToolError { - #[error("Empty graphql response")] - EmptyGraphqlResponse, - #[error(transparent)] - Io(#[from] std::io::Error), - #[error(transparent)] - Serde(#[from] serde_json::Error), - #[error("Node offline")] - NodeOffline, -} - -pub type NodeStatus = Arc>; +use clap::Parser; #[tokio::main] async fn main() { diff --git a/producer-dashboard/src/evaluator/epoch.rs b/producer-dashboard/src/evaluator/epoch.rs index 15ec0c2bb2..faa822bc9c 100644 --- a/producer-dashboard/src/evaluator/epoch.rs +++ b/producer-dashboard/src/evaluator/epoch.rs @@ -3,7 +3,7 @@ use std::ops::AddAssign; use serde::{Deserialize, Serialize}; use crate::{ - archive::{Block, ChainStatus}, + archive::{postgres_types::ChainStatus, Block}, node::epoch_ledgers::{Balances, NanoMina}, }; diff --git a/producer-dashboard/src/lib.rs b/producer-dashboard/src/lib.rs new file mode 100644 index 0000000000..e861691473 --- /dev/null +++ b/producer-dashboard/src/lib.rs @@ -0,0 +1,30 @@ +pub mod archive; +pub mod config; +pub mod evaluator; +pub mod node; +pub mod rpc; +pub mod storage; + +use std::sync::Arc; + +pub use archive::ArchiveConnector; + +#[cfg(test)] +pub use archive::raw_types::ArchiveConnectorForTest; + +use node::NodeData; +use tokio::sync::RwLock; + +#[derive(Debug, thiserror::Error)] +pub enum StakingToolError { + #[error("Empty graphql response")] + EmptyGraphqlResponse, + #[error(transparent)] + Io(#[from] std::io::Error), + #[error(transparent)] + Serde(#[from] serde_json::Error), + #[error("Node offline")] + NodeOffline, +} + +pub type NodeStatus = Arc>; diff --git a/run.yaml b/run.yaml index bc69691964..8aad448ef3 100644 --- a/run.yaml +++ b/run.yaml @@ -72,11 +72,11 @@ spec: - | apt-get update && apt-get -y install git curl gcc libssl-dev pkg-config curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y - PATH=$PATH:~/.cargo/bin && rustup update 1.83 + PATH=$PATH:~/.cargo/bin && rustup update 1.84 git clone https://github.com/openmina/openmina cd openmina git fetch && git checkout feat/tweak-for-debugger - PATH=$PATH:~/.cargo/bin && cargo +1.83 build --release --bin openmina -p cli --no-default-features + PATH=$PATH:~/.cargo/bin && cargo +1.84 build --release --bin openmina -p cli --no-default-features cp target/release/openmina /usr/local/bin/openmina openmina node -p 10000 --libp2p-port 8302 ports: diff --git a/snark/Cargo.toml b/snark/Cargo.toml index da710f35e8..a853c3a3f5 100644 --- a/snark/Cargo.toml +++ b/snark/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snark" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/snark/src/block_verify/snark_block_verify_actions.rs b/snark/src/block_verify/snark_block_verify_actions.rs index ed5f5daf0c..0ef82dfc67 100644 --- a/snark/src/block_verify/snark_block_verify_actions.rs +++ b/snark/src/block_verify/snark_block_verify_actions.rs @@ -38,22 +38,22 @@ impl redux::EnablingCondition for SnarkBlockVerifyAction { .block_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_init()), + .is_some_and(|v| v.is_init()), SnarkBlockVerifyAction::Error { req_id, .. } => state .block_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_pending()), + .is_some_and(|v| v.is_pending()), SnarkBlockVerifyAction::Success { req_id } => state .block_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_pending()), + .is_some_and(|v| v.is_pending()), SnarkBlockVerifyAction::Finish { req_id } => state .block_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_finished()), + .is_some_and(|v| v.is_finished()), } } } @@ -65,6 +65,6 @@ where fn is_enabled(&self, state: &T, time: redux::Timestamp) -> bool { state .substate() - .map_or(false, |state| self.is_enabled(state, time)) + .is_ok_and(|state| self.is_enabled(state, time)) } } diff --git a/snark/src/user_command_verify/snark_user_command_verify_actions.rs b/snark/src/user_command_verify/snark_user_command_verify_actions.rs index 6ec0f05755..54f936b907 100644 --- a/snark/src/user_command_verify/snark_user_command_verify_actions.rs +++ b/snark/src/user_command_verify/snark_user_command_verify_actions.rs @@ -55,22 +55,22 @@ impl redux::EnablingCondition for SnarkUserCommandVerifyActio .user_command_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_init()), + .is_some_and(|v| v.is_init()), SnarkUserCommandVerifyAction::Error { req_id, .. } => state .user_command_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_pending()), + .is_some_and(|v| v.is_pending()), SnarkUserCommandVerifyAction::Success { req_id, .. } => state .user_command_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_pending()), + .is_some_and(|v| v.is_pending()), SnarkUserCommandVerifyAction::Finish { req_id } => state .user_command_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_finished()), + .is_some_and(|v| v.is_finished()), } } } diff --git a/snark/src/work_verify/snark_work_verify_actions.rs b/snark/src/work_verify/snark_work_verify_actions.rs index fc4b5f3d8c..287f835771 100644 --- a/snark/src/work_verify/snark_work_verify_actions.rs +++ b/snark/src/work_verify/snark_work_verify_actions.rs @@ -45,18 +45,18 @@ impl redux::EnablingCondition for SnarkWorkVerifyAction { .work_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_init()), + .is_some_and(|v| v.is_init()), SnarkWorkVerifyAction::Error { req_id, .. } | SnarkWorkVerifyAction::Success { req_id } => state .work_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_pending()), + .is_some_and(|v| v.is_pending()), SnarkWorkVerifyAction::Finish { req_id } => state .work_verify .jobs .get(*req_id) - .map_or(false, |v| v.is_finished()), + .is_some_and(|v| v.is_finished()), } } } @@ -68,6 +68,6 @@ where fn is_enabled(&self, state: &T, time: redux::Timestamp) -> bool { state .substate() - .map_or(false, |state| self.is_enabled(state, time)) + .is_ok_and(|state| self.is_enabled(state, time)) } } diff --git a/tests/files/archive-breadcrumb/3NK56ZbCS31qb8SvCtCCYza4beRDtKgXA2JL6s3evKouG2KkKtiy.bin b/tests/files/archive-breadcrumb/3NK56ZbCS31qb8SvCtCCYza4beRDtKgXA2JL6s3evKouG2KkKtiy.bin new file mode 100644 index 0000000000..0a9bf81229 Binary files /dev/null and b/tests/files/archive-breadcrumb/3NK56ZbCS31qb8SvCtCCYza4beRDtKgXA2JL6s3evKouG2KkKtiy.bin differ diff --git a/tools/archive-breadcrumb-compare/Cargo.toml b/tools/archive-breadcrumb-compare/Cargo.toml new file mode 100644 index 0000000000..de3b214149 --- /dev/null +++ b/tools/archive-breadcrumb-compare/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "openmina-archive-breadcrumb-compare" +version = "0.14.0" +edition = "2021" + +[dependencies] +clap = { version = "4.5", features = [ "derive", "env" ] } +reqwest = { version = "0.11", features = ["json"] } +tokio = { version = "1.0", features = ["full"] } +serde = { workspace = true } +anyhow = "1.0" +mina-p2p-messages = { workspace = true } +binprot = { git = "https://github.com/openmina/binprot-rs", rev = "400b52c" } +serde_json = "1.0" +similar = "2.6.0" diff --git a/tools/archive-breadcrumb-compare/src/main.rs b/tools/archive-breadcrumb-compare/src/main.rs new file mode 100644 index 0000000000..028e86fca9 --- /dev/null +++ b/tools/archive-breadcrumb-compare/src/main.rs @@ -0,0 +1,538 @@ +use mina_p2p_messages::v2::ArchiveTransitionFronntierDiff; +use std::{collections::HashSet, path::PathBuf}; + +use anyhow::Result; +use clap::Parser; +use serde::{Deserialize, Serialize}; +use tokio::time::{interval, timeout, Duration}; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + /// OCaml Node GraphQL endpoint + #[arg(env = "OCAML_NODE_GRAPHQL")] + ocaml_node_graphql: Option, + + /// OCaml Node directory path + #[arg(env = "OCAML_NODE_DIR", required = true)] + ocaml_node_dir: PathBuf, + + /// Openmina Node GraphQL endpoint + #[arg(env = "OPENMINA_NODE_GRAPHQL")] + openmina_node_graphql: Option, + + /// Openmina Node directory path + #[arg(env = "OPENMINA_NODE_DIR", required = true)] + openmina_node_dir: PathBuf, + + /// Check for missing breadcrumbs + #[arg(long)] + check_missing: bool, +} + +#[derive(Serialize)] +struct GraphQLQuery { + query: String, +} + +#[derive(Deserialize, Debug)] +struct SyncStatusResponse { + data: SyncStatusData, +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +struct SyncStatusData { + sync_status: String, +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +struct BlockInfo { + state_hash: String, +} + +#[derive(Deserialize, Debug)] +struct BestChainResponse { + data: BestChainData, +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +struct BestChainData { + best_chain: Vec, +} + +async fn check_sync_status(endpoint: &str) -> Result { + let client = reqwest::Client::new(); + + let query = GraphQLQuery { + query: "query MyQuery { syncStatus }".to_string(), + }; + + let response = client + .post(endpoint) + .json(&query) + .send() + .await? + .json::() + .await?; + + Ok(response.data.sync_status) +} + +async fn get_best_chain(endpoint: &str) -> Result> { + let client = reqwest::Client::new(); + + let query = GraphQLQuery { + query: "query MyQuery { bestChain(maxLength: 290) { stateHash } }".to_string(), + }; + + let response = client + .post(endpoint) + .json(&query) + .send() + .await? + .json::() + .await?; + + Ok(response + .data + .best_chain + .into_iter() + .map(|block| block.state_hash) + .collect()) +} + +async fn wait_for_sync(endpoint: &str, node_name: &str) -> Result<()> { + const TIMEOUT_DURATION: Duration = Duration::from_secs(300); // 5 minutes timeout + const CHECK_INTERVAL: Duration = Duration::from_secs(5); + + let sync_check = async { + let mut interval = interval(CHECK_INTERVAL); + + loop { + interval.tick().await; + + let status = check_sync_status(endpoint).await?; + println!("{} sync status: {}", node_name, status); + + if status == "SYNCED" { + return Ok(()); + } + + println!("Waiting for {} to sync...", node_name); + } + }; + + timeout(TIMEOUT_DURATION, sync_check).await.map_err(|_| { + anyhow::anyhow!( + "Timeout waiting for {} to sync after {:?}", + node_name, + TIMEOUT_DURATION + ) + })? +} + +async fn compare_chains(ocaml_endpoint: &str, openmina_endpoint: &str) -> Result> { + const MAX_RETRIES: u32 = 3; + const RETRY_INTERVAL: Duration = Duration::from_secs(5); + let mut interval = interval(RETRY_INTERVAL); + + for attempt in 1..=MAX_RETRIES { + println!( + "\nAttempting chain comparison (attempt {}/{})", + attempt, MAX_RETRIES + ); + + let ocaml_chain = get_best_chain(ocaml_endpoint).await?; + let openmina_chain = get_best_chain(openmina_endpoint).await?; + + println!("Chain comparison:"); + println!("OCaml chain length: {}", ocaml_chain.len()); + println!("Openmina chain length: {}", openmina_chain.len()); + + // Try to compare chains + if let Err(e) = compare_chain_data(&ocaml_chain, &openmina_chain) { + if attempt == MAX_RETRIES { + return Err(e); + } + println!("Comparison failed: {}. Retrying in 5s...", e); + interval.tick().await; + continue; + } + + println!("✅ Chains match perfectly!"); + return Ok(ocaml_chain); + } + + unreachable!() +} + +fn compare_chain_data(ocaml_chain: &[String], openmina_chain: &[String]) -> Result<()> { + if ocaml_chain.len() != openmina_chain.len() { + anyhow::bail!( + "Chain lengths don't match! OCaml: {}, Openmina: {}", + ocaml_chain.len(), + openmina_chain.len() + ); + } + + for (i, (ocaml_hash, openmina_hash)) in + ocaml_chain.iter().zip(openmina_chain.iter()).enumerate() + { + if ocaml_hash != openmina_hash { + anyhow::bail!( + "Chain mismatch at position {}: \nOCaml: {}\nOpenmina: {}", + i, + ocaml_hash, + openmina_hash + ); + } + } + + Ok(()) +} + +#[derive(Debug)] +struct DiffMismatch { + state_hash: String, + reason: String, +} + +async fn compare_binary_diffs( + ocaml_dir: PathBuf, + openmina_dir: PathBuf, + state_hashes: &[String], +) -> Result> { + let mut mismatches = Vec::new(); + + if state_hashes.is_empty() { + println!("No state hashes provided, comparing all diffs"); + let files = openmina_dir.read_dir()?; + files.for_each(|file| { + let file = file.unwrap(); + let file_name = file.file_name(); + let file_name_str = file_name.to_str().unwrap(); + let ocaml_path = ocaml_dir.join(file_name_str); + let openmina_path = openmina_dir.join(file_name_str); + + // Load and deserialize both files + let ocaml_diff = match load_and_deserialize(&ocaml_path) { + Ok(diff) => diff, + Err(e) => { + mismatches.push(DiffMismatch { + state_hash: file_name_str.to_string(), + reason: format!("Failed to load OCaml diff: {}", e), + }); + return; + } + }; + + let openmina_diff = match load_and_deserialize(&openmina_path) { + Ok(diff) => diff, + Err(e) => { + mismatches.push(DiffMismatch { + state_hash: file_name_str.to_string(), + reason: format!("Failed to load Openmina diff: {}", e), + }); + return; + } + }; + + // Compare the diffs + if let Some(reason) = compare_diffs(&ocaml_diff, &openmina_diff) { + mismatches.push(DiffMismatch { + state_hash: file_name_str.to_string(), + reason, + }); + } + }); + Ok(mismatches) + } else { + for state_hash in state_hashes { + let ocaml_path = ocaml_dir.join(format!("{}.bin", state_hash)); + let openmina_path = openmina_dir.join(format!("{}.bin", state_hash)); + + // Load and deserialize both files + let ocaml_diff = match load_and_deserialize(&ocaml_path) { + Ok(diff) => diff, + Err(e) => { + mismatches.push(DiffMismatch { + state_hash: state_hash.clone(), + reason: format!("Failed to load OCaml diff: {}", e), + }); + continue; + } + }; + + let openmina_diff = match load_and_deserialize(&openmina_path) { + Ok(diff) => diff, + Err(e) => { + mismatches.push(DiffMismatch { + state_hash: state_hash.clone(), + reason: format!("Failed to load Openmina diff: {}", e), + }); + continue; + } + }; + + // Compare the diffs + if let Some(reason) = compare_diffs(&ocaml_diff, &openmina_diff) { + mismatches.push(DiffMismatch { + state_hash: state_hash.clone(), + reason, + }); + } + } + Ok(mismatches) + } +} + +fn load_and_deserialize(path: &PathBuf) -> Result { + let data = std::fs::read(path)?; + let diff = binprot::BinProtRead::binprot_read(&mut data.as_slice())?; + Ok(diff) +} + +fn compare_diffs( + ocaml: &ArchiveTransitionFronntierDiff, + openmina: &ArchiveTransitionFronntierDiff, +) -> Option { + match (ocaml, openmina) { + ( + ArchiveTransitionFronntierDiff::BreadcrumbAdded { + block: (b1, (body_hash1, state_hash1)), + accounts_accessed: a1, + accounts_created: c1, + tokens_used: t1, + sender_receipt_chains_from_parent_ledger: s1, + }, + ArchiveTransitionFronntierDiff::BreadcrumbAdded { + block: (b2, (body_hash2, state_hash2)), + accounts_accessed: a2, + accounts_created: c2, + tokens_used: t2, + sender_receipt_chains_from_parent_ledger: s2, + }, + ) => { + let mut mismatches = Vec::new(); + + if body_hash1 != body_hash2 { + if body_hash1.is_some() { + mismatches.push(format!( + "Body hash mismatch:\nOCaml: {:?}\nOpenmina: {:?}", + body_hash1, body_hash2 + )); + } + } else if state_hash1 != state_hash2 { + mismatches.push(format!( + "State hash mismatch:\nOCaml: {}\nOpenmina: {}", + state_hash1, state_hash2 + )); + } else if b1.header.protocol_state_proof != b2.header.protocol_state_proof { + // Note this is not a real mismatch, we can have different protocol state proofs for the same block. + // If both proofs are valid, we can ignore the mismatch. + // Create a temporary copy of b1 with b2's proof for comparison + let mut b1_with_b2_proof = b1.clone(); + b1_with_b2_proof.header.protocol_state_proof = + b2.header.protocol_state_proof.clone(); + + if &b1_with_b2_proof != b2 { + let ocaml_json = + serde_json::to_string_pretty(&serde_json::to_value(b1).unwrap()).unwrap(); + let openmina_json = + serde_json::to_string_pretty(&serde_json::to_value(b2).unwrap()).unwrap(); + mismatches.push(format!( + "Block data mismatch:\nOCaml:\n{}\nOpenmina:\n{}", + ocaml_json, openmina_json + )); + } + } else if b1 != b2 { + let ocaml_json = + serde_json::to_string_pretty(&serde_json::to_value(b1).unwrap()).unwrap(); + let openmina_json = + serde_json::to_string_pretty(&serde_json::to_value(b2).unwrap()).unwrap(); + mismatches.push(format!( + "Block data mismatch:\nOCaml:\n{}\nOpenmina:\n{}", + ocaml_json, openmina_json + )); + } + + if a1 != a2 { + let ids_ocaml = a1.iter().map(|(id, _)| id.as_u64()).collect::>(); + let ids_openmina = a2.iter().map(|(id, _)| id.as_u64()).collect::>(); + + // Find missing IDs in openmina (present in ocaml but not in openmina) + let missing_in_openmina: Vec<_> = ids_ocaml.difference(&ids_openmina).collect(); + // Find extra IDs in openmina (present in openmina but not in ocaml) + let extra_in_openmina: Vec<_> = ids_openmina.difference(&ids_ocaml).collect(); + + if !missing_in_openmina.is_empty() { + println!("Missing in Openmina: {:?}", missing_in_openmina); + } + if !extra_in_openmina.is_empty() { + println!("Extra in Openmina: {:?}", extra_in_openmina); + } + + let ocaml_json = + serde_json::to_string_pretty(&serde_json::to_value(a1).unwrap()).unwrap(); + let openmina_json = + serde_json::to_string_pretty(&serde_json::to_value(a2).unwrap()).unwrap(); + mismatches.push(format!( + "Accounts accessed mismatch:\nOCaml:\n{}\nOpenmina:\n{}", + ocaml_json, openmina_json + )); + } + if c1 != c2 { + let ocaml_json = + serde_json::to_string_pretty(&serde_json::to_value(c1).unwrap()).unwrap(); + let openmina_json = + serde_json::to_string_pretty(&serde_json::to_value(c2).unwrap()).unwrap(); + mismatches.push(format!( + "Accounts created mismatch:\nOCaml:\n{}\nOpenmina:\n{}", + ocaml_json, openmina_json + )); + } + if t1 != t2 { + let ocaml_json = + serde_json::to_string_pretty(&serde_json::to_value(t1).unwrap()).unwrap(); + let openmina_json = + serde_json::to_string_pretty(&serde_json::to_value(t2).unwrap()).unwrap(); + mismatches.push(format!( + "Tokens used mismatch:\nOCaml:\n{}\nOpenmina:\n{}", + ocaml_json, openmina_json + )); + } + if s1 != s2 { + let ocaml_json = + serde_json::to_string_pretty(&serde_json::to_value(s1).unwrap()).unwrap(); + let openmina_json = + serde_json::to_string_pretty(&serde_json::to_value(s2).unwrap()).unwrap(); + mismatches.push(format!( + "Sender receipt chains mismatch:\nOCaml:\n{}\nOpenmina:\n{}", + ocaml_json, openmina_json + )); + } + + if mismatches.is_empty() { + None + } else { + Some(mismatches.join("\n\n")) + } + } + _ => { + let ocaml_json = + serde_json::to_string_pretty(&serde_json::to_value(ocaml).unwrap()).unwrap(); + let openmina_json = + serde_json::to_string_pretty(&serde_json::to_value(openmina).unwrap()).unwrap(); + Some(format!( + "Different diff types:\nOCaml:\n{}\nOpenmina:\n{}", + ocaml_json, openmina_json + )) + } + } +} + +async fn check_missing_breadcrumbs( + openmina_node_dir: PathBuf, + openmina_endpoint: &str, +) -> Result<()> { + let files = openmina_node_dir.read_dir()?; + let best_chain = get_best_chain(openmina_endpoint).await?; + let mut missing_breadcrumbs = Vec::new(); + + let file_names = files + .map(|file| { + file.unwrap() + .file_name() + .to_str() + .unwrap() + .to_string() + .strip_suffix(".bin") + .unwrap() + .to_owned() + }) + .collect::>(); + + for best_chain_hash in best_chain { + if !file_names.contains(&best_chain_hash.to_string()) { + missing_breadcrumbs.push(best_chain_hash.to_string()); + } + } + + if !missing_breadcrumbs.is_empty() { + println!( + "❌ Found {} missing breadcrumbs:", + missing_breadcrumbs.len() + ); + for missing_breadcrumb in missing_breadcrumbs { + println!("{}", missing_breadcrumb); + } + } else { + println!("✅ All breadcrumbs present!"); + } + + Ok(()) +} + +#[tokio::main] +async fn main() -> Result<()> { + let args = Args::parse(); + + let mut best_chain = Vec::new(); + + println!("Checking for missing breadcrumbs..."); + + if args.check_missing { + check_missing_breadcrumbs( + args.openmina_node_dir, + args.openmina_node_graphql.as_deref().unwrap(), + ) + .await?; + return Ok(()); + } + + if let (Some(ocaml_graphql), Some(openmina_graphql)) = + (args.ocaml_node_graphql, args.openmina_node_graphql) + { + // Wait for both nodes to be synced + println!("Waiting for nodes to sync..."); + wait_for_sync(&ocaml_graphql, "OCaml Node").await?; + wait_for_sync(&openmina_graphql, "Openmina Node").await?; + println!("Both nodes are synced! ✅\n"); + // Compare chains with retry logic + let bc = compare_chains(&ocaml_graphql, &openmina_graphql).await?; + println!("Comparing binary diffs for {} blocks...", bc.len()); + best_chain.extend_from_slice(&bc); + } else { + println!("No graphql endpoints provided, skipping chain comparison"); + } + + let mismatches = + compare_binary_diffs(args.ocaml_node_dir, args.openmina_node_dir, &best_chain).await?; + + if mismatches.is_empty() { + println!("✅ All binary diffs match perfectly!"); + } else { + println!("\n❌ Found {} mismatches:", mismatches.len()); + + // let first_mismatch = mismatches.first().unwrap(); + // println!( + // "\nMismatch #{}: \nState Hash: {}\nReason: {}", + // 1, first_mismatch.state_hash, first_mismatch.reason + // ); + // println!("Another {} missmatches are pending", mismatches.len() - 1); + for (i, mismatch) in mismatches.iter().enumerate() { + println!( + "\nMismatch #{}: \nState Hash: {}\nReason: {}", + i + 1, + mismatch.state_hash, + mismatch.reason + ); + } + anyhow::bail!("Binary diff comparison failed"); + } + + Ok(()) +} diff --git a/tools/bootstrap-sandbox/Cargo.toml b/tools/bootstrap-sandbox/Cargo.toml index 1d17fac5d4..332daa5e4b 100644 --- a/tools/bootstrap-sandbox/Cargo.toml +++ b/tools/bootstrap-sandbox/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-bootstrap-sandbox" -version = "0.13.0" +version = "0.14.0" edition = "2021" [dependencies] @@ -13,7 +13,7 @@ thiserror = { version = "1.0" } bs58 = { version = "0.5.0", features = ["check"] } rand = { version = "0.8.5" } -base64 = { version = "0.21.7" } +base64 = { version = "0.22" } tokio = { version = "1.37", features = ["macros", "rt-multi-thread"] } diff --git a/tools/bootstrap-sandbox/Dockerfile b/tools/bootstrap-sandbox/Dockerfile index 7ab38d3a52..2fffd3462d 100644 --- a/tools/bootstrap-sandbox/Dockerfile +++ b/tools/bootstrap-sandbox/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.83.0-bullseye AS builder +FROM rust:1.84.0-bullseye AS builder RUN mkdir -p -m 0700 ~/.ssh && ssh-keyscan github.com >> ~/.ssh/known_hosts diff --git a/tools/fuzzing/Cargo.toml b/tools/fuzzing/Cargo.toml index 39d7d0d674..83098fad5f 100644 --- a/tools/fuzzing/Cargo.toml +++ b/tools/fuzzing/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "transaction_fuzzer" -version = "0.13.0" +version = "0.14.0" edition = "2021" @@ -37,7 +37,7 @@ itertools = "0.11.0" binprot = { git = "https://github.com/openmina/binprot-rs", rev = "400b52c" } binprot_derive = { git = "https://github.com/openmina/binprot-rs", rev = "400b52c" } clap = "4.5.20" - +node = { path = "../../node" } [profile.release] opt-level = 3 diff --git a/tools/fuzzing/src/main.rs b/tools/fuzzing/src/main.rs index b8ebf02ecb..3fa13fa44e 100644 --- a/tools/fuzzing/src/main.rs +++ b/tools/fuzzing/src/main.rs @@ -8,10 +8,9 @@ pub mod transaction_fuzzer { pub mod generator; pub mod invariants; pub mod mutator; - use binprot::{ macros::{BinProtRead, BinProtWrite}, - BinProtRead, BinProtSize, BinProtWrite, + BinProtRead, BinProtSize, BinProtWrite, SmallString1k, }; use context::{ApplyTxResult, FuzzerCtx, FuzzerCtxBuilder}; use coverage::{ @@ -20,7 +19,7 @@ pub mod transaction_fuzzer { stats::Stats, }; use ledger::{ - scan_state::transaction_logic::{zkapp_command::ZkAppCommand, Transaction, UserCommand}, + scan_state::transaction_logic::{Transaction, UserCommand}, sparse_ledger::LedgerIntf, Account, BaseLedger, }; @@ -28,6 +27,7 @@ pub mod transaction_fuzzer { use mina_p2p_messages::bigint::BigInt; use openmina_core::constants::ConstraintConstantsUnversioned; use std::io::{Read, Write}; + use std::panic; use std::{ env, process::{ChildStdin, ChildStdout}, @@ -105,6 +105,11 @@ pub mod transaction_fuzzer { .filter_path(".rustup/") .filter_path("mina-p2p-messages/") .filter_path("core/") + .filter_path("tools/") + .filter_path("p2p/") + .filter_path("node/") + .filter_path("vrf/") + .filter_path("snark/") .filter_path("proofs/") ); } @@ -115,6 +120,8 @@ pub mod transaction_fuzzer { enum Action { SetConstraintConstants(ConstraintConstantsUnversioned), SetInitialAccounts(Vec), + SetupPool, + PoolVerify(UserCommand), GetAccounts, ApplyTx(UserCommand), #[allow(dead_code)] @@ -125,11 +132,39 @@ pub mod transaction_fuzzer { enum ActionOutput { ConstraintConstantsSet, InitialAccountsSet(BigInt), + SetupPool, + PoolVerify(Result, SmallString1k>), Accounts(Vec), TxApplied(ApplyTxResult), ExitAck, } + #[coverage(off)] + fn ocaml_setup_pool(stdin: &mut ChildStdin, stdout: &mut ChildStdout) { + let action = Action::SetupPool; + serialize(&action, stdin); + let output: ActionOutput = deserialize(stdout); + match output { + ActionOutput::SetupPool => (), + _ => panic!("Expected SetupPool"), + } + } + + #[coverage(off)] + fn ocaml_pool_verify( + stdin: &mut ChildStdin, + stdout: &mut ChildStdout, + user_command: UserCommand, + ) -> Result, SmallString1k> { + let action = Action::PoolVerify(user_command); + serialize(&action, stdin); + let output: ActionOutput = deserialize(stdout); + match output { + ActionOutput::PoolVerify(result) => result, + _ => panic!("Expected SetupPool"), + } + } + #[coverage(off)] fn ocaml_set_initial_accounts( ctx: &mut FuzzerCtx, @@ -197,6 +232,8 @@ pub mod transaction_fuzzer { break_on_invariant: bool, seed: u64, minimum_fee: u64, + pool_fuzzing: bool, + transaction_application_fuzzing: bool, ) { *invariants::BREAK.write().unwrap() = break_on_invariant; let mut cov_stats = CoverageStats::new(); @@ -210,6 +247,10 @@ pub mod transaction_fuzzer { ocaml_set_constraint_constants(&mut ctx, stdin, stdout); ocaml_set_initial_accounts(&mut ctx, stdin, stdout); + if pool_fuzzing { + ocaml_setup_pool(stdin, stdout); + } + let mut fuzzer_made_progress = false; for iteration in 0.. { @@ -238,86 +279,145 @@ pub mod transaction_fuzzer { } let user_command: UserCommand = ctx.random_user_command(); - let ocaml_apply_result = ocaml_apply_transaction(stdin, stdout, user_command.clone()); - let mut ledger = ctx.get_ledger_inner().make_child(); - - // Apply transaction on the Rust side - if let Err(error) = - ctx.apply_transaction(&mut ledger, &user_command, &ocaml_apply_result) - { - println!("!!! {error}"); - // Diff generated command form serialized version (detect hash inconsitencies) - if let Transaction::Command(ocaml_user_command) = - ocaml_apply_result.apply_result[0].transaction().data - { - if let UserCommand::ZkAppCommand(command) = &ocaml_user_command { - command.account_updates.ensure_hashed(); + if pool_fuzzing { + let ocaml_pool_verify_result = + ocaml_pool_verify(stdin, stdout, user_command.clone()); + + match panic::catch_unwind( + #[coverage(off)] + || ctx.pool_verify(&user_command, &ocaml_pool_verify_result), + ) { + Ok(mismatch) => { + if mismatch { + let mut ledger = ctx.get_ledger_inner().make_child(); + let bigint: num_bigint::BigUint = + LedgerIntf::merkle_root(&mut ledger).into(); + ctx.save_fuzzcase(&user_command, &bigint.to_string()); + + std::process::exit(0); + } else { + if let Err(_error) = ocaml_pool_verify_result { + //println!("Skipping application: {:?}", _error); + continue; + } + } + } + Err(_) => { + println!("!!! PANIC detected"); + let mut ledger = ctx.get_ledger_inner().make_child(); + let bigint: num_bigint::BigUint = + LedgerIntf::merkle_root(&mut ledger).into(); + ctx.save_fuzzcase(&user_command, &bigint.to_string()); + + std::process::exit(0); } - - println!("{}", ctx.diagnostic(&user_command, &ocaml_user_command)); } + } + + if transaction_application_fuzzing { + let ocaml_apply_result = + ocaml_apply_transaction(stdin, stdout, user_command.clone()); + let mut ledger = ctx.get_ledger_inner().make_child(); - let ocaml_accounts = ocaml_get_accounts(stdin, stdout); - let rust_accounts = ledger.to_list(); + // Apply transaction on the Rust side + if let Err(error) = + ctx.apply_transaction(&mut ledger, &user_command, &ocaml_apply_result) + { + println!("!!! {error}"); + + // Diff generated command form serialized version (detect hash inconsitencies) + if let Transaction::Command(ocaml_user_command) = + ocaml_apply_result.apply_result[0].transaction().data + { + if let UserCommand::ZkAppCommand(command) = &ocaml_user_command { + command.account_updates.ensure_hashed(); + } + + println!("{}", ctx.diagnostic(&user_command, &ocaml_user_command)); + } - for ocaml_account in ocaml_accounts.iter() { - match rust_accounts.iter().find( - #[coverage(off)] - |account| account.public_key == ocaml_account.public_key, - ) { - Some(rust_account) => { - if rust_account != ocaml_account { + let ocaml_accounts = ocaml_get_accounts(stdin, stdout); + let rust_accounts = ledger.to_list(); + + for ocaml_account in ocaml_accounts.iter() { + match rust_accounts.iter().find( + #[coverage(off)] + |account| account.public_key == ocaml_account.public_key, + ) { + Some(rust_account) => { + if rust_account != ocaml_account { + println!( + "Content mismatch between OCaml and Rust account:\n{}", + ctx.diagnostic(rust_account, ocaml_account) + ); + } + } + None => { println!( - "Content mismatch between OCaml and Rust account:\n{}", - ctx.diagnostic(rust_account, ocaml_account) + "OCaml account not present in Rust ledger: {:?}", + ocaml_account ); } } - None => { + } + + for rust_account in rust_accounts.iter() { + if !ocaml_accounts.iter().any( + #[coverage(off)] + |account| account.public_key == rust_account.public_key, + ) { println!( - "OCaml account not present in Rust ledger: {:?}", - ocaml_account + "Rust account not present in Ocaml ledger: {:?}", + rust_account ); } } - } - for rust_account in rust_accounts.iter() { - if !ocaml_accounts.iter().any( - #[coverage(off)] - |account| account.public_key == rust_account.public_key, - ) { - println!( - "Rust account not present in Ocaml ledger: {:?}", - rust_account - ); - } - } + let bigint: num_bigint::BigUint = LedgerIntf::merkle_root(&mut ledger).into(); + ctx.save_fuzzcase(&user_command, &bigint.to_string()); - let bigint: num_bigint::BigUint = LedgerIntf::merkle_root(&mut ledger).into(); - ctx.save_fuzzcase(&user_command, &bigint.to_string()); - - // Exiting due to inconsistent state - std::process::exit(0); + // Exiting due to inconsistent state + std::process::exit(0); + } } } } #[coverage(off)] - pub fn reproduce(stdin: &mut ChildStdin, stdout: &mut ChildStdout, fuzzcase: &String) { + pub fn reproduce( + stdin: &mut ChildStdin, + stdout: &mut ChildStdout, + fuzzcase: &String, + pool_fuzzing: bool, + transaction_application_fuzzing: bool, + ) { let mut ctx = FuzzerCtxBuilder::new().build(); let user_command = ctx.load_fuzzcase(fuzzcase); ocaml_set_constraint_constants(&mut ctx, stdin, stdout); ocaml_set_initial_accounts(&mut ctx, stdin, stdout); - let mut ledger = ctx.get_ledger_inner().make_child(); - let ocaml_apply_result = ocaml_apply_transaction(stdin, stdout, user_command.clone()); - let rust_apply_result = - ctx.apply_transaction(&mut ledger, &user_command, &ocaml_apply_result); + if pool_fuzzing { + ocaml_setup_pool(stdin, stdout); - println!("apply_transaction: {:?}", rust_apply_result); + let ocaml_pool_verify_result = ocaml_pool_verify(stdin, stdout, user_command.clone()); + + println!("OCaml pool verify: {:?}", ocaml_pool_verify_result); + + if ctx.pool_verify(&user_command, &ocaml_pool_verify_result) { + return; + } + } + + if transaction_application_fuzzing { + let mut ledger = ctx.get_ledger_inner().make_child(); + let ocaml_apply_result = ocaml_apply_transaction(stdin, stdout, user_command.clone()); + let rust_apply_result = + ctx.apply_transaction(&mut ledger, &user_command, &ocaml_apply_result); + + println!("apply_transaction: {:?}", rust_apply_result); + } } } @@ -340,6 +440,18 @@ fn main() { .default_value("42") .value_parser(clap::value_parser!(u64)), ) + .arg( + clap::Arg::new("pool-fuzzing") + .long("pool-fuzzing") + .default_value("true") + .value_parser(clap::value_parser!(bool)), + ) + .arg( + clap::Arg::new("transaction-application-fuzzing") + .long("transaction-application-fuzzing") + .default_value("true") + .value_parser(clap::value_parser!(bool)), + ) .get_matches(); let mut child = Command::new( @@ -363,16 +475,33 @@ fn main() { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); let stdout = child.stdout.as_mut().expect("Failed to open stdout"); + let pool_fuzzing = *matches.get_one::("pool-fuzzing").unwrap(); + let transaction_application_fuzzing = *matches + .get_one::("transaction-application-fuzzing") + .unwrap(); + if let Some(fuzzcase) = matches.get_one::("fuzzcase") { println!("Reproducing fuzzcase from file: {}", fuzzcase); - transaction_fuzzer::reproduce(stdin, stdout, fuzzcase); + transaction_fuzzer::reproduce( + stdin, + stdout, + fuzzcase, + pool_fuzzing, + transaction_application_fuzzing, + ); } else { - let Some(seed) = matches.get_one::("seed") else { - unreachable!() - }; - - println!("Running the fuzzer with seed {seed}..."); - transaction_fuzzer::fuzz(stdin, stdout, true, *seed, 1000); + let seed = *matches.get_one::("seed").unwrap(); + println!("Fuzzing [seed: {seed}] [transaction application: {transaction_application_fuzzing} ] [pool: {pool_fuzzing}]..."); + + transaction_fuzzer::fuzz( + stdin, + stdout, + true, + seed, + 1000, + pool_fuzzing, + transaction_application_fuzzing, + ); } } } diff --git a/tools/fuzzing/src/transaction_fuzzer/context.rs b/tools/fuzzing/src/transaction_fuzzer/context.rs index fa9417a9e7..0ae131484b 100644 --- a/tools/fuzzing/src/transaction_fuzzer/context.rs +++ b/tools/fuzzing/src/transaction_fuzzer/context.rs @@ -5,10 +5,6 @@ use crate::transaction_fuzzer::{ }; use ark_ff::fields::arithmetic::InvalidBigInt; use ark_ff::Zero; -use ledger::scan_state::currency::{Amount, Fee, Length, Magnitude, Nonce, Signed, Slot}; -use ledger::scan_state::transaction_logic::protocol_state::{ - protocol_state_view, EpochData, EpochLedger, ProtocolStateView, -}; use ledger::scan_state::transaction_logic::transaction_applied::{ signed_command_applied, CommandApplied, TransactionApplied, Varying, }; @@ -18,6 +14,16 @@ use ledger::scan_state::transaction_logic::{ use ledger::sparse_ledger::LedgerIntf; use ledger::staged_ledger::staged_ledger::StagedLedger; use ledger::{dummy, Account, AccountId, Database, Mask, Timing, TokenId}; +use ledger::{ + scan_state::currency::{Amount, Fee, Length, Magnitude, Nonce, Signed, Slot}, + transaction_pool::TransactionPool, +}; +use ledger::{ + scan_state::transaction_logic::protocol_state::{ + protocol_state_view, EpochData, EpochLedger, ProtocolStateView, + }, + transaction_pool, +}; use mina_curves::pasta::Fq; use mina_hasher::Fp; use mina_p2p_messages::binprot::SmallString1k; @@ -29,27 +35,14 @@ use mina_p2p_messages::{ }, }; use mina_signer::{CompressedPubKey, Keypair}; -use openmina_core::constants::ConstraintConstants; +use node::DEVNET_CONFIG; +use openmina_core::{consensus::ConsensusConstants, constants::ConstraintConstants, NetworkConfig}; use rand::{rngs::SmallRng, seq::SliceRandom, Rng, SeedableRng}; use ring_buffer::RingBuffer; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use std::fmt::Debug; use std::{fs, str::FromStr}; -/// Same values when we run `dune runtest src/lib/staged_ledger -f` -pub const CONSTRAINT_CONSTANTS: ConstraintConstants = ConstraintConstants { - sub_windows_per_window: 11, - ledger_depth: 35, - work_delay: 2, - block_window_duration_ms: 180000, - transaction_capacity_log_2: 7, - pending_coinbase_depth: 5, - coinbase_amount: 720000000000, - supercharged_coinbase_factor: 2, - account_creation_fee: 1000000000, - fork: None, -}; - // Taken from ocaml_tests /// Same values when we run `dune runtest src/lib/staged_ledger -f` #[coverage(off)] @@ -267,6 +260,7 @@ pub struct GeneratorCtx { pub struct FuzzerCtx { pub constraint_constants: ConstraintConstants, pub txn_state_view: ProtocolStateView, + pub pool: TransactionPool, pub fuzzcases_path: String, pub gen: GeneratorCtx, pub state: FuzzerState, @@ -349,11 +343,13 @@ impl FuzzerCtx { } #[coverage(off)] - pub fn get_account(&mut self, pkey: &CompressedPubKey) -> Option { - let account_location = LedgerIntf::location_of_account( - self.get_ledger_inner(), - &AccountId::new(pkey.clone(), TokenId::default()), - ); + pub fn get_account(&self, pkey: &CompressedPubKey) -> Option { + self.get_account_by_id(&AccountId::new(pkey.clone(), TokenId::default())) + } + + #[coverage(off)] + pub fn get_account_by_id(&self, account_id: &AccountId) -> Option { + let account_location = LedgerIntf::location_of_account(self.get_ledger_inner(), account_id); account_location.map( #[coverage(off)] @@ -362,7 +358,7 @@ impl FuzzerCtx { } #[coverage(off)] - pub fn find_sender(&mut self, pkey: &CompressedPubKey) -> Option<&(Keypair, PermissionModel)> { + pub fn find_sender(&self, pkey: &CompressedPubKey) -> Option<&(Keypair, PermissionModel)> { self.state.potential_senders.iter().find( #[coverage(off)] |(kp, _)| kp.public.into_compressed() == *pkey, @@ -370,7 +366,7 @@ impl FuzzerCtx { } #[coverage(off)] - pub fn find_permissions(&mut self, pkey: &CompressedPubKey) -> Option<&PermissionModel> { + pub fn find_permissions(&self, pkey: &CompressedPubKey) -> Option<&PermissionModel> { self.find_sender(pkey).map( #[coverage(off)] |(_, pm)| pm, @@ -378,7 +374,7 @@ impl FuzzerCtx { } #[coverage(off)] - pub fn find_keypair(&mut self, pkey: &CompressedPubKey) -> Option<&Keypair> { + pub fn find_keypair(&self, pkey: &CompressedPubKey) -> Option<&Keypair> { self.find_sender(pkey).map( #[coverage(off)] |(kp, _)| kp, @@ -541,6 +537,78 @@ impl FuzzerCtx { ret } + #[coverage(off)] + pub fn pool_verify( + &self, + user_command: &UserCommand, + ocaml_pool_verify_result: &Result, SmallString1k>, + ) -> bool { + let diff = transaction_pool::diff::Diff { + list: vec![user_command.clone()], + }; + let account_ids = user_command.accounts_referenced(); + let accounts = account_ids + .iter() + .filter_map( + #[coverage(off)] + |account_id| { + self.get_account_by_id(account_id).and_then( + #[coverage(off)] + |account| Some((account_id.clone(), account)), + ) + }, + ) + .collect::>(); + + let rust_pool_result = self.pool.prevalidate(diff); + let mismatch; + + if let Ok(diff) = rust_pool_result { + let convert_diff_result = self.pool.convert_diff_to_verifiable(diff, &accounts); + + if let Ok(commands) = convert_diff_result { + let verify_result = &ledger::verifier::Verifier.verify_commands(commands, None)[0]; + let ocaml_pool_verify_result = ocaml_pool_verify_result.clone().map( + #[coverage(off)] + |commands| commands[0].clone(), + ); + + *ledger::GLOBAL_SKIP_PARTIAL_EQ.write().unwrap() = true; + mismatch = ocaml_pool_verify_result.is_ok() + && (verify_result.is_err() + || verify_result.as_ref().unwrap().forget_check() + != ocaml_pool_verify_result.clone().unwrap()); + + if mismatch { + println!( + "verify_commands: Mismatch between Rust and OCaml pool_verify_result\n{}", + self.diagnostic(&verify_result, &ocaml_pool_verify_result) + ); + } + } else { + mismatch = ocaml_pool_verify_result.is_ok(); + + if mismatch { + println!( + "convert_diff_to_verifiable: Mismatch between Rust and OCaml pool_verify_result\n{}", + self.diagnostic(&convert_diff_result, &ocaml_pool_verify_result) + ); + } + } + } else { + mismatch = ocaml_pool_verify_result.is_ok(); + + if mismatch { + println!( + "prevalidate: Mismatch between Rust and OCaml pool_verify_result\n{}", + self.diagnostic(&rust_pool_result, &ocaml_pool_verify_result) + ); + } + } + + return mismatch; + } + #[coverage(off)] pub fn apply_transaction( &mut self, @@ -694,6 +762,7 @@ impl FuzzerCtx { pub struct FuzzerCtxBuilder { constraint_constants: Option, txn_state_view: Option, + pool: Option, fuzzcases_path: Option, seed: u64, minimum_fee: u64, @@ -710,6 +779,7 @@ impl Default for FuzzerCtxBuilder { Self { constraint_constants: None, txn_state_view: None, + pool: None, fuzzcases_path: None, seed: 0, minimum_fee: 1_000_000, @@ -740,6 +810,11 @@ impl FuzzerCtxBuilder { self } + pub fn transaction_pool(&mut self, pool: TransactionPool) -> &mut Self { + self.pool = Some(pool); + self + } + #[coverage(off)] pub fn fuzzcases_path(&mut self, fuzzcases_path: String) -> &mut Self { self.fuzzcases_path = Some(fuzzcases_path); @@ -786,16 +861,35 @@ impl FuzzerCtxBuilder { #[coverage(off)] pub fn build(&mut self) -> FuzzerCtx { - let constraint_constants = self + let mut constraint_constants = self .constraint_constants .clone() - .unwrap_or(CONSTRAINT_CONSTANTS); + .unwrap_or(NetworkConfig::global().constraint_constants.clone()); + + // HACK (binprot breaks in the OCaml side) + constraint_constants.fork = None; + let depth = constraint_constants.ledger_depth as usize; let root = Mask::new_root(Database::create(depth.try_into().unwrap())); let txn_state_view = self .txn_state_view .clone() .unwrap_or(dummy_state_view(None)); + + let protocol_constants = DEVNET_CONFIG + .protocol_constants() + .expect("wrong protocol constants"); + + let default_pool = TransactionPool::new( + ledger::transaction_pool::Config { + trust_system: (), + pool_max_size: 3000, + slot_tx_end: None, + }, + &ConsensusConstants::create(&constraint_constants, &protocol_constants), + ); + + let pool = self.pool.clone().unwrap_or(default_pool); let fuzzcases_path = self.fuzzcases_path.clone().unwrap_or("./".to_string()); let ledger = match self.is_staged_ledger { @@ -813,6 +907,7 @@ impl FuzzerCtxBuilder { let mut ctx = FuzzerCtx { constraint_constants, txn_state_view, + pool, fuzzcases_path, gen: GeneratorCtx { rng: SmallRng::seed_from_u64(self.seed), diff --git a/tools/gossipsub-sandbox/Cargo.toml b/tools/gossipsub-sandbox/Cargo.toml index 3ad065f204..896f354af8 100644 --- a/tools/gossipsub-sandbox/Cargo.toml +++ b/tools/gossipsub-sandbox/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-gossipsub-sandbox" -version = "0.13.0" +version = "0.14.0" edition = "2021" [dependencies] diff --git a/tools/hash-tool/Cargo.toml b/tools/hash-tool/Cargo.toml index 10dcacd052..6eb42d8eca 100644 --- a/tools/hash-tool/Cargo.toml +++ b/tools/hash-tool/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hash-tool" -version = "0.13.0" +version = "0.14.0" edition = "2021" [dependencies] diff --git a/tools/heartbeats-processor/.gitignore b/tools/heartbeats-processor/.gitignore new file mode 100644 index 0000000000..c6f9ae0c43 --- /dev/null +++ b/tools/heartbeats-processor/.gitignore @@ -0,0 +1,5 @@ + +/data +/credentials +.env +*.db \ No newline at end of file diff --git a/tools/heartbeats-processor/.sqlx/query-12261ecc56a9408bc7b95eb66dd939823a72b071adc7428ef353375982274d7f.json b/tools/heartbeats-processor/.sqlx/query-12261ecc56a9408bc7b95eb66dd939823a72b071adc7428ef353375982274d7f.json new file mode 100644 index 0000000000..fc5b537638 --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-12261ecc56a9408bc7b95eb66dd939823a72b071adc7428ef353375982274d7f.json @@ -0,0 +1,38 @@ +{ + "db_name": "SQLite", + "query": "\n SELECT\n pk.public_key,\n ss.score,\n ss.blocks_produced,\n ss.last_updated\n FROM submitter_scores ss\n JOIN public_keys pk ON pk.id = ss.public_key_id\n ORDER BY ss.score DESC\n ", + "describe": { + "columns": [ + { + "name": "public_key", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "score", + "ordinal": 1, + "type_info": "Integer" + }, + { + "name": "blocks_produced", + "ordinal": 2, + "type_info": "Integer" + }, + { + "name": "last_updated", + "ordinal": 3, + "type_info": "Integer" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + false, + false, + false, + false + ] + }, + "hash": "12261ecc56a9408bc7b95eb66dd939823a72b071adc7428ef353375982274d7f" +} diff --git a/tools/heartbeats-processor/.sqlx/query-1d954275ae05319000ad2b298491ffab0747985e8af22a52ac0ed77c3cf27e64.json b/tools/heartbeats-processor/.sqlx/query-1d954275ae05319000ad2b298491ffab0747985e8af22a52ac0ed77c3cf27e64.json new file mode 100644 index 0000000000..ec9715b3df --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-1d954275ae05319000ad2b298491ffab0747985e8af22a52ac0ed77c3cf27e64.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "\n UPDATE time_windows\n SET disabled = ?1\n WHERE start_time >= ?2 AND end_time < ?3\n ", + "describe": { + "columns": [], + "parameters": { + "Right": 3 + }, + "nullable": [] + }, + "hash": "1d954275ae05319000ad2b298491ffab0747985e8af22a52ac0ed77c3cf27e64" +} diff --git a/tools/heartbeats-processor/.sqlx/query-22a52592b753ea43f7e33af6166f9b8f74f03c087618df92e9b9062c8af30314.json b/tools/heartbeats-processor/.sqlx/query-22a52592b753ea43f7e33af6166f9b8f74f03c087618df92e9b9062c8af30314.json new file mode 100644 index 0000000000..7adb655c92 --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-22a52592b753ea43f7e33af6166f9b8f74f03c087618df92e9b9062c8af30314.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "INSERT INTO time_windows (start_time, end_time) VALUES (?1, ?2) RETURNING id", + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Integer" + } + ], + "parameters": { + "Right": 2 + }, + "nullable": [ + false + ] + }, + "hash": "22a52592b753ea43f7e33af6166f9b8f74f03c087618df92e9b9062c8af30314" +} diff --git a/tools/heartbeats-processor/.sqlx/query-25c9e074156b792e92cbfbaf5954647bfdda59b680b5a393c5324dd6d8a19683.json b/tools/heartbeats-processor/.sqlx/query-25c9e074156b792e92cbfbaf5954647bfdda59b680b5a393c5324dd6d8a19683.json new file mode 100644 index 0000000000..ea821622ea --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-25c9e074156b792e92cbfbaf5954647bfdda59b680b5a393c5324dd6d8a19683.json @@ -0,0 +1,38 @@ +{ + "db_name": "SQLite", + "query": "\n SELECT\n pk.public_key,\n ss.score,\n ss.blocks_produced,\n datetime(ss.last_updated, 'unixepoch') as last_updated\n FROM submitter_scores ss\n JOIN public_keys pk ON pk.id = ss.public_key_id\n ORDER BY ss.score DESC, ss.blocks_produced DESC\n ", + "describe": { + "columns": [ + { + "name": "public_key", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "score", + "ordinal": 1, + "type_info": "Integer" + }, + { + "name": "blocks_produced", + "ordinal": 2, + "type_info": "Integer" + }, + { + "name": "last_updated", + "ordinal": 3, + "type_info": "Text" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + false, + false, + false, + true + ] + }, + "hash": "25c9e074156b792e92cbfbaf5954647bfdda59b680b5a393c5324dd6d8a19683" +} diff --git a/tools/heartbeats-processor/.sqlx/query-30837ba4832ee31eee8b4568a9b431d3277c344bd0e396e6ae199de5ad85f82a.json b/tools/heartbeats-processor/.sqlx/query-30837ba4832ee31eee8b4568a9b431d3277c344bd0e396e6ae199de5ad85f82a.json new file mode 100644 index 0000000000..9ae54e9c3c --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-30837ba4832ee31eee8b4568a9b431d3277c344bd0e396e6ae199de5ad85f82a.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "\n UPDATE time_windows\n SET disabled = TRUE\n WHERE start_time >= ? AND end_time <= ?\n ", + "describe": { + "columns": [], + "parameters": { + "Right": 2 + }, + "nullable": [] + }, + "hash": "30837ba4832ee31eee8b4568a9b431d3277c344bd0e396e6ae199de5ad85f82a" +} diff --git a/tools/heartbeats-processor/.sqlx/query-45341663b6eb18a14a7106b3bf53a63c585f507ac1a957ee60a89b0de50fc9fc.json b/tools/heartbeats-processor/.sqlx/query-45341663b6eb18a14a7106b3bf53a63c585f507ac1a957ee60a89b0de50fc9fc.json new file mode 100644 index 0000000000..ea8cb74426 --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-45341663b6eb18a14a7106b3bf53a63c585f507ac1a957ee60a89b0de50fc9fc.json @@ -0,0 +1,32 @@ +{ + "db_name": "SQLite", + "query": "\n SELECT id, start_time, end_time\n FROM time_windows\n WHERE start_time <= ?2 AND end_time >= ?1 AND disabled = FALSE\n ORDER BY start_time ASC\n ", + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Integer" + }, + { + "name": "start_time", + "ordinal": 1, + "type_info": "Integer" + }, + { + "name": "end_time", + "ordinal": 2, + "type_info": "Integer" + } + ], + "parameters": { + "Right": 2 + }, + "nullable": [ + true, + false, + false + ] + }, + "hash": "45341663b6eb18a14a7106b3bf53a63c585f507ac1a957ee60a89b0de50fc9fc" +} diff --git a/tools/heartbeats-processor/.sqlx/query-93720d0caecab27616c2826d0a188fabef65f281986d72ffffab0dd58369a673.json b/tools/heartbeats-processor/.sqlx/query-93720d0caecab27616c2826d0a188fabef65f281986d72ffffab0dd58369a673.json new file mode 100644 index 0000000000..2a250fe40d --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-93720d0caecab27616c2826d0a188fabef65f281986d72ffffab0dd58369a673.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "SELECT last_processed_time FROM processing_state WHERE id = 1", + "describe": { + "columns": [ + { + "name": "last_processed_time", + "ordinal": 0, + "type_info": "Integer" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + false + ] + }, + "hash": "93720d0caecab27616c2826d0a188fabef65f281986d72ffffab0dd58369a673" +} diff --git a/tools/heartbeats-processor/.sqlx/query-98b0cf1049d82f39c4ba61515174126b1be6873040411debedab5b27ca2606b9.json b/tools/heartbeats-processor/.sqlx/query-98b0cf1049d82f39c4ba61515174126b1be6873040411debedab5b27ca2606b9.json new file mode 100644 index 0000000000..aa1bf181f6 --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-98b0cf1049d82f39c4ba61515174126b1be6873040411debedab5b27ca2606b9.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "SELECT COUNT(*) as count FROM time_windows WHERE disabled = FALSE", + "describe": { + "columns": [ + { + "name": "count", + "ordinal": 0, + "type_info": "Integer" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + false + ] + }, + "hash": "98b0cf1049d82f39c4ba61515174126b1be6873040411debedab5b27ca2606b9" +} diff --git a/tools/heartbeats-processor/.sqlx/query-a29bafe3ddeef887e7e08390d03f664302deac08315a30a499a569310799055a.json b/tools/heartbeats-processor/.sqlx/query-a29bafe3ddeef887e7e08390d03f664302deac08315a30a499a569310799055a.json new file mode 100644 index 0000000000..9434b2dee2 --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-a29bafe3ddeef887e7e08390d03f664302deac08315a30a499a569310799055a.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "\n UPDATE time_windows \n SET disabled = TRUE \n WHERE (start_time < ?1 OR end_time > ?2) \n AND disabled = FALSE\n ", + "describe": { + "columns": [], + "parameters": { + "Right": 2 + }, + "nullable": [] + }, + "hash": "a29bafe3ddeef887e7e08390d03f664302deac08315a30a499a569310799055a" +} diff --git a/tools/heartbeats-processor/.sqlx/query-a76de53b5c3443bee6539d261edd2044fc771fbf0e9e6d94d61be4e5a31d91f6.json b/tools/heartbeats-processor/.sqlx/query-a76de53b5c3443bee6539d261edd2044fc771fbf0e9e6d94d61be4e5a31d91f6.json new file mode 100644 index 0000000000..e41d04efc1 --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-a76de53b5c3443bee6539d261edd2044fc771fbf0e9e6d94d61be4e5a31d91f6.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "SELECT id FROM time_windows WHERE start_time = ?1 AND end_time = ?2", + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Integer" + } + ], + "parameters": { + "Right": 2 + }, + "nullable": [ + true + ] + }, + "hash": "a76de53b5c3443bee6539d261edd2044fc771fbf0e9e6d94d61be4e5a31d91f6" +} diff --git a/tools/heartbeats-processor/.sqlx/query-bc586a064ad3094fe93bf09715e00c3638e403705c437816d56de4af3fcbdb17.json b/tools/heartbeats-processor/.sqlx/query-bc586a064ad3094fe93bf09715e00c3638e403705c437816d56de4af3fcbdb17.json new file mode 100644 index 0000000000..f8a843138a --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-bc586a064ad3094fe93bf09715e00c3638e403705c437816d56de4af3fcbdb17.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "\n INSERT INTO submitter_scores (public_key_id, score, blocks_produced)\n SELECT\n pk.id,\n COUNT(DISTINCT hp.window_id) as score,\n COUNT(DISTINCT pb.id) as blocks_produced\n FROM public_keys pk\n LEFT JOIN heartbeat_presence hp ON pk.id = hp.public_key_id\n LEFT JOIN time_windows tw ON hp.window_id = tw.id\n LEFT JOIN produced_blocks pb ON pk.id = pb.public_key_id\n WHERE tw.disabled = FALSE\n GROUP BY pk.id\n ON CONFLICT(public_key_id) DO UPDATE SET\n score = excluded.score,\n blocks_produced = excluded.blocks_produced,\n last_updated = strftime('%s', 'now')\n ", + "describe": { + "columns": [], + "parameters": { + "Right": 0 + }, + "nullable": [] + }, + "hash": "bc586a064ad3094fe93bf09715e00c3638e403705c437816d56de4af3fcbdb17" +} diff --git a/tools/heartbeats-processor/.sqlx/query-d2f074d1223c3a5e1b0cb54a2c18828b5e176624ae2ae9a5fb0412e8bf3f29f0.json b/tools/heartbeats-processor/.sqlx/query-d2f074d1223c3a5e1b0cb54a2c18828b5e176624ae2ae9a5fb0412e8bf3f29f0.json new file mode 100644 index 0000000000..6d645480c0 --- /dev/null +++ b/tools/heartbeats-processor/.sqlx/query-d2f074d1223c3a5e1b0cb54a2c18828b5e176624ae2ae9a5fb0412e8bf3f29f0.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "UPDATE processing_state SET last_processed_time = ? WHERE id = 1", + "describe": { + "columns": [], + "parameters": { + "Right": 1 + }, + "nullable": [] + }, + "hash": "d2f074d1223c3a5e1b0cb54a2c18828b5e176624ae2ae9a5fb0412e8bf3f29f0" +} diff --git a/tools/heartbeats-processor/Cargo.toml b/tools/heartbeats-processor/Cargo.toml new file mode 100644 index 0000000000..fa55fddd83 --- /dev/null +++ b/tools/heartbeats-processor/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "heartbeats-processor" +version = "0.14.0" +edition = "2021" + +[dependencies] +tokio = { version = "1.28", features = ["full", "time"] } +firestore = "0.44" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +anyhow = "1.0" +chrono = "0.4" +sqlx = { version = "0.8", features = ["runtime-tokio-native-tls", "sqlite", "chrono"] } +dotenv = "0.15" +clap = { version = "4.4", features = ["derive"] } +gcloud-sdk = { version = "0.26.0", default-features = false, features = ["google-firestore-v1"] } +base64 = "0.22" + +mina-p2p-messages = { workspace = true } +openmina-core = { path = "../../core" } \ No newline at end of file diff --git a/tools/heartbeats-processor/Dockerfile b/tools/heartbeats-processor/Dockerfile new file mode 100644 index 0000000000..f13a04b697 --- /dev/null +++ b/tools/heartbeats-processor/Dockerfile @@ -0,0 +1,24 @@ +# Build stage +FROM rust:1.84-slim-bookworm AS builder + +WORKDIR /usr/src/app +RUN apt-get update && apt-get install -y pkg-config libssl-dev && rm -rf /var/lib/apt/lists/* + +COPY . . +RUN ls -la tools/heartbeats-processor +RUN cargo build --release -p heartbeats-processor + +# Runtime stage +FROM debian:bookworm-slim + +RUN apt-get update && apt-get install -y libsqlite3-0 ca-certificates && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +COPY --from=builder /usr/src/app/target/release/heartbeats-processor . +COPY tools/heartbeats-processor/schema.sql . + +ENV DATABASE_PATH=/app/data/heartbeats.db + +ENTRYPOINT ["./heartbeats-processor"] +CMD ["process-loop"] diff --git a/tools/heartbeats-processor/README.md b/tools/heartbeats-processor/README.md new file mode 100644 index 0000000000..3334d2a05b --- /dev/null +++ b/tools/heartbeats-processor/README.md @@ -0,0 +1,44 @@ +# Heartbeats Processor + +This application processes "heartbeat" entries from Firestore. It fetches data, groups it by time windows, and stores the results into a local SQLite database for further analysis or reporting. + +## Environment Variables + +The following environment variables control the program's behavior. + +These variables can be set in your shell environment or in a `.env` file in the project root directory. + +### Required Variables +* `DATABASE_PATH` - SQLite database path (e.g., "./data.db") +* `GOOGLE_CLOUD_PROJECT` - Google Cloud project ID +* `WINDOW_RANGE_START` - Start time for window creation in RFC3339 format +* `WINDOW_RANGE_END` - End time for window creation in RFC3339 format + +### Optional Variables +* `GOOGLE_APPLICATION_CREDENTIALS` - Path to Google Cloud credentials file +* `DISABLED_WINDOWS` - Comma-separated list of time ranges to disable in RFC3339 format (e.g., `2023-01-01T00:00:00Z/2023-01-02T00:00:00Z,2023-02-01T00:00:00Z/2023-02-02T00:00:00Z`) + +## Development With Firestore Emulator + +To develop locally using the Firestore Emulator, do the following: + +1. Set these environment variables in your shell: + + ``` + FIRESTORE_EMULATOR_HOST=127.0.0.1:8080 + GOOGLE_CLOUD_PROJECT=staging + ``` + +2. From the "frontend/firestore" directory, start the emulator by running: + + ``` + npm run serve + ``` + +3. Authenticate on your local machine with Google Cloud to allow proper credential usage: + + ``` + gcloud auth application-default login + ``` + +Once these steps are complete, the application can connect to the local emulator to simulate production-like Firestore behavior for debugging or development. diff --git a/tools/heartbeats-processor/docker-compose.yml b/tools/heartbeats-processor/docker-compose.yml new file mode 100644 index 0000000000..0648fd8283 --- /dev/null +++ b/tools/heartbeats-processor/docker-compose.yml @@ -0,0 +1,23 @@ +version: '3.8' + +services: + heartbeats-processor: + #build: . + image: openmina/heartbeat-processor:local + environment: + - GOOGLE_CLOUD_PROJECT=${GOOGLE_CLOUD_PROJECT:-staging} + - WINDOW_RANGE_START=${WINDOW_RANGE_START:-} + - WINDOW_RANGE_END=${WINDOW_RANGE_END:-} + #- FIRESTORE_EMULATOR_HOST=${FIRESTORE_EMULATOR_HOST:-} + - DISABLED_WINDOWS=${DISABLED_WINDOWS:-} + - GOOGLE_APPLICATION_CREDENTIALS=${GOOGLE_APPLICATION_CREDENTIALS:-/credentials/service-account.json} + - DATABASE_PATH=${DATABASE_PATH:-/app/data/store.db} + volumes: + - ./data:/app/data + - ./credentials:/credentials:ro + command: ["process-loop", "--interval-seconds", "300"] + restart: unless-stopped + +volumes: + data: + driver: local diff --git a/tools/heartbeats-processor/schema.sql b/tools/heartbeats-processor/schema.sql new file mode 100644 index 0000000000..f1a99dd4e6 --- /dev/null +++ b/tools/heartbeats-processor/schema.sql @@ -0,0 +1,112 @@ +CREATE TABLE IF NOT EXISTS public_keys ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + public_key TEXT NOT NULL UNIQUE +); + +CREATE TABLE IF NOT EXISTS submitter_counts ( + public_key_id INTEGER PRIMARY KEY, + count INTEGER NOT NULL, + last_seen INTEGER NOT NULL, -- Unix timestamp + updated_at INTEGER DEFAULT (strftime('%s', 'now')), + FOREIGN KEY (public_key_id) REFERENCES public_keys(id) +); + +CREATE TABLE IF NOT EXISTS processing_state ( + id INTEGER PRIMARY KEY, + last_processed_time INTEGER NOT NULL -- Unix timestamp +); + +INSERT OR IGNORE INTO processing_state (id, last_processed_time) +VALUES (1, 0); + +CREATE TABLE IF NOT EXISTS time_windows ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + start_time INTEGER NOT NULL, -- Unix timestamp + end_time INTEGER NOT NULL, -- Unix timestamp + disabled BOOLEAN NOT NULL DEFAULT FALSE, + UNIQUE(start_time, end_time) +); + +CREATE TABLE IF NOT EXISTS heartbeat_presence ( + window_id INTEGER NOT NULL, + public_key_id INTEGER NOT NULL, + best_tip_hash TEXT NOT NULL, + best_tip_height INTEGER NOT NULL, + best_tip_global_slot INTEGER NOT NULL, + heartbeat_time INTEGER NOT NULL, + disabled BOOLEAN NOT NULL DEFAULT FALSE, + PRIMARY KEY (window_id, public_key_id), + FOREIGN KEY (window_id) REFERENCES time_windows(id), + FOREIGN KEY (public_key_id) REFERENCES public_keys(id) +); + +CREATE TABLE IF NOT EXISTS submitter_scores ( + public_key_id INTEGER PRIMARY KEY, + score INTEGER NOT NULL DEFAULT 0, + last_updated INTEGER NOT NULL DEFAULT (strftime('%s', 'now')), + blocks_produced INTEGER NOT NULL DEFAULT 0, + FOREIGN KEY (public_key_id) REFERENCES public_keys(id) +); + +CREATE TABLE IF NOT EXISTS produced_blocks ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + window_id INTEGER NOT NULL, + public_key_id INTEGER NOT NULL, + block_hash TEXT NOT NULL, + block_height INTEGER NOT NULL, + block_global_slot INTEGER NOT NULL, + block_data_blob TEXT, -- Raw block data in base64-encoded binprot format + validated BOOLEAN NOT NULL DEFAULT FALSE, + UNIQUE(public_key_id, block_hash), + FOREIGN KEY (window_id, public_key_id) REFERENCES heartbeat_presence(window_id, public_key_id), + FOREIGN KEY (window_id) REFERENCES time_windows(id), + FOREIGN KEY (public_key_id) REFERENCES public_keys(id) +); + +-- Index for time window queries +CREATE INDEX IF NOT EXISTS idx_time_windows_start_end +ON time_windows(start_time, end_time); + +-- Index for public key lookups +CREATE INDEX IF NOT EXISTS idx_public_keys_key +ON public_keys(public_key); + +-- Index for presence queries by window +CREATE INDEX IF NOT EXISTS idx_heartbeat_presence_window +ON heartbeat_presence(window_id); + +-- Index for presence queries by public key +CREATE INDEX IF NOT EXISTS idx_heartbeat_presence_pubkey +ON heartbeat_presence(public_key_id); + +-- Index for presence queries by global slot +CREATE INDEX IF NOT EXISTS idx_heartbeat_presence_global_slot +ON heartbeat_presence(best_tip_global_slot); + +-- Index for submitter counts lookup +CREATE INDEX IF NOT EXISTS idx_submitter_counts_last_seen +ON submitter_counts(last_seen); + +-- Index for submitter scores lookup +CREATE INDEX IF NOT EXISTS idx_submitter_scores_score +ON submitter_scores(score DESC); + +-- Index for produced blocks queries by window +CREATE INDEX IF NOT EXISTS idx_produced_blocks_window +ON produced_blocks(window_id); + +-- Index for produced blocks queries by public key +CREATE INDEX IF NOT EXISTS idx_produced_blocks_pubkey +ON produced_blocks(public_key_id); + +-- Index for produced blocks queries by block hash +CREATE INDEX IF NOT EXISTS idx_produced_blocks_hash +ON produced_blocks(block_hash); + +-- Combined index for window and public key lookups +CREATE INDEX IF NOT EXISTS idx_produced_blocks_window_pubkey +ON produced_blocks(window_id, public_key_id); + +-- Index for global slot queries +CREATE INDEX IF NOT EXISTS idx_produced_blocks_global_slot +ON produced_blocks(block_global_slot); diff --git a/tools/heartbeats-processor/src/config.rs b/tools/heartbeats-processor/src/config.rs new file mode 100644 index 0000000000..17a6ac925f --- /dev/null +++ b/tools/heartbeats-processor/src/config.rs @@ -0,0 +1,116 @@ +use anyhow::{Context, Result}; +use chrono::{DateTime, Utc}; +use std::fmt; + +#[derive(Debug, Clone)] +pub struct Config { + pub google_cloud_project: String, + pub google_credentials_path: Option, + pub firestore_emulator_host: Option, + pub database_url: String, + pub window_range_start: DateTime, + pub window_range_end: DateTime, + pub disabled_windows: Vec<(DateTime, DateTime)>, +} + +impl Config { + pub fn from_env() -> Result { + dotenv::dotenv().ok(); + + let google_cloud_project = + std::env::var("GOOGLE_CLOUD_PROJECT").unwrap_or_else(|_| "local".to_string()); + + let google_credentials_path = std::env::var("GOOGLE_APPLICATION_CREDENTIALS").ok(); + let firestore_emulator_host = std::env::var("FIRESTORE_EMULATOR_HOST").ok(); + + let database_url = std::env::var("DATABASE_PATH") + .map(|path| format!("sqlite:{}", path)) + .unwrap_or_else(|_| format!("sqlite:heartbeats-{}.db", google_cloud_project)); + + let window_range_start = + std::env::var("WINDOW_RANGE_START").context("WINDOW_RANGE_START must be set")?; + let window_range_start = DateTime::parse_from_rfc3339(&window_range_start) + .context("Failed to parse WINDOW_RANGE_START as RFC3339")? + .with_timezone(&Utc); + + let window_range_end = + std::env::var("WINDOW_RANGE_END").context("WINDOW_RANGE_END must be set")?; + let window_range_end = DateTime::parse_from_rfc3339(&window_range_end) + .context("Failed to parse WINDOW_RANGE_END as RFC3339")? + .with_timezone(&Utc); + + if window_range_start >= window_range_end { + anyhow::bail!("WINDOW_RANGE_START must be before WINDOW_RANGE_END"); + } + + let disabled_windows = if let Ok(ranges) = std::env::var("DISABLED_WINDOWS") { + let mut windows = Vec::new(); + for range in ranges.split(',').filter(|s| !s.is_empty()) { + let mut parts = range.split('/'); + let start = parts.next().ok_or_else(|| { + anyhow::anyhow!("Missing start time in disabled window range") + })?; + let end = parts + .next() + .ok_or_else(|| anyhow::anyhow!("Missing end time in disabled window range"))?; + + let start = DateTime::parse_from_rfc3339(start) + .with_context(|| { + format!("Failed to parse disabled window start time: {}", start) + })? + .with_timezone(&Utc); + let end = DateTime::parse_from_rfc3339(end) + .with_context(|| format!("Failed to parse disabled window end time: {}", end))? + .with_timezone(&Utc); + + if start >= end { + anyhow::bail!( + "Disabled window start time must be before end time: {start} >= {end}" + ); + } + if end < window_range_start || start > window_range_end { + println!("Warning: Disabled window {start} to {end} is outside the main window range"); + } + + windows.push((start, end)); + } + windows + } else { + Vec::new() + }; + + Ok(Config { + google_cloud_project, + google_credentials_path, + firestore_emulator_host, + database_url, + window_range_start, + window_range_end, + disabled_windows, + }) + } +} + +impl fmt::Display for Config { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "Configuration:")?; + writeln!(f, " Project: {}", self.google_cloud_project)?; + if let Some(creds) = &self.google_credentials_path { + writeln!(f, " Credentials: {}", creds)?; + } + if let Some(emu) = &self.firestore_emulator_host { + writeln!(f, " Firestore Emulator: {}", emu)?; + } + writeln!(f, " Database: {}", self.database_url)?; + writeln!(f, " Window Range:")?; + writeln!(f, " Start: {}", self.window_range_start)?; + writeln!(f, " End: {}", self.window_range_end)?; + if !self.disabled_windows.is_empty() { + writeln!(f, " Disabled Windows:")?; + for (start, end) in &self.disabled_windows { + writeln!(f, " {} to {}", start, end)?; + } + } + Ok(()) + } +} diff --git a/tools/heartbeats-processor/src/local_db.rs b/tools/heartbeats-processor/src/local_db.rs new file mode 100644 index 0000000000..6a9f62ffbd --- /dev/null +++ b/tools/heartbeats-processor/src/local_db.rs @@ -0,0 +1,713 @@ +use anyhow::Result; +use chrono::{DateTime, Utc}; +use firestore::FirestoreDb; +use serde::Serialize; + +use sqlx::{Row, SqlitePool}; +use std::collections::{HashMap, HashSet}; +use std::fs; + +use crate::config::Config; +use crate::remote_db::BlockInfo; +use crate::remote_db::HeartbeatChunkState; +use crate::time::*; + +#[derive(Debug)] +pub struct HeartbeatPresence { + pub window_id: i64, + pub public_key_id: i64, + pub best_tip: BlockInfo, + pub heartbeat_time: i64, +} + +#[derive(Debug)] +pub struct ProducedBlock { + pub window_id: i64, + pub public_key_id: i64, + pub block_hash: String, + pub block_height: u32, + pub block_global_slot: u32, + pub block_data: String, +} + +pub async fn get_last_processed_time( + pool: &SqlitePool, + config: Option<&Config>, +) -> Result> { + let record = sqlx::query!("SELECT last_processed_time FROM processing_state WHERE id = 1") + .fetch_one(pool) + .await?; + + let db_time = from_unix_timestamp(record.last_processed_time); + + Ok(match config { + Some(cfg) => db_time.max(cfg.window_range_start), + None => db_time, + }) +} + +pub async fn update_last_processed_time(pool: &SqlitePool, time: DateTime) -> Result<()> { + let current = get_last_processed_time(pool, None).await?; + let ts = to_unix_timestamp(time); + + println!("Updating last processed time: {} -> {}", current, time); + + sqlx::query!( + "UPDATE processing_state SET last_processed_time = ? WHERE id = 1", + ts + ) + .execute(pool) + .await?; + + Ok(()) +} + +pub async fn ensure_time_windows( + pool: &SqlitePool, + start: DateTime, + end: DateTime, +) -> Result> { + let windows = generate_fixed_time_windows(start, end); + let mut window_ids = Vec::new(); + + for window in windows { + let start_ts = to_unix_timestamp(window.start); + let end_ts = to_unix_timestamp(window.end); + + // Try to get existing window ID first + let existing_id = sqlx::query!( + "SELECT id FROM time_windows WHERE start_time = ?1 AND end_time = ?2", + start_ts, + end_ts, + ) + .fetch_optional(pool) + .await?; + + let id = if let Some(record) = existing_id { + record + .id + .expect("ID should not be None for an existing record") + } else { + sqlx::query!( + "INSERT INTO time_windows (start_time, end_time) VALUES (?1, ?2) RETURNING id", + start_ts, + end_ts, + ) + .fetch_one(pool) + .await? + .id + }; + + window_ids.push(id); + } + + Ok(window_ids) +} + +pub async fn ensure_public_keys( + pool: &SqlitePool, + public_keys: &[&str], +) -> Result> { + let mut map = HashMap::new(); + + // Create a single query with multiple values + let values = public_keys + .iter() + .map(|k| format!("('{}')", k)) + .collect::>() + .join(","); + + let query = format!( + r#" + INSERT INTO public_keys (public_key) + VALUES {} + ON CONFLICT (public_key) DO UPDATE SET + public_key = excluded.public_key + RETURNING id, public_key + "#, + values + ); + + let rows = sqlx::query(&query).fetch_all(pool).await?; + + for row in rows { + let id: i64 = row.get("id"); + let key: String = row.get("public_key"); + map.insert(key, id); + } + + Ok(map) +} + +pub async fn batch_insert_presence( + pool: &SqlitePool, + presences: &[HeartbeatPresence], +) -> Result<()> { + if presences.is_empty() { + return Ok(()); + } + + let values = presences + .iter() + .map(|p| { + format!( + "({}, {}, '{}', {}, {}, {})", + p.window_id, + p.public_key_id, + p.best_tip.hash, + p.best_tip.height, + p.best_tip.global_slot, + p.heartbeat_time + ) + }) + .collect::>() + .join(","); + + let query = format!( + r#" + INSERT INTO heartbeat_presence ( + window_id, public_key_id, + best_tip_hash, best_tip_height, best_tip_global_slot, + heartbeat_time + ) + VALUES {} + ON CONFLICT(window_id, public_key_id) + DO UPDATE SET + best_tip_hash = CASE + WHEN excluded.best_tip_global_slot >= best_tip_global_slot + THEN excluded.best_tip_hash + ELSE best_tip_hash + END, + best_tip_height = CASE + WHEN excluded.best_tip_global_slot >= best_tip_global_slot + THEN excluded.best_tip_height + ELSE best_tip_height + END, + best_tip_global_slot = CASE + WHEN excluded.best_tip_global_slot >= best_tip_global_slot + THEN excluded.best_tip_global_slot + ELSE best_tip_global_slot + END, + heartbeat_time = CASE + WHEN excluded.best_tip_global_slot >= best_tip_global_slot + THEN excluded.heartbeat_time + ELSE heartbeat_time + END + "#, + values + ); + + sqlx::query(&query).execute(pool).await?; + + Ok(()) +} + +async fn batch_insert_produced_blocks(pool: &SqlitePool, blocks: &[ProducedBlock]) -> Result<()> { + if blocks.is_empty() { + return Ok(()); + } + + let values = blocks + .iter() + .map(|b| { + format!( + "({}, {}, '{}', {}, {}, '{}')", + b.window_id, + b.public_key_id, + b.block_hash, + b.block_height, + b.block_global_slot, + b.block_data.replace('\'', "''") + ) + }) + .collect::>() + .join(","); + + let query = format!( + r#" + INSERT INTO produced_blocks ( + window_id, public_key_id, + block_hash, block_height, block_global_slot, + block_data_blob + ) + VALUES {} + ON CONFLICT(public_key_id, block_hash) DO NOTHING + "#, + values + ); + + sqlx::query(&query).execute(pool).await?; + + Ok(()) +} + +pub async fn process_heartbeats( + db: &FirestoreDb, + pool: &SqlitePool, + config: &Config, +) -> Result<()> { + let last_processed_time = get_last_processed_time(pool, Some(config)).await?; + let now = Utc::now(); + + let mut total_heartbeats = 0; + let mut latest_time = last_processed_time; + let mut seen_blocks: HashMap<(i64, String), DateTime> = HashMap::new(); + + // Statistics + let mut total_presence_count = 0; + let mut total_skipped_count = 0; + let mut total_blocks_recorded = 0; + let mut total_blocks_duplicate = 0; + let mut total_outside_windows = 0; + + let mut chunk_state = HeartbeatChunkState { + chunk_start: last_processed_time, + last_timestamp: None, + }; + + loop { + let heartbeats = crate::remote_db::fetch_heartbeat_chunk(db, &mut chunk_state, now).await?; + if heartbeats.is_empty() { + break; + } + + total_heartbeats += heartbeats.len(); + println!("Processing batch of {} heartbeats...", heartbeats.len()); + + latest_time = latest_time.max( + heartbeats + .iter() + .map(|h| h.create_time) + .max() + .unwrap_or(latest_time), + ); + + let start_ts = to_unix_timestamp(last_processed_time); + let end_ts = to_unix_timestamp(latest_time); + + let existing_windows = sqlx::query!( + r#" + SELECT id, start_time, end_time + FROM time_windows + WHERE start_time <= ?2 AND end_time >= ?1 AND disabled = FALSE + ORDER BY start_time ASC + "#, + start_ts, + end_ts + ) + .fetch_all(pool) + .await?; + + let unique_submitters: HashSet<&str> = heartbeats + .iter() + .map(|entry| entry.submitter.as_str()) + .collect(); + + let public_key_map = + ensure_public_keys(pool, &unique_submitters.into_iter().collect::>()).await?; + + let mut presence_count = 0; + let mut skipped_count = 0; + let mut blocks_recorded = 0; + let mut blocks_duplicate = 0; + let mut processed_heartbeats = HashSet::new(); + let mut produced_blocks_batch = Vec::new(); + + for window in existing_windows { + let window_start = from_unix_timestamp(window.start_time); + let window_end = from_unix_timestamp(window.end_time); + let mut presence_batch = Vec::new(); + + for (idx, entry) in heartbeats.iter().enumerate() { + if entry.create_time >= window_start && entry.create_time < window_end { + processed_heartbeats.insert(idx); + + let best_tip = entry.best_tip_block(); + + if entry.is_synced() && best_tip.is_some() { + if let Some(&public_key_id) = public_key_map.get(&entry.submitter) { + presence_batch.push(HeartbeatPresence { + window_id: window.id.unwrap(), + public_key_id, + best_tip: best_tip.unwrap(), // Cannot fail due to the above check + heartbeat_time: to_unix_timestamp(entry.create_time), + }); + presence_count += 1; + + // Add produced block if it exists + match entry.last_produced_block_decoded() { + Ok(Some(block)) => { + let block_data = entry.last_produced_block_raw().unwrap(); // Cannot fail, we have the block + let key = (public_key_id, block.hash().to_string()); + + if let Some(first_seen) = seen_blocks.get(&key) { + blocks_duplicate += 1; + println!( + "Duplicate block detected: {} (height: {}, producer: {}, peer_id: {}) [first seen at {}, now at {}]", + key.1, + block.height(), + entry.submitter, + entry.peer_id().unwrap_or_else(|| "unknown".to_string()), + first_seen, + entry.create_time + ); + continue; + } + + seen_blocks.insert(key.clone(), entry.create_time); + produced_blocks_batch.push(ProducedBlock { + window_id: window.id.unwrap(), + public_key_id, + block_hash: block.hash().to_string(), + block_height: block.height(), + block_global_slot: block.global_slot(), + block_data, + }); + } + Ok(None) => (), // No block to process + Err(e) => { + println!( + "WARNING: Failed to decode block from {}: {}", + entry.submitter, e + ) + } + } + } + } else { + if let Ok(Some(block)) = entry.last_produced_block_decoded() { + println!( + "Skipping unsynced block: {} (height: {}, producer: {}, peer_id: {})", + block.hash(), + block.height(), + entry.submitter, + entry.peer_id().unwrap_or_else(|| "unknown".to_string()) + ); + } + skipped_count += 1; + } + } + } + + if !presence_batch.is_empty() { + batch_insert_presence(pool, &presence_batch).await?; + } + } + + if !produced_blocks_batch.is_empty() { + blocks_recorded = produced_blocks_batch.len(); + batch_insert_produced_blocks(pool, &produced_blocks_batch).await?; + } + + let outside_windows = heartbeats.len() - processed_heartbeats.len(); + + println!( + "Batch complete: {} presences, {} blocks ({} duplicates), {} skipped, {} outside windows", + presence_count, + blocks_recorded, + blocks_duplicate, + skipped_count, + outside_windows + ); + + total_presence_count += presence_count; + total_skipped_count += skipped_count; + total_blocks_recorded += blocks_recorded; + total_blocks_duplicate += blocks_duplicate; + total_outside_windows += outside_windows; + } + + println!( + "Processed {} total heartbeats ({} synced presences recorded, {} unique blocks recorded ({} duplicates skipped), {} unsynced skipped), {} outside of defined windows", + total_heartbeats, + total_presence_count, + total_blocks_recorded, + total_blocks_duplicate, + total_skipped_count, + total_outside_windows, + ); + + if latest_time > last_processed_time { + update_last_processed_time(pool, latest_time).await?; + } + + Ok(()) +} + +pub async fn create_tables_from_file(pool: &SqlitePool) -> Result<()> { + println!("Initializing SQLite database schema..."); + let schema = fs::read_to_string("schema.sql")?; + sqlx::query(&schema).execute(pool).await?; + Ok(()) +} + +pub async fn toggle_windows( + pool: &SqlitePool, + start: String, + end: String, + disabled: bool, +) -> Result<()> { + let start_time = parse_datetime(&start)?; + let end_time = parse_datetime(&end)?; + + if start_time >= end_time { + return Err(anyhow::anyhow!("Start time must be before end time")); + } + + let start_ts = to_unix_timestamp(start_time); + let end_ts = to_unix_timestamp(end_time); + + let affected = sqlx::query!( + r#" + UPDATE time_windows + SET disabled = ?1 + WHERE start_time >= ?2 AND end_time < ?3 + "#, + disabled, + start_ts, + end_ts + ) + .execute(pool) + .await?; + + if affected.rows_affected() > 0 { + println!( + "{} windows {} successfully between {} and {}", + affected.rows_affected(), + if disabled { "disabled" } else { "enabled" }, + start_time, + end_time + ); + } else { + println!("No windows found in the specified range"); + } + Ok(()) +} + +// TODO: multiple blocks for the same slot should be counted as one +// TODO: take into account the validated flag to count blocks +pub async fn update_scores(pool: &SqlitePool) -> Result<()> { + sqlx::query!( + r#" + INSERT INTO submitter_scores (public_key_id, score, blocks_produced) + SELECT + pk.id, + COUNT(DISTINCT hp.window_id) as score, + COUNT(DISTINCT pb.id) as blocks_produced + FROM public_keys pk + LEFT JOIN heartbeat_presence hp ON pk.id = hp.public_key_id + LEFT JOIN time_windows tw ON hp.window_id = tw.id + LEFT JOIN produced_blocks pb ON pk.id = pb.public_key_id + WHERE tw.disabled = FALSE + GROUP BY pk.id + ON CONFLICT(public_key_id) DO UPDATE SET + score = excluded.score, + blocks_produced = excluded.blocks_produced, + last_updated = strftime('%s', 'now') + "# + ) + .execute(pool) + .await?; + Ok(()) +} + +#[derive(Debug, Serialize)] +pub struct MaxScores { + pub total: i64, + pub current: i64, +} + +pub async fn get_max_scores(pool: &SqlitePool) -> Result { + let total = sqlx::query!("SELECT COUNT(*) as count FROM time_windows WHERE disabled = FALSE") + .fetch_one(pool) + .await? + .count as i64; + + let current = sqlx::query_as::<_, (i64,)>( + r#" + SELECT COUNT(*) as count + FROM time_windows + WHERE end_time <= strftime('%s', 'now') + AND disabled = FALSE + "#, + ) + .fetch_one(pool) + .await? + .0; + + Ok(MaxScores { total, current }) +} + +pub async fn view_scores(pool: &SqlitePool) -> Result<()> { + // Make sure scores are up to date + update_scores(pool).await?; + + let scores = sqlx::query!( + r#" + SELECT + pk.public_key, + ss.score, + ss.blocks_produced, + datetime(ss.last_updated, 'unixepoch') as last_updated + FROM submitter_scores ss + JOIN public_keys pk ON pk.id = ss.public_key_id + ORDER BY ss.score DESC, ss.blocks_produced DESC + "# + ) + .fetch_all(pool) + .await?; + + let max_scores = get_max_scores(pool).await?; + + println!("\nSubmitter Scores:"); + println!("----------------------------------------"); + println!( + "Public Key | Score | Blocks | Current Max | Total Max | Last Updated" + ); + println!("----------------------------------------"); + + for row in scores { + println!( + "{:<40} | {:>5} | {:>6} | {:>11} | {:>9} | {}", + row.public_key, + row.score, + row.blocks_produced, + max_scores.current, + max_scores.total, + row.last_updated.unwrap_or_default() + ); + } + + Ok(()) +} + +pub fn ensure_db_exists(db_path: &str) -> Result<()> { + let file_path = db_path.strip_prefix("sqlite:").unwrap_or(db_path); + + if !std::path::Path::new(file_path).exists() { + std::fs::File::create(file_path)?; + } + + Ok(()) +} + +pub async fn set_last_processed_time(pool: &SqlitePool, time_str: &str) -> Result<()> { + // Try parsing with different formats + let dt = if let Ok(dt) = DateTime::parse_from_str( + &format!("{} 00:00:00 +0000", time_str), + "%Y-%m-%d %H:%M:%S %z", + ) { + dt.with_timezone(&Utc) + } else if let Ok(dt) = + DateTime::parse_from_str(&format!("{} +0000", time_str), "%Y-%m-%d %H:%M:%S %z") + { + dt.with_timezone(&Utc) + } else { + return Err(anyhow::anyhow!( + "Invalid time format. Expected YYYY-MM-DD or YYYY-MM-DD HH:MM:SS" + )); + }; + + let ts = to_unix_timestamp(dt); + sqlx::query!( + "UPDATE processing_state SET last_processed_time = ? WHERE id = 1", + ts + ) + .execute(pool) + .await?; + + println!("Last processed time set to: {}", dt); + Ok(()) +} + +pub async fn create_windows(pool: &SqlitePool, start: String, end: String) -> Result<()> { + let start_time = parse_datetime(&start)?; + let end_time = parse_datetime(&end)?; + + if start_time >= end_time { + return Err(anyhow::anyhow!("Start time must be before end time")); + } + + let window_ids = ensure_time_windows(pool, start_time, end_time).await?; + println!("Created {} time windows", window_ids.len()); + Ok(()) +} + +/// Ensures time windows exist in the database for a configured time range. +/// +/// This function uses environment variables to determine the range of windows to create: +/// - `WINDOW_RANGE_START`: The start time for window creation (RFC3339 format) +/// If not set, defaults to the current time +/// - `WINDOW_RANGE_END`: The end time for window creation (RFC3339 format) +/// If not set, defaults to start + 28 days +/// +/// Time windows are created at 5-minute intervals within this range. +/// Windows that already exist will be preserved, new ones will be created. +/// Any windows outside this range will be disabled. +pub async fn ensure_initial_windows(pool: &SqlitePool, config: &Config) -> Result<()> { + let start = config.window_range_start; + let end = config.window_range_end; + + println!("Ensuring time windows exist from {} to {}", start, end); + let window_ids = ensure_time_windows(pool, start, end).await?; + println!("Created/verified {} time windows", window_ids.len()); + + // Disable windows outside the configured range + let start_ts = to_unix_timestamp(start); + let end_ts = to_unix_timestamp(end); + + let affected = sqlx::query!( + r#" + UPDATE time_windows + SET disabled = TRUE + WHERE (start_time < ?1 OR end_time > ?2) + AND disabled = FALSE + "#, + start_ts, + end_ts + ) + .execute(pool) + .await?; + + if affected.rows_affected() > 0 { + println!( + "Disabled {} windows outside the configured range", + affected.rows_affected() + ); + } + + Ok(()) +} + +pub async fn mark_disabled_windows(pool: &SqlitePool, config: &Config) -> Result<()> { + if !config.disabled_windows.is_empty() { + println!("Processing disabled window ranges:"); + let mut affected_total = 0; + + for (start, end) in &config.disabled_windows { + println!(" {} to {}", start, end); + let start_ts = to_unix_timestamp(*start); + let end_ts = to_unix_timestamp(*end); + + let result = sqlx::query!( + r#" + UPDATE time_windows + SET disabled = TRUE + WHERE start_time >= ? AND end_time <= ? + "#, + start_ts, + end_ts + ) + .execute(pool) + .await?; + + affected_total += result.rows_affected(); + } + + if affected_total > 0 { + println!("✓ Disabled {} windows in the above ranges", affected_total); + } else { + println!("! No windows found in the configured disabled ranges"); + } + } + Ok(()) +} diff --git a/tools/heartbeats-processor/src/main.rs b/tools/heartbeats-processor/src/main.rs new file mode 100644 index 0000000000..80e1c4ae7d --- /dev/null +++ b/tools/heartbeats-processor/src/main.rs @@ -0,0 +1,176 @@ +use anyhow::Result; +use clap::{Parser, Subcommand}; +use firestore::FirestoreDb; +use sqlx::SqlitePool; + +mod config; +mod local_db; +mod remote_db; +mod time; + +use config::Config; +use remote_db::ScoreDocument; + +#[derive(Parser)] +#[command(version, about, long_about = None)] +struct Cli { + #[command(subcommand)] + command: Commands, +} + +#[derive(Subcommand)] +enum Commands { + /// Create database schema + InitDb, + /// Process heartbeats from Firestore + Process, + /// Toggle windows disabled state for a time range + ToggleWindows { + /// Start time in UTC (format: YYYY-MM-DD HH:MM:SS) + #[arg(long)] + start: String, + /// End time in UTC (format: YYYY-MM-DD HH:MM:SS) + #[arg(long)] + end: String, + #[arg(long)] + disabled: bool, + }, + /// View scores for all submitters + ViewScores, + /// Post scores to Firestore + PostScores, + /// Set the last processing time + SetLastProcessed { + /// Time in UTC (format: YYYY-MM-DD or YYYY-MM-DD HH:MM:SS) + #[arg(long)] + time: String, + }, + /// Create time windows for a given time range + CreateWindows { + /// Start time in UTC (format: YYYY-MM-DD HH:MM:SS) + #[arg(long)] + start: String, + /// End time in UTC (format: YYYY-MM-DD HH:MM:SS) + #[arg(long)] + end: String, + }, + /// Run continuous processing loop + ProcessLoop { + #[arg(long, default_value = "300")] + interval_seconds: u64, + }, +} + +async fn post_scores_to_firestore(pool: &SqlitePool, db: &FirestoreDb) -> Result<()> { + // Make sure scores are up to date + local_db::update_scores(pool).await?; + + let scores = sqlx::query!( + r#" + SELECT + pk.public_key, + ss.score, + ss.blocks_produced, + ss.last_updated + FROM submitter_scores ss + JOIN public_keys pk ON pk.id = ss.public_key_id + ORDER BY ss.score DESC + "# + ) + .fetch_all(pool) + .await?; + + let scores: Vec = scores + .into_iter() + .map(|row| ScoreDocument { + public_key: row.public_key, + score: row.score, + blocks_produced: row.blocks_produced, + last_updated: row.last_updated, + }) + .collect(); + + let max_scores = local_db::get_max_scores(pool).await?; + remote_db::post_scores(db, scores, (max_scores.current, max_scores.total)).await?; + + Ok(()) +} + +async fn run_process_loop( + pool: &SqlitePool, + db: &FirestoreDb, + config: &Config, + interval_seconds: u64, +) -> Result<()> { + let interval = std::time::Duration::from_secs(interval_seconds); + + loop { + println!("Processing heartbeats..."); + local_db::process_heartbeats(db, pool, config).await?; + + println!("Posting scores..."); + post_scores_to_firestore(pool, db).await?; + + println!("Sleeping for {} seconds...", interval_seconds); + tokio::time::sleep(interval).await; + } +} + +#[tokio::main] +async fn main() -> Result<()> { + let config = Config::from_env()?; + println!("\n{}\n", config); + + let cli = Cli::parse(); + + local_db::ensure_db_exists(&config.database_url)?; + let pool = SqlitePool::connect(&config.database_url).await?; + + match cli.command { + Commands::InitDb => { + local_db::create_tables_from_file(&pool).await?; + local_db::ensure_initial_windows(&pool, &config).await?; + local_db::mark_disabled_windows(&pool, &config).await?; + } + Commands::Process => { + println!("Initializing firestore connection..."); + let db = remote_db::get_db(&config).await?; + local_db::create_tables_from_file(&pool).await?; + local_db::ensure_initial_windows(&pool, &config).await?; + local_db::mark_disabled_windows(&pool, &config).await?; + local_db::process_heartbeats(&db, &pool, &config).await?; + println!("Processing completed successfully!"); + } + Commands::ToggleWindows { + start, + end, + disabled, + } => { + local_db::toggle_windows(&pool, start, end, disabled).await?; + } + Commands::ViewScores => { + local_db::view_scores(&pool).await?; + } + Commands::PostScores => { + println!("Initializing firestore connection..."); + let db = remote_db::get_db(&config).await?; + post_scores_to_firestore(&pool, &db).await?; + } + Commands::SetLastProcessed { time } => { + local_db::set_last_processed_time(&pool, &time).await?; + } + Commands::CreateWindows { start, end } => { + local_db::create_windows(&pool, start, end).await?; + } + Commands::ProcessLoop { interval_seconds } => { + println!("Initializing firestore connection..."); + let db = remote_db::get_db(&config).await?; + local_db::create_tables_from_file(&pool).await?; + local_db::ensure_initial_windows(&pool, &config).await?; + local_db::mark_disabled_windows(&pool, &config).await?; + run_process_loop(&pool, &db, &config, interval_seconds).await?; + } + } + + Ok(()) +} diff --git a/tools/heartbeats-processor/src/remote_db.rs b/tools/heartbeats-processor/src/remote_db.rs new file mode 100644 index 0000000000..bec2f5b47c --- /dev/null +++ b/tools/heartbeats-processor/src/remote_db.rs @@ -0,0 +1,274 @@ +use std::sync::Arc; + +use anyhow::Result; +use base64::{engine::general_purpose, Engine as _}; +use chrono::{DateTime, Duration, Utc}; +use firestore::*; +use mina_p2p_messages::v2; +use openmina_core::block::{ArcBlockWithHash, BlockWithHash}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use crate::config::Config; + +const FIRESTORE_BATCH_SIZE: u32 = 1000; // Number of documents per batch +const MAX_TIME_CHUNK_HOURS: i64 = 24; + +#[derive(Debug, Serialize, Deserialize)] +pub struct SignatureJson { + pub field: String, + pub scalar: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct HeartbeatEntry { + pub version: u8, + pub payload: String, + pub submitter: String, + pub signature: SignatureJson, + #[serde(rename = "createTime")] + pub create_time: DateTime, + #[serde(skip_deserializing)] + pub decoded_payload: Option, +} + +#[derive(Debug)] +pub struct BlockInfo { + pub hash: String, + pub height: u64, + pub global_slot: u64, +} + +fn base64_decode_block( + encoded: &str, +) -> Result { + use base64::{engine::general_purpose::URL_SAFE, Engine as _}; + use mina_p2p_messages::binprot::BinProtRead; + + let decoded = URL_SAFE + .decode(encoded) + .map_err(|_| "Could not decode base64".to_string())?; + let block = v2::MinaBlockBlockStableV2::binprot_read(&mut &decoded[..]) + .map_err(|e| format!("Could not decode block: {:?}", e))?; + + Ok(block) +} + +impl HeartbeatEntry { + pub fn decode_payload(&mut self) -> Result<(), anyhow::Error> { + let decoded = general_purpose::URL_SAFE.decode(&self.payload)?; + let json_str = String::from_utf8(decoded)?; + self.decoded_payload = Some(serde_json::from_str(&json_str)?); + Ok(()) + } + + pub fn peer_id(&self) -> Option { + self.decoded_payload + .as_ref() + .and_then(|decoded| decoded.get("peer_id")) + .and_then(|peer_id| peer_id.as_str()) + .map(|s| s.to_string()) + } + + pub fn last_produced_block_raw(&self) -> Option { + self.decoded_payload + .as_ref() + .and_then(|status| status.get("last_produced_block")) + .and_then(|block| block.as_str()) + .map(|s| s.to_string()) + } + + pub fn last_produced_block_decoded(&self) -> Result, String> { + match self.last_produced_block_raw() { + None => Ok(None), + Some(encoded) => { + let block = base64_decode_block(&encoded)?; + let block = BlockWithHash::try_new(Arc::new(block)) + .map_err(|e| format!("Invalid block: {}", e))?; + Ok(Some(block)) + } + } + } + + fn transition_frontier(&self) -> Option<&Value> { + self.decoded_payload + .as_ref() + .and_then(|decoded| decoded.get("status")) + .and_then(|status| status.get("transition_frontier")) + } + + fn best_tip(&self) -> Option<&Value> { + self.transition_frontier() + .and_then(|tf| tf.get("best_tip")) + .filter(|v| !v.is_null()) + } + + pub fn best_tip_block(&self) -> Option { + self.best_tip().map(|best_tip| BlockInfo { + hash: best_tip.get("hash").unwrap().as_str().unwrap().to_string(), + height: best_tip.get("height").unwrap().as_u64().unwrap(), + global_slot: best_tip.get("global_slot").unwrap().as_u64().unwrap(), + }) + } + + pub fn sync_status(&self) -> Option { + self.transition_frontier() + .and_then(|tf| tf.get("sync")) + .and_then(|sync| sync.get("status")) + .map(|status| status.as_str().unwrap().to_string()) + } + + pub fn is_synced(&self) -> bool { + self.sync_status() + .as_ref() + .map(|status| status == "Synced") + .unwrap_or(false) + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ScoreDocument { + #[serde(rename = "publicKey")] + pub public_key: String, + pub score: i64, + #[serde(rename = "blocksProduced")] + pub blocks_produced: i64, + #[serde(rename = "lastUpdated")] + pub last_updated: i64, +} + +pub async fn get_db(config: &Config) -> Result { + if let Some(emulator_host) = &config.firestore_emulator_host { + // Using emulator + std::env::set_var("GOOGLE_CLOUD_PROJECT", "staging"); + let emulator_url = format!("http://{}", emulator_host); + let token_source = gcloud_sdk::TokenSourceType::Default; + Ok(FirestoreDb::with_options_token_source( + FirestoreDbOptions::new("staging".to_string()).with_firebase_api_url(emulator_url), + vec!["http://127.0.0.1:9099".to_string()], + token_source, + ) + .await?) + } else { + // Production mode - requires auth + Ok(FirestoreDb::new(&config.google_cloud_project).await?) + } +} + +pub struct HeartbeatChunkState { + pub chunk_start: DateTime, + pub last_timestamp: Option>, +} + +pub async fn fetch_heartbeat_chunk( + db: &FirestoreDb, + state: &mut HeartbeatChunkState, + end_time: DateTime, +) -> Result> { + let chunk_duration = Duration::try_hours(MAX_TIME_CHUNK_HOURS).unwrap(); + let chunk_end = (state.chunk_start + chunk_duration).min(end_time); + + if state.chunk_start >= end_time { + return Ok(Vec::new()); + } + + println!("Fetching heartbeat chunk... {}", state.chunk_start); + + let query = db + .fluent() + .select() + .from("heartbeats") + .filter(|q| { + let mut conditions = vec![ + q.field("createTime") + .greater_than_or_equal(firestore::FirestoreTimestamp::from(state.chunk_start)), + q.field("createTime") + .less_than(firestore::FirestoreTimestamp::from(chunk_end)), + ]; + + if let Some(ts) = &state.last_timestamp { + conditions.push( + q.field("createTime") + .greater_than(firestore::FirestoreTimestamp::from(*ts)), + ); + } + + q.for_all(conditions) + }) + .order_by([("createTime", FirestoreQueryDirection::Ascending)]) + .limit(FIRESTORE_BATCH_SIZE); + + let mut batch: Vec = query.obj().query().await?; + + if batch.is_empty() { + state.chunk_start = chunk_end; + state.last_timestamp = None; + } else { + state.last_timestamp = batch.last().map(|doc| doc.create_time); + if batch.len() < FIRESTORE_BATCH_SIZE as usize { + state.chunk_start = chunk_end; + state.last_timestamp = None; + } + } + + // Decode payloads + for heartbeat in &mut batch { + if let Err(e) = heartbeat.decode_payload() { + eprintln!("Failed to decode payload: {:?}", e); + } + } + + Ok(batch) +} + +pub async fn post_scores( + db: &FirestoreDb, + scores: Vec, + max_scores: (i64, i64), +) -> Result<()> { + let scores_count = scores.len(); + let now = FirestoreTimestamp::from(Utc::now()); + let (current_max, total_max) = max_scores; + + let mut transaction = db.begin_transaction().await?; + + // Store max scores in separate documents + db.fluent() + .update() + .in_col("maxScore") + .document_id("current") + .object(&serde_json::json!({ + "value": current_max, + "lastUpdated": now, + })) + .add_to_transaction(&mut transaction)?; + + db.fluent() + .update() + .in_col("maxScore") + .document_id("total") + .object(&serde_json::json!({ + "value": total_max, + "lastUpdated": now, + })) + .add_to_transaction(&mut transaction)?; + + // Per-key scores + for doc in scores { + db.fluent() + .update() + .in_col("scores") + .document_id(&doc.public_key) + .object(&doc) + .add_to_transaction(&mut transaction)?; + } + + println!( + "Successfully posted {scores_count} scores and max scores (current: {}, total: {}) to Firestore", + current_max, total_max + ); + + transaction.commit().await?; + + Ok(()) +} diff --git a/tools/heartbeats-processor/src/time.rs b/tools/heartbeats-processor/src/time.rs new file mode 100644 index 0000000000..25314d104e --- /dev/null +++ b/tools/heartbeats-processor/src/time.rs @@ -0,0 +1,51 @@ +use chrono::{DateTime, Duration, Utc}; + +const WINDOW_SIZE_MINUTES: i64 = 5; + +#[derive(Debug)] +pub struct TimeWindow { + pub start: DateTime, + pub end: DateTime, +} + +// Helper function to convert DateTime to Unix timestamp +pub fn to_unix_timestamp(dt: DateTime) -> i64 { + dt.timestamp() +} + +// Helper function to convert Unix timestamp to DateTime +pub fn from_unix_timestamp(ts: i64) -> DateTime { + DateTime::from_timestamp(ts, 0).unwrap() +} + +pub fn parse_datetime(s: &str) -> anyhow::Result> { + // Try parsing with different formats + if let Ok(dt) = DateTime::parse_from_str(&format!("{} +0000", s), "%Y-%m-%d %H:%M:%S %z") { + return Ok(dt.with_timezone(&Utc)); + } + + if let Ok(dt) = DateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%SZ") { + return Ok(dt.with_timezone(&Utc)); + } + + Err(anyhow::anyhow!( + "Invalid datetime format. Expected YYYY-MM-DD HH:MM:SS or YYYY-MM-DDThh:mm:ssZ" + )) +} + +pub fn generate_fixed_time_windows(start: DateTime, end: DateTime) -> Vec { + let window_duration = Duration::try_minutes(WINDOW_SIZE_MINUTES).unwrap(); + let mut windows = Vec::new(); + let mut current = start; + + while current < end { + let window_end = current + window_duration; + windows.push(TimeWindow { + start: current, + end: window_end, + }); + current = window_end; + } + + windows +} diff --git a/tools/ledger-tool/Cargo.toml b/tools/ledger-tool/Cargo.toml index d22390fe55..9f913efc62 100644 --- a/tools/ledger-tool/Cargo.toml +++ b/tools/ledger-tool/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ledger-tool" -version = "0.13.0" +version = "0.14.0" edition = "2021" [dependencies] diff --git a/tools/salsa-simple/Cargo.toml b/tools/salsa-simple/Cargo.toml index ecc180bcd9..85a42065c2 100644 --- a/tools/salsa-simple/Cargo.toml +++ b/tools/salsa-simple/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "salsa-simple" -version = "0.13.0" +version = "0.14.0" edition = "2021" [dev-dependencies] diff --git a/tools/transport/Cargo.toml b/tools/transport/Cargo.toml index d08267b27c..77c6bfae90 100644 --- a/tools/transport/Cargo.toml +++ b/tools/transport/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mina-transport" -version = "0.13.0" +version = "0.14.0" edition = "2021" [dependencies] diff --git a/vrf/Cargo.toml b/vrf/Cargo.toml index f032aea6b3..35f5aeb36a 100644 --- a/vrf/Cargo.toml +++ b/vrf/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "vrf" -version = "0.13.0" +version = "0.14.0" edition = "2021" license = "Apache-2.0" diff --git a/vrf/src/lib.rs b/vrf/src/lib.rs index 655fc8c791..6b3aba9c20 100644 --- a/vrf/src/lib.rs +++ b/vrf/src/lib.rs @@ -160,7 +160,7 @@ pub fn evaluate_vrf(vrf_input: VrfEvaluationInput) -> VrfResult Keypair { +pub fn keypair_from_bs58_string(str: &str) -> Keypair { let mut secret_hex_vec = bs58::decode(str).into_vec().unwrap(); secret_hex_vec = secret_hex_vec[2..secret_hex_vec.len() - 4].to_vec(); secret_hex_vec.reverse();