diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml index be50bc3e66..716645eaa7 100644 --- a/.github/workflows/docker.yaml +++ b/.github/workflows/docker.yaml @@ -123,7 +123,7 @@ jobs: runs-on: ubuntu-arm64 configuration: - build_configuration: compose - - build_configuration: block_producers + - build_configuration: staging runs-on: ${{ matrix.arch.runs-on }} steps: - name: Prepare @@ -175,7 +175,7 @@ jobs: configuration: - build_configuration: compose tag_suffix: "" - - build_configuration: block_producers + - build_configuration: staging tag_suffix: "-producer-demo" runs-on: ubuntu-latest needs: diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index b410a6b705..c5bc94c501 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -860,3 +860,93 @@ if let Some((callback, block_hash)) = callback_and_arg { dispatcher.push(SnarkBlockVerifyAction::Finish { req_id: *req_id }); ``` + +#### Callbacks instead of conditional dispatches + +A common pattern seen in the current code is conditional dispatches: + +```rust +if store.dispatch(SomeAction) { + store.dispatch(SomeOtherAction); +} +``` + +The equivalent with queueing is to use `dispatcher.push_if_enabled` which will return `true` if the enabling condition for that action returns `true`. This will work most of the time, but it is possible for the state to change between the time the action was enqueued and when it is finally going to be dispatched, so the enabling condition may not be `true` anymore. This means that the equivalence is not strict. + +A better approach is to add a callback to the first action. This ensures that the second action only happens when intended, avoiding the potential race condition of the state changing between enqueuing and dispatching. + +First a callback is added to the action: + +```diff + #[derive(Serialize, Deserialize, Debug, Clone)] + pub enum LedgerWriteAction { +- Init { request: LedgerWriteRequest }, ++ Init { ++ request: LedgerWriteRequest, ++ on_init: redux::Callback, ++ }, + Pending, + // ... +``` + +Then in the handling code is updated to dispatch the callback: + +```diff + match action { + LedgerAction::Write(a) => match a { +- LedgerWriteAction::Init { request } => { +- store.service.write_init(request); ++ LedgerWriteAction::Init { request, on_init } => { ++ store.service.write_init(request.clone()); + store.dispatch(LedgerWriteAction::Pending); ++ store.dispatch_callback(on_init, request); + } +``` + +Finally the dispatching of that action is update to provide a callback that will return the same action that was inside the body of the conditional dispatch: + +```diff +- if store.dispatch(LedgerWriteAction::Init { ++ store.dispatch(LedgerWriteAction::Init { + request: LedgerWriteRequest::StagedLedgerDiffCreate { + pred_block: pred_block.clone(), + global_slot_since_genesis: won_slot + // ... + supercharge_coinbase, + transactions_by_fee, + }, +- }) { +- store.dispatch(BlockProducerAction::StagedLedgerDiffCreatePending); +- } ++ on_init: redux::callback!( ++ on_staged_ledger_diff_create_init(_request: LedgerWriteRequest) -> crate::Action { ++ BlockProducerAction::StagedLedgerDiffCreatePending ++ } ++ ), ++ }); +``` + +In the above example the passed argument is not used, but for other callbacks it is useful. Consider this example where we need the block hash for the next action, which can be extracted from the data contained in the request: + +```diff +- if store.dispatch(LedgerWriteAction::Init { ++ store.dispatch(LedgerWriteAction::Init { + request: LedgerWriteRequest::BlockApply { block, pred_block }, +- }) { +- store.dispatch(TransitionFrontierSyncAction::BlocksNextApplyPending { +- hash: hash.clone(), +- }); +- } ++ on_init: redux::callback!( ++ on_block_next_apply_init(request: LedgerWriteRequest) -> crate::Action { ++ let LedgerWriteRequest::BlockApply { block, .. } = request ++ else { ++ // Cannot happen because this is the same value we passed above ++ unreachable!() ++ }; ++ let hash = block.hash().clone(); ++ TransitionFrontierSyncAction::BlocksNextApplyPending { hash } ++ } ++ ), ++ }); +``` \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 0befed1bc2..daab8cbfed 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.8.2] - 2024-09-06 + +### Fixed + +- Include circuit blobs in docker images, required for block production. +- Add missing bounds to ZkAppUri and TokenSymbol fields. +- Various stability improvements to make sure the node will not crash in certain circumstances. + +### Changed + +- Root snarked ledger re-syncs now reuse the previously in-progress root snarked ledger instead of starting again from the next-epoch ledger. +- Added `--libp2p-keypair=` flag to specify encrypted secret key (with passphrase from `MINA_LIBP2P_PASS` environment variable). + ## [0.8.1] - 2024-09-02 ### Fixed @@ -215,7 +228,8 @@ First public release. - Alpha version of the node which can connect and syncup to the berkeleynet network, and keep applying new blocks to maintain consensus state and ledger up to date. - Web-based frontend for the node. -[Unreleased]: https://github.com/openmina/openmina/compare/v0.8.1...develop +[Unreleased]: https://github.com/openmina/openmina/compare/v0.8.2...develop +[0.8.2]: https://github.com/openmina/openmina/releases/tag/v0.8.1...v0.8.2 [0.8.1]: https://github.com/openmina/openmina/releases/tag/v0.8.0...v0.8.1 [0.8.0]: https://github.com/openmina/openmina/releases/tag/v0.7.0...v0.8.0 [0.7.0]: https://github.com/openmina/openmina/releases/tag/v0.6.0...v0.7.0 diff --git a/Cargo.lock b/Cargo.lock index d8e68b74e4..f90e0d45b4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1013,7 +1013,7 @@ checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "cli" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "bytes", @@ -1028,6 +1028,7 @@ dependencies = [ "node", "num_cpus", "openmina-core", + "openmina-node-account", "openmina-node-native", "rand 0.8.5", "rayon", @@ -2411,7 +2412,7 @@ dependencies = [ [[package]] name = "hash-tool" -version = "0.8.1" +version = "0.8.2" dependencies = [ "bs58 0.5.0", "hex", @@ -3009,7 +3010,7 @@ dependencies = [ [[package]] name = "ledger-tool" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "mina-curves", @@ -3335,7 +3336,7 @@ dependencies = [ [[package]] name = "libp2p-rpc-behaviour" -version = "0.8.1" +version = "0.8.2" dependencies = [ "libp2p", "log", @@ -3714,7 +3715,7 @@ dependencies = [ [[package]] name = "mina-transport" -version = "0.8.1" +version = "0.8.2" dependencies = [ "blake2", "hex", @@ -3725,7 +3726,7 @@ dependencies = [ [[package]] name = "mina-tree" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "ark-ec", @@ -4058,7 +4059,7 @@ dependencies = [ [[package]] name = "node" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "derive_more", @@ -4403,7 +4404,7 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openmina-bootstrap-sandbox" -version = "0.8.1" +version = "0.8.2" dependencies = [ "base64 0.21.7", "binprot", @@ -4428,7 +4429,7 @@ dependencies = [ [[package]] name = "openmina-core" -version = "0.8.1" +version = "0.8.2" dependencies = [ "ark-ff", "binprot", @@ -4455,7 +4456,7 @@ dependencies = [ [[package]] name = "openmina-fuzzer" -version = "0.8.1" +version = "0.8.2" dependencies = [ "lazy_static", "rand 0.8.5", @@ -4466,7 +4467,7 @@ dependencies = [ [[package]] name = "openmina-gossipsub-sandbox" -version = "0.8.1" +version = "0.8.2" dependencies = [ "bs58 0.5.0", "env_logger", @@ -4480,7 +4481,7 @@ dependencies = [ [[package]] name = "openmina-macros" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "openmina-core", @@ -4493,7 +4494,7 @@ dependencies = [ [[package]] name = "openmina-node-account" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "argon2", @@ -4514,7 +4515,7 @@ dependencies = [ [[package]] name = "openmina-node-common" -version = "0.8.1" +version = "0.8.2" dependencies = [ "gloo-timers", "gloo-utils", @@ -4542,7 +4543,7 @@ dependencies = [ [[package]] name = "openmina-node-invariants" -version = "0.8.1" +version = "0.8.2" dependencies = [ "documented", "lazy_static", @@ -4556,7 +4557,7 @@ dependencies = [ [[package]] name = "openmina-node-native" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "bytes", @@ -4590,7 +4591,7 @@ dependencies = [ [[package]] name = "openmina-node-testing" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "axum", @@ -4634,7 +4635,7 @@ dependencies = [ [[package]] name = "openmina-node-web" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "bytes", @@ -4660,7 +4661,7 @@ dependencies = [ [[package]] name = "openmina-producer-dashboard" -version = "0.8.1" +version = "0.8.2" dependencies = [ "bincode", "clap 4.5.2", @@ -4752,7 +4753,7 @@ dependencies = [ [[package]] name = "p2p" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "binprot", @@ -4815,7 +4816,7 @@ dependencies = [ [[package]] name = "p2p-testing" -version = "0.8.1" +version = "0.8.2" dependencies = [ "derive_more", "futures", @@ -5670,7 +5671,7 @@ checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "replay_dynamic_effects" -version = "0.8.1" +version = "0.8.2" dependencies = [ "node", "openmina-node-invariants", @@ -6014,7 +6015,7 @@ checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "salsa-simple" -version = "0.8.1" +version = "0.8.2" dependencies = [ "generic-array", "hex", @@ -6407,7 +6408,7 @@ dependencies = [ [[package]] name = "snark" -version = "0.8.1" +version = "0.8.2" dependencies = [ "ark-ec", "ark-ff", @@ -7590,7 +7591,7 @@ checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" [[package]] name = "vrf" -version = "0.8.1" +version = "0.8.2" dependencies = [ "anyhow", "ark-ec", @@ -7606,6 +7607,7 @@ dependencies = [ "mina-tree", "num", "o1-utils", + "openmina-node-account", "rand 0.8.5", "redux", "serde", diff --git a/Cargo.toml b/Cargo.toml index 5adff0a341..c795dcf775 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,6 +46,7 @@ mina-poseidon = {git = "https://github.com/openmina/proof-systems", branch = "le poly-commitment = {git = "https://github.com/openmina/proof-systems", branch = "ledger-newtypes-rampup4-vrf"} libp2p = { git = "https://github.com/openmina/rust-libp2p", rev = "5c44c7d9", default-features = false } vrf = { path = "vrf" } +openmina-node-account = { path = "node/account" } redux = { git = "https://github.com/openmina/redux-rs.git", rev = "588dd76c", features = ["serde"] } serde = "1.0.190" serde_json = "1.0.107" diff --git a/Dockerfile b/Dockerfile index 0f830a4bbb..994e211bc1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,6 +8,10 @@ COPY . . RUN cargo build --release --package=cli --bin=openmina RUN cargo build --release --features scenario-generators --bin openmina-node-testing +# necessary for proof generation when running a block producer. +RUN git clone --depth 1 https://github.com/openmina/circuit-blobs.git \ + && rm -rf circuit-blobs/berkeley_rc1 circuit-blobs/*/tests + FROM openmina/mina-snark-worker-prover:${MINA_SNARK_WORKER_TAG} AS prover FROM debian:buster @@ -15,5 +19,7 @@ RUN apt-get update && apt-get install -y libjemalloc2 libssl1.1 libpq5 curl jq p COPY --from=build /openmina/cli/bin/snark-worker /usr/local/bin/ COPY --from=build /openmina/target/release/openmina /usr/local/bin/ COPY --from=build /openmina/target/release/openmina-node-testing /usr/local/bin/ +RUN mkdir -p /usr/local/lib/openmina/circuit-blobs +COPY --from=build /openmina/circuit-blobs/ /usr/local/lib/openmina/circuit-blobs/ COPY --from=prover /usr/local/bin/mina /usr/local/bin -ENTRYPOINT [ "openmina" ] \ No newline at end of file +ENTRYPOINT [ "openmina" ] diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 90839ed066..908b11de69 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cli" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" @@ -31,6 +31,7 @@ reqwest = { version = "0.11.24", features = ["blocking", "json"] } openmina-core = { path = "../core" } node = { path = "../node", features = ["replay"] } openmina-node-native = { path = "../node/native" } +openmina-node-account = { path = "../node/account" } bytes = "1.4.0" tracing = "0.1.37" nix = { version = "0.26.2", features = ["signal"] } diff --git a/cli/replay_dynamic_effects/Cargo.toml b/cli/replay_dynamic_effects/Cargo.toml index 1cb3819c8d..0d11b20e1a 100644 --- a/cli/replay_dynamic_effects/Cargo.toml +++ b/cli/replay_dynamic_effects/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "replay_dynamic_effects" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/cli/src/commands/misc.rs b/cli/src/commands/misc.rs index b1bb6e0e83..c53c8487c5 100644 --- a/cli/src/commands/misc.rs +++ b/cli/src/commands/misc.rs @@ -34,7 +34,7 @@ impl P2PKeyPair { let secret_key = self.p2p_secret_key.unwrap_or_else(SecretKey::rand); let public_key = secret_key.public_key(); let peer_id = public_key.peer_id(); - let libp2p_peer_id = PeerId::from(peer_id); + let libp2p_peer_id = PeerId::try_from(peer_id)?; println!("secret key: {secret_key}"); println!("public key: {public_key}"); println!("peer_id: {peer_id}"); diff --git a/cli/src/commands/node/mod.rs b/cli/src/commands/node/mod.rs index ddda518215..f0dabbda60 100644 --- a/cli/src/commands/node/mod.rs +++ b/cli/src/commands/node/mod.rs @@ -23,6 +23,16 @@ pub struct Node { #[arg(long, short = 's', env = "OPENMINA_P2P_SEC_KEY")] pub p2p_secret_key: Option, + // warning, this overrides `OPENMINA_P2P_SEC_KEY` + /// Compatibility with OCaml Mina node + #[arg(long)] + pub libp2p_keypair: Option, + + // warning, this overrides `OPENMINA_P2P_SEC_KEY` + /// Compatibility with OCaml Mina node + #[arg(env = "MINA_LIBP2P_PASS")] + pub libp2p_password: Option, + /// Http port to listen on #[arg(long, short, env, default_value = "3000")] pub port: u16, @@ -65,6 +75,8 @@ pub struct Node { /// MINA_PRIVKEY_PASS must be set to decrypt the keyfile #[arg(long, env)] pub producer_key: Option, + #[arg(env = "MINA_PRIVKEY_PASS")] + pub producer_key_password: Option, /// Snark fee, in Mina #[arg(long, env, default_value_t = 1_000_000)] pub snarker_fee: u64, @@ -124,6 +136,38 @@ impl Node { if let Some(sec_key) = self.p2p_secret_key { node_builder.p2p_sec_key(sec_key); } + + // warning, this overrides `OPENMINA_P2P_SEC_KEY` + if let (Some(key_file), Some(password)) = (&self.libp2p_keypair, &self.libp2p_password) { + match AccountSecretKey::from_encrypted_file(key_file, password) { + Ok(sk) => { + node_builder.p2p_sec_key(SecretKey::from_bytes(sk.to_bytes())); + node::core::info!( + node::core::log::system_time(); + summary = "read sercret key from file", + file_name = key_file, + pk = sk.public_key().to_string(), + ) + } + Err(err) => { + node::core::error!( + node::core::log::system_time(); + summary = "failed to read secret key", + file_name = key_file, + err = err.to_string(), + ); + return Err(err.into()); + } + } + } else if self.libp2p_keypair.is_some() && self.libp2p_password.is_none() { + let error = "keyfile is specified, but `MINA_LIBP2P_PASS` is not set"; + node::core::error!( + node::core::log::system_time(); + summary = error, + ); + return Err(anyhow::anyhow!(error)); + } + node_builder.p2p_libp2p_port(self.libp2p_port); self.seed.then(|| node_builder.p2p_seed_node()); @@ -138,8 +182,13 @@ impl Node { node_builder.initial_peers_from_url(url)?; } - if let Some(producer_key_path) = self.producer_key { - node_builder.block_producer_from_file(producer_key_path)?; + if let (Some(producer_key_path), Some(pasword)) = + (self.producer_key, &self.producer_key_password) + { + node::core::info!(node::core::log::system_time(); summary = "loading provers index"); + ledger::proofs::gates::get_provers(); + node::core::info!(node::core::log::system_time(); summary = "loaded provers index"); + node_builder.block_producer_from_file(producer_key_path, pasword)?; } if let Some(sec_key) = self.run_snarker { diff --git a/core/Cargo.toml b/core/Cargo.toml index 08883b5183..adba0be8a8 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-core" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/core/src/log.rs b/core/src/log.rs index 7662cdeab0..fcdd59b81c 100644 --- a/core/src/log.rs +++ b/core/src/log.rs @@ -140,3 +140,16 @@ pub trait ActionEvent { use tracing::Value; pub use crate::{debug, error, info, trace, warn}; + +#[macro_export] +macro_rules! bug_condition { + ($($arg:tt)*) => {{ + if std::env::var("OPENMINA_PANIC_ON_BUG") + .map(|v| v.to_lowercase() == "true") + .unwrap_or(false) { + panic!($($arg)*) + } else { + $crate::log::inner::error!($($arg)*) + } + }}; +} diff --git a/docker-compose.local.producers.yml b/docker-compose.local.producers.yml index 8156e1c683..bdc8bb8b65 100644 --- a/docker-compose.local.producers.yml +++ b/docker-compose.local.producers.yml @@ -1,7 +1,7 @@ services: local-producer-cluster: container_name: local-producer-cluster - image: openmina/openmina:0.8.1 + image: openmina/openmina:0.8.2 environment: - RUST_BACKTRACE=1 entrypoint: ["openmina-node-testing", "scenarios-generate", "--name", "simulation-small-forever-real-time"] @@ -12,7 +12,7 @@ services: frontend: container_name: frontend - image: openmina/frontend:0.8.1-producer-demo + image: openmina/frontend:0.8.2-producer-demo # build: # context: . # dockerfile: Dockerfile_FE diff --git a/frontend/Dockerfile b/frontend/Dockerfile index e8684fddba..ece84a24fa 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,5 +1,5 @@ FROM node:18 AS BUILD_IMAGE -ARG BUILD_CONFIGURATION=producer +ARG BUILD_CONFIGURATION=staging WORKDIR /app COPY . . RUN npm install diff --git a/frontend/angular.json b/frontend/angular.json index 1d297b6deb..b6f5338908 100644 --- a/frontend/angular.json +++ b/frontend/angular.json @@ -109,6 +109,15 @@ } ], "outputHashing": "all" + }, + "staging": { + "fileReplacements": [ + { + "replace": "src/environments/environment.ts", + "with": "src/environments/environment.staging.ts" + } + ], + "outputHashing": "all" } }, "defaultConfiguration": "production" @@ -126,6 +135,12 @@ "browserTarget": "frontend:build:local" } }, + "options": { + "headers": { + "Cross-Origin-Opener-Policy": "same-origin", + "Cross-Origin-Embedder-Policy": "require-corp" + } + }, "defaultConfiguration": "development" }, "extract-i18n": { diff --git a/frontend/cypress/e2e/block-production/won-slots/apis.cy.ts b/frontend/cypress/e2e/block-production/won-slots/apis.cy.ts index 5c0dc69e91..30bd37f20d 100644 --- a/frontend/cypress/e2e/block-production/won-slots/apis.cy.ts +++ b/frontend/cypress/e2e/block-production/won-slots/apis.cy.ts @@ -14,8 +14,8 @@ let response: WonSlotResponse; describe('BLOCK PRODUCTION WON SLOTS APIS', () => { beforeEach(() => { - console.log('beforeEach'); cy + .wait(10000) .visit(Cypress.config().baseUrl) .window() .its('store') diff --git a/frontend/cypress/e2e/block-production/won-slots/filters.cy.ts b/frontend/cypress/e2e/block-production/won-slots/filters.cy.ts index ad2b383eb2..fdcf0b5e3c 100644 --- a/frontend/cypress/e2e/block-production/won-slots/filters.cy.ts +++ b/frontend/cypress/e2e/block-production/won-slots/filters.cy.ts @@ -83,7 +83,7 @@ describe('BLOCK PRODUCTION WON SLOTS FILTERS', () => { if (condition(state)) { cy.get('mina-block-production-won-slots-filters .overflow-hidden > div:nth-child(3)') .then((div: any) => expect(div.text().trim()).equals( - `${state.slots.filter(s => s.status === BlockProductionWonSlotsStatus.Discarded || s.status === BlockProductionWonSlotsStatus.Orphaned).length} Dropped`, + `${state.slots.filter(s => s.status === BlockProductionWonSlotsStatus.Orphaned).length} Orphaned`, )); } }); @@ -95,7 +95,7 @@ describe('BLOCK PRODUCTION WON SLOTS FILTERS', () => { .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - cy.get('mina-block-production-won-slots-filters .overflow-hidden > div:nth-child(4)') + cy.get('mina-block-production-won-slots-filters .overflow-hidden > div:nth-child(5)') .then((div: any) => expect(div.text().trim()).equals( `${state.slots.filter(s => !s.status || s.status === BlockProductionWonSlotsStatus.Scheduled).length} Upcoming`, )); @@ -109,14 +109,18 @@ describe('BLOCK PRODUCTION WON SLOTS FILTERS', () => { .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - if (hasDropped(state)) { - cy.get('mina-block-production-won-slots-filters .overflow-hidden > div.aware-primary', { timeout: 500 }) + if (hasOrphaned(state)) { + cy.get('mina-block-production-won-slots-filters .overflow-hidden > div:nth-child(3)', { timeout: 500 }) .click(); } if (hasUpcoming(state)) { cy.get('mina-block-production-won-slots-filters .overflow-hidden > div.bg-container.primary', { timeout: 500 }) .click(); } + if (hasDiscarded(state)) { + cy.get('mina-block-production-won-slots-filters .overflow-hidden > div:nth-child(4)', { timeout: 500 }) + .click(); + } cy .wait(1000) .window() @@ -131,7 +135,7 @@ describe('BLOCK PRODUCTION WON SLOTS FILTERS', () => { }); })); - it('show only dropped blocks', () => execute(() => { + it('show only orphaned blocks', () => execute(() => { cy.window() .its('store') .then(getBPWonSlots) @@ -145,6 +149,10 @@ describe('BLOCK PRODUCTION WON SLOTS FILTERS', () => { cy.get('mina-block-production-won-slots-filters .overflow-hidden > div.bg-container.primary') .click(); } + if (hasDiscarded(state)) { + cy.get('mina-block-production-won-slots-filters .overflow-hidden > div:nth-child(4)', { timeout: 500 }) + .click(); + } cy .wait(1000) .window() @@ -153,7 +161,7 @@ describe('BLOCK PRODUCTION WON SLOTS FILTERS', () => { .then((state: BlockProductionWonSlotsState) => { const producing = state.slots.filter(s => s.active || s.status === BlockProductionWonSlotsStatus.Committed).length; const scheduled = state.slots.filter(s => s.status === BlockProductionWonSlotsStatus.Scheduled).length; - expect(state.filteredSlots.length).equals(state.slots.filter(s => s.status === BlockProductionWonSlotsStatus.Orphaned || s.status === BlockProductionWonSlotsStatus.Discarded).length + producing + scheduled); + expect(state.filteredSlots.length).equals(state.slots.filter(s => s.status === BlockProductionWonSlotsStatus.Orphaned).length + producing + scheduled); }); } }); @@ -169,8 +177,12 @@ describe('BLOCK PRODUCTION WON SLOTS FILTERS', () => { cy.get('mina-block-production-won-slots-filters .overflow-hidden > div.success-primary') .click(); } - if (hasDropped(state)) { - cy.get('mina-block-production-won-slots-filters .overflow-hidden > div.aware-primary') + if (hasOrphaned(state)) { + cy.get('mina-block-production-won-slots-filters .overflow-hidden > div:nth-child(3)', { timeout: 500 }) + .click(); + } + if (hasDiscarded(state)) { + cy.get('mina-block-production-won-slots-filters .overflow-hidden > div:nth-child(4)', { timeout: 500 }) .click(); } cy @@ -193,8 +205,12 @@ function hasCanonical(state: BlockProductionWonSlotsState): boolean { return state.slots.some(s => s.status === BlockProductionWonSlotsStatus.Canonical); } -function hasDropped(state: BlockProductionWonSlotsState): boolean { - return state.slots.some(s => s.status === BlockProductionWonSlotsStatus.Discarded || s.status === BlockProductionWonSlotsStatus.Orphaned); +function hasOrphaned(state: BlockProductionWonSlotsState): boolean { + return state.slots.some(s => s.status === BlockProductionWonSlotsStatus.Orphaned); +} + +function hasDiscarded(state: BlockProductionWonSlotsState): boolean { + return state.slots.some(s => s.status === BlockProductionWonSlotsStatus.Discarded); } function hasUpcoming(state: BlockProductionWonSlotsState): boolean { diff --git a/frontend/cypress/e2e/block-production/won-slots/side-panel.cy.ts b/frontend/cypress/e2e/block-production/won-slots/side-panel.cy.ts index f8ca7585ad..ae20b18205 100644 --- a/frontend/cypress/e2e/block-production/won-slots/side-panel.cy.ts +++ b/frontend/cypress/e2e/block-production/won-slots/side-panel.cy.ts @@ -6,7 +6,6 @@ import { BlockProductionWonSlotsState } from '@block-production/won-slots/block- import { BlockProductionWonSlotsStatus, } from '@shared/types/block-production/won-slots/block-production-won-slots-slot.type'; -import { hasValue } from '@openmina/shared'; const condition = (state: BlockProductionWonSlotsState): boolean => state && state.slots?.length > 0; const getBPWonSlots = (store: Store): BlockProductionWonSlotsState => stateSliceAsPromise(store, condition, 'blockProduction', 'wonSlots'); @@ -77,7 +76,7 @@ describe('BLOCK PRODUCTION WON SLOTS SIDE PANEL', () => { }); })); - it.only('selecting first slot should display its data in the side panel', () => execute(() => { + it('selecting first slot should display its data in the side panel', () => execute(() => { cy.window() .its('store') .then(getBPWonSlots) @@ -102,6 +101,7 @@ describe('BLOCK PRODUCTION WON SLOTS SIDE PANEL', () => { .then((state: BlockProductionWonSlotsState) => { expect(state.activeSlot.globalSlot).to.equal(expectedActiveSlot.globalSlot); expect(state.activeSlot.height).to.equal(expectedActiveSlot.height); + console.log(expectedActiveSlot.times); }) .get('mina-block-production-won-slots-side-panel .percentage') .should('have.text', ([ @@ -110,7 +110,7 @@ describe('BLOCK PRODUCTION WON SLOTS SIDE PANEL', () => { expectedActiveSlot.times?.proofCreate, expectedActiveSlot.times?.blockApply, expectedActiveSlot.times?.committed, - ].filter(t => t !== undefined).length * 20) + '%'); + ].filter(t => t !== null && t !== undefined).length * 20) + '%'); }); } diff --git a/frontend/cypress/e2e/block-production/won-slots/table.cy.ts b/frontend/cypress/e2e/block-production/won-slots/table.cy.ts index 0f2f93b475..1262ddae74 100644 --- a/frontend/cypress/e2e/block-production/won-slots/table.cy.ts +++ b/frontend/cypress/e2e/block-production/won-slots/table.cy.ts @@ -77,7 +77,7 @@ describe('BLOCK PRODUCTION WON SLOTS TABLE', () => { .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - checkSorting(state.filteredSlots, 'slotTime', Sort.DSC); + checkSorting(state.filteredSlots, 'slotTime', Sort.ASC); } }); })); @@ -103,7 +103,7 @@ describe('BLOCK PRODUCTION WON SLOTS TABLE', () => { .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - checkSorting(state.filteredSlots, 'height', Sort.DSC); + checkSorting(state.filteredSlots, 'height', Sort.ASC); } }); })); @@ -116,7 +116,7 @@ describe('BLOCK PRODUCTION WON SLOTS TABLE', () => { .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - checkSorting(state.filteredSlots, 'globalSlot', Sort.DSC); + checkSorting(state.filteredSlots, 'globalSlot', Sort.ASC); } }); })); @@ -129,46 +129,46 @@ describe('BLOCK PRODUCTION WON SLOTS TABLE', () => { .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - checkSorting(state.filteredSlots, 'transactionsTotal', Sort.DSC); + checkSorting(state.filteredSlots, 'transactionsTotal', Sort.ASC); } }); })); it('sort by snark fees', () => execute(() => { - cy.get('mina-block-production-won-slots-table .head > span:nth-child(6)') + cy.get('mina-block-production-won-slots-table .head > span:nth-child(7)') .click() .window() .its('store') .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - checkSorting(state.filteredSlots, 'snarkFees', Sort.DSC); + checkSorting(state.filteredSlots, 'snarkFees', Sort.ASC); } }); })); it('sort by snark coinbase rewards', () => execute(() => { - cy.get('mina-block-production-won-slots-table .head > span:nth-child(7)') + cy.get('mina-block-production-won-slots-table .head > span:nth-child(8)') .click() .window() .its('store') .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - checkSorting(state.filteredSlots, 'coinbaseRewards', Sort.DSC); + checkSorting(state.filteredSlots, 'coinbaseRewards', Sort.ASC); } }); })); it('sort by snark tx fees rewards', () => execute(() => { - cy.get('mina-block-production-won-slots-table .head > span:nth-child(8)') + cy.get('mina-block-production-won-slots-table .head > span:nth-child(9)') .click() .window() .its('store') .then(getBPWonSlots) .then((state: BlockProductionWonSlotsState) => { if (condition(state)) { - checkSorting(state.filteredSlots, 'txFeesRewards', Sort.DSC); + checkSorting(state.filteredSlots, 'txFeesRewards', Sort.ASC); } }); })); diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 537750d3b5..870eadb770 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -24,6 +24,7 @@ "@ngrx/router-store": "^16.2.0", "@ngrx/store": "^16.2.0", "@openmina/shared": "^0.96.0", + "base-x": "^5.0.0", "buffer": "^6.0.3", "d3": "^7.8.4", "eigen": "^0.2.2", @@ -5155,6 +5156,11 @@ "dev": true, "license": "MIT" }, + "node_modules/base-x": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-5.0.0.tgz", + "integrity": "sha512-sMW3VGSX1QWVFA6l8U62MLKz29rRfpTlYdCqLdpLo1/Yd4zZwSbnUaDfciIAowAqvq7YFnWq9hrhdg1KYgc1lQ==" + }, "node_modules/base64-js": { "version": "1.5.1", "funding": [ diff --git a/frontend/package.json b/frontend/package.json index d1c245bc9c..0b2b7c991e 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -30,6 +30,7 @@ "@ngrx/router-store": "^16.2.0", "@ngrx/store": "^16.2.0", "@openmina/shared": "^0.96.0", + "base-x": "^5.0.0", "buffer": "^6.0.3", "d3": "^7.8.4", "eigen": "^0.2.2", diff --git a/frontend/src/app/app.effects.ts b/frontend/src/app/app.effects.ts index a60391efba..3adfcc79ea 100644 --- a/frontend/src/app/app.effects.ts +++ b/frontend/src/app/app.effects.ts @@ -3,7 +3,7 @@ import { MinaState, selectMinaState } from '@app/app.setup'; import { Actions, createEffect, ofType } from '@ngrx/effects'; import { Store } from '@ngrx/store'; import { createNonDispatchableEffect, Effect, removeParamsFromURL } from '@openmina/shared'; -import { filter, map, switchMap, tap } from 'rxjs'; +import { filter, from, map, switchMap, tap } from 'rxjs'; import { AppActions } from '@app/app.actions'; import { Router } from '@angular/router'; import { FeatureType, MinaNode } from '@shared/types/core/environment/mina-env.type'; @@ -11,6 +11,7 @@ import { AppService } from '@app/app.service'; import { getFirstFeature, isFeatureEnabled } from '@shared/constants/config'; import { RustService } from '@core/services/rust.service'; import { BaseEffect } from '@shared/base-classes/mina-rust-base.effect'; +import { WebNodeService } from '@core/services/web-node.service'; const INIT_EFFECTS = '@ngrx/effects/init'; @@ -30,6 +31,7 @@ export class AppEffects extends BaseEffect { private appService: AppService, private rustNode: RustService, private router: Router, + private webNodeService: WebNodeService, store: Store) { super(store, selectMinaState); @@ -62,6 +64,14 @@ export class AppEffects extends BaseEffect { this.router.navigate([getFirstFeature(state.app.activeNode)]); } }), + switchMap(({ state }) => { + if (state.app.activeNode.isWebNode) { + return this.webNodeService.loadWasm$().pipe( + switchMap(() => this.webNodeService.startWasm$()), + ); + } + return from([]); + }), map(() => AppActions.getNodeDetails()), )); diff --git a/frontend/src/app/core/services/rust.service.ts b/frontend/src/app/core/services/rust.service.ts index 019f9d5c40..26c8e8d01c 100644 --- a/frontend/src/app/core/services/rust.service.ts +++ b/frontend/src/app/core/services/rust.service.ts @@ -1,7 +1,8 @@ import { Injectable } from '@angular/core'; import { MinaNode } from '@shared/types/core/environment/mina-env.type'; import { HttpClient } from '@angular/common/http'; -import { Observable } from 'rxjs'; +import { EMPTY, map, Observable, of } from 'rxjs'; +import { WebNodeService } from '@core/services/web-node.service'; @Injectable({ providedIn: 'root', @@ -10,7 +11,8 @@ export class RustService { private node: MinaNode; - constructor(private http: HttpClient) {} + constructor(private http: HttpClient, + private webNodeService: WebNodeService) {} changeRustNode(node: MinaNode): void { this.node = node; @@ -25,6 +27,12 @@ export class RustService { } get(path: string): Observable { + if (this.node.isWebNode) { + return this.getFromWebNode(path).pipe(map((response: any) => { + // console.log(path, response); + return response; + })); + } return this.http.get(this.URL + path); } @@ -35,4 +43,21 @@ export class RustService { getMemProfiler(path: string): Observable { return this.http.get(this.node.memoryProfiler + path); } + + private getFromWebNode(path: string): Observable { + switch (path) { + case '/status': + return this.webNodeService.status$; + case '/state/peers': + return this.webNodeService.peers$; + case '/state/message-progress': + return this.webNodeService.messageProgress$; + case '/stats/sync?limit=1': + return this.webNodeService.sync$; + case '/stats/block_producer': + return this.webNodeService.blockProducerStats$; + default: + throw new Error('Unknown path for web node'); + } + } } diff --git a/frontend/src/app/core/services/web-node.service.ts b/frontend/src/app/core/services/web-node.service.ts new file mode 100644 index 0000000000..db550faf6d --- /dev/null +++ b/frontend/src/app/core/services/web-node.service.ts @@ -0,0 +1,87 @@ +import { Injectable } from '@angular/core'; +import { BehaviorSubject, filter, from, fromEvent, map, Observable, of, switchMap, tap } from 'rxjs'; +import base from 'base-x'; +import { any, log } from '@openmina/shared'; + +@Injectable({ + providedIn: 'root', +}) +export class WebNodeService { + + private readonly backendSubject$: BehaviorSubject = new BehaviorSubject(null); + private backend: any; + webNodeState: string = 'notLoaded'; + + constructor() { + const basex = base('123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'); + any(window)['bs58btc'] = { + encode: (buffer: Uint8Array | number[]) => 'z' + basex.encode(buffer), + decode: (string: string) => basex.decode(string.substring(1)), + }; + } + + loadWasm$(): Observable { + console.log('loading WASM'); + console.log((window as any).webnode); + if ((window as any).webnode) { + return of(void 0); + } + return fromEvent(window, 'webNodeLoaded').pipe(map(() => void 0)); + } + + startWasm$(): Observable { + console.log('starting WASM'); + return of((window as any).webnode) + .pipe( + switchMap((wasm: any) => from(wasm.default('assets/webnode/pkg/openmina_node_web_bg.wasm')).pipe(map(() => wasm))), + switchMap((wasm) => { + console.log(wasm); + return from(wasm.run()); + }), + tap((jsHandle: any) => { + this.backend = jsHandle; + console.log('----------------JS HANDLE----------------'); + console.log(jsHandle); + console.log('----------------JS HANDLE----------------'); + this.backendSubject$.next(jsHandle); + }), + switchMap(() => this.backendSubject$.asObservable()), + filter(Boolean), + ); + } + + get status$(): Observable { + return this.backendSubject$.asObservable().pipe( + filter(Boolean), + switchMap(handle => from((handle as any).status())), + ); + } + + get blockProducerStats$(): Observable { + return this.backendSubject$.asObservable().pipe( + filter(Boolean), + switchMap(handle => from((handle as any).stats().block_producer())), + ); + } + + get peers$(): Observable { + return this.backendSubject$.asObservable().pipe( + filter(Boolean), + switchMap(handle => from(any(handle).state().peers())), + ); + } + + get messageProgress$(): Observable { + return this.backendSubject$.asObservable().pipe( + filter(Boolean), + switchMap(handle => from((handle as any).state().message_progress())), + ); + } + + get sync$(): Observable { + return this.backendSubject$.asObservable().pipe( + filter(Boolean), + switchMap(handle => from((handle as any).stats().sync())), + ); + } +} diff --git a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.actions.ts b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.actions.ts index 706105d88b..cdb308fdd4 100644 --- a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.actions.ts +++ b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.actions.ts @@ -16,7 +16,7 @@ export const BLOCK_PRODUCTION_WON_SLOTS_KEY = 'wonSlots'; const type = (type: T) => createType(BLOCK_PRODUCTION_PREFIX, 'Won Slots', type); -const init = createAction(type('Init')); +const init = createAction(type('Init'), props<{ activeSlotRoute: string }>()); const close = createAction(type('Close')); const getSlots = createAction(type('Get Slots')); const getSlotsSuccess = createAction(type('Get Slots Success'), props<{ diff --git a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.component.ts b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.component.ts index b3f0984e42..fa9a1ce8c7 100644 --- a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.component.ts +++ b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.component.ts @@ -1,7 +1,7 @@ import { ChangeDetectionStrategy, Component, ElementRef, OnDestroy, OnInit } from '@angular/core'; import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; -import { isDesktop, isMobile } from '@openmina/shared'; -import { debounceTime, filter, fromEvent, timer } from 'rxjs'; +import { getMergedRoute, isDesktop, isMobile, MergedRoute } from '@openmina/shared'; +import { debounceTime, filter, fromEvent, take, timer } from 'rxjs'; import { untilDestroyed } from '@ngneat/until-destroy'; import { BlockProductionWonSlotsActions } from '@block-production/won-slots/block-production-won-slots.actions'; import { AppSelectors } from '@app/app.state'; @@ -30,7 +30,9 @@ export class BlockProductionWonSlotsComponent extends StoreDispatcher implements private listenToActiveNode(): void { this.select(AppSelectors.activeNode, () => { - this.dispatch2(BlockProductionWonSlotsActions.init()); + this.select(getMergedRoute, (data: MergedRoute) => { + this.dispatch2(BlockProductionWonSlotsActions.init({ activeSlotRoute: data.params['id'] })); + }, take(1)); }); } diff --git a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.effects.ts b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.effects.ts index 73d5c41c71..6cb1e24b4f 100644 --- a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.effects.ts +++ b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.effects.ts @@ -9,7 +9,10 @@ import { Store } from '@ngrx/store'; import { BaseEffect } from '@shared/base-classes/mina-rust-base.effect'; import { BlockProductionModule } from '@block-production/block-production.module'; import { BlockProductionWonSlotsService } from '@block-production/won-slots/block-production-won-slots.service'; -import { BlockProductionWonSlotsActions } from '@block-production/won-slots/block-production-won-slots.actions'; +import { + BLOCK_PRODUCTION_WON_SLOTS_KEY, + BlockProductionWonSlotsActions, +} from '@block-production/won-slots/block-production-won-slots.actions'; import { BlockProductionWonSlotsStatus, } from '@shared/types/block-production/won-slots/block-production-won-slots-slot.type'; @@ -44,10 +47,9 @@ export class BlockProductionWonSlotsEffects extends BaseEffect { ? EMPTY : this.wonSlotsService.getSlots().pipe( switchMap(({ slots, epoch }) => { - const initialActiveSlot = state.blockProduction.wonSlots.activeSlot; - let newActiveSlot = slots.find(s => s.globalSlot === initialActiveSlot?.globalSlot); - - if (!initialActiveSlot || initialActiveSlot && !newActiveSlot) { + const activeSlotRoute = state.blockProduction[BLOCK_PRODUCTION_WON_SLOTS_KEY].activeSlotRoute; + let newActiveSlot = slots.find(s => s.globalSlot.toString() === activeSlotRoute); + if (!activeSlotRoute || (activeSlotRoute && !newActiveSlot)) { newActiveSlot = slots.find(s => s.active) ?? slots.find(s => s.status === BlockProductionWonSlotsStatus.Committed) ?? slots.find(s => s.status === BlockProductionWonSlotsStatus.Scheduled) diff --git a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.reducer.ts b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.reducer.ts index 2cdcae7d27..187fdf4725 100644 --- a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.reducer.ts +++ b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.reducer.ts @@ -12,11 +12,12 @@ const initialState: BlockProductionWonSlotsState = { slots: [], filteredSlots: [], activeSlot: undefined, + activeSlotRoute: undefined, filters: { accepted: true, - rejected: true, + orphaned: true, upcoming: true, - missed: true, + discarded: true, }, sort: { sortBy: 'slotTime', @@ -26,6 +27,10 @@ const initialState: BlockProductionWonSlotsState = { export const blockProductionWonSlotsReducer = createReducer( initialState, + on(BlockProductionWonSlotsActions.init, (state, { activeSlotRoute }) => ({ + ...state, + activeSlotRoute, + })), on(BlockProductionWonSlotsActions.getSlotsSuccess, (state, { slots, epoch, activeSlot }) => ({ ...state, slots, @@ -58,7 +63,8 @@ function filterSlots(slots: BlockProductionWonSlotsSlot[], filters: BlockProduct return slots.filter(slot => { if ( (filters.accepted && slot.status === BlockProductionWonSlotsStatus.Canonical) - || (filters.rejected && (slot.status === BlockProductionWonSlotsStatus.Orphaned || slot.status === BlockProductionWonSlotsStatus.Discarded)) + || (filters.orphaned && slot.status === BlockProductionWonSlotsStatus.Orphaned) + || (filters.discarded && slot.status === BlockProductionWonSlotsStatus.Discarded) || slot.active || slot.status === BlockProductionWonSlotsStatus.Committed ) { diff --git a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.service.ts b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.service.ts index c62669d006..99fbd1506c 100644 --- a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.service.ts +++ b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.service.ts @@ -24,6 +24,9 @@ export class BlockProductionWonSlotsService { return this.rust.get('/stats/block_producer') .pipe( map((response: WonSlotResponse) => { + if (!response) { + throw new Error('Empty response from /stats/block_producer'); + } const attemptsSlots = response.attempts.map((attempt: Attempt) => { attempt.won_slot.slot_time = Math.floor(attempt.won_slot.slot_time / ONE_MILLION); // converted to milliseconds attempt.active = BlockProductionWonSlotsService.getActive(attempt); @@ -122,8 +125,10 @@ export class BlockProductionWonSlotsService { return 'Production Scheduled'; } else if (attempt.status === BlockProductionWonSlotsStatus.Canonical) { return 'Produced Block'; - } else if (attempt.status === BlockProductionWonSlotsStatus.Orphaned || attempt.status == BlockProductionWonSlotsStatus.Discarded) { - return 'Dropped Block'; + } else if (attempt.status === BlockProductionWonSlotsStatus.Orphaned) { + return BlockProductionWonSlotsStatus.Orphaned + ' Block'; + } else if (attempt.status === BlockProductionWonSlotsStatus.Discarded) { + return BlockProductionWonSlotsStatus.Discarded + ' Block'; } else if (attempt.status === BlockProductionWonSlotsStatus.Committed) { return 'Waiting for Confirmation'; } diff --git a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.state.ts b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.state.ts index 3613c06daf..e45d0147ea 100644 --- a/frontend/src/app/features/block-production/won-slots/block-production-won-slots.state.ts +++ b/frontend/src/app/features/block-production/won-slots/block-production-won-slots.state.ts @@ -17,6 +17,7 @@ export interface BlockProductionWonSlotsState { slots: BlockProductionWonSlotsSlot[]; filteredSlots: BlockProductionWonSlotsSlot[]; activeSlot: BlockProductionWonSlotsSlot; + activeSlotRoute: string; filters: BlockProductionWonSlotsFilters; sort: TableSort; } diff --git a/frontend/src/app/features/block-production/won-slots/filters/block-production-won-slots-filters.component.html b/frontend/src/app/features/block-production/won-slots/filters/block-production-won-slots-filters.component.html index 79faf68b35..0740b702c7 100644 --- a/frontend/src/app/features/block-production/won-slots/filters/block-production-won-slots-filters.component.html +++ b/frontend/src/app/features/block-production/won-slots/filters/block-production-won-slots-filters.component.html @@ -16,10 +16,19 @@
+ [ngClass]="(filters.orphaned && totalOrphaned !== 0) ? 'bg-aware-container aware-primary' : 'bg-container tertiary'" + (click)="changeFilter('orphaned', !filters.orphaned)"> {{ totalOrphaned }} - Dropped + Orphaned +
+ +
+ {{ totalDiscarded }} + Discarded
{ this.totalWonSlots = slots.length; this.totalCanonical = slots.filter(s => s.status === BlockProductionWonSlotsStatus.Canonical).length; - this.totalOrphaned = slots.filter(s => s.status === BlockProductionWonSlotsStatus.Orphaned || s.status == BlockProductionWonSlotsStatus.Discarded).length; + this.totalOrphaned = slots.filter(s => s.status === BlockProductionWonSlotsStatus.Orphaned).length; this.totalFuture = slots.filter(s => !s.status || s.status === BlockProductionWonSlotsStatus.Scheduled).length; + this.totalDiscarded = slots.filter(s => s.status === BlockProductionWonSlotsStatus.Discarded).length; this.detect(); }); } diff --git a/frontend/src/app/features/block-production/won-slots/table/block-production-won-slots-table.component.scss b/frontend/src/app/features/block-production/won-slots/table/block-production-won-slots-table.component.scss index a7b778f714..25d6f79317 100644 --- a/frontend/src/app/features/block-production/won-slots/table/block-production-won-slots-table.component.scss +++ b/frontend/src/app/features/block-production/won-slots/table/block-production-won-slots-table.component.scss @@ -15,8 +15,10 @@ color: $success-primary !important; } - .mina-table .row.active > span.Dropped, - .Dropped { + .mina-table .row.active > span.Orphaned, + .mina-table .row.active > span.Discarded, + .Orphaned, + .Discarded { color: $aware-primary !important; } diff --git a/frontend/src/app/features/dashboard/dashboard-errors/dashboard-errors.component.html b/frontend/src/app/features/dashboard/dashboard-errors/dashboard-errors.component.html index e09294972c..86db8302e7 100644 --- a/frontend/src/app/features/dashboard/dashboard-errors/dashboard-errors.component.html +++ b/frontend/src/app/features/dashboard/dashboard-errors/dashboard-errors.component.html @@ -5,7 +5,7 @@ unfold_{{ open ? 'less' : 'more' }}
+ [style.height.px]="resyncs.length * 36">
diff --git a/frontend/src/app/features/dashboard/dashboard-errors/dashboard-errors.component.ts b/frontend/src/app/features/dashboard/dashboard-errors/dashboard-errors.component.ts index 28ece4f6e7..2f92a5361d 100644 --- a/frontend/src/app/features/dashboard/dashboard-errors/dashboard-errors.component.ts +++ b/frontend/src/app/features/dashboard/dashboard-errors/dashboard-errors.component.ts @@ -44,13 +44,11 @@ export class DashboardErrorsComponent extends StoreDispatcher implements OnInit } private mapResyncs(resyncs: NodesOverviewResync[]): void { - this.resyncs = resyncs.reverse().map(resync => { - return { - ...resync, - description: resync.description ?? descriptionMap[resync.kind], - timeAgo: this.calculateProgressTime(resync.time), - } as NodesOverviewResyncUI; - }); + this.resyncs = resyncs.slice().reverse().map(resync => ({ + ...resync, + description: resync.description ?? descriptionMap[resync.kind], + timeAgo: this.calculateProgressTime(resync.time), + } as NodesOverviewResyncUI)); } private calculateProgressTime(timestamp: number): string { diff --git a/frontend/src/app/features/dashboard/dashboard.actions.ts b/frontend/src/app/features/dashboard/dashboard.actions.ts index 43d4771474..0ca88de140 100644 --- a/frontend/src/app/features/dashboard/dashboard.actions.ts +++ b/frontend/src/app/features/dashboard/dashboard.actions.ts @@ -31,6 +31,8 @@ export class DashboardInit implements DashboardAction { export class DashboardGetData implements DashboardAction { readonly type = DASHBOARD_GET_DATA; + + constructor(public payload?: { force: boolean }) {} } export class DashboardGetDataSuccess implements DashboardAction { diff --git a/frontend/src/app/features/dashboard/dashboard.component.ts b/frontend/src/app/features/dashboard/dashboard.component.ts index 044a366206..cc2dbaaf7e 100644 --- a/frontend/src/app/features/dashboard/dashboard.component.ts +++ b/frontend/src/app/features/dashboard/dashboard.component.ts @@ -1,8 +1,9 @@ import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core'; import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; import { DashboardGetData, DashboardInit } from '@dashboard/dashboard.actions'; -import { tap, timer } from 'rxjs'; +import { filter, skip, tap, timer } from 'rxjs'; import { untilDestroyed } from '@ngneat/until-destroy'; +import { AppSelectors } from '@app/app.state'; @Component({ selector: 'mina-dashboard', @@ -14,12 +15,19 @@ import { untilDestroyed } from '@ngneat/until-destroy'; export class DashboardComponent extends StoreDispatcher implements OnInit { ngOnInit(): void { + this.listenToNodeChanging(); this.dispatch(DashboardInit); - timer(2000, 2000) + timer(4000, 4000) .pipe( tap(() => this.dispatch(DashboardGetData)), untilDestroyed(this), ) .subscribe(); } + + private listenToNodeChanging(): void { + this.select(AppSelectors.activeNode, () => { + this.dispatch(DashboardGetData, { force: true }); + }, filter(Boolean), skip(1), untilDestroyed(this)); + } } diff --git a/frontend/src/app/features/dashboard/dashboard.effects.ts b/frontend/src/app/features/dashboard/dashboard.effects.ts index 9610ff96f2..193f982722 100644 --- a/frontend/src/app/features/dashboard/dashboard.effects.ts +++ b/frontend/src/app/features/dashboard/dashboard.effects.ts @@ -3,7 +3,7 @@ import { Effect } from '@openmina/shared'; import { Actions, createEffect, ofType } from '@ngrx/effects'; import { Store } from '@ngrx/store'; import { MinaState, selectMinaState } from '@app/app.setup'; -import { EMPTY, filter, forkJoin, map, switchMap, tap } from 'rxjs'; +import { catchError, combineLatest, EMPTY, filter, forkJoin, map, mergeMap, switchMap, tap } from 'rxjs'; import { catchErrorAndRepeat } from '@shared/constants/store-functions'; import { MinaErrorType } from '@shared/types/error-preview/mina-error-type.enum'; import { MinaRustBaseEffect } from '@shared/base-classes/mina-rust-base.effect'; @@ -18,9 +18,8 @@ import { } from '@dashboard/dashboard.actions'; import { DashboardService } from '@dashboard/dashboard.service'; import { DashboardPeer } from '@shared/types/dashboard/dashboard.peer'; -import { NodesOverviewService } from '@nodes/overview/nodes-overview.service'; import { NodesOverviewNode } from '@shared/types/nodes/dashboard/nodes-overview-node.type'; -import { DashboardPeerRpcResponses, DashboardRpcStats } from '@shared/types/dashboard/dashboard-rpc-stats.type'; +import { DashboardRpcStats } from '@shared/types/dashboard/dashboard-rpc-stats.type'; @Injectable({ providedIn: 'root', @@ -34,7 +33,6 @@ export class DashboardEffects extends MinaRustBaseEffect { constructor(private actions$: Actions, private dashboardService: DashboardService, - private nodesOverviewService: NodesOverviewService, store: Store) { super(store, selectMinaState); @@ -43,30 +41,10 @@ export class DashboardEffects extends MinaRustBaseEffect { map(() => ({ type: DASHBOARD_GET_DATA })), )); - // !!! add to loading reducer as well when uncomment - // this.getPeers$ = createEffect(() => this.actions$.pipe( - // ofType(DASHBOARD_GET_PEERS, DASHBOARD_CLOSE), - // this.latestActionState(), - // filter(() => !this.pendingRequest), - // tap(({ action }) => { - // if (action.type === DASHBOARD_GET_PEERS) { - // this.pendingRequest = true; - // } - // }), - // switchMap(({ action }) => - // action.type === DASHBOARD_CLOSE - // ? EMPTY - // : this.dashboardService.getPeers(), - // ), - // map((payload: DashboardPeer[]) => ({ type: DASHBOARD_GET_PEERS_SUCCESS, payload })), - // catchErrorAndRepeat(MinaErrorType.GENERIC, DASHBOARD_GET_PEERS_SUCCESS, []), - // tap(() => this.pendingRequest = false), - // )); - this.getData$ = createEffect(() => this.actions$.pipe( ofType(DASHBOARD_GET_DATA, DASHBOARD_CLOSE), this.latestActionState(), - filter(() => !this.pendingRequest), + filter(({ action }) => (action as DashboardGetData).payload?.force || !this.pendingRequest), tap(({ action }) => { if (action.type === DASHBOARD_GET_DATA) { this.pendingRequest = true; @@ -75,14 +53,22 @@ export class DashboardEffects extends MinaRustBaseEffect { switchMap(({ action, state }) => action.type === DASHBOARD_CLOSE ? EMPTY - : forkJoin([ + : combineLatest([ this.dashboardService.getPeers(), - this.nodesOverviewService.getNodeTips({ + this.dashboardService.getTips({ url: state.app.activeNode.url, name: state.app.activeNode.name, - }, '?limit=1', true), + }), this.dashboardService.getRpcCalls(), - ]) + ]).pipe( + // tap((r) => { + // console.log('RESPONSE FROM COMBINATION', r); + // }), + // catchError((err) => { + // console.log('ERROR FROM COMBINATION', err); + // return EMPTY; + // }), + ), ), map((payload: [DashboardPeer[], NodesOverviewNode[], DashboardRpcStats]) => ({ type: DASHBOARD_GET_DATA_SUCCESS, payload: { peers: payload[0], ledger: payload[1], rpcStats: payload[2] }, @@ -90,7 +76,7 @@ export class DashboardEffects extends MinaRustBaseEffect { catchErrorAndRepeat(MinaErrorType.GENERIC, DASHBOARD_GET_DATA_SUCCESS, { peers: [], ledger: [], - rpcStats: { peerResponses: [], stakingLedger: null, nextLedger: null, rootLedger: null } + rpcStats: { peerResponses: [], stakingLedger: null, nextLedger: null, rootLedger: null }, }), tap(() => this.pendingRequest = false), )); diff --git a/frontend/src/app/features/dashboard/dashboard.service.ts b/frontend/src/app/features/dashboard/dashboard.service.ts index 743a1f3037..68a742cba2 100644 --- a/frontend/src/app/features/dashboard/dashboard.service.ts +++ b/frontend/src/app/features/dashboard/dashboard.service.ts @@ -1,14 +1,17 @@ import { Injectable } from '@angular/core'; -import { map, Observable } from 'rxjs'; +import { catchError, map, Observable, tap } from 'rxjs'; import { DashboardPeer, DashboardPeerStatus } from '@shared/types/dashboard/dashboard.peer'; import { RustService } from '@core/services/rust.service'; import { ONE_MILLION, toReadableDate } from '@openmina/shared'; import { DashboardPeerRpcResponses, DashboardRpcStats } from '@shared/types/dashboard/dashboard-rpc-stats.type'; +import { NodesOverviewNode } from '@shared/types/nodes/dashboard/nodes-overview-node.type'; +import { NodesOverviewService } from '@nodes/overview/nodes-overview.service'; @Injectable({ providedIn: 'root' }) export class DashboardService { - constructor(private rust: RustService) { } + constructor(private rust: RustService, + private nodesOverviewService: NodesOverviewService) { } getPeers(): Observable { return this.rust.get('/state/peers').pipe( @@ -27,12 +30,40 @@ export class DashboardService { requests: 0, } as DashboardPeer)), ), + // tap((peers: any) => { + // console.log('----------------PEERS----------------'); + // console.log(peers); + // console.log('----------------PEERS----------------'); + // }), + ); + } + + getTips({ url, name }: { url: string, name: string }): Observable { + return this.rust.get('/stats/sync?limit=1').pipe( + map((response: NodesOverviewNode[]) => this.nodesOverviewService.mapNodeTipsResponse(response, true, { + name, + url, + })), + catchError(() => this.nodesOverviewService.mapNodeTipsErrorResponse({ + name, + url, + })), + // tap((peers: any) => { + // console.log('----------------SYNC----------------'); + // console.log(peers); + // console.log('----------------SYNC----------------'); + // }), ); } getRpcCalls(): Observable { return this.rust.get('/state/message-progress').pipe( map((response: MessageProgressResponse) => this.mapMessageProgressResponse(response)), + // tap((peers: any) => { + // console.log('----------------MESSAGES----------------'); + // console.log(peers); + // console.log('----------------MESSAGES----------------'); + // }), ); } diff --git a/frontend/src/app/features/mempool/table/mempool-table.component.html b/frontend/src/app/features/mempool/table/mempool-table.component.html index efd5aa15e8..f85fd86081 100644 --- a/frontend/src/app/features/mempool/table/mempool-table.component.html +++ b/frontend/src/app/features/mempool/table/mempool-table.component.html @@ -10,10 +10,13 @@ {{ row.memo }}
+ [tooltip]="'This transaction was sent from the Benchmarks page.'" + *ngIf="row.sentFromStressingTool"> dynamic_form
-
add_circle
New node
diff --git a/frontend/src/app/layout/node-picker/node-picker.component.ts b/frontend/src/app/layout/node-picker/node-picker.component.ts index 8e955af40d..203c360fad 100644 --- a/frontend/src/app/layout/node-picker/node-picker.component.ts +++ b/frontend/src/app/layout/node-picker/node-picker.component.ts @@ -5,6 +5,9 @@ import { untilDestroyed } from '@ngneat/until-destroy'; import { StoreDispatcher } from '@shared/base-classes/store-dispatcher.class'; import { AppActions } from '@app/app.actions'; import { animate, state, style, transition, trigger } from '@angular/animations'; +import { WebNodeService } from '@core/services/web-node.service'; +import { Router } from '@angular/router'; +import { CONFIG } from '@shared/constants/config'; @Component({ selector: 'mina-node-picker', @@ -37,8 +40,11 @@ export class NodePickerComponent extends StoreDispatcher implements AfterViewIni parentInitialWidth: number = 0; @ViewChild('searchNode') searchInput: ElementRef; + readonly canAddNodes: boolean = CONFIG.globalConfig?.canAddNodes; - constructor(private elementRef: ElementRef) { super(); } + constructor(private elementRef: ElementRef, + private webNodeService: WebNodeService, + private router: Router) { super(); } ngAfterViewInit(): void { this.listenToNodeSearch(); @@ -74,6 +80,10 @@ export class NodePickerComponent extends StoreDispatcher implements AfterViewIni if (node !== this.activeNode) { this.dispatch2(AppActions.changeActiveNode({ node })); } + if (node.isWebNode) { + this.webNodeService.webNodeState = 'loading'; + this.router.navigate(['web-node']); + } } addNode(event: MouseEvent): void { diff --git a/frontend/src/app/shared/types/block-production/won-slots/block-production-won-slots-filters.type.ts b/frontend/src/app/shared/types/block-production/won-slots/block-production-won-slots-filters.type.ts index e6769e1b69..d9daa20352 100644 --- a/frontend/src/app/shared/types/block-production/won-slots/block-production-won-slots-filters.type.ts +++ b/frontend/src/app/shared/types/block-production/won-slots/block-production-won-slots-filters.type.ts @@ -1,6 +1,6 @@ export interface BlockProductionWonSlotsFilters { accepted: boolean; - rejected: boolean; - missed: boolean; + orphaned: boolean; upcoming: boolean; + discarded: boolean; } diff --git a/frontend/src/app/shared/types/core/environment/mina-env.type.ts b/frontend/src/app/shared/types/core/environment/mina-env.type.ts index 8dc755418b..332acf96b8 100644 --- a/frontend/src/app/shared/types/core/environment/mina-env.type.ts +++ b/frontend/src/app/shared/types/core/environment/mina-env.type.ts @@ -10,12 +10,13 @@ export interface MinaEnv { export interface MinaNode { name: string; - url: string; + url?: string; memoryProfiler?: string; debugger?: string; features?: FeaturesConfig; - minaExplorerNetwork?: 'mainnet' | 'devnet' | 'berkeley'; + minaExplorerNetwork?: 'mainnet' | 'devnet'; isCustom?: boolean; + isWebNode?: boolean; } export type FeaturesConfig = Partial<{ diff --git a/frontend/src/app/shared/types/nodes/dashboard/nodes-overview-node.type.ts b/frontend/src/app/shared/types/nodes/dashboard/nodes-overview-node.type.ts index f0e01c1fc2..0e1d376d52 100644 --- a/frontend/src/app/shared/types/nodes/dashboard/nodes-overview-node.type.ts +++ b/frontend/src/app/shared/types/nodes/dashboard/nodes-overview-node.type.ts @@ -24,5 +24,6 @@ export enum NodesOverviewNodeKindType { BOOTSTRAP = 'Bootstrap', CATCHUP = 'Catchup', SYNCED = 'Synced', + PENDING = 'Pending', OFFLINE = 'Offline', } diff --git a/frontend/src/app/shared/types/web-node-demo/web-node-demo-loading-step.type.ts b/frontend/src/app/shared/types/web-node-demo/web-node-demo-loading-step.type.ts new file mode 100644 index 0000000000..d35d52d373 --- /dev/null +++ b/frontend/src/app/shared/types/web-node-demo/web-node-demo-loading-step.type.ts @@ -0,0 +1,6 @@ +export interface WebNodeDemoLoadingStep { + name: string; + loaded: boolean; + attempt?: number; + step: number; +} diff --git a/frontend/src/app/shared/types/web-node-demo/web-node-demo-transaction.type.ts b/frontend/src/app/shared/types/web-node-demo/web-node-demo-transaction.type.ts new file mode 100644 index 0000000000..0e51f4e4b4 --- /dev/null +++ b/frontend/src/app/shared/types/web-node-demo/web-node-demo-transaction.type.ts @@ -0,0 +1,15 @@ +export interface WebNodeDemoTransaction { + from: string; + to: string; + amount: string | number; + fee: string | number; + memo: string; + nonce: string; + status: string; + statusText: string; + priv_key: string; + hash?: string; + height?: number; + includedTime?: string; + time: number; +} diff --git a/frontend/src/app/shared/types/web-node-demo/web-node-demo-wallet.type.ts b/frontend/src/app/shared/types/web-node-demo/web-node-demo-wallet.type.ts new file mode 100644 index 0000000000..f98f8f36c1 --- /dev/null +++ b/frontend/src/app/shared/types/web-node-demo/web-node-demo-wallet.type.ts @@ -0,0 +1,6 @@ +export interface WebNodeDemoWallet { + publicKey: string; + privateKey: string; + balance: string; + nonce: string; +} diff --git a/frontend/src/app/shared/types/web-node-demo/web-node-demo-web-node.type.ts b/frontend/src/app/shared/types/web-node-demo/web-node-demo-web-node.type.ts new file mode 100644 index 0000000000..0a9edf8cf8 --- /dev/null +++ b/frontend/src/app/shared/types/web-node-demo/web-node-demo-web-node.type.ts @@ -0,0 +1,6 @@ +export interface WebNodeDemoWebNode { + network: string; + height: number; + timestamp: number; + hash: string; +} diff --git a/frontend/src/assets/webnode/pkg/openmina_node_web.d.ts b/frontend/src/assets/webnode/pkg/openmina_node_web.d.ts new file mode 100644 index 0000000000..f214a967d8 --- /dev/null +++ b/frontend/src/assets/webnode/pkg/openmina_node_web.d.ts @@ -0,0 +1,115 @@ +/* tslint:disable */ +/* eslint-disable */ +/** +* Automatically run after wasm is loaded. +*/ +export function main(): void; +/** +* @returns {Promise} +*/ +export function run(): Promise; +/** +* Entry point for web workers +* @param {number} ptr +*/ +export function wasm_thread_entry_point(ptr: number): void; +/** +*/ +export class RpcSender { + free(): void; +/** +* @returns {State} +*/ + state(): State; +/** +* @returns {Stats} +*/ + stats(): Stats; +/** +* @returns {Promise} +*/ + status(): Promise; +} +/** +*/ +export class State { + free(): void; +/** +* @returns {Promise} +*/ + peers(): Promise; +/** +* @returns {Promise} +*/ + message_progress(): Promise; +} +/** +*/ +export class Stats { + free(): void; +/** +* @param {number | undefined} [limit] +* @returns {Promise} +*/ + sync(limit?: number): Promise; +/** +* @returns {Promise} +*/ + block_producer(): Promise; +} + +export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; + +export interface InitOutput { + readonly main: () => void; + readonly run: () => number; + readonly stats_sync: (a: number, b: number, c: number) => number; + readonly stats_block_producer: (a: number) => number; + readonly __wbg_state_free: (a: number, b: number) => void; + readonly state_peers: (a: number) => number; + readonly state_message_progress: (a: number) => number; + readonly __wbg_stats_free: (a: number, b: number) => void; + readonly __wbg_rpcsender_free: (a: number, b: number) => void; + readonly rpcsender_state: (a: number) => number; + readonly rpcsender_status: (a: number) => number; + readonly rpcsender_stats: (a: number) => number; + readonly wasm_thread_entry_point: (a: number) => void; + readonly memory: WebAssembly.Memory; + readonly __wbindgen_malloc: (a: number, b: number) => number; + readonly __wbindgen_realloc: (a: number, b: number, c: number, d: number) => number; + readonly __wbindgen_export_3: WebAssembly.Table; + readonly _dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hfc3cd25b53215e84: (a: number, b: number) => void; + readonly _dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h45df3e6947fcd651: (a: number, b: number, c: number) => void; + readonly _dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hd4a56f8647b4502b: (a: number, b: number) => void; + readonly _dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h92f8e55d458a8e39: (a: number, b: number) => void; + readonly _dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h90985823836ef819: (a: number, b: number, c: number) => void; + readonly _dyn_core__ops__function__FnMut___A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hff3e249b51682479: (a: number, b: number, c: number) => void; + readonly __wbindgen_free: (a: number, b: number, c: number) => void; + readonly __wbindgen_exn_store: (a: number) => void; + readonly wasm_bindgen__convert__closures__invoke2_mut__h808c9af25a9c04f4: (a: number, b: number, c: number, d: number) => void; + readonly __wbindgen_thread_destroy: (a?: number, b?: number, c?: number) => void; + readonly __wbindgen_start: (a: number) => void; +} + +export type SyncInitInput = BufferSource | WebAssembly.Module; +/** +* Instantiates the given `module`, which can either be bytes or +* a precompiled `WebAssembly.Module`. +* +* @param {{ module: SyncInitInput, memory?: WebAssembly.Memory, thread_stack_size?: number }} module - Passing `SyncInitInput` directly is deprecated. +* @param {WebAssembly.Memory} memory - Deprecated. +* +* @returns {InitOutput} +*/ +export function initSync(module: { module: SyncInitInput, memory?: WebAssembly.Memory, thread_stack_size?: number } | SyncInitInput, memory?: WebAssembly.Memory): InitOutput; + +/** +* If `module_or_path` is {RequestInfo} or {URL}, makes a request and +* for everything else, calls `WebAssembly.instantiate` directly. +* +* @param {{ module_or_path: InitInput | Promise, memory?: WebAssembly.Memory, thread_stack_size?: number }} module_or_path - Passing `InitInput` directly is deprecated. +* @param {WebAssembly.Memory} memory - Deprecated. +* +* @returns {Promise} +*/ +export default function __wbg_init (module_or_path?: { module_or_path: InitInput | Promise, memory?: WebAssembly.Memory, thread_stack_size?: number } | InitInput | Promise, memory?: WebAssembly.Memory): Promise; diff --git a/frontend/src/assets/webnode/pkg/openmina_node_web.js b/frontend/src/assets/webnode/pkg/openmina_node_web.js new file mode 100644 index 0000000000..b4bc44213e --- /dev/null +++ b/frontend/src/assets/webnode/pkg/openmina_node_web.js @@ -0,0 +1,1107 @@ +import * as __wbg_star0 from './snippets/p2p-5e5eda2a182dce1e/src/service_impl/webrtc/web.js'; +import * as __wbg_star1 from './snippets/wasm_thread-8ee53d0673203880/src/wasm32/js/module_workers_polyfill.min.js'; + +let wasm; + +const heap = new Array(128).fill(undefined); + +heap.push(undefined, null, true, false); + +function getObject(idx) { return heap[idx]; } + +let heap_next = heap.length; + +function dropObject(idx) { + if (idx < 132) return; + heap[idx] = heap_next; + heap_next = idx; +} + +function takeObject(idx) { + const ret = getObject(idx); + dropObject(idx); + return ret; +} + +const cachedTextDecoder = (typeof TextDecoder !== 'undefined' ? new TextDecoder('utf-8', { ignoreBOM: true, fatal: true }) : { decode: () => { throw Error('TextDecoder not available') } } ); + +if (typeof TextDecoder !== 'undefined') { cachedTextDecoder.decode(); }; + +let cachedUint8ArrayMemory0 = null; + +function getUint8ArrayMemory0() { + if (cachedUint8ArrayMemory0 === null || cachedUint8ArrayMemory0.buffer !== wasm.memory.buffer) { + cachedUint8ArrayMemory0 = new Uint8Array(wasm.memory.buffer); + } + return cachedUint8ArrayMemory0; +} + +function getStringFromWasm0(ptr, len) { + ptr = ptr >>> 0; + return cachedTextDecoder.decode(getUint8ArrayMemory0().slice(ptr, ptr + len)); +} + +function addHeapObject(obj) { + if (heap_next === heap.length) heap.push(heap.length + 1); + const idx = heap_next; + heap_next = heap[idx]; + + heap[idx] = obj; + return idx; +} + +function isLikeNone(x) { + return x === undefined || x === null; +} + +let cachedDataViewMemory0 = null; + +function getDataViewMemory0() { + if (cachedDataViewMemory0 === null || cachedDataViewMemory0.buffer !== wasm.memory.buffer) { + cachedDataViewMemory0 = new DataView(wasm.memory.buffer); + } + return cachedDataViewMemory0; +} + +let WASM_VECTOR_LEN = 0; + +const cachedTextEncoder = (typeof TextEncoder !== 'undefined' ? new TextEncoder('utf-8') : { encode: () => { throw Error('TextEncoder not available') } } ); + +const encodeString = function (arg, view) { + const buf = cachedTextEncoder.encode(arg); + view.set(buf); + return { + read: arg.length, + written: buf.length + }; +}; + +function passStringToWasm0(arg, malloc, realloc) { + + if (realloc === undefined) { + const buf = cachedTextEncoder.encode(arg); + const ptr = malloc(buf.length, 1) >>> 0; + getUint8ArrayMemory0().subarray(ptr, ptr + buf.length).set(buf); + WASM_VECTOR_LEN = buf.length; + return ptr; + } + + let len = arg.length; + let ptr = malloc(len, 1) >>> 0; + + const mem = getUint8ArrayMemory0(); + + let offset = 0; + + for (; offset < len; offset++) { + const code = arg.charCodeAt(offset); + if (code > 0x7F) break; + mem[ptr + offset] = code; + } + + if (offset !== len) { + if (offset !== 0) { + arg = arg.slice(offset); + } + ptr = realloc(ptr, len, len = offset + arg.length * 3, 1) >>> 0; + const view = getUint8ArrayMemory0().subarray(ptr + offset, ptr + len); + const ret = encodeString(arg, view); + + offset += ret.written; + ptr = realloc(ptr, len, offset, 1) >>> 0; + } + + WASM_VECTOR_LEN = offset; + return ptr; +} + +function debugString(val) { + // primitive types + const type = typeof val; + if (type == 'number' || type == 'boolean' || val == null) { + return `${val}`; + } + if (type == 'string') { + return `"${val}"`; + } + if (type == 'symbol') { + const description = val.description; + if (description == null) { + return 'Symbol'; + } else { + return `Symbol(${description})`; + } + } + if (type == 'function') { + const name = val.name; + if (typeof name == 'string' && name.length > 0) { + return `Function(${name})`; + } else { + return 'Function'; + } + } + // objects + if (Array.isArray(val)) { + const length = val.length; + let debug = '['; + if (length > 0) { + debug += debugString(val[0]); + } + for(let i = 1; i < length; i++) { + debug += ', ' + debugString(val[i]); + } + debug += ']'; + return debug; + } + // Test for built-in + const builtInMatches = /\[object ([^\]]+)\]/.exec(toString.call(val)); + let className; + if (builtInMatches.length > 1) { + className = builtInMatches[1]; + } else { + // Failed to match the standard '[object ClassName]' + return toString.call(val); + } + if (className == 'Object') { + // we're a user defined class or Object + // JSON.stringify avoids problems with cycles, and is generally much + // easier than looping through ownProperties of `val`. + try { + return 'Object(' + JSON.stringify(val) + ')'; + } catch (_) { + return 'Object'; + } + } + // errors + if (val instanceof Error) { + return `${val.name}: ${val.message}\n${val.stack}`; + } + // TODO we could test for more things here, like `Set`s and `Map`s. + return className; +} + +const CLOSURE_DTORS = (typeof FinalizationRegistry === 'undefined') + ? { register: () => {}, unregister: () => {} } + : new FinalizationRegistry(state => { + wasm.__wbindgen_export_3.get(state.dtor)(state.a, state.b) +}); + +function makeMutClosure(arg0, arg1, dtor, f) { + const state = { a: arg0, b: arg1, cnt: 1, dtor }; + const real = (...args) => { + // First up with a closure we increment the internal reference + // count. This ensures that the Rust closure environment won't + // be deallocated while we're invoking it. + state.cnt++; + const a = state.a; + state.a = 0; + try { + return f(a, state.b, ...args); + } finally { + if (--state.cnt === 0) { + wasm.__wbindgen_export_3.get(state.dtor)(a, state.b); + CLOSURE_DTORS.unregister(state); + } else { + state.a = a; + } + } + }; + real.original = state; + CLOSURE_DTORS.register(real, state, state); + return real; +} +function __wbg_adapter_34(arg0, arg1) { + wasm._dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hfc3cd25b53215e84(arg0, arg1); +} + +function __wbg_adapter_37(arg0, arg1, arg2) { + wasm._dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h45df3e6947fcd651(arg0, arg1, addHeapObject(arg2)); +} + +function __wbg_adapter_42(arg0, arg1) { + wasm._dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hd4a56f8647b4502b(arg0, arg1); +} + +function __wbg_adapter_45(arg0, arg1) { + wasm._dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h92f8e55d458a8e39(arg0, arg1); +} + +function __wbg_adapter_48(arg0, arg1, arg2) { + wasm._dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h90985823836ef819(arg0, arg1, addHeapObject(arg2)); +} + +let stack_pointer = 128; + +function addBorrowedObject(obj) { + if (stack_pointer == 1) throw new Error('out of js stack'); + heap[--stack_pointer] = obj; + return stack_pointer; +} +function __wbg_adapter_53(arg0, arg1, arg2) { + try { + wasm._dyn_core__ops__function__FnMut___A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hff3e249b51682479(arg0, arg1, addBorrowedObject(arg2)); + } finally { + heap[stack_pointer++] = undefined; + } +} + +/** +* Automatically run after wasm is loaded. +*/ +export function main() { + wasm.main(); +} + +/** +* @returns {Promise} +*/ +export function run() { + const ret = wasm.run(); + return takeObject(ret); +} + +function handleError(f, args) { + try { + return f.apply(this, args); + } catch (e) { + wasm.__wbindgen_exn_store(addHeapObject(e)); + } +} +/** +* Entry point for web workers +* @param {number} ptr +*/ +export function wasm_thread_entry_point(ptr) { + wasm.wasm_thread_entry_point(ptr); +} + +function __wbg_adapter_218(arg0, arg1, arg2, arg3) { + wasm.wasm_bindgen__convert__closures__invoke2_mut__h808c9af25a9c04f4(arg0, arg1, addHeapObject(arg2), addHeapObject(arg3)); +} + +function notDefined(what) { return () => { throw new Error(`${what} is not defined`); }; } + +const RpcSenderFinalization = (typeof FinalizationRegistry === 'undefined') + ? { register: () => {}, unregister: () => {} } + : new FinalizationRegistry(ptr => wasm.__wbg_rpcsender_free(ptr >>> 0, 1)); +/** +*/ +export class RpcSender { + + static __wrap(ptr) { + ptr = ptr >>> 0; + const obj = Object.create(RpcSender.prototype); + obj.__wbg_ptr = ptr; + RpcSenderFinalization.register(obj, obj.__wbg_ptr, obj); + return obj; + } + + __destroy_into_raw() { + const ptr = this.__wbg_ptr; + this.__wbg_ptr = 0; + RpcSenderFinalization.unregister(this); + return ptr; + } + + free() { + const ptr = this.__destroy_into_raw(); + wasm.__wbg_rpcsender_free(ptr, 0); + } + /** + * @returns {State} + */ + state() { + const ret = wasm.rpcsender_state(this.__wbg_ptr); + return State.__wrap(ret); + } + /** + * @returns {Stats} + */ + stats() { + const ret = wasm.rpcsender_state(this.__wbg_ptr); + return Stats.__wrap(ret); + } + /** + * @returns {Promise} + */ + status() { + const ret = wasm.rpcsender_status(this.__wbg_ptr); + return takeObject(ret); + } +} + +const StateFinalization = (typeof FinalizationRegistry === 'undefined') + ? { register: () => {}, unregister: () => {} } + : new FinalizationRegistry(ptr => wasm.__wbg_state_free(ptr >>> 0, 1)); +/** +*/ +export class State { + + static __wrap(ptr) { + ptr = ptr >>> 0; + const obj = Object.create(State.prototype); + obj.__wbg_ptr = ptr; + StateFinalization.register(obj, obj.__wbg_ptr, obj); + return obj; + } + + __destroy_into_raw() { + const ptr = this.__wbg_ptr; + this.__wbg_ptr = 0; + StateFinalization.unregister(this); + return ptr; + } + + free() { + const ptr = this.__destroy_into_raw(); + wasm.__wbg_state_free(ptr, 0); + } + /** + * @returns {Promise} + */ + peers() { + const ret = wasm.state_peers(this.__wbg_ptr); + return takeObject(ret); + } + /** + * @returns {Promise} + */ + message_progress() { + const ret = wasm.state_message_progress(this.__wbg_ptr); + return takeObject(ret); + } +} + +const StatsFinalization = (typeof FinalizationRegistry === 'undefined') + ? { register: () => {}, unregister: () => {} } + : new FinalizationRegistry(ptr => wasm.__wbg_stats_free(ptr >>> 0, 1)); +/** +*/ +export class Stats { + + static __wrap(ptr) { + ptr = ptr >>> 0; + const obj = Object.create(Stats.prototype); + obj.__wbg_ptr = ptr; + StatsFinalization.register(obj, obj.__wbg_ptr, obj); + return obj; + } + + __destroy_into_raw() { + const ptr = this.__wbg_ptr; + this.__wbg_ptr = 0; + StatsFinalization.unregister(this); + return ptr; + } + + free() { + const ptr = this.__destroy_into_raw(); + wasm.__wbg_stats_free(ptr, 0); + } + /** + * @param {number | undefined} [limit] + * @returns {Promise} + */ + sync(limit) { + const ret = wasm.stats_sync(this.__wbg_ptr, !isLikeNone(limit), isLikeNone(limit) ? 0 : limit); + return takeObject(ret); + } + /** + * @returns {Promise} + */ + block_producer() { + const ret = wasm.stats_block_producer(this.__wbg_ptr); + return takeObject(ret); + } +} + +async function __wbg_load(module, imports) { + if (typeof Response === 'function' && module instanceof Response) { + if (typeof WebAssembly.instantiateStreaming === 'function') { + try { + return await WebAssembly.instantiateStreaming(module, imports); + + } catch (e) { + if (module.headers.get('Content-Type') != 'application/wasm') { + console.warn("`WebAssembly.instantiateStreaming` failed because your server does not serve wasm with `application/wasm` MIME type. Falling back to `WebAssembly.instantiate` which is slower. Original error:\n", e); + + } else { + throw e; + } + } + } + + const bytes = await module.arrayBuffer(); + return await WebAssembly.instantiate(bytes, imports); + + } else { + const instance = await WebAssembly.instantiate(module, imports); + + if (instance instanceof WebAssembly.Instance) { + return { instance, module }; + + } else { + return instance; + } + } +} + +function __wbg_get_imports() { + const imports = {}; + imports.wbg = {}; + imports.wbg.__wbindgen_object_drop_ref = function(arg0) { + takeObject(arg0); + }; + imports.wbg.__wbindgen_cb_drop = function(arg0) { + const obj = takeObject(arg0).original; + if (obj.cnt-- == 1) { + obj.a = 0; + return true; + } + const ret = false; + return ret; + }; + imports.wbg.__wbindgen_string_new = function(arg0, arg1) { + const ret = getStringFromWasm0(arg0, arg1); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_is_undefined = function(arg0) { + const ret = getObject(arg0) === undefined; + return ret; + }; + imports.wbg.__wbg_new_abda76e883ba8a5f = function() { + const ret = new Error(); + return addHeapObject(ret); + }; + imports.wbg.__wbg_stack_658279fe44541cf6 = function(arg0, arg1) { + const ret = getObject(arg1).stack; + const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); + const len1 = WASM_VECTOR_LEN; + getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true); + getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true); + }; + imports.wbg.__wbg_error_f851667af71bcfc6 = function(arg0, arg1) { + let deferred0_0; + let deferred0_1; + try { + deferred0_0 = arg0; + deferred0_1 = arg1; + console.error(getStringFromWasm0(arg0, arg1)); + } finally { + wasm.__wbindgen_free(deferred0_0, deferred0_1, 1); + } + }; + imports.wbg.__wbg_rpcsender_new = function(arg0) { + const ret = RpcSender.__wrap(arg0); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_object_clone_ref = function(arg0) { + const ret = getObject(arg0); + return addHeapObject(ret); + }; + imports.wbg.__wbg_clearTimeout_541ac0980ffcef74 = function(arg0) { + const ret = clearTimeout(takeObject(arg0)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_setTimeout_7d81d052875b0f4f = function() { return handleError(function (arg0, arg1) { + const ret = setTimeout(getObject(arg0), arg1); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_waitAsync_cd62c81646382b45 = function() { + const ret = Atomics.waitAsync; + return addHeapObject(ret); + }; + imports.wbg.__wbg_waitAsync_3ed212d5e9450545 = function(arg0, arg1, arg2) { + const ret = Atomics.waitAsync(getObject(arg0), arg1 >>> 0, arg2); + return addHeapObject(ret); + }; + imports.wbg.__wbg_async_49a1efd7e3e4bd73 = function(arg0) { + const ret = getObject(arg0).async; + return ret; + }; + imports.wbg.__wbg_value_96cb463707ad2f31 = function(arg0) { + const ret = getObject(arg0).value; + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_link_8b58b27602368eaa = function(arg0) { + const val = `onmessage = function (ev) { + let [ia, index, value] = ev.data; + ia = new Int32Array(ia.buffer); + let result = Atomics.wait(ia, index, value); + postMessage(result); + }; + `; + const ret = typeof URL.createObjectURL === 'undefined' ? "data:application/javascript," + encodeURIComponent(val) : URL.createObjectURL(new Blob([val], { type: "text/javascript" })); + const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); + const len1 = WASM_VECTOR_LEN; + getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true); + getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true); + }; + imports.wbg.__wbindgen_number_new = function(arg0) { + const ret = arg0; + return addHeapObject(ret); + }; + imports.wbg.__wbg_queueMicrotask_12a30234db4045d3 = function(arg0) { + queueMicrotask(getObject(arg0)); + }; + imports.wbg.__wbg_queueMicrotask_48421b3cc9052b68 = function(arg0) { + const ret = getObject(arg0).queueMicrotask; + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_is_function = function(arg0) { + const ret = typeof(getObject(arg0)) === 'function'; + return ret; + }; + imports.wbg.__wbg_crypto_1d1f22824a6a080c = function(arg0) { + const ret = getObject(arg0).crypto; + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_is_object = function(arg0) { + const val = getObject(arg0); + const ret = typeof(val) === 'object' && val !== null; + return ret; + }; + imports.wbg.__wbg_process_4a72847cc503995b = function(arg0) { + const ret = getObject(arg0).process; + return addHeapObject(ret); + }; + imports.wbg.__wbg_versions_f686565e586dd935 = function(arg0) { + const ret = getObject(arg0).versions; + return addHeapObject(ret); + }; + imports.wbg.__wbg_node_104a2ff8d6ea03a2 = function(arg0) { + const ret = getObject(arg0).node; + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_is_string = function(arg0) { + const ret = typeof(getObject(arg0)) === 'string'; + return ret; + }; + imports.wbg.__wbg_require_cca90b1a94a0255b = function() { return handleError(function () { + const ret = module.require; + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_msCrypto_eb05e62b530a1508 = function(arg0) { + const ret = getObject(arg0).msCrypto; + return addHeapObject(ret); + }; + imports.wbg.__wbg_randomFillSync_5c9c955aa56b6049 = function() { return handleError(function (arg0, arg1) { + getObject(arg0).randomFillSync(takeObject(arg1)); + }, arguments) }; + imports.wbg.__wbg_getRandomValues_3aa56aa6edec874c = function() { return handleError(function (arg0, arg1) { + getObject(arg0).getRandomValues(getObject(arg1)); + }, arguments) }; + imports.wbg.__wbindgen_number_get = function(arg0, arg1) { + const obj = getObject(arg1); + const ret = typeof(obj) === 'number' ? obj : undefined; + getDataViewMemory0().setFloat64(arg0 + 8 * 1, isLikeNone(ret) ? 0 : ret, true); + getDataViewMemory0().setInt32(arg0 + 4 * 0, !isLikeNone(ret), true); + }; + imports.wbg.__wbindgen_string_get = function(arg0, arg1) { + const obj = getObject(arg1); + const ret = typeof(obj) === 'string' ? obj : undefined; + var ptr1 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); + var len1 = WASM_VECTOR_LEN; + getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true); + getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true); + }; + imports.wbg.__wbg_instanceof_Window_9029196b662bc42a = function(arg0) { + let result; + try { + result = getObject(arg0) instanceof Window; + } catch (_) { + result = false; + } + const ret = result; + return ret; + }; + imports.wbg.__wbg_navigator_7c9103698acde322 = function(arg0) { + const ret = getObject(arg0).navigator; + return addHeapObject(ret); + }; + imports.wbg.__wbg_fetch_336b6f0cb426b46e = function(arg0, arg1) { + const ret = getObject(arg0).fetch(getObject(arg1)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_instanceof_WorkerGlobalScope_d9d741da0fb130ce = function(arg0) { + let result; + try { + result = getObject(arg0) instanceof WorkerGlobalScope; + } catch (_) { + result = false; + } + const ret = result; + return ret; + }; + imports.wbg.__wbg_navigator_41bd88b80ed4685e = function(arg0) { + const ret = getObject(arg0).navigator; + return addHeapObject(ret); + }; + imports.wbg.__wbg_set_b34caba58723c454 = function() { return handleError(function (arg0, arg1, arg2, arg3, arg4) { + getObject(arg0).set(getStringFromWasm0(arg1, arg2), getStringFromWasm0(arg3, arg4)); + }, arguments) }; + imports.wbg.__wbg_setonmessage_f0bd0280573b7084 = function(arg0, arg1) { + getObject(arg0).onmessage = getObject(arg1); + }; + imports.wbg.__wbg_new_8e7322f46d5d019c = function() { return handleError(function (arg0, arg1) { + const ret = new Worker(getStringFromWasm0(arg0, arg1)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_newwithoptions_1bd20b45061ed935 = function() { return handleError(function (arg0, arg1, arg2) { + const ret = new Worker(getStringFromWasm0(arg0, arg1), getObject(arg2)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_postMessage_8c609e2bde333d9c = function() { return handleError(function (arg0, arg1) { + getObject(arg0).postMessage(getObject(arg1)); + }, arguments) }; + imports.wbg.__wbg_hardwareConcurrency_af3a0cb6b3464bd9 = function(arg0) { + const ret = getObject(arg0).hardwareConcurrency; + return ret; + }; + imports.wbg.__wbg_localDescription_60b438182ca37beb = function(arg0) { + const ret = getObject(arg0).localDescription; + return isLikeNone(ret) ? 0 : addHeapObject(ret); + }; + imports.wbg.__wbg_iceGatheringState_fea35c457cf86f05 = function(arg0) { + const ret = getObject(arg0).iceGatheringState; + return {"new":0,"gathering":1,"complete":2,}[ret] ?? 3; + }; + imports.wbg.__wbg_connectionState_915abee2c4db3016 = function(arg0) { + const ret = getObject(arg0).connectionState; + return {"closed":0,"failed":1,"disconnected":2,"new":3,"connecting":4,"connected":5,}[ret] ?? 6; + }; + imports.wbg.__wbg_setonicegatheringstatechange_e0c5a4ab4d37ab63 = function(arg0, arg1) { + getObject(arg0).onicegatheringstatechange = getObject(arg1); + }; + imports.wbg.__wbg_setonconnectionstatechange_98879070c908305e = function(arg0, arg1) { + getObject(arg0).onconnectionstatechange = getObject(arg1); + }; + imports.wbg.__wbg_newwithconfiguration_c2620a61f13be424 = function() { return handleError(function (arg0) { + const ret = new RTCPeerConnection(getObject(arg0)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_close_ade28656aea74a4b = function(arg0) { + getObject(arg0).close(); + }; + imports.wbg.__wbg_createAnswer_ffe6dbcf7cd5ed2a = function(arg0) { + const ret = getObject(arg0).createAnswer(); + return addHeapObject(ret); + }; + imports.wbg.__wbg_createDataChannel_69cc16b4f9cad344 = function(arg0, arg1, arg2, arg3) { + const ret = getObject(arg0).createDataChannel(getStringFromWasm0(arg1, arg2), getObject(arg3)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_createOffer_aa7098f1f4c2f40b = function(arg0) { + const ret = getObject(arg0).createOffer(); + return addHeapObject(ret); + }; + imports.wbg.__wbg_setLocalDescription_4744eb2c267efbb4 = function(arg0, arg1) { + const ret = getObject(arg0).setLocalDescription(getObject(arg1)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_setRemoteDescription_0c7a66e1bd51121d = function(arg0, arg1) { + const ret = getObject(arg0).setRemoteDescription(getObject(arg1)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_data_ab99ae4a2e1e8bc9 = function(arg0) { + const ret = getObject(arg0).data; + return addHeapObject(ret); + }; + imports.wbg.__wbg_headers_b439dcff02e808e5 = function(arg0) { + const ret = getObject(arg0).headers; + return addHeapObject(ret); + }; + imports.wbg.__wbg_newwithstrandinit_cad5cd6038c7ff5d = function() { return handleError(function (arg0, arg1, arg2) { + const ret = new Request(getStringFromWasm0(arg0, arg1), getObject(arg2)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_instanceof_DedicatedWorkerGlobalScope_936e3c64be605b1e = function(arg0) { + let result; + try { + result = getObject(arg0) instanceof DedicatedWorkerGlobalScope; + } catch (_) { + result = false; + } + const ret = result; + return ret; + }; + imports.wbg.__wbg_postMessage_2f0b8369b84c3c1e = function() { return handleError(function (arg0, arg1) { + getObject(arg0).postMessage(getObject(arg1)); + }, arguments) }; + imports.wbg.__wbg_now_0cfdc90c97d0c24b = function(arg0) { + const ret = getObject(arg0).now(); + return ret; + }; + imports.wbg.__wbg_setonopen_37846cc10560e3c0 = function(arg0, arg1) { + getObject(arg0).onopen = getObject(arg1); + }; + imports.wbg.__wbg_setonerror_de2acc8492751dad = function(arg0, arg1) { + getObject(arg0).onerror = getObject(arg1); + }; + imports.wbg.__wbg_setonclose_d39802b4195bab2f = function(arg0, arg1) { + getObject(arg0).onclose = getObject(arg1); + }; + imports.wbg.__wbg_setonmessage_463c6aefedd50235 = function(arg0, arg1) { + getObject(arg0).onmessage = getObject(arg1); + }; + imports.wbg.__wbg_close_4df98968ad72ccde = function(arg0) { + getObject(arg0).close(); + }; + imports.wbg.__wbg_send_1e6563347168b789 = function() { return handleError(function (arg0, arg1) { + getObject(arg0).send(getObject(arg1)); + }, arguments) }; + imports.wbg.__wbg_instanceof_Response_fc4327dbfcdf5ced = function(arg0) { + let result; + try { + result = getObject(arg0) instanceof Response; + } catch (_) { + result = false; + } + const ret = result; + return ret; + }; + imports.wbg.__wbg_json_2a46ed5b7c4d30d1 = function() { return handleError(function (arg0) { + const ret = getObject(arg0).json(); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_size_b9bc39a333bd5d88 = function(arg0) { + const ret = getObject(arg0).size; + return ret; + }; + imports.wbg.__wbg_newwithstrsequence_6b9d515005eb94ac = function() { return handleError(function (arg0) { + const ret = new Blob(getObject(arg0)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_slice_db5bccebb10de3fb = function() { return handleError(function (arg0, arg1, arg2, arg3, arg4) { + const ret = getObject(arg0).slice(arg1, arg2, getStringFromWasm0(arg3, arg4)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_createObjectURL_d82f2880bada6a1d = function() { return handleError(function (arg0, arg1) { + const ret = URL.createObjectURL(getObject(arg1)); + const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); + const len1 = WASM_VECTOR_LEN; + getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true); + getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true); + }, arguments) }; + imports.wbg.__wbg_hardwareConcurrency_3b217d60cc0aa1a6 = function(arg0) { + const ret = getObject(arg0).hardwareConcurrency; + return ret; + }; + imports.wbg.__wbg_sdp_997ce3396a98ebc3 = function(arg0, arg1) { + const ret = getObject(arg1).sdp; + const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); + const len1 = WASM_VECTOR_LEN; + getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true); + getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true); + }; + imports.wbg.__wbg_new_a220cf903aa02ca2 = function() { + const ret = new Array(); + return addHeapObject(ret); + }; + imports.wbg.__wbg_newnoargs_76313bd6ff35d0f2 = function(arg0, arg1) { + const ret = new Function(getStringFromWasm0(arg0, arg1)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_get_224d16597dbbfd96 = function() { return handleError(function (arg0, arg1) { + const ret = Reflect.get(getObject(arg0), getObject(arg1)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_call_1084a111329e68ce = function() { return handleError(function (arg0, arg1) { + const ret = getObject(arg0).call(getObject(arg1)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_new_525245e2b9901204 = function() { + const ret = new Object(); + return addHeapObject(ret); + }; + imports.wbg.__wbg_self_3093d5d1f7bcb682 = function() { return handleError(function () { + const ret = self.self; + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_window_3bcfc4d31bc012f8 = function() { return handleError(function () { + const ret = window.window; + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_globalThis_86b222e13bdf32ed = function() { return handleError(function () { + const ret = globalThis.globalThis; + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_global_e5a3fe56f8be9485 = function() { return handleError(function () { + const ret = global.global; + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_eval_6e4fc17d87772f52 = function() { return handleError(function (arg0, arg1) { + const ret = eval(getStringFromWasm0(arg0, arg1)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_set_673dda6c73d19609 = function(arg0, arg1, arg2) { + getObject(arg0)[arg1 >>> 0] = takeObject(arg2); + }; + imports.wbg.__wbg_of_61f336d7eeabfca8 = function(arg0, arg1, arg2) { + const ret = Array.of(getObject(arg0), getObject(arg1), getObject(arg2)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_push_37c89022f34c01ca = function(arg0, arg1) { + const ret = getObject(arg0).push(getObject(arg1)); + return ret; + }; + imports.wbg.__wbg_instanceof_ArrayBuffer_61dfc3198373c902 = function(arg0) { + let result; + try { + result = getObject(arg0) instanceof ArrayBuffer; + } catch (_) { + result = false; + } + const ret = result; + return ret; + }; + imports.wbg.__wbg_call_89af060b4e1523f2 = function() { return handleError(function (arg0, arg1, arg2) { + const ret = getObject(arg0).call(getObject(arg1), getObject(arg2)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_now_b7a162010a9e75b4 = function() { + const ret = Date.now(); + return ret; + }; + imports.wbg.__wbg_new_b85e72ed1bfd57f9 = function(arg0, arg1) { + try { + var state0 = {a: arg0, b: arg1}; + var cb0 = (arg0, arg1) => { + const a = state0.a; + state0.a = 0; + try { + return __wbg_adapter_218(a, state0.b, arg0, arg1); + } finally { + state0.a = a; + } + }; + const ret = new Promise(cb0); + return addHeapObject(ret); + } finally { + state0.a = state0.b = 0; + } + }; + imports.wbg.__wbg_resolve_570458cb99d56a43 = function(arg0) { + const ret = Promise.resolve(getObject(arg0)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_then_95e6edc0f89b73b1 = function(arg0, arg1) { + const ret = getObject(arg0).then(getObject(arg1)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_then_876bb3c633745cc6 = function(arg0, arg1, arg2) { + const ret = getObject(arg0).then(getObject(arg1), getObject(arg2)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_buffer_b7b08af79b0b0974 = function(arg0) { + const ret = getObject(arg0).buffer; + return addHeapObject(ret); + }; + imports.wbg.__wbg_new_a0719a520adfdb99 = function(arg0) { + const ret = new Int32Array(getObject(arg0)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_newwithbyteoffsetandlength_8a2cb9ca96b27ec9 = function(arg0, arg1, arg2) { + const ret = new Uint8Array(getObject(arg0), arg1 >>> 0, arg2 >>> 0); + return addHeapObject(ret); + }; + imports.wbg.__wbg_new_ea1883e1e5e86686 = function(arg0) { + const ret = new Uint8Array(getObject(arg0)); + return addHeapObject(ret); + }; + imports.wbg.__wbg_set_d1e79e2388520f18 = function(arg0, arg1, arg2) { + getObject(arg0).set(getObject(arg1), arg2 >>> 0); + }; + imports.wbg.__wbg_length_8339fcf5d8ecd12e = function(arg0) { + const ret = getObject(arg0).length; + return ret; + }; + imports.wbg.__wbg_newwithlength_ec548f448387c968 = function(arg0) { + const ret = new Uint8Array(arg0 >>> 0); + return addHeapObject(ret); + }; + imports.wbg.__wbg_buffer_0710d1b9dbe2eea6 = function(arg0) { + const ret = getObject(arg0).buffer; + return addHeapObject(ret); + }; + imports.wbg.__wbg_subarray_7c2e3576afe181d1 = function(arg0, arg1, arg2) { + const ret = getObject(arg0).subarray(arg1 >>> 0, arg2 >>> 0); + return addHeapObject(ret); + }; + imports.wbg.__wbg_random_4a6f48b07d1eab14 = typeof Math.random == 'function' ? Math.random : notDefined('Math.random'); + imports.wbg.__wbg_set_eacc7d73fefaafdf = function() { return handleError(function (arg0, arg1, arg2) { + const ret = Reflect.set(getObject(arg0), getObject(arg1), getObject(arg2)); + return ret; + }, arguments) }; + imports.wbg.__wbg_parse_52202f117ec9ecfa = function() { return handleError(function (arg0, arg1) { + const ret = JSON.parse(getStringFromWasm0(arg0, arg1)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_stringify_bbf45426c92a6bf5 = function() { return handleError(function (arg0) { + const ret = JSON.stringify(getObject(arg0)); + return addHeapObject(ret); + }, arguments) }; + imports.wbg.__wbg_mark_40e050a77cc39fea = function(arg0, arg1) { + performance.mark(getStringFromWasm0(arg0, arg1)); + }; + imports.wbg.__wbg_log_c9486ca5d8e2cbe8 = function(arg0, arg1) { + let deferred0_0; + let deferred0_1; + try { + deferred0_0 = arg0; + deferred0_1 = arg1; + console.log(getStringFromWasm0(arg0, arg1)); + } finally { + wasm.__wbindgen_free(deferred0_0, deferred0_1, 1); + } + }; + imports.wbg.__wbg_log_aba5996d9bde071f = function(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) { + let deferred0_0; + let deferred0_1; + try { + deferred0_0 = arg0; + deferred0_1 = arg1; + console.log(getStringFromWasm0(arg0, arg1), getStringFromWasm0(arg2, arg3), getStringFromWasm0(arg4, arg5), getStringFromWasm0(arg6, arg7)); + } finally { + wasm.__wbindgen_free(deferred0_0, deferred0_1, 1); + } + }; + imports.wbg.__wbg_measure_aa7a73f17813f708 = function() { return handleError(function (arg0, arg1, arg2, arg3) { + let deferred0_0; + let deferred0_1; + let deferred1_0; + let deferred1_1; + try { + deferred0_0 = arg0; + deferred0_1 = arg1; + deferred1_0 = arg2; + deferred1_1 = arg3; + performance.measure(getStringFromWasm0(arg0, arg1), getStringFromWasm0(arg2, arg3)); + } finally { + wasm.__wbindgen_free(deferred0_0, deferred0_1, 1); + wasm.__wbindgen_free(deferred1_0, deferred1_1, 1); + } + }, arguments) }; + imports.wbg.__wbindgen_debug_string = function(arg0, arg1) { + const ret = debugString(getObject(arg1)); + const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); + const len1 = WASM_VECTOR_LEN; + getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true); + getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true); + }; + imports.wbg.__wbindgen_throw = function(arg0, arg1) { + throw new Error(getStringFromWasm0(arg0, arg1)); + }; + imports.wbg.__wbindgen_rethrow = function(arg0) { + throw takeObject(arg0); + }; + imports.wbg.__wbindgen_module = function() { + const ret = __wbg_init.__wbindgen_wasm_module; + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_memory = function() { + const ret = wasm.memory; + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_closure_wrapper1266 = function(arg0, arg1, arg2) { + const ret = makeMutClosure(arg0, arg1, 186, __wbg_adapter_34); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_closure_wrapper1268 = function(arg0, arg1, arg2) { + const ret = makeMutClosure(arg0, arg1, 186, __wbg_adapter_37); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_closure_wrapper1270 = function(arg0, arg1, arg2) { + const ret = makeMutClosure(arg0, arg1, 186, __wbg_adapter_37); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_closure_wrapper14345 = function(arg0, arg1, arg2) { + const ret = makeMutClosure(arg0, arg1, 4390, __wbg_adapter_42); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_closure_wrapper14884 = function(arg0, arg1, arg2) { + const ret = makeMutClosure(arg0, arg1, 4553, __wbg_adapter_45); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_closure_wrapper14934 = function(arg0, arg1, arg2) { + const ret = makeMutClosure(arg0, arg1, 4580, __wbg_adapter_48); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_closure_wrapper14936 = function(arg0, arg1, arg2) { + const ret = makeMutClosure(arg0, arg1, 4580, __wbg_adapter_48); + return addHeapObject(ret); + }; + imports.wbg.__wbindgen_closure_wrapper16260 = function(arg0, arg1, arg2) { + const ret = makeMutClosure(arg0, arg1, 5044, __wbg_adapter_53); + return addHeapObject(ret); + }; + imports['./snippets/p2p-5e5eda2a182dce1e/src/service_impl/webrtc/web.js'] = __wbg_star0; + imports['./snippets/wasm_thread-8ee53d0673203880/src/wasm32/js/module_workers_polyfill.min.js'] = __wbg_star1; + + return imports; +} + +function __wbg_init_memory(imports, memory) { + imports.wbg.memory = memory || new WebAssembly.Memory({initial:170,maximum:16384,shared:true}); +} + +function __wbg_finalize_init(instance, module, thread_stack_size) { + wasm = instance.exports; + __wbg_init.__wbindgen_wasm_module = module; + cachedDataViewMemory0 = null; + cachedUint8ArrayMemory0 = null; + +if (typeof thread_stack_size !== 'undefined' && (typeof thread_stack_size !== 'number' || thread_stack_size === 0 || thread_stack_size % 65536 !== 0)) { throw 'invalid stack size' } +wasm.__wbindgen_start(thread_stack_size); +return wasm; +} + +function initSync(module, memory) { + if (wasm !== undefined) return wasm; + + let thread_stack_size + if (typeof module !== 'undefined' && Object.getPrototypeOf(module) === Object.prototype) + ({module, memory, thread_stack_size} = module) + else + console.warn('using deprecated parameters for `initSync()`; pass a single object instead') + + const imports = __wbg_get_imports(); + + __wbg_init_memory(imports, memory); + + if (!(module instanceof WebAssembly.Module)) { + module = new WebAssembly.Module(module); + } + + const instance = new WebAssembly.Instance(module, imports); + + return __wbg_finalize_init(instance, module, thread_stack_size); +} + +async function __wbg_init(module_or_path, memory) { + if (wasm !== undefined) return wasm; + + let thread_stack_size + if (typeof module_or_path !== 'undefined' && Object.getPrototypeOf(module_or_path) === Object.prototype) + ({module_or_path, memory, thread_stack_size} = module_or_path) + else + console.warn('using deprecated parameters for the initialization function; pass a single object instead') + + if (typeof module_or_path === 'undefined') { + module_or_path = new URL('openmina_node_web_bg.wasm', import.meta.url); + } + const imports = __wbg_get_imports(); + + if (typeof module_or_path === 'string' || (typeof Request === 'function' && module_or_path instanceof Request) || (typeof URL === 'function' && module_or_path instanceof URL)) { + module_or_path = fetch(module_or_path); + } + + __wbg_init_memory(imports, memory); + + const { instance, module } = await __wbg_load(await module_or_path, imports); + + return __wbg_finalize_init(instance, module, thread_stack_size); +} + +export { initSync }; +export default __wbg_init; diff --git a/frontend/src/assets/webnode/pkg/openmina_node_web_bg.wasm b/frontend/src/assets/webnode/pkg/openmina_node_web_bg.wasm new file mode 100644 index 0000000000..927f5014d0 Binary files /dev/null and b/frontend/src/assets/webnode/pkg/openmina_node_web_bg.wasm differ diff --git a/frontend/src/assets/webnode/pkg/openmina_node_web_bg.wasm.d.ts b/frontend/src/assets/webnode/pkg/openmina_node_web_bg.wasm.d.ts new file mode 100644 index 0000000000..0ecafcda48 --- /dev/null +++ b/frontend/src/assets/webnode/pkg/openmina_node_web_bg.wasm.d.ts @@ -0,0 +1,30 @@ +/* tslint:disable */ +/* eslint-disable */ +export function main(): void; +export function run(): number; +export function stats_sync(a: number, b: number, c: number): number; +export function stats_block_producer(a: number): number; +export function __wbg_state_free(a: number, b: number): void; +export function state_peers(a: number): number; +export function state_message_progress(a: number): number; +export function __wbg_stats_free(a: number, b: number): void; +export function __wbg_rpcsender_free(a: number, b: number): void; +export function rpcsender_state(a: number): number; +export function rpcsender_status(a: number): number; +export function rpcsender_stats(a: number): number; +export function wasm_thread_entry_point(a: number): void; +export const memory: WebAssembly.Memory; +export function __wbindgen_malloc(a: number, b: number): number; +export function __wbindgen_realloc(a: number, b: number, c: number, d: number): number; +export const __wbindgen_export_3: WebAssembly.Table; +export function _dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hfc3cd25b53215e84(a: number, b: number): void; +export function _dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h45df3e6947fcd651(a: number, b: number, c: number): void; +export function _dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hd4a56f8647b4502b(a: number, b: number): void; +export function _dyn_core__ops__function__FnMut_____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h92f8e55d458a8e39(a: number, b: number): void; +export function _dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h90985823836ef819(a: number, b: number, c: number): void; +export function _dyn_core__ops__function__FnMut___A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hff3e249b51682479(a: number, b: number, c: number): void; +export function __wbindgen_free(a: number, b: number, c: number): void; +export function __wbindgen_exn_store(a: number): void; +export function wasm_bindgen__convert__closures__invoke2_mut__h808c9af25a9c04f4(a: number, b: number, c: number, d: number): void; +export function __wbindgen_thread_destroy(a: number, b: number, c: number): void; +export function __wbindgen_start(a: number): void; diff --git a/frontend/src/assets/webnode/pkg/snippets/p2p-5e5eda2a182dce1e/src/service_impl/webrtc/web.js b/frontend/src/assets/webnode/pkg/snippets/p2p-5e5eda2a182dce1e/src/service_impl/webrtc/web.js new file mode 100644 index 0000000000..ea6963c1dc --- /dev/null +++ b/frontend/src/assets/webnode/pkg/snippets/p2p-5e5eda2a182dce1e/src/service_impl/webrtc/web.js @@ -0,0 +1,17 @@ +// https://bugs.chromium.org/p/chromium/issues/detail?id=825576 +// workaround: https://stackoverflow.com/questions/66546934/how-to-clear-closed-rtcpeerconnection-with-workaround +export function webrtcCleanup() { + queueMicrotask(() => { + console.warn("[WebRTC] doing heavy (around 50ms) GC for dangling peer connections"); + let img = document.createElement("img"); + img.src = window.URL.createObjectURL(new Blob([new ArrayBuffer(5e+7)])); // 50Mo or less or more depending as you wish to force/invoke GC cycle run + img.onerror = function() { + window.URL.revokeObjectURL(this.src); + img = null + } + }); +} + +export function schedulePeriodicWebrtcCleanup() { + setInterval(webrtcCleanup, 60 * 1000); +} diff --git a/frontend/src/assets/webnode/pkg/snippets/p2p-b52ece2615728d9c/src/service_impl/webrtc/web.js b/frontend/src/assets/webnode/pkg/snippets/p2p-b52ece2615728d9c/src/service_impl/webrtc/web.js new file mode 100644 index 0000000000..ea6963c1dc --- /dev/null +++ b/frontend/src/assets/webnode/pkg/snippets/p2p-b52ece2615728d9c/src/service_impl/webrtc/web.js @@ -0,0 +1,17 @@ +// https://bugs.chromium.org/p/chromium/issues/detail?id=825576 +// workaround: https://stackoverflow.com/questions/66546934/how-to-clear-closed-rtcpeerconnection-with-workaround +export function webrtcCleanup() { + queueMicrotask(() => { + console.warn("[WebRTC] doing heavy (around 50ms) GC for dangling peer connections"); + let img = document.createElement("img"); + img.src = window.URL.createObjectURL(new Blob([new ArrayBuffer(5e+7)])); // 50Mo or less or more depending as you wish to force/invoke GC cycle run + img.onerror = function() { + window.URL.revokeObjectURL(this.src); + img = null + } + }); +} + +export function schedulePeriodicWebrtcCleanup() { + setInterval(webrtcCleanup, 60 * 1000); +} diff --git a/frontend/src/assets/webnode/pkg/snippets/wasm-bindgen-futures-a509390b5b548b61/src/task/worker.js b/frontend/src/assets/webnode/pkg/snippets/wasm-bindgen-futures-a509390b5b548b61/src/task/worker.js new file mode 100644 index 0000000000..d25dab6606 --- /dev/null +++ b/frontend/src/assets/webnode/pkg/snippets/wasm-bindgen-futures-a509390b5b548b61/src/task/worker.js @@ -0,0 +1,6 @@ +onmessage = function (ev) { + let [ia, index, value] = ev.data; + ia = new Int32Array(ia.buffer); + let result = Atomics.wait(ia, index, value); + postMessage(result); +}; diff --git a/frontend/src/assets/webnode/pkg/snippets/wasm_thread-8ee53d0673203880/src/wasm32/js/module_workers_polyfill.min.js b/frontend/src/assets/webnode/pkg/snippets/wasm_thread-8ee53d0673203880/src/wasm32/js/module_workers_polyfill.min.js new file mode 100644 index 0000000000..3ef050e317 --- /dev/null +++ b/frontend/src/assets/webnode/pkg/snippets/wasm_thread-8ee53d0673203880/src/wasm32/js/module_workers_polyfill.min.js @@ -0,0 +1,8 @@ +export function load_module_workers_polyfill() { + if(Worker._$P !== true) { + let polyfill = "!function(e){if(!e||!0!==e._$P){if(e){var n,r=Object.defineProperty({},\"type\",{get:function(){n=!0}});try{var t=URL.createObjectURL(new Blob([\"\"],{type:\"text/javascript\"}));new e(t,r).terminate(),URL.revokeObjectURL(t)}catch(e){}if(!n)try{new e(\"data:text/javascript,\",r).terminate()}catch(e){}if(n)return;(self.Worker=function(n,r){return r&&\"module\"==r.type&&(r={name:n+\"\\n\"+(r.name||\"\")},n=\"undefined\"==typeof document?location.href:document.currentScript&&document.currentScript.src||(new Error).stack.match(/[(@]((file|https?):\\/\\/[^)]+?):\\d+(:\\d+)?(?:\\)|$)/m)[1]),new e(n,r)})._$P=!0}\"undefined\"==typeof document&&function(){var e={},n={};function r(e,n){for(n=n.replace(/^(\\.\\.\\/|\\.\\/)/,e.replace(/[^/]+$/g,\"\")+\"$1\");n!==(n=n.replace(/[^/]+\\/\\.\\.\\//g,\"\")););return n.replace(/\\.\\//g,\"\")}var t=[],s=t.push.bind(t);addEventListener(\"message\",s);var a=self.name.match(/^[^\\n]+/)[0];self.name=self.name.replace(/^[^\\n]*\\n/g,\"\"),function t(s,a){var u,o=s;return a&&(s=r(a,s)),e[s]||(e[s]=fetch(s).then((function(a){if((o=a.url)!==s){if(null!=e[o])return e[o];e[o]=e[s]}return a.text().then((function(e){if(!a.ok)throw e;var c={exports:{}};u=n[o]||(n[o]=c.exports);var i=function(e){return t(e,o)},f=[];return e=function(e,n){n=n||[];var r,t=[],a=0;function u(e,n){for(var s,a=/(?:^|,)\\s*([\\w$]+)(?:\\s+as\\s+([\\w$]+))?\\s*/g,u=[];s=a.exec(e);)n?t.push((s[2]||s[1])+\":\"+s[1]):u.push((s[2]||s[1])+\"=\"+r+\".\"+s[1]);return u}return(e=e.replace(/(^\\s*|[;}\\s\\n]\\s*)import\\s*(?:(?:([\\w$]+)(?:\\s*\\,\\s*\\{([^}]+)\\})?|(?:\\*\\s*as\\s+([\\w$]+))|\\{([^}]*)\\})\\s*from)?\\s*(['\"])(.+?)\\6/g,(function(e,t,s,o,c,i,f,p){return n.push(p),t+=\"var \"+(r=\"$im$\"+ ++a)+\"=$require(\"+f+p+f+\")\",s&&(t+=\";var \"+s+\" = 'default' in \"+r+\" ? \"+r+\".default : \"+r),c&&(t+=\";var \"+c+\" = \"+r),(o=o||i)&&(t+=\";var \"+u(o,!1)),t})).replace(/((?:^|[;}\\s\\n])\\s*)export\\s*(?:\\s+(default)\\s+|((?:async\\s+)?function\\s*\\*?|class|const\\s|let\\s|var\\s)\\s*([a-zA-Z0-9$_{[]+))/g,(function(e,n,r,s,u){if(r){var o=\"$im$\"+ ++a;return t.push(\"default:\"+o),n+\"var \"+o+\"=\"}return t.push(u+\":\"+u),n+s+\" \"+u})).replace(/((?:^|[;}\\s\\n])\\s*)export\\s*\\{([^}]+)\\}\\s*;?/g,(function(e,n,r){return u(r,!0),n})).replace(/((?:^|[^a-zA-Z0-9$_@`'\".])\\s*)(import\\s*\\([\\s\\S]+?\\))/g,\"$1$$$2\")).replace(/((?:^|[^a-zA-Z0-9$_@`'\".])\\s*)import\\.meta\\.url/g,\"$1\"+JSON.stringify(s))+\"\\n$module.exports={\"+t.join(\",\")+\"}\"}(e,f),Promise.all(f.map((function(e){var s=r(o,e);return s in n?n[s]:t(s)}))).then((function(n){e+=\"\\n//# sourceURL=\"+s;try{var r=new Function(\"$import\",\"$require\",\"$module\",\"$exports\",e)}catch(n){var t=n.line-1,a=n.column,o=e.split(\"\\n\"),p=(o[t-2]||\"\")+\"\\n\"+o[t-1]+\"\\n\"+(null==a?\"\":new Array(a).join(\"-\")+\"^\\n\")+(o[t]||\"\"),l=new Error(n.message+\"\\n\\n\"+p,s,t);throw l.sourceURL=l.fileName=s,l.line=t,l.column=a,l}var m=r(i,(function(e){return n[f.indexOf(e)]}),c,c.exports);return null!=m&&(c.exports=m),Object.assign(u,c.exports),c.exports}))}))})))}(a).then((function(){removeEventListener(\"message\",s),t.map(dispatchEvent)})).catch((function(e){setTimeout((function(){throw e}))}))}()}}(self.Worker);"; + let blob = new Blob([polyfill], { type: 'text/javascript' }); + let blobUrl = URL.createObjectURL(blob); + !function(e){if(!e||!0!==e._$P){if(e){var n,r=Object.defineProperty({},"type",{get:function(){n=!0}});try{var t=URL.createObjectURL(new Blob([""],{type:"text/javascript"}));new e(t,r).terminate(),URL.revokeObjectURL(t)}catch(e){}if(!n)try{new e("data:text/javascript,",r).terminate()}catch(e){}if(n)return;(self.Worker=function(n,r){return r&&"module"==r.type&&(r={name:n+"\n"+(r.name||"")},n=blobUrl),new e(n,r)})._$P=!0}"undefined"==typeof document&&function(){var e={},n={};function r(e,n){for(n=n.replace(/^(\.\.\/|\.\/)/,e.replace(/[^/]+$/g,"")+"$1");n!==(n=n.replace(/[^/]+\/\.\.\//g,"")););return n.replace(/\.\//g,"")}var t=[],s=t.push.bind(t);addEventListener("message",s);var a=self.name.match(/^[^\n]+/)[0];self.name=self.name.replace(/^[^\n]*\n/g,""),function t(s,a){var u,o=s;return a&&(s=r(a,s)),e[s]||(e[s]=fetch(s).then((function(a){if((o=a.url)!==s){if(null!=e[o])return e[o];e[o]=e[s]}return a.text().then((function(e){if(!a.ok)throw e;var c={exports:{}};u=n[o]||(n[o]=c.exports);var i=function(e){return t(e,o)},f=[];return e=function(e,n){n=n||[];var r,t=[],a=0;function u(e,n){for(var s,a=/(?:^|,)\s*([\w$]+)(?:\s+as\s+([\w$]+))?\s*/g,u=[];s=a.exec(e);)n?t.push((s[2]||s[1])+":"+s[1]):u.push((s[2]||s[1])+"="+r+"."+s[1]);return u}return(e=e.replace(/(^\s*|[;}\s\n]\s*)import\s*(?:(?:([\w$]+)(?:\s*\,\s*\{([^}]+)\})?|(?:\*\s*as\s+([\w$]+))|\{([^}]*)\})\s*from)?\s*(['"])(.+?)\6/g,(function(e,t,s,o,c,i,f,p){return n.push(p),t+="var "+(r="$im$"+ ++a)+"=$require("+f+p+f+")",s&&(t+=";var "+s+" = 'default' in "+r+" ? "+r+".default : "+r),c&&(t+=";var "+c+" = "+r),(o=o||i)&&(t+=";var "+u(o,!1)),t})).replace(/((?:^|[;}\s\n])\s*)export\s*(?:\s+(default)\s+|((?:async\s+)?function\s*\*?|class|const\s|let\s|var\s)\s*([a-zA-Z0-9$_{[]+))/g,(function(e,n,r,s,u){if(r){var o="$im$"+ ++a;return t.push("default:"+o),n+"var "+o+"="}return t.push(u+":"+u),n+s+" "+u})).replace(/((?:^|[;}\s\n])\s*)export\s*\{([^}]+)\}\s*;?/g,(function(e,n,r){return u(r,!0),n})).replace(/((?:^|[^a-zA-Z0-9$_@`'".])\s*)(import\s*\([\s\S]+?\))/g,"$1$$$2")).replace(/((?:^|[^a-zA-Z0-9$_@`'".])\s*)import\.meta\.url/g,"$1"+JSON.stringify(s))+"\n$module.exports={"+t.join(",")+"}"}(e,f),Promise.all(f.map((function(e){var s=r(o,e);return s in n?n[s]:t(s)}))).then((function(n){e+="\n//# sourceURL="+s;try{var r=new Function("$import","$require","$module","$exports",e)}catch(n){var t=n.line-1,a=n.column,o=e.split("\n"),p=(o[t-2]||"")+"\n"+o[t-1]+"\n"+(null==a?"":new Array(a).join("-")+"^\n")+(o[t]||""),l=new Error(n.message+"\n\n"+p,s,t);throw l.sourceURL=l.fileName=s,l.line=t,l.column=a,l}var m=r(i,(function(e){return n[f.indexOf(e)]}),c,c.exports);return null!=m&&(c.exports=m),Object.assign(u,c.exports),c.exports}))}))})))}(a).then((function(){removeEventListener("message",s),t.map(dispatchEvent)})).catch((function(e){setTimeout((function(){throw e}))}))}()}}(self.Worker); + } +} \ No newline at end of file diff --git a/frontend/src/environments/environment.staging.ts b/frontend/src/environments/environment.staging.ts new file mode 100644 index 0000000000..c5e4399df3 --- /dev/null +++ b/frontend/src/environments/environment.staging.ts @@ -0,0 +1,34 @@ +import { MinaEnv } from '@shared/types/core/environment/mina-env.type'; + +export const environment: Readonly = { + production: true, + globalConfig: { + features: { + 'dashboard': [], + 'block-production': ['won-slots'], + 'nodes': ['overview', 'live', 'bootstrap'], + 'mempool': [], + 'state': ['actions'], + }, + canAddNodes: false, + }, + configs: [ + { + name: 'staging-devnet-bp-0', + url: 'https://staging-devnet-openmina-bp-0.minaprotocol.network', + }, + { + name: 'staging-devnet-bp-1', + url: 'https://staging-devnet-openmina-bp-1.minaprotocol.network', + }, + { + name: 'staging-devnet-bp-2', + url: 'https://staging-devnet-openmina-bp-2.minaprotocol.network', + }, + { + name: 'staging-devnet-bp-3', + url: 'https://staging-devnet-openmina-bp-3.minaprotocol.network', + }, + ], +}; + diff --git a/frontend/src/environments/environment.ts b/frontend/src/environments/environment.ts index 4b342feba8..43168b7301 100644 --- a/frontend/src/environments/environment.ts +++ b/frontend/src/environments/environment.ts @@ -2,7 +2,6 @@ import { MinaEnv } from '@shared/types/core/environment/mina-env.type'; export const environment: Readonly = { production: false, - identifier: 'Development FE', globalConfig: { features: { dashboard: [], @@ -15,8 +14,12 @@ export const environment: Readonly = { 'block-production': ['overview', 'won-slots'], mempool: [], benchmarks: ['wallets'], + // 'block-production': ['won-slots'], + // 'nodes': ['overview', 'live', 'bootstrap'], + // 'mempool': [], + // 'state': ['actions'], }, - canAddNodes: true, + canAddNodes: false, }, configs: [ // { @@ -35,8 +38,20 @@ export const environment: Readonly = { // url: 'http://116.202.128.230:11010', // }, { - name: 'http://116.202.128.230:11010', - url: 'http://116.202.128.230:11010', + name: 'staging-devnet-bp-0', + url: 'https://staging-devnet-openmina-bp-0.minaprotocol.network', + }, + { + name: 'staging-devnet-bp-1', + url: 'https://staging-devnet-openmina-bp-1.minaprotocol.network', + }, + { + name: 'staging-devnet-bp-2', + url: 'https://staging-devnet-openmina-bp-2.minaprotocol.network', + }, + { + name: 'staging-devnet-bp-3', + url: 'https://staging-devnet-openmina-bp-3.minaprotocol.network', }, // { // name: 'Local rust node', @@ -44,18 +59,22 @@ export const environment: Readonly = { // memoryProfiler: 'http://1.k8.openmina.com:31164', // }, { - name: 'Docker 11010', - url: 'http://localhost:11010', - }, - { - name: 'Docker 11012', - url: 'http://localhost:11012', - }, - { - name: 'Docker 11014', - url: 'http://localhost:11014', + name: 'Web Node 1', + isWebNode: true, }, // { + // name: 'Docker 11010', + // url: 'http://localhost:11010', + // }, + // { + // name: 'Docker 11012', + // url: 'http://localhost:11012', + // }, + // { + // name: 'Docker 11014', + // url: 'http://localhost:11014', + // }, + // { // name: 'Producer', // url: 'http://65.109.105.40:3000', // minaExplorerNetwork: 'devnet', diff --git a/frontend/src/index.html b/frontend/src/index.html index 38b402509d..0a9d08dac0 100644 --- a/frontend/src/index.html +++ b/frontend/src/index.html @@ -7,7 +7,6 @@ - + diff --git a/fuzzer/Cargo.toml b/fuzzer/Cargo.toml index 5383bae80a..fafebde9ec 100644 --- a/fuzzer/Cargo.toml +++ b/fuzzer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-fuzzer" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/ledger/Cargo.toml b/ledger/Cargo.toml index a0b40151aa..1a920d3fff 100644 --- a/ledger/Cargo.toml +++ b/ledger/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mina-tree" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/ledger/src/account/account.rs b/ledger/src/account/account.rs index 9733ae3e77..8da7efd729 100644 --- a/ledger/src/account/account.rs +++ b/ledger/src/account/account.rs @@ -113,15 +113,15 @@ impl Default for TokenSymbol { } } -impl TryFrom<&mina_p2p_messages::string::ByteString> for TokenSymbol { +impl TryFrom<&mina_p2p_messages::string::TokenSymbol> for TokenSymbol { type Error = std::string::FromUtf8Error; - fn try_from(value: &mina_p2p_messages::string::ByteString) -> Result { + fn try_from(value: &mina_p2p_messages::string::TokenSymbol) -> Result { Ok(Self(value.clone().try_into()?)) } } -impl From<&TokenSymbol> for mina_p2p_messages::string::ByteString { +impl From<&TokenSymbol> for mina_p2p_messages::string::TokenSymbol { fn from(value: &TokenSymbol) -> Self { value.0.as_bytes().into() } @@ -618,15 +618,15 @@ impl std::ops::Deref for ZkAppUri { } } -impl TryFrom<&mina_p2p_messages::string::ByteString> for ZkAppUri { +impl TryFrom<&mina_p2p_messages::string::ZkAppUri> for ZkAppUri { type Error = std::string::FromUtf8Error; - fn try_from(value: &mina_p2p_messages::string::ByteString) -> Result { + fn try_from(value: &mina_p2p_messages::string::ZkAppUri) -> Result { Ok(Self(value.clone().try_into()?)) } } -impl From<&ZkAppUri> for mina_p2p_messages::string::ByteString { +impl From<&ZkAppUri> for mina_p2p_messages::string::ZkAppUri { fn from(value: &ZkAppUri) -> Self { Self::from(value.0.as_bytes()) } diff --git a/ledger/src/proofs/gates.rs b/ledger/src/proofs/gates.rs index 1ab2dcadb4..fedc00256c 100644 --- a/ledger/src/proofs/gates.rs +++ b/ledger/src/proofs/gates.rs @@ -88,7 +88,14 @@ fn read_gates() -> Gates { Vec>, ) { let circuits_config = openmina_core::NetworkConfig::global().circuits_config; - let base_dir = Path::new(env!("CARGO_MANIFEST_DIR")); + let base_dir = std::env::var("OPENMINA_CIRCUIT_BLOBS_BASE_DIR") + .unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_string()); + let base_dir = Path::new(&base_dir); + let base_dir = if base_dir.exists() { + base_dir + } else { + Path::new("/usr/local/lib/openmina/circuit-blobs") + }; let base_dir = base_dir.join(circuits_config.directory_name); let internal_vars_path = base_dir.join(format!("{}_internal_vars.bin", filename)); diff --git a/ledger/src/scan_state/conv.rs b/ledger/src/scan_state/conv.rs index 536773c56f..72aa1f196e 100644 --- a/ledger/src/scan_state/conv.rs +++ b/ledger/src/scan_state/conv.rs @@ -1101,9 +1101,10 @@ impl From<&MinaBaseAccountUpdateTStableV1> for AccountUpdate { use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1Delegate as Delegate; use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1Permissions as Perm; use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1Timing as Timing; + use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1TokenSymbol as TokenSymbol; use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1VerificationKey as VK; use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1VotingFor as Voting; - use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1ZkappUri as BString; + use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1ZkappUri as ZkAppUri; use MinaBaseAccountUpdateUpdateStableV1AppStateA as AppState; Self { @@ -1128,12 +1129,12 @@ impl From<&MinaBaseAccountUpdateTStableV1> for AccountUpdate { Perm::Keep => SetOrKeep::Keep, }, zkapp_uri: match &value.body.update.zkapp_uri { - BString::Set(s) => SetOrKeep::Set(s.try_into().unwrap()), - BString::Keep => SetOrKeep::Keep, + ZkAppUri::Set(s) => SetOrKeep::Set(s.try_into().unwrap()), + ZkAppUri::Keep => SetOrKeep::Keep, }, token_symbol: match &value.body.update.token_symbol { - BString::Set(s) => SetOrKeep::Set(s.try_into().unwrap()), - BString::Keep => SetOrKeep::Keep, + TokenSymbol::Set(s) => SetOrKeep::Set(s.try_into().unwrap()), + TokenSymbol::Keep => SetOrKeep::Keep, }, timing: match &value.body.update.timing { Timing::Set(timing) => SetOrKeep::Set((&**timing).into()), @@ -1423,9 +1424,10 @@ impl From<&AccountUpdate> for MinaBaseAccountUpdateTStableV1 { use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1Delegate as Delegate; use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1Permissions as Perm; use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1Timing as Timing; + use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1TokenSymbol as TokenSymbol; use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1VerificationKey as VK; use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1VotingFor as Voting; - use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1ZkappUri as BString; + use mina_p2p_messages::v2::MinaBaseAccountUpdateUpdateStableV1ZkappUri as ZkAppUri; use MinaBaseAccountUpdateUpdateStableV1AppStateA as AppState; Self { @@ -1450,12 +1452,12 @@ impl From<&AccountUpdate> for MinaBaseAccountUpdateTStableV1 { SetOrKeep::Keep => Perm::Keep, }, zkapp_uri: match &value.body.update.zkapp_uri { - SetOrKeep::Set(s) => BString::Set(s.into()), - SetOrKeep::Keep => BString::Keep, + SetOrKeep::Set(s) => ZkAppUri::Set(s.into()), + SetOrKeep::Keep => ZkAppUri::Keep, }, token_symbol: match &value.body.update.token_symbol { - SetOrKeep::Set(s) => BString::Set(s.into()), - SetOrKeep::Keep => BString::Keep, + SetOrKeep::Set(s) => TokenSymbol::Set(s.into()), + SetOrKeep::Keep => TokenSymbol::Keep, }, timing: match &value.body.update.timing { SetOrKeep::Set(timing) => Timing::Set(Box::new(timing.into())), diff --git a/ledger/src/transaction_pool.rs b/ledger/src/transaction_pool.rs index a5e3c784a4..16ebb4bd57 100644 --- a/ledger/src/transaction_pool.rs +++ b/ledger/src/transaction_pool.rs @@ -1,6 +1,7 @@ +use backtrace::Backtrace; use serde::{Deserialize, Serialize}; use std::{ - borrow::Borrow, + borrow::{Borrow, Cow}, collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap, HashSet, VecDeque}, sync::Arc, }; @@ -8,7 +9,7 @@ use std::{ use itertools::Itertools; use mina_hasher::Fp; use mina_p2p_messages::{bigint::BigInt, v2}; -use openmina_core::consensus::ConsensusConstants; +use openmina_core::{bug_condition, consensus::ConsensusConstants}; use crate::{ scan_state::{ @@ -54,6 +55,17 @@ impl From for TransactionPoolErrors { } } +#[inline(never)] +fn my_assert(v: bool) -> Result<(), CommandError> { + if !v { + let backtrace = Backtrace::new(); + let s = format!("assert failed {:?}", backtrace); + bug_condition!("{:?}", s); + return Err(CommandError::Custom(Cow::Owned(s))); + } + Ok(()) +} + mod consensus { use crate::scan_state::currency::{BlockTimeSpan, Epoch, Length}; @@ -205,6 +217,9 @@ pub mod diff { FeePayerAccountNotFound, FeePayerNotPermittedToSend, AfterSlotTxEnd, + BacktrackNonceMismatch, + InvalidCurrencyConsumed, + Custom, } impl Error { @@ -218,7 +233,10 @@ pub mod diff { | Error::Overloaded | Error::FeePayerAccountNotFound | Error::FeePayerNotPermittedToSend - | Error::AfterSlotTxEnd => false, + | Error::AfterSlotTxEnd + | Error::InvalidCurrencyConsumed + | Error::Custom + | Error::BacktrackNonceMismatch => false, Error::Overflow | Error::BadToken | Error::UnwantedFeeToken => true, } } @@ -527,6 +545,18 @@ pub enum CommandError { token_id: TokenId, }, AfterSlotTxEnd, + BacktrackNonceMismatch { + expected_nonce: Nonce, + first_nonce: Nonce, + }, + InvalidCurrencyConsumed, + Custom(Cow<'static, str>), +} + +impl From for String { + fn from(value: CommandError) -> Self { + format!("{:?}", value) + } } impl From for diff::Error { @@ -540,6 +570,9 @@ impl From for diff::Error { CommandError::Expired { .. } => diff::Error::Expired, CommandError::UnwantedFeeToken { .. } => diff::Error::UnwantedFeeToken, CommandError::AfterSlotTxEnd => diff::Error::AfterSlotTxEnd, + CommandError::BacktrackNonceMismatch { .. } => diff::Error::BacktrackNonceMismatch, + CommandError::InvalidCurrencyConsumed => diff::Error::InvalidCurrencyConsumed, + CommandError::Custom(_) => diff::Error::Custom, } } } @@ -735,7 +768,7 @@ impl IndexedPool { let fee_payer = unchecked.fee_payer(); let fee_per_wu = unchecked.fee_per_wu(); - let consumed = currency_consumed(&unchecked).unwrap(); + let consumed = currency_consumed(&unchecked)?; match self.all_by_sender.get_mut(&fee_payer) { None => { @@ -752,12 +785,16 @@ impl IndexedPool { } Some((queue, currency_reserved)) => { let first_queued = queue.front().cloned().unwrap(); + let expected_nonce = unchecked.expected_target_nonce(); + let first_nonce = first_queued.data.forget_check().applicable_at_nonce(); - if unchecked.expected_target_nonce() - != first_queued.data.forget_check().applicable_at_nonce() - { + if expected_nonce != first_nonce { // Ocaml panics here as well - panic!("indexed pool nonces inconsistent when adding from backtrack.") + //panic!("indexed pool nonces inconsistent when adding from backtrack.") + return Err(CommandError::BacktrackNonceMismatch { + expected_nonce, + first_nonce, + }); } // update `self.all_by_sender` @@ -823,7 +860,7 @@ impl IndexedPool { fn remove_with_dependents_exn( &mut self, cmd: &ValidCommandWithHash, - ) -> VecDeque { + ) -> Result, CommandError> { let sender = cmd.data.fee_payer(); let mut by_sender = SenderState { state: self.all_by_sender.get(&sender).cloned(), @@ -844,11 +881,11 @@ impl IndexedPool { cmd: &ValidCommandWithHash, by_sender: &mut SenderState, updates: &mut Vec, - ) -> VecDeque { + ) -> Result, CommandError> { let (sender_queue, reserved_currency_ref) = by_sender.state.as_mut().unwrap(); let unchecked = cmd.data.forget_check(); - assert!(!sender_queue.is_empty()); + my_assert(!sender_queue.is_empty())?; let cmd_nonce = unchecked.applicable_at_nonce(); @@ -863,12 +900,12 @@ impl IndexedPool { let drop_queue = sender_queue.split_off(cmd_index); let keep_queue = sender_queue; - assert!(!drop_queue.is_empty()); + my_assert(!drop_queue.is_empty())?; - let currency_to_remove = drop_queue.iter().fold(Amount::zero(), |acc, cmd| { - let consumed = currency_consumed(&cmd.data.forget_check()).unwrap(); - consumed.checked_add(&acc).unwrap() - }); + let currency_to_remove = drop_queue.iter().try_fold(Amount::zero(), |acc, cmd| { + let consumed = currency_consumed(&cmd.data.forget_check())?; + Ok(consumed.checked_add(&acc).unwrap()) + })?; // This is safe because the currency in a subset of the commands much be <= // total currency in all the commands. @@ -892,11 +929,11 @@ impl IndexedPool { if !keep_queue.is_empty() { *reserved_currency_ref = reserved_currency; } else { - assert!(reserved_currency.is_zero()); + my_assert(reserved_currency.is_zero())?; by_sender.state = None; } - drop_queue + Ok(drop_queue) } fn apply_updates(&mut self, updates: Vec) { @@ -989,7 +1026,7 @@ impl IndexedPool { let consumed = { self.check_expiry(global_slot_since_genesis, &unchecked)?; - let consumed = currency_consumed(&unchecked).ok_or(CommandError::Overflow)?; + let consumed = currency_consumed(&unchecked).map_err(|_| CommandError::Overflow)?; if !unchecked.fee_token().is_default() { return Err(CommandError::UnwantedFeeToken { token_id: unchecked.fee_token(), @@ -1023,7 +1060,7 @@ impl IndexedPool { Ok((cmd.clone(), Self::make_queue())) } Some((mut queued_cmds, reserved_currency)) => { - assert!(!queued_cmds.is_empty()); + my_assert(!queued_cmds.is_empty())?; let queue_applicable_at_nonce = { let first = queued_cmds.front().unwrap(); first.data.forget_check().applicable_at_nonce() @@ -1077,7 +1114,7 @@ impl IndexedPool { let drop_queue = queued_cmds.split_off(replacement_index); let to_drop = drop_queue.front().unwrap().data.forget_check(); - assert!(cmd_applicable_at_nonce <= to_drop.applicable_at_nonce()); + my_assert(cmd_applicable_at_nonce <= to_drop.applicable_at_nonce())?; // We check the fee increase twice because we need to be sure the // subtraction is safe. @@ -1092,8 +1129,8 @@ impl IndexedPool { drop_queue.front().unwrap(), by_sender, updates, - ); - assert_eq!(drop_queue, dropped); + )?; + my_assert(drop_queue == dropped)?; let (cmd, _) = { let (v, dropped) = self.add_from_gossip_exn_impl( @@ -1106,7 +1143,7 @@ impl IndexedPool { updates, )?; // We've already removed them, so this should always be empty. - assert!(dropped.is_empty()); + my_assert(dropped.is_empty())?; (v, dropped) }; @@ -1144,7 +1181,7 @@ impl IndexedPool { &mut this_updates, ) { Ok((_cmd, dropped)) => { - assert!(dropped.is_empty()); + my_assert(dropped.is_empty())?; updates.append(&mut this_updates); } Err(_) => { @@ -1191,20 +1228,23 @@ impl IndexedPool { self.expired_by_global_slot(global_slot_since_genesis) } - fn remove_expired(&mut self, global_slot_since_genesis: Slot) -> Vec { + fn remove_expired( + &mut self, + global_slot_since_genesis: Slot, + ) -> Result, CommandError> { let mut dropped = Vec::with_capacity(128); for cmd in self.expired(global_slot_since_genesis) { if self.member(&cmd) { - let removed = self.remove_with_dependents_exn(&cmd); + let removed = self.remove_with_dependents_exn(&cmd)?; dropped.extend(removed); } } - dropped + Ok(dropped) } - fn remove_lowest_fee(&mut self) -> VecDeque { + fn remove_lowest_fee(&mut self) -> Result, CommandError> { let Some(set) = self.min_fee().and_then(|fee| self.all_by_fee.get(&fee)) else { - return VecDeque::new(); + return Ok(VecDeque::new()); }; // TODO: Should `self.all_by_fee` be a `BTreeSet` instead ? @@ -1221,21 +1261,24 @@ impl IndexedPool { mut queue: VecDeque, mut currency_reserved: Amount, current_balance: Amount, - ) -> ( - VecDeque, - Amount, - VecDeque, - ) { + ) -> Result< + ( + VecDeque, + Amount, + VecDeque, + ), + CommandError, + > { let mut dropped_so_far = VecDeque::with_capacity(queue.len()); while currency_reserved > current_balance { let last = queue.pop_back().unwrap(); - let consumed = currency_consumed(&last.data.forget_check()).unwrap(); + let consumed = currency_consumed(&last.data.forget_check())?; dropped_so_far.push_back(last); currency_reserved = currency_reserved.checked_sub(&consumed).unwrap(); } - (queue, currency_reserved, dropped_so_far) + Ok((queue, currency_reserved, dropped_so_far)) } fn revalidate( @@ -1243,9 +1286,9 @@ impl IndexedPool { global_slot_since_genesis: Slot, kind: RevalidateKind, get_account: F, - ) -> Vec + ) -> Result, CommandError> where - F: Fn(&AccountId) -> Account, + F: Fn(&AccountId) -> Option, { let requires_revalidation = |account_id: &AccountId| match kind { RevalidateKind::EntirePool => true, @@ -1258,7 +1301,8 @@ impl IndexedPool { if !requires_revalidation(&sender) { continue; } - let account: Account = get_account(&sender); + let account: Account = get_account(&sender) + .ok_or(CommandError::Custom(Cow::Borrowed("Account not find")))?; let current_balance = account .liquid_balance_at_slot(global_slot_since_genesis) .to_amount(); @@ -1268,7 +1312,7 @@ impl IndexedPool { if !(account.has_permission_to_send() && account.has_permission_to_increment_nonce()) || account.nonce < first_nonce { - let this_dropped = self.remove_with_dependents_exn(first_cmd); + let this_dropped = self.remove_with_dependents_exn(first_cmd)?; dropped.extend(this_dropped); } else { // current_nonce >= first_nonce @@ -1285,7 +1329,7 @@ impl IndexedPool { for cmd in &drop_queue { currency_reserved = currency_reserved - .checked_sub(¤cy_consumed(&cmd.data.forget_check()).unwrap()) + .checked_sub(¤cy_consumed(&cmd.data.forget_check())?) .unwrap(); } @@ -1294,7 +1338,7 @@ impl IndexedPool { keep_queue, currency_reserved, current_balance, - ); + )?; let to_drop: Vec<_> = drop_queue.into_iter().chain(dropped_for_balance).collect(); @@ -1325,7 +1369,7 @@ impl IndexedPool { } } - dropped + Ok(dropped) } // TODO(adonagy): clones too expensive? Optimize @@ -1461,7 +1505,7 @@ impl IndexedPool { } } -fn currency_consumed(cmd: &UserCommand) -> Option { +fn currency_consumed(cmd: &UserCommand) -> Result { use crate::scan_state::transaction_logic::signed_command::{Body::*, PaymentPayload}; let fee_amount = Amount::of_fee(&cmd.fee()); @@ -1478,7 +1522,9 @@ fn currency_consumed(cmd: &UserCommand) -> Option { UserCommand::ZkAppCommand(_) => Amount::zero(), }; - fee_amount.checked_add(&amount) + fee_amount + .checked_add(&amount) + .ok_or(CommandError::InvalidCurrencyConsumed) } type BlakeHash = Arc<[u8; 32]>; @@ -1646,17 +1692,19 @@ impl TransactionPool { &mut self, global_slot_since_genesis: Slot, accounts: &BTreeMap, - ) { + ) -> Result<(), CommandError> { let dropped = self.pool.revalidate( global_slot_since_genesis, RevalidateKind::EntirePool, |sender_id| { - accounts - .get(sender_id) - .cloned() - .unwrap_or_else(Account::empty) + Some( + accounts + .get(sender_id) + .cloned() + .unwrap_or_else(Account::empty), + ) }, - ); + )?; let dropped_locally_generated = dropped .iter() @@ -1675,6 +1723,8 @@ impl TransactionPool { dropped_locally_generated ) } + + Ok(()) } fn has_sufficient_fee(&self, pool_max_size: usize, cmd: &valid::UserCommand) -> bool { @@ -1690,16 +1740,19 @@ impl TransactionPool { } } - fn drop_until_below_max_size(&mut self, pool_max_size: usize) -> Vec { + fn drop_until_below_max_size( + &mut self, + pool_max_size: usize, + ) -> Result, CommandError> { let mut list = Vec::new(); while self.pool.size() > pool_max_size { - let dropped = self.pool.remove_lowest_fee(); - assert!(!dropped.is_empty()); + let dropped = self.pool.remove_lowest_fee()?; + my_assert(!dropped.is_empty())?; list.extend(dropped) } - list + Ok(list) } pub fn get_accounts_to_handle_transition_diff( @@ -1735,7 +1788,7 @@ impl TransactionPool { account_ids: &BTreeSet, accounts: &BTreeMap, uncommited: &BTreeMap, - ) { + ) -> Result<(), String> { let diff::BestTipDiff { new_commands, removed_commands, @@ -1784,8 +1837,8 @@ impl TransactionPool { current_global_slot, cmd, ) { - Ok(_) => self.drop_until_below_max_size(pool_max_size), - Err(_) => todo!(), // TODO: print error + Ok(_) => self.drop_until_below_max_size(pool_max_size)?, + Err(e) => return Err(format!("{:?}", e)), }; dropped_backtrack.extend(dropped_seq); } @@ -1804,16 +1857,17 @@ impl TransactionPool { let get_account = |id: &AccountId| { match existing_account_states_by_id.get(id) { - Some(account) => account.clone(), + Some(account) => Some(account.clone()), None => { if accounts_to_check.contains(id) { - Account::empty() + Some(Account::empty()) } else { + None // OCaml panic too, with same message - panic!( - "did not expect Indexed_pool.revalidate to call \ - get_account on account not in accounts_to_check" - ) + // panic!( + // "did not expect Indexed_pool.revalidate to call \ + // get_account on account not in accounts_to_check" + // ) } } } @@ -1823,7 +1877,7 @@ impl TransactionPool { global_slot_since_genesis, RevalidateKind::Subset(accounts_to_check), get_account, - ) + )? }; let (committed_commands, dropped_commit_conflicts): (Vec<_>, Vec<_>) = { @@ -1841,7 +1895,7 @@ impl TransactionPool { let old = self .locally_generated_committed .insert((*cmd).clone(), data); - assert!(old.is_none()); + my_assert(old.is_none())?; }; } @@ -1888,11 +1942,13 @@ impl TransactionPool { } } - let expired_commands = self.pool.remove_expired(global_slot_since_genesis); + let expired_commands = self.pool.remove_expired(global_slot_since_genesis)?; for cmd in &expired_commands { self.verification_key_table.decrement_hashed([cmd]); self.locally_generated_uncommitted.remove(cmd); } + + Ok(()) } pub fn get_accounts_to_apply_diff(&self, diff: &diff::DiffVerified) -> BTreeSet { @@ -1941,11 +1997,13 @@ impl TransactionPool { .list .iter() .map(|cmd| { + let account = fee_payer_accounts + .get(&fee_payer(cmd)) + .ok_or_else(|| "Fee payer not found".to_string())?; + let result: Result<_, diff::Error> = (|| { check_command(&self.pool, cmd)?; - let account = fee_payer_accounts.get(&fee_payer(cmd)).unwrap(); // OCaml panics too - match self.pool.add_from_gossip_exn( global_slot_since_genesis, current_global_slot, @@ -1962,11 +2020,11 @@ impl TransactionPool { })(); match result { - Ok((cmd, dropped)) => Ok((cmd, dropped)), - Err(err) => Err((cmd, err)), + Ok((cmd, dropped)) => Ok(Ok((cmd, dropped))), + Err(err) => Ok(Err((cmd, err))), } }) - .collect::>(); + .collect::>, String>>()?; let added_cmds = add_results .iter() @@ -1985,7 +2043,7 @@ impl TransactionPool { .flatten() .collect::>(); - let dropped_for_size = { self.drop_until_below_max_size(self.config.pool_max_size) }; + let dropped_for_size = self.drop_until_below_max_size(self.config.pool_max_size)?; let all_dropped_cmds = dropped_for_add .iter() diff --git a/macros/Cargo.toml b/macros/Cargo.toml index 3370610959..92158d505c 100644 --- a/macros/Cargo.toml +++ b/macros/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-macros" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" authors = [ "Alexander Koptelov " ] diff --git a/mina-p2p-messages/src/hash_input.rs b/mina-p2p-messages/src/hash_input.rs index df6e456ea1..f995f83389 100644 --- a/mina-p2p-messages/src/hash_input.rs +++ b/mina-p2p-messages/src/hash_input.rs @@ -11,6 +11,7 @@ use crate::{ number::{Int32, Int64, UInt32, UInt64}, pseq::PaddedSeq, string::ByteString, + string::ZkAppUri, }; pub trait ToInput { @@ -59,6 +60,12 @@ impl ToInput for ByteString { } } +impl ToInput for ZkAppUri { + fn to_input(&self, inputs: &mut Inputs) { + inputs.append_bytes(self.as_ref()) + } +} + impl ToInput for Vec where D: Deref, diff --git a/mina-p2p-messages/src/string.rs b/mina-p2p-messages/src/string.rs index 228864b521..a64d126aef 100644 --- a/mina-p2p-messages/src/string.rs +++ b/mina-p2p-messages/src/string.rs @@ -1,22 +1,35 @@ +use std::marker::PhantomData; + use binprot::Nat0; use serde::{de::Visitor, Deserialize, Serialize}; const MINA_STRING_MAX_LENGTH: usize = 100_000_000; const CHUNK_SIZE: usize = 5_000; +pub type ByteString = BoundedByteString; +pub type CharString = BoundedCharString; + +// https://github.com/MinaProtocol/mina/blob/c0c9d702b8cba34a603a28001c293ca462b1dfec/src/lib/mina_base/zkapp_account.ml#L140 +pub const ZKAPP_URI_MAX_LENGTH: usize = 255; +// https://github.com/MinaProtocol/mina/blob/c0c9d702b8cba34a603a28001c293ca462b1dfec/src/lib/mina_base/account.ml#L92 +pub const TOKEN_SYMBOL_MAX_LENGTH: usize = 6; + +pub type ZkAppUri = BoundedCharString; +pub type TokenSymbol = BoundedCharString; + /// String of bytes. #[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord)] -pub struct ByteString(pub Vec); +pub struct BoundedByteString(pub Vec, PhantomData<[u8; MAX_LENGTH]>); -impl std::fmt::Debug for ByteString { +impl std::fmt::Debug for BoundedByteString { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let Self(inner) = self; + let Self(inner, _) = self; // Avoid vertical alignment - f.write_fmt(format_args!("ByteString({:?})", inner)) + f.write_fmt(format_args!("BoundedByteString<{MAX_LENGTH}>({:?})", inner)) } } -impl std::ops::Deref for ByteString { +impl std::ops::Deref for BoundedByteString { type Target = Vec; fn deref(&self) -> &Self::Target { @@ -24,47 +37,47 @@ impl std::ops::Deref for ByteString { } } -impl AsRef<[u8]> for ByteString { +impl AsRef<[u8]> for BoundedByteString { fn as_ref(&self) -> &[u8] { &self.0 } } -impl From> for ByteString { +impl From> for BoundedByteString { fn from(source: Vec) -> Self { - Self(source) + Self(source, PhantomData) } } -impl From<&[u8]> for ByteString { +impl From<&[u8]> for BoundedByteString { fn from(source: &[u8]) -> Self { - Self(source.to_vec()) + Self(source.to_vec(), PhantomData) } } -impl From<&str> for ByteString { +impl From<&str> for BoundedByteString { fn from(source: &str) -> Self { - Self(source.as_bytes().to_vec()) + Self(source.as_bytes().to_vec(), PhantomData) } } -impl TryFrom for String { +impl TryFrom> for String { type Error = std::string::FromUtf8Error; - fn try_from(value: ByteString) -> Result { + fn try_from(value: BoundedByteString) -> Result { String::from_utf8(value.0) } } -impl TryFrom<&ByteString> for String { +impl TryFrom<&BoundedByteString> for String { type Error = std::string::FromUtf8Error; - fn try_from(value: &ByteString) -> Result { + fn try_from(value: &BoundedByteString) -> Result { String::from_utf8(value.0.clone()) } } -impl Serialize for ByteString { +impl Serialize for BoundedByteString { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -76,13 +89,13 @@ impl Serialize for ByteString { } } -impl<'de> Deserialize<'de> for ByteString { +impl<'de, const MAX_LENGTH: usize> Deserialize<'de> for BoundedByteString { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { if !deserializer.is_human_readable() { - return Vec::::deserialize(deserializer).map(Self); + return Vec::::deserialize(deserializer).map(|bs| Self(bs, PhantomData)); } struct V; impl<'de> Visitor<'de> for V { @@ -103,28 +116,30 @@ impl<'de> Deserialize<'de> for ByteString { .map_err(|_| serde::de::Error::custom("failed to decode hex str".to_string())) } } - deserializer.deserialize_str(V).map(Self) + deserializer + .deserialize_str(V) + .map(|bs| Self(bs, PhantomData)) } } -impl binprot::BinProtRead for ByteString { +impl binprot::BinProtRead for BoundedByteString { fn binprot_read(r: &mut R) -> Result where Self: Sized, { let len = Nat0::binprot_read(r)?.0 as usize; - if len > MINA_STRING_MAX_LENGTH { - return Err(MinaStringTooLong::as_binprot_err(len)); + if len > MAX_LENGTH { + return Err(MinaStringTooLong::as_binprot_err(MAX_LENGTH, len)); } - Ok(Self(maybe_read_in_chunks(len, r)?)) + Ok(Self(maybe_read_in_chunks(len, r)?, PhantomData)) } } -impl binprot::BinProtWrite for ByteString { +impl binprot::BinProtWrite for BoundedByteString { fn binprot_write(&self, w: &mut W) -> std::io::Result<()> { - if self.0.len() > MINA_STRING_MAX_LENGTH { - return Err(MinaStringTooLong::as_io_err(self.0.len())); + if self.0.len() > MAX_LENGTH { + return Err(MinaStringTooLong::as_io_err(MAX_LENGTH, self.0.len())); } Nat0(self.0.len() as u64).binprot_write(w)?; w.write_all(&self.0)?; @@ -134,65 +149,81 @@ impl binprot::BinProtWrite for ByteString { /// Human-readable string. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Default)] -pub struct CharString(Vec); +pub struct BoundedCharString(Vec, PhantomData<[u8; MAX_LENGTH]>); -impl std::fmt::Debug for CharString { +impl std::fmt::Debug for BoundedCharString { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let Self(inner) = self; + let Self(inner, _) = self; // Avoid vertical alignment - f.write_fmt(format_args!("CharString({:?})", inner)) + f.write_fmt(format_args!("BoundedCharString({:?})", inner)) } } -impl CharString { +impl BoundedCharString { pub fn to_string_lossy(&self) -> std::string::String { std::string::String::from_utf8_lossy(&self.0).into_owned() } } -impl AsRef<[u8]> for CharString { +impl AsRef<[u8]> for BoundedCharString { fn as_ref(&self) -> &[u8] { self.0.as_ref() } } -impl From> for CharString { +impl From> for BoundedCharString { fn from(source: Vec) -> Self { - Self(source) + Self(source, PhantomData) } } -impl From<&[u8]> for CharString { +impl From<&[u8]> for BoundedCharString { fn from(source: &[u8]) -> Self { - Self(source.to_vec()) + Self(source.to_vec(), PhantomData) } } -impl From<&str> for CharString { +impl From<&str> for BoundedCharString { fn from(source: &str) -> Self { - Self(source.as_bytes().to_vec()) + Self(source.as_bytes().to_vec(), PhantomData) + } +} + +impl TryFrom> for String { + type Error = std::string::FromUtf8Error; + + fn try_from(value: BoundedCharString) -> Result { + String::from_utf8(value.0) } } -impl PartialEq<[u8]> for CharString { +impl TryFrom<&BoundedCharString> for String { + type Error = std::string::FromUtf8Error; + + fn try_from(value: &BoundedCharString) -> Result { + String::from_utf8(value.0.clone()) + } +} + +impl PartialEq<[u8]> for BoundedCharString { fn eq(&self, other: &[u8]) -> bool { self.as_ref() == other } } -impl PartialEq for CharString { +impl PartialEq for BoundedCharString { fn eq(&self, other: &str) -> bool { self.as_ref() == other.as_bytes() } } -impl std::fmt::Display for CharString { +impl std::fmt::Display for BoundedCharString { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.to_string_lossy()) } } -impl Serialize for CharString { +impl Serialize for BoundedCharString { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -208,13 +239,13 @@ impl Serialize for CharString { } } -impl<'de> Deserialize<'de> for CharString { +impl<'de, const MAX_LENGTH: usize> Deserialize<'de> for BoundedCharString { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, { if !deserializer.is_human_readable() { - return Vec::::deserialize(deserializer).map(Self); + return Vec::::deserialize(deserializer).map(|cs| Self(cs, PhantomData)); } struct V; impl<'de> Visitor<'de> for V { @@ -234,28 +265,30 @@ impl<'de> Deserialize<'de> for CharString { Ok(v.as_bytes().to_vec()) } } - deserializer.deserialize_str(V).map(Self) + deserializer + .deserialize_str(V) + .map(|cs| Self(cs, PhantomData)) } } -impl binprot::BinProtRead for CharString { +impl binprot::BinProtRead for BoundedCharString { fn binprot_read(r: &mut R) -> Result where Self: Sized, { let len = Nat0::binprot_read(r)?.0 as usize; - if len > MINA_STRING_MAX_LENGTH { - return Err(MinaStringTooLong::as_binprot_err(len)); + if len > MAX_LENGTH { + return Err(MinaStringTooLong::as_binprot_err(MAX_LENGTH, len)); } - Ok(Self(maybe_read_in_chunks(len, r)?)) + Ok(Self(maybe_read_in_chunks(len, r)?, PhantomData)) } } -impl binprot::BinProtWrite for CharString { +impl binprot::BinProtWrite for BoundedCharString { fn binprot_write(&self, w: &mut W) -> std::io::Result<()> { - if self.0.len() > MINA_STRING_MAX_LENGTH { - return Err(MinaStringTooLong::as_io_err(self.0.len())); + if self.0.len() > MAX_LENGTH { + return Err(MinaStringTooLong::as_io_err(MAX_LENGTH, self.0.len())); } Nat0(self.0.len() as u64).binprot_write(w)?; w.write_all(&self.0)?; @@ -297,22 +330,19 @@ pub struct MinaStringTooLong { } impl MinaStringTooLong { - fn boxed(actual: usize) -> Box { - Box::new(MinaStringTooLong { - max: MINA_STRING_MAX_LENGTH, - actual, - }) + fn boxed(max: usize, actual: usize) -> Box { + Box::new(MinaStringTooLong { max, actual }) } - fn as_io_err(actual: usize) -> std::io::Error { + fn as_io_err(max: usize, actual: usize) -> std::io::Error { std::io::Error::new( std::io::ErrorKind::InvalidData, - MinaStringTooLong::boxed(actual), + MinaStringTooLong::boxed(max, actual), ) } - fn as_binprot_err(actual: usize) -> binprot::Error { - binprot::Error::CustomError(MinaStringTooLong::boxed(actual)) + fn as_binprot_err(max: usize, actual: usize) -> binprot::Error { + binprot::Error::CustomError(MinaStringTooLong::boxed(max, actual)) } } @@ -322,9 +352,31 @@ mod tests { use binprot::{BinProtRead, BinProtWrite, Nat0}; - use crate::string::CharString; + use super::{ByteString, CharString, MINA_STRING_MAX_LENGTH}; - use super::{ByteString, MINA_STRING_MAX_LENGTH}; + #[test] + fn bounded_string_serialize_deserialize() { + let valid_str = "a".repeat(MINA_STRING_MAX_LENGTH); // max-length string + let valid_uri = CharString::from(valid_str.as_str()); + let serialized = serde_json::to_string(&valid_uri).unwrap(); + let deserialized: CharString = serde_json::from_str(&serialized).unwrap(); + assert_eq!(deserialized.to_string_lossy(), valid_str); + + let invalid_str = "a".repeat(MINA_STRING_MAX_LENGTH + 1); // exceeding max-length string + let invalid_uri = CharString::from(invalid_str.as_str()); + let result = serde_json::to_string(&invalid_uri); + assert!( + result.is_err(), + "Expected serialization to fail for string longer than 255 bytes" + ); + + let invalid_json = format!("\"{}\"", "a".repeat(MINA_STRING_MAX_LENGTH + 1)); + let deserialization_result: Result = serde_json::from_str(&invalid_json); + assert!( + deserialization_result.is_err(), + "Expected deserialization to fail for string longer than 255 bytes" + ); + } #[test] fn bounded_string_binprot_write() { diff --git a/mina-p2p-messages/src/v2/generated.rs b/mina-p2p-messages/src/v2/generated.rs index 2189e2bdfc..a8815ed3ce 100644 --- a/mina-p2p-messages/src/v2/generated.rs +++ b/mina-p2p-messages/src/v2/generated.rs @@ -1456,10 +1456,21 @@ pub enum MinaBaseAccountUpdateUpdateStableV1AppStateA { /// /// Gid: `766` /// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) -/// Args: crate :: string :: ByteString +/// Args: crate :: string :: ZkAppUri #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] pub enum MinaBaseAccountUpdateUpdateStableV1ZkappUri { - Set(crate::string::ByteString), + Set(crate::string::ZkAppUri), + Keep, +} + +/// Derived name: `Mina_base__Account_update.Update.Stable.V1.token_symbol` +/// +/// Gid: `766` +/// Location: [src/lib/mina_base/zkapp_basic.ml:100:6](https://github.com/MinaProtocol/mina/blob/1551e2faaa/src/lib/mina_base/zkapp_basic.ml#L100) +/// Args: crate :: string :: TokenSymbol +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, BinProtRead, BinProtWrite)] +pub enum MinaBaseAccountUpdateUpdateStableV1TokenSymbol { + Set(crate::string::TokenSymbol), Keep, } @@ -1571,7 +1582,7 @@ pub struct MinaBaseZkappAccountStableV2 { pub action_state: PaddedSeq, pub last_action_slot: MinaNumbersGlobalSlotSinceGenesisMStableV1, pub proved_state: bool, - pub zkapp_uri: crate::string::ByteString, + pub zkapp_uri: crate::string::ZkAppUri, } /// **OCaml name**: `Mina_base__Account.Index.Stable.V1` @@ -1830,7 +1841,7 @@ pub struct MinaBaseAccountUpdateUpdateStableV1 { pub verification_key: MinaBaseAccountUpdateUpdateStableV1VerificationKey, pub permissions: MinaBaseAccountUpdateUpdateStableV1Permissions, pub zkapp_uri: MinaBaseAccountUpdateUpdateStableV1ZkappUri, - pub token_symbol: MinaBaseAccountUpdateUpdateStableV1ZkappUri, + pub token_symbol: MinaBaseAccountUpdateUpdateStableV1TokenSymbol, pub timing: MinaBaseAccountUpdateUpdateStableV1Timing, pub voting_for: MinaBaseAccountUpdateUpdateStableV1VotingFor, } diff --git a/node/Cargo.toml b/node/Cargo.toml index 5a621754d6..02cc6ba3ad 100644 --- a/node/Cargo.toml +++ b/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "node" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" @@ -27,7 +27,7 @@ vrf = { workspace = true } openmina-core = { path = "../core" } snark = { path = "../snark" } p2p = { path = "../p2p" } -openmina-node-account = { path = "./account" } +openmina-node-account = { workspace = true } tokio = { version = "1.26.0" } postcard = { version = "1.0.8", features = ["use-std"] } static_assertions.workspace = true diff --git a/node/account/Cargo.toml b/node/account/Cargo.toml index b7c4eef5bc..ba47e5146c 100644 --- a/node/account/Cargo.toml +++ b/node/account/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-account" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/node/account/src/secret_key.rs b/node/account/src/secret_key.rs index 4a61a9ce7f..3e21932670 100644 --- a/node/account/src/secret_key.rs +++ b/node/account/src/secret_key.rs @@ -1,4 +1,4 @@ -use std::{env, fmt, fs, path::Path, str::FromStr}; +use std::{fmt, fs, path::Path, str::FromStr}; use argon2::{password_hash::SaltString, Argon2, Params, PasswordHasher}; use base64::Engine; @@ -73,19 +73,26 @@ impl AccountSecretKey { self.0.public.clone().into_compressed() } - pub fn from_encrypted_file(path: impl AsRef) -> Result { + pub fn from_encrypted_file( + path: impl AsRef, + password: &str, + ) -> Result { let key_file = fs::File::open(path)?; let encrypted: EncryptedSecretKey = serde_json::from_reader(key_file)?; - encrypted.try_decrypt() + encrypted.try_decrypt(password) } - pub fn to_encrypted_file(&self, path: impl AsRef) -> Result<(), EncryptionError> { + pub fn to_encrypted_file( + &self, + path: impl AsRef, + password: &str, + ) -> Result<(), EncryptionError> { if path.as_ref().exists() { panic!("File {} already exists", path.as_ref().display()) } let f = fs::File::create(path)?; - let encrypted = EncryptedSecretKey::encrypt(&self.to_bytes())?; + let encrypted = EncryptedSecretKey::encrypt(&self.to_bytes(), password)?; serde_json::to_writer(f, &encrypted)?; Ok(()) @@ -188,8 +195,6 @@ pub enum EncryptionError { Io(#[from] std::io::Error), #[error(transparent)] SerdeJson(#[from] serde_json::Error), - #[error("MINA_PRIVKEY_PASS environment variable must be set!")] - PasswordEnvVarMissing, } #[derive(Serialize, Deserialize, Debug)] @@ -222,10 +227,8 @@ impl EncryptedSecretKey { )) } - pub fn try_decrypt(&self) -> Result { + pub fn try_decrypt(&self, password: &str) -> Result { // prepare inputs to cipher - let password = - env::var("MINA_PRIVKEY_PASS").map_err(|_| EncryptionError::PasswordEnvVarMissing)?; let password = password.as_bytes(); let pwsalt = self.pwsalt.try_decode(Self::ENCRYPTION_DATA_VERSION_BYTE)?; let nonce = self.nonce.try_decode(Self::ENCRYPTION_DATA_VERSION_BYTE)?; @@ -253,14 +256,12 @@ impl EncryptedSecretKey { Ok(AccountSecretKey::from_bytes(&decrypted[1..])?) } - pub fn encrypt(key: &[u8]) -> Result { + pub fn encrypt(key: &[u8], password: &str) -> Result { let argon2 = Self::setup_argon(Self::PW_DIFF)?; // add the prefix byt to the key let mut key_prefixed = vec![Self::SECRET_KEY_PREFIX_BYTE]; key_prefixed.extend(key); - let password = - env::var("MINA_PRIVKEY_PASS").map_err(|_| EncryptionError::PasswordEnvVarMissing)?; let salt = SaltString::generate(&mut OsRng); let password_hash = argon2 @@ -290,6 +291,8 @@ impl EncryptedSecretKey { #[cfg(test)] mod tests { + use std::env; + use super::*; #[test] @@ -316,18 +319,19 @@ mod tests { #[test] fn test_encrypt_decrypt() { - env::set_var("MINA_PRIVKEY_PASS", "not-very-secure-pass"); + let password = "not-very-secure-pass"; + let new_key = AccountSecretKey::rand(); let tmp_dir = env::temp_dir(); let tmp_path = format!("{}/{}-key", tmp_dir.display(), new_key.public_key()); // dump encrypted file new_key - .to_encrypted_file(&tmp_path) + .to_encrypted_file(&tmp_path, password) .expect("Failed to encrypt secret key"); // load and decrypt - let decrypted = AccountSecretKey::from_encrypted_file(&tmp_path) + let decrypted = AccountSecretKey::from_encrypted_file(&tmp_path, password) .expect("Failed to decrypt secret key file"); assert_eq!( @@ -339,10 +343,10 @@ mod tests { #[test] fn test_ocaml_key_decrypt() { - env::set_var("MINA_PRIVKEY_PASS", "not-very-secure-pass"); + let password = "not-very-secure-pass"; let key_path = "../tests/files/accounts/test-key-1"; let expected_public_key = "B62qmg7n4XqU3SFwx9KD9B7gxsKwxJP5GmxtBpHp1uxyN3grujii9a1"; - let decrypted = AccountSecretKey::from_encrypted_file(key_path) + let decrypted = AccountSecretKey::from_encrypted_file(key_path, password) .expect("Failed to decrypt secret key file"); assert_eq!( diff --git a/node/common/Cargo.toml b/node/common/Cargo.toml index 7a66a74762..e05a4a103f 100644 --- a/node/common/Cargo.toml +++ b/node/common/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-common" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/node/common/src/service/block_producer/vrf_evaluator.rs b/node/common/src/service/block_producer/vrf_evaluator.rs index 7179bfcfa6..837a582a30 100644 --- a/node/common/src/service/block_producer/vrf_evaluator.rs +++ b/node/common/src/service/block_producer/vrf_evaluator.rs @@ -24,7 +24,7 @@ pub fn vrf_evaluator( let vrf_input = VrfEvaluationInput::new( keypair.clone(), vrf_evaluator_input.epoch_seed.clone(), - pub_key.to_string(), + pub_key.clone(), vrf_evaluator_input.global_slot, *index, (*stake).into(), diff --git a/node/common/src/service/rpc/mod.rs b/node/common/src/service/rpc/mod.rs index c608728a47..5d59e9b6d8 100644 --- a/node/common/src/service/rpc/mod.rs +++ b/node/common/src/service/rpc/mod.rs @@ -1,6 +1,7 @@ mod sender; pub use sender::RpcSender; +pub mod state; pub mod stats; use node::rpc::{ diff --git a/node/common/src/service/rpc/sender.rs b/node/common/src/service/rpc/sender.rs index 2ca82bd0b1..978f398a64 100644 --- a/node/common/src/service/rpc/sender.rs +++ b/node/common/src/service/rpc/sender.rs @@ -8,6 +8,7 @@ use node::core::channels::{mpsc, oneshot}; use node::p2p::connection::outgoing::P2pConnectionOutgoingInitOpts; use node::rpc::*; +use super::state::State; use super::stats::Stats; use super::NodeRpcRequest; @@ -68,6 +69,10 @@ impl RpcSender { #[cfg_attr(target_family = "wasm", wasm_bindgen)] impl RpcSender { + pub fn state(&self) -> State { + State::new(self.clone()) + } + pub fn stats(&self) -> Stats { Stats::new(self.clone()) } diff --git a/node/common/src/service/rpc/state.rs b/node/common/src/service/rpc/state.rs new file mode 100644 index 0000000000..5fec6f009e --- /dev/null +++ b/node/common/src/service/rpc/state.rs @@ -0,0 +1,41 @@ +#[cfg(target_family = "wasm")] +use gloo_utils::format::JsValueSerdeExt; +#[cfg(target_family = "wasm")] +use node::rpc::*; +#[cfg(target_family = "wasm")] +use wasm_bindgen::prelude::*; + +use super::RpcSender; + +#[derive(Clone)] +#[cfg_attr(target_family = "wasm", wasm_bindgen)] +pub struct State { + #[allow(unused)] + sender: RpcSender, +} + +impl State { + pub fn new(sender: RpcSender) -> Self { + Self { sender } + } +} + +#[cfg(target_family = "wasm")] +#[cfg_attr(target_family = "wasm", wasm_bindgen)] +impl State { + pub async fn peers(&self) -> JsValue { + let res = self + .sender + .oneshot_request::(RpcRequest::PeersGet) + .await; + JsValue::from_serde(&res).unwrap_or_default() + } + + pub async fn message_progress(&self) -> JsValue { + let res = self + .sender + .oneshot_request::(RpcRequest::MessageProgressGet) + .await; + JsValue::from_serde(&res).unwrap_or_default() + } +} diff --git a/node/common/src/service/rpc/stats.rs b/node/common/src/service/rpc/stats.rs index ef6756e211..4422810ca1 100644 --- a/node/common/src/service/rpc/stats.rs +++ b/node/common/src/service/rpc/stats.rs @@ -1,7 +1,7 @@ #[cfg(target_family = "wasm")] use gloo_utils::format::JsValueSerdeExt; #[cfg(target_family = "wasm")] -use node::rpc::{RpcBlockProducerStatsGetResponse, RpcRequest}; +use node::rpc::*; #[cfg(target_family = "wasm")] use wasm_bindgen::prelude::*; @@ -23,6 +23,16 @@ impl Stats { #[cfg(target_family = "wasm")] #[cfg_attr(target_family = "wasm", wasm_bindgen)] impl Stats { + pub async fn sync(&self, limit: Option) -> JsValue { + let query = SyncStatsQuery { limit }; + let res = self + .sender + .oneshot_request::(RpcRequest::SyncStatsGet(query)) + .await + .flatten(); + JsValue::from_serde(&res).unwrap_or_default() + } + pub async fn block_producer(&self) -> JsValue { let res = self .sender diff --git a/node/invariants/Cargo.toml b/node/invariants/Cargo.toml index 8ad1bdec31..97159f692b 100644 --- a/node/invariants/Cargo.toml +++ b/node/invariants/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-invariants" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/node/native/Cargo.toml b/node/native/Cargo.toml index fe9d787e90..356aa24c0f 100644 --- a/node/native/Cargo.toml +++ b/node/native/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-native" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/node/native/src/http_server.rs b/node/native/src/http_server.rs index b319c15733..458b932a27 100644 --- a/node/native/src/http_server.rs +++ b/node/native/src/http_server.rs @@ -10,12 +10,7 @@ use warp::{ }; use node::core::snark::SnarkJobId; -use node::rpc::{ - ActionStatsQuery, RpcBlockProducerStatsGetResponse, RpcMessageProgressResponse, RpcPeerInfo, - RpcRequest, RpcScanStateSummaryGetQuery, RpcScanStateSummaryGetResponse, - RpcSnarkPoolJobGetResponse, RpcSnarkerWorkersResponse, RpcStateGetError, RpcStatusGetResponse, - SyncStatsQuery, -}; +use node::rpc::*; use openmina_node_common::rpc::{ RpcActionStatsGetResponse, RpcSender, RpcSnarkPoolGetResponse, RpcSnarkerJobCommitResponse, @@ -154,8 +149,9 @@ pub async fn run(port: u16, rpc_sender: RpcSender) { .then(move || { let rpc_sender_clone = rpc_sender_clone.clone(); async move { - let result: Option> = - rpc_sender_clone.oneshot_request(RpcRequest::PeersGet).await; + let result = rpc_sender_clone + .oneshot_request::(RpcRequest::PeersGet) + .await; with_json_reply(&result, StatusCode::OK) } @@ -295,7 +291,8 @@ pub async fn run(port: u16, rpc_sender: RpcSender) { &"response channel dropped", StatusCode::INTERNAL_SERVER_ERROR, ), - Some(resp) => with_json_reply(&resp, StatusCode::OK), + Some(Err(err)) => with_json_reply(&err, StatusCode::INTERNAL_SERVER_ERROR), + Some(Ok(data)) => with_json_reply(&data, StatusCode::OK), } } }); diff --git a/node/native/src/node/builder.rs b/node/native/src/node/builder.rs index 255873aedf..c01bbd322d 100644 --- a/node/native/src/node/builder.rs +++ b/node/native/src/node/builder.rs @@ -180,8 +180,9 @@ impl NodeBuilder { pub fn block_producer_from_file( &mut self, path: impl AsRef, + password: &str, ) -> anyhow::Result<&mut Self> { - let key = AccountSecretKey::from_encrypted_file(path) + let key = AccountSecretKey::from_encrypted_file(path, password) .context("Failed to decrypt secret key file")?; Ok(self.block_producer(key)) } diff --git a/node/src/block_producer/block_producer_actions.rs b/node/src/block_producer/block_producer_actions.rs index 4eb73fe74f..f64d3f352a 100644 --- a/node/src/block_producer/block_producer_actions.rs +++ b/node/src/block_producer/block_producer_actions.rs @@ -34,6 +34,12 @@ pub enum BlockProducerAction { WonSlot { won_slot: BlockProducerWonSlot, }, + #[action_event( + level = info, + fields( + reason = format!("{reason:?}"), + ) + )] WonSlotDiscard { reason: BlockProducerWonSlotDiscardReason, }, @@ -89,7 +95,7 @@ impl redux::EnablingCondition for BlockProducerAction { } this.current.won_slot_should_search() - && won_slot.global_slot() >= state.cur_global_slot().unwrap() + && Some(won_slot.global_slot()) >= state.cur_global_slot() && won_slot > best_tip }), BlockProducerAction::WonSlotWait => state diff --git a/node/src/block_producer/block_producer_effects.rs b/node/src/block_producer/block_producer_effects.rs index 0786fe9699..f36ff3bbd8 100644 --- a/node/src/block_producer/block_producer_effects.rs +++ b/node/src/block_producer/block_producer_effects.rs @@ -143,7 +143,7 @@ pub fn block_producer_effects( let transactions_by_fee = state.block_producer.pending_transactions(); - if store.dispatch(LedgerWriteAction::Init { + store.dispatch(LedgerWriteAction::Init { request: LedgerWriteRequest::StagedLedgerDiffCreate { pred_block: pred_block.clone(), global_slot_since_genesis: won_slot @@ -155,9 +155,12 @@ pub fn block_producer_effects( supercharge_coinbase, transactions_by_fee, }, - }) { - store.dispatch(BlockProducerAction::StagedLedgerDiffCreatePending); - } + on_init: redux::callback!( + on_staged_ledger_diff_create_init(_request: LedgerWriteRequest) -> crate::Action { + BlockProducerAction::StagedLedgerDiffCreatePending + } + ), + }); } BlockProducerAction::StagedLedgerDiffCreatePending => {} BlockProducerAction::StagedLedgerDiffCreateSuccess { .. } => { @@ -278,7 +281,14 @@ pub fn block_producer_effects( return; }; + let previous_root_snarked_ledger_hash = store + .state() + .transition_frontier + .root() + .map(|b| b.snarked_ledger_hash().clone()); + if store.dispatch(TransitionFrontierSyncAction::BestTipUpdate { + previous_root_snarked_ledger_hash, best_tip: best_tip.clone(), root_block, blocks_inbetween, diff --git a/node/src/block_producer/block_producer_reducer.rs b/node/src/block_producer/block_producer_reducer.rs index 06ac88b5b6..0c92c62e7a 100644 --- a/node/src/block_producer/block_producer_reducer.rs +++ b/node/src/block_producer/block_producer_reducer.rs @@ -46,6 +46,7 @@ impl BlockProducerEnabled { self.vrf_evaluator.reducer(meta.with_action(action)) } BlockProducerAction::BestTipUpdate { best_tip } => { + self.injected_blocks.remove(best_tip.hash()); // set the genesis timestamp on the first best tip update // TODO: move/remove once we can generate the genesis block if self.vrf_evaluator.genesis_timestamp == redux::Timestamp::ZERO { @@ -421,12 +422,18 @@ impl BlockProducerEnabled { chain_proof_len => { // TODO(binier): test let mut iter = chain.iter().rev().take(chain_proof_len + 1).rev(); - let first_hash = iter.next().unwrap().hash().clone(); - let body_hashes = iter - .map(|b| b.header().protocol_state.body.hash()) - .map(StateBodyHash::from) - .collect(); - (first_hash, body_hashes) + if let Some(first_block) = iter.next() { + let first_hash = first_block.hash().clone(); + let body_hashes = iter + .map(|b| b.header().protocol_state.body.hash()) + .map(StateBodyHash::from) + .collect(); + (first_hash, body_hashes) + } else { + // TODO: test this as well + // If the chain is empty, return the same as when chain_proof_len is 0 + (pred_block.hash().clone(), List::new()) + } } }; @@ -526,6 +533,7 @@ impl BlockProducerEnabled { .. } = &mut self.current { + self.injected_blocks.insert(block.hash().clone()); self.current = BlockProducerCurrentState::Injected { time: meta.time(), won_slot: won_slot.clone(), diff --git a/node/src/block_producer/block_producer_state.rs b/node/src/block_producer/block_producer_state.rs index 61ff93a69f..fe83508dc0 100644 --- a/node/src/block_producer/block_producer_state.rs +++ b/node/src/block_producer/block_producer_state.rs @@ -1,4 +1,4 @@ -use std::time::Duration; +use std::{collections::BTreeSet, time::Duration}; use ledger::scan_state::transaction_logic::valid; use mina_p2p_messages::v2; @@ -20,6 +20,9 @@ pub struct BlockProducerEnabled { pub config: BlockProducerConfig, pub vrf_evaluator: BlockProducerVrfEvaluatorState, pub current: BlockProducerCurrentState, + /// Blocks that were injected into transition frontier, but hasn't + /// become our best tip yet. + pub injected_blocks: BTreeSet, } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -142,6 +145,7 @@ impl BlockProducerState { config: config.clone(), vrf_evaluator: BlockProducerVrfEvaluatorState::new(now), current: BlockProducerCurrentState::Idle { time: now }, + injected_blocks: Default::default(), })) } @@ -173,6 +177,13 @@ impl BlockProducerState { self.with(false, |this| producer == &this.config.pub_key) } + /// Checks if the block was produced by us recently. + pub fn is_produced_by_me(&self, block: &ArcBlockWithHash) -> bool { + self.with(false, |this| { + block.producer() == &this.config.pub_key && this.injected_blocks.contains(block.hash()) + }) + } + pub fn is_producing(&self) -> bool { self.with(false, |this| this.current.is_producing()) } @@ -275,15 +286,7 @@ impl BlockProducerCurrentState { } pub fn won_slot_should_wait(&self, now: redux::Timestamp) -> bool { - let slot_interval = Duration::from_secs(3 * 60).as_nanos() as u64; - match self { - Self::WonSlot { won_slot, .. } => { - // Make sure to only producer blocks when in the slot interval - let slot_upper_bound = won_slot.slot_time + slot_interval; - now < won_slot.slot_time && now >= slot_upper_bound - } - _ => false, - } + matches!(self, Self::WonSlot { .. }) && !self.won_slot_should_produce(now) } pub fn won_slot_should_produce(&self, now: redux::Timestamp) -> bool { diff --git a/node/src/block_producer/mod.rs b/node/src/block_producer/mod.rs index 574d54ccb0..0e20332b28 100644 --- a/node/src/block_producer/mod.rs +++ b/node/src/block_producer/mod.rs @@ -22,13 +22,10 @@ pub use block_producer_service::*; use ledger::AccountIndex; use mina_p2p_messages::{list::List, v2}; -use mina_signer::CompressedPubKey; use openmina_core::block::ArcBlockWithHash; use serde::{Deserialize, Serialize}; use vrf::output::VrfOutput; -use crate::account::AccountPublicKey; - use self::vrf_evaluator::VrfWonSlotWithHash; #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] @@ -63,10 +60,10 @@ impl BlockProducerWonSlot { let slot_time = Self::calculate_slot_time(genesis_timestamp, won_slot.global_slot); - let winner_pub_key = AccountPublicKey::from( - CompressedPubKey::from_address(&won_slot.winner_account).unwrap(), + let delegator = ( + won_slot.winner_account.clone().into(), + won_slot.account_index, ); - let delegator = (winner_pub_key.into(), won_slot.account_index); let global_slot = v2::ConsensusGlobalSlotStableV1 { slot_number: v2::MinaNumbersGlobalSlotSinceHardForkMStableV1::SinceHardFork( won_slot.global_slot.into(), @@ -200,6 +197,7 @@ pub fn calc_epoch_seed( prev_epoch_seed: &v2::EpochSeed, vrf_hash: mina_hasher::Fp, ) -> v2::EpochSeed { + // TODO(adonagy): fix this unwrap let old_seed = prev_epoch_seed.to_fp().unwrap(); let new_seed = ledger::hash_with_kimchi("MinaEpochSeed", &[old_seed, vrf_hash]); v2::MinaBaseEpochSeedStableV1(new_seed.into()).into() diff --git a/node/src/block_producer/vrf_evaluator/block_producer_vrf_evaluator_effects.rs b/node/src/block_producer/vrf_evaluator/block_producer_vrf_evaluator_effects.rs index 2b9720182a..634f21d4ff 100644 --- a/node/src/block_producer/vrf_evaluator/block_producer_vrf_evaluator_effects.rs +++ b/node/src/block_producer/vrf_evaluator/block_producer_vrf_evaluator_effects.rs @@ -74,8 +74,7 @@ impl BlockProducerVrfEvaluatorAction { ); let previous_epoch = epoch.saturating_sub(1); let last_height = if slot < k { - // TODO(adonagy): error handling - store + let found = store .state() .transition_frontier .best_chain @@ -83,12 +82,17 @@ impl BlockProducerVrfEvaluatorAction { .rev() .find(|b| { b.consensus_state().epoch_count.as_u32() == previous_epoch - }) - .unwrap() - .height() + }); + + if let Some(block) = found { + block.height() + } else { + Default::default() + } + } else if let Some(root_block) = store.state().transition_frontier.root() { + root_block.height() } else { - // TODO(adonagy): error handling - store.state().transition_frontier.root().unwrap().height() + Default::default() }; store.dispatch( BlockProducerVrfEvaluatorAction::FinalizeEvaluatorInitialization { diff --git a/node/src/block_producer/vrf_evaluator/block_producer_vrf_evaluator_state.rs b/node/src/block_producer/vrf_evaluator/block_producer_vrf_evaluator_state.rs index b9ba6b4259..dfa39c155d 100644 --- a/node/src/block_producer/vrf_evaluator/block_producer_vrf_evaluator_state.rs +++ b/node/src/block_producer/vrf_evaluator/block_producer_vrf_evaluator_state.rs @@ -646,6 +646,7 @@ mod test { UnsignedExtendedUInt64Int64ForVersionTagsStableV1, }, }; + use openmina_node_account::AccountSecretKey; use vrf::VrfWonSlot; use crate::block_producer::vrf_evaluator::{ @@ -951,8 +952,8 @@ mod test { (start_slot..=end_slot).map(move |slot| { let dummy_won_slot = VrfWonSlot { - producer: "Dummy".to_string(), - winner_account: "Dummy".to_string(), + producer: AccountSecretKey::genesis_producer().public_key(), + winner_account: AccountSecretKey::genesis_producer().public_key(), vrf_output: Box::new( vrf::genesis_vrf(EpochSeed::from(MinaBaseEpochSeedStableV1(BigInt::zero()))) .unwrap(), diff --git a/node/src/consensus/consensus_reducer.rs b/node/src/consensus/consensus_reducer.rs index a76423c0a4..90cdb314c7 100644 --- a/node/src/consensus/consensus_reducer.rs +++ b/node/src/consensus/consensus_reducer.rs @@ -299,7 +299,13 @@ fn transition_frontier_new_best_tip_handler( blocks_inbetween, }); } else { + let previous_root_snarked_ledger_hash = state + .transition_frontier + .root() + .map(|b| b.snarked_ledger_hash().clone()); + dispatcher.push(TransitionFrontierSyncAction::BestTipUpdate { + previous_root_snarked_ledger_hash, best_tip, root_block, blocks_inbetween, diff --git a/node/src/effects.rs b/node/src/effects.rs index 5f65b631b3..6b0b3b4fa0 100644 --- a/node/src/effects.rs +++ b/node/src/effects.rs @@ -122,6 +122,7 @@ fn request_best_tip(store: &mut Store, _consensus_best_tip_hash: peer_id, id, request: Box::new(P2pRpcRequest::BestTipWithProof), + on_init: None, }); } } diff --git a/node/src/ledger/ledger_effects.rs b/node/src/ledger/ledger_effects.rs index 5a9927ed74..62916adba3 100644 --- a/node/src/ledger/ledger_effects.rs +++ b/node/src/ledger/ledger_effects.rs @@ -21,9 +21,10 @@ pub fn ledger_effects(store: &mut Store, action: LedgerActi match action { LedgerAction::Write(a) => match a { - LedgerWriteAction::Init { request } => { - store.service.write_init(request); + LedgerWriteAction::Init { request, on_init } => { + store.service.write_init(request.clone()); store.dispatch(LedgerWriteAction::Pending); + store.dispatch_callback(on_init, request); } LedgerWriteAction::Pending => {} LedgerWriteAction::Success { response } => { diff --git a/node/src/ledger/ledger_manager.rs b/node/src/ledger/ledger_manager.rs index e2bd53d1e4..fbab2a4c09 100644 --- a/node/src/ledger/ledger_manager.rs +++ b/node/src/ledger/ledger_manager.rs @@ -42,7 +42,7 @@ pub(super) enum LedgerRequest { snarked_ledger_hash: LedgerHash, }, // expected response: Success CopySnarkedLedgerContentsForSync { - origin_snarked_ledger_hash: LedgerHash, + origin_snarked_ledger_hash: Vec, target_snarked_ledger_hash: LedgerHash, overwrite: bool, }, // expected response: SnarkedLedgerContentsCopied @@ -244,6 +244,16 @@ impl LedgerRequest { target_snarked_ledger_hash, overwrite, } => { + let origin_snarked_ledger_hash = origin_snarked_ledger_hash + .iter() + .find(|hash| ledger_ctx.contains_snarked_ledger(hash)) + .unwrap_or_else(|| { + origin_snarked_ledger_hash + .first() + .expect("origin_snarked_ledger_hash cannot be empty") + }) + .clone(); + let res = ledger_ctx.copy_snarked_ledger_contents_for_sync( origin_snarked_ledger_hash, target_snarked_ledger_hash, @@ -443,7 +453,7 @@ impl TransitionFrontierSyncLedgerSnarkedService for T { fn copy_snarked_ledger_contents_for_sync( &self, - origin_snarked_ledger_hash: LedgerHash, + origin_snarked_ledger_hash: Vec, target_snarked_ledger_hash: LedgerHash, overwrite: bool, ) -> Result { diff --git a/node/src/ledger/ledger_service.rs b/node/src/ledger/ledger_service.rs index e7508f08b2..ef8da84839 100644 --- a/node/src/ledger/ledger_service.rs +++ b/node/src/ledger/ledger_service.rs @@ -205,6 +205,10 @@ impl LedgerCtx { }) } + pub fn contains_snarked_ledger(&self, hash: &LedgerHash) -> bool { + self.snarked_ledgers.contains_key(hash) + } + /// Returns the mask for a snarked ledger being synchronized or an error if it is not present pub fn pending_sync_snarked_ledger_mask(&self, hash: &LedgerHash) -> Result { self.sync.pending_sync_snarked_ledger_mask(hash) @@ -937,11 +941,13 @@ impl LedgerCtx { pub fn scan_state_summary( &self, staged_ledger_hash: LedgerHash, - ) -> Vec> { + ) -> Result>, String> { use ledger::scan_state::scan_state::JobValue; let ledger = self.staged_ledgers.get(&staged_ledger_hash); - let Some(ledger) = ledger else { return vec![] }; + let Some(ledger) = ledger else { + return Ok(Vec::new()); + }; ledger .scan_state() .view() @@ -1021,11 +1027,10 @@ impl LedgerCtx { res.push(if is_done { let is_left = bundle.map_or_else(|| true, |(_, is_sibling_left)| !is_sibling_left); - let sok_message: MinaBaseSokMessageStableV1 = job - .parent() - .and_then(|parent| { - let job = jobs.get(parent)?; - let sok_message = match &job.job { + let parent = job.parent().ok_or_else(|| format!("job(depth: {}, index: {}) has no parent", job.depth(), job.index()))?; + let sok_message: MinaBaseSokMessageStableV1 = { + let job = jobs.get(parent).ok_or_else(|| format!("job(depth: {}, index: {}) parent not found", job.depth(), job.index()))?; + match &job.job { JobValue::Node(JobValueMerge::Part(job)) if is_left => { (&job.sok_message).into() } @@ -1036,14 +1041,11 @@ impl LedgerCtx { (&job.right.sok_message).into() } } - state => panic!( - "parent of a `Done` job can't be in this state: {:?}", - state - ), - }; - Some(sok_message) - }) - .unwrap(); + state => { + return Err(format!("parent of a `Done` job can't be in this state: {:?}", state)); + } + } + }; RpcScanStateSummaryScanStateJob::Done { job_id, bundle_job_id, @@ -1063,7 +1065,7 @@ impl LedgerCtx { } }) } - res + Ok(res) }) .collect() } diff --git a/node/src/ledger/read/mod.rs b/node/src/ledger/read/mod.rs index 6e9137c162..926b4633ce 100644 --- a/node/src/ledger/read/mod.rs +++ b/node/src/ledger/read/mod.rs @@ -58,7 +58,7 @@ pub enum LedgerReadResponse { GetChildAccountsAtAddr(Option>), GetStagedLedgerAuxAndPendingCoinbases(Option>), // rpcs - ScanStateSummary(Vec>), + ScanStateSummary(Result>, String>), AccountsForRpc(RpcId, Vec), } diff --git a/node/src/ledger/write/ledger_write_actions.rs b/node/src/ledger/write/ledger_write_actions.rs index 9503b21ae2..38e594f5d3 100644 --- a/node/src/ledger/write/ledger_write_actions.rs +++ b/node/src/ledger/write/ledger_write_actions.rs @@ -7,9 +7,14 @@ pub type LedgerWriteActionWithMetaRef<'a> = redux::ActionWithMeta<&'a LedgerWrit #[derive(Serialize, Deserialize, Debug, Clone)] pub enum LedgerWriteAction { - Init { request: LedgerWriteRequest }, + Init { + request: LedgerWriteRequest, + on_init: redux::Callback, + }, Pending, - Success { response: LedgerWriteResponse }, + Success { + response: LedgerWriteResponse, + }, } impl redux::EnablingCondition for LedgerWriteAction { diff --git a/node/src/ledger/write/ledger_write_reducer.rs b/node/src/ledger/write/ledger_write_reducer.rs index 6f036a96f4..fb5c9b0ddf 100644 --- a/node/src/ledger/write/ledger_write_reducer.rs +++ b/node/src/ledger/write/ledger_write_reducer.rs @@ -4,7 +4,10 @@ impl LedgerWriteState { pub fn reducer(&mut self, action: LedgerWriteActionWithMetaRef<'_>) { let (action, meta) = action.split(); match action { - LedgerWriteAction::Init { request } => { + LedgerWriteAction::Init { + request, + on_init: _, + } => { *self = Self::Init { time: meta.time(), request: request.clone(), diff --git a/node/src/logger/logger_effects.rs b/node/src/logger/logger_effects.rs index ddc9f0ce14..4828a3fc43 100644 --- a/node/src/logger/logger_effects.rs +++ b/node/src/logger/logger_effects.rs @@ -8,7 +8,9 @@ use crate::p2p::connection::P2pConnectionAction; use crate::p2p::network::P2pNetworkAction; use crate::p2p::P2pAction; use crate::snark::SnarkAction; -use crate::{Action, ActionWithMetaRef, Service, Store}; +use crate::{ + Action, ActionWithMetaRef, BlockProducerAction, Service, Store, TransitionFrontierAction, +}; struct ActionLoggerContext { time: redux::Timestamp, @@ -80,8 +82,53 @@ pub fn logger_effects(store: &Store, action: ActionWithMetaRef<'_ Action::SnarkPool(action) => action.action_event(&context), Action::Snark(SnarkAction::WorkVerify(a)) => a.action_event(&context), Action::Consensus(a) => a.action_event(&context), - Action::TransitionFrontier(a) => a.action_event(&context), - Action::BlockProducer(a) => a.action_event(&context), + Action::TransitionFrontier(a) => match a { + TransitionFrontierAction::Synced { .. } => { + let tip = store.state().transition_frontier.best_tip().unwrap(); + + if store.state().block_producer.is_produced_by_me(tip) { + openmina_core::action_info!( + context, + kind = "BlockProducerBlockIntegrated", + summary = "produced block integrated into frontier", + block_hash = tip.hash().to_string(), + block_height = tip.height(), + ); + } + + openmina_core::action_info!( + context, + kind = action.kind().to_string(), + summary = "transition frontier synced", + block_hash = tip.hash().to_string(), + block_height = tip.height(), + ); + } + a => a.action_event(&context), + }, + Action::BlockProducer(a) => match a { + BlockProducerAction::BlockProduced => { + let block = store.state().block_producer.produced_block().unwrap(); + openmina_core::action_info!( + context, + kind = action.kind().to_string(), + summary = "produced a block", + block_hash = block.hash().to_string(), + block_height = block.height(), + ); + } + BlockProducerAction::BlockInjected => { + let block = store.state().transition_frontier.sync.best_tip().unwrap(); + openmina_core::action_info!( + context, + kind = action.kind().to_string(), + summary = "produced block injected", + block_hash = block.hash().to_string(), + block_height = block.height(), + ); + } + a => a.action_event(&context), + }, Action::Rpc(a) => a.action_event(&context), Action::TransactionPool(a) => a.action_event(&context), _ => {} diff --git a/node/src/p2p/p2p_effects.rs b/node/src/p2p/p2p_effects.rs index 46ef74055d..3fa240391e 100644 --- a/node/src/p2p/p2p_effects.rs +++ b/node/src/p2p/p2p_effects.rs @@ -1,5 +1,6 @@ use mina_p2p_messages::v2::{MinaLedgerSyncLedgerAnswerStableV2, StateHash}; use openmina_core::block::BlockWithHash; +use openmina_core::bug_condition; use p2p::channels::streaming_rpc::{ P2pChannelsStreamingRpcAction, P2pStreamingRpcRequest, P2pStreamingRpcResponseFull, }; @@ -281,6 +282,7 @@ pub fn node_p2p_effects(store: &mut Store, action: P2pActionWithM peer_id, id: 0, request: Box::new(P2pRpcRequest::BestTipWithProof), + on_init: None, }); store.dispatch(TransitionFrontierSyncLedgerSnarkedAction::PeersQuery); @@ -289,8 +291,19 @@ pub fn node_p2p_effects(store: &mut Store, action: P2pActionWithM store.dispatch(TransitionFrontierSyncAction::BlocksPeersQuery); } P2pChannelsRpcAction::Timeout { peer_id, id } => { - let peer = store.state().p2p.get_ready_peer(&peer_id).unwrap(); - let rpc_kind = peer.channels.rpc.pending_local_rpc_kind().unwrap(); + let Some(peer) = store.state().p2p.get_ready_peer(&peer_id) else { + bug_condition!("get_ready_peer({:?}) returned None", peer_id); + return; + }; + + let Some(rpc_kind) = peer.channels.rpc.pending_local_rpc_kind() else { + bug_condition!( + "peer: {:?} pending_local_rpc_kind() returned None", + peer_id + ); + return; + }; + store.dispatch( TransitionFrontierSyncLedgerSnarkedAction::PeerQueryAddressError { peer_id, @@ -555,12 +568,18 @@ pub fn node_p2p_effects(store: &mut Store, action: P2pActionWithM .dispatch(TransitionFrontierSyncLedgerStagedAction::PartsPeerFetchInit); } P2pChannelsStreamingRpcAction::Timeout { peer_id, id } => { - let peer = store.state().p2p.get_ready_peer(&peer_id).unwrap(); - let rpc_kind = peer - .channels - .streaming_rpc - .pending_local_rpc_kind() - .unwrap(); + let Some(peer) = store.state().p2p.get_ready_peer(&peer_id) else { + bug_condition!("get_ready_peer({:?}) returned None", peer_id); + return; + }; + let Some(rpc_kind) = peer.channels.streaming_rpc.pending_local_rpc_kind() + else { + bug_condition!( + "peer: {:?} pending_local_rpc_kind() returned None", + peer_id + ); + return; + }; store.dispatch( TransitionFrontierSyncLedgerStagedAction::PartsPeerFetchError { peer_id, diff --git a/node/src/rpc/mod.rs b/node/src/rpc/mod.rs index 79f8fbf798..adebec8452 100644 --- a/node/src/rpc/mod.rs +++ b/node/src/rpc/mod.rs @@ -323,7 +323,7 @@ pub type RpcSyncStatsGetResponse = Option>; pub type RpcBlockProducerStatsGetResponse = Option; pub type RpcPeersGetResponse = Vec; pub type RpcP2pConnectionOutgoingResponse = Result<(), String>; -pub type RpcScanStateSummaryGetResponse = Option; +pub type RpcScanStateSummaryGetResponse = Result; pub type RpcSnarkPoolGetResponse = Vec; pub type RpcSnarkPoolJobGetResponse = Option; pub type RpcSnarkerConfigGetResponse = Option; @@ -539,8 +539,8 @@ pub type RpcDiscoveryBoostrapStatsResponse = Option pub mod discovery { use p2p::{ - ConnectionType, P2pNetworkKadBucket, P2pNetworkKadDist, P2pNetworkKadEntry, - P2pNetworkKadKey, P2pNetworkKadRoutingTable, PeerId, + libp2p_identity::DecodingError, ConnectionType, P2pNetworkKadBucket, P2pNetworkKadDist, + P2pNetworkKadEntry, P2pNetworkKadKey, P2pNetworkKadRoutingTable, PeerId, }; use serde::{Deserialize, Serialize}; @@ -550,17 +550,20 @@ pub mod discovery { buckets: Vec, } - impl From<&P2pNetworkKadRoutingTable> for RpcDiscoveryRoutingTable { - fn from(value: &P2pNetworkKadRoutingTable) -> Self { - RpcDiscoveryRoutingTable { - this_key: value.this_key.clone(), - buckets: value - .buckets - .iter() - .enumerate() - .map(|(i, b)| (b, P2pNetworkKadDist::from(i), &value.this_key).into()) - .collect(), + impl TryFrom<&P2pNetworkKadRoutingTable> for RpcDiscoveryRoutingTable { + type Error = DecodingError; + + fn try_from(value: &P2pNetworkKadRoutingTable) -> Result { + let mut buckets = Vec::new(); + + for (i, b) in value.buckets.iter().enumerate() { + buckets.push((b, P2pNetworkKadDist::from(i), &value.this_key).try_into()?); } + + Ok(RpcDiscoveryRoutingTable { + this_key: value.this_key.clone(), + buckets, + }) } } @@ -571,26 +574,27 @@ pub mod discovery { } impl - From<( + TryFrom<( &P2pNetworkKadBucket, P2pNetworkKadDist, &P2pNetworkKadKey, )> for RpcKBucket { - fn from( + type Error = DecodingError; + + fn try_from( (bucket, max_dist, this_key): ( &P2pNetworkKadBucket, P2pNetworkKadDist, &P2pNetworkKadKey, ), - ) -> Self { - RpcKBucket { - max_dist, - entries: bucket - .iter() - .map(|entry| (entry, this_key).into()) - .collect(), + ) -> Result { + let mut entries = Vec::new(); + + for entry in bucket.iter() { + entries.push((entry, this_key).try_into()?); } + Ok(RpcKBucket { max_dist, entries }) } } @@ -604,16 +608,20 @@ pub mod discovery { connection: ConnectionType, } - impl From<(&P2pNetworkKadEntry, &P2pNetworkKadKey)> for RpcEntry { - fn from((value, this_key): (&P2pNetworkKadEntry, &P2pNetworkKadKey)) -> Self { - RpcEntry { + impl TryFrom<(&P2pNetworkKadEntry, &P2pNetworkKadKey)> for RpcEntry { + type Error = DecodingError; + + fn try_from( + (value, this_key): (&P2pNetworkKadEntry, &P2pNetworkKadKey), + ) -> Result { + Ok(RpcEntry { peer_id: value.peer_id, - libp2p: value.peer_id.into(), + libp2p: value.peer_id.try_into()?, key: value.key.clone(), dist: this_key - &value.key, addrs: value.addrs.clone(), connection: value.connection, - } + }) } } } diff --git a/node/src/rpc/rpc_actions.rs b/node/src/rpc/rpc_actions.rs index 635a28b6b4..e1fce4b628 100644 --- a/node/src/rpc/rpc_actions.rs +++ b/node/src/rpc/rpc_actions.rs @@ -97,7 +97,7 @@ pub enum RpcAction { }, ScanStateSummaryGetSuccess { rpc_id: RpcId, - scan_state: Vec>, + scan_state: Result>, String>, }, SnarkPoolAvailableJobsGet { diff --git a/node/src/rpc/rpc_effects.rs b/node/src/rpc/rpc_effects.rs index 070f472b75..6de8103a10 100644 --- a/node/src/rpc/rpc_effects.rs +++ b/node/src/rpc/rpc_effects.rs @@ -6,6 +6,7 @@ use mina_p2p_messages::rpc_kernel::QueryHeader; use mina_p2p_messages::v2::MinaBaseTransactionStatusStableV2; use mina_signer::CompressedPubKey; use openmina_core::block::ArcBlockWithHash; +use openmina_core::bug_condition; use crate::block_producer::BlockProducerWonSlot; use crate::external_snark_worker::available_job_to_snark_worker_spec; @@ -338,7 +339,7 @@ pub fn rpc_effects(store: &mut Store, action: RpcActionWithMeta) }); store.dispatch(RpcAction::ScanStateSummaryGetSuccess { rpc_id, - scan_state: Vec::new(), + scan_state: Ok(Vec::new()), }); return; } @@ -362,7 +363,10 @@ pub fn rpc_effects(store: &mut Store, action: RpcActionWithMeta) RpcRequestExtraData::FullBlockOpt(opt) => opt.as_ref(), _ => None, }) else { - let _ = store.service.respond_scan_state_summary_get(rpc_id, None); + let _ = store.service.respond_scan_state_summary_get( + rpc_id, + Err("target block not found".to_string()), + ); return; }; let coinbases = block @@ -392,7 +396,7 @@ pub fn rpc_effects(store: &mut Store, action: RpcActionWithMeta) }; let snark_pool = &store.state().snark_pool; - scan_state.iter_mut().flatten().for_each(|job| { + scan_state.iter_mut().flatten().flatten().for_each(|job| { if let RpcScanStateSummaryScanStateJob::Todo { job_id, bundle_job_id, @@ -428,7 +432,7 @@ pub fn rpc_effects(store: &mut Store, action: RpcActionWithMeta) }; } }); - let res = Some(RpcScanStateSummary { + let res = scan_state.map(|scan_state| RpcScanStateSummary { block: block_summary, scan_state, }); @@ -621,7 +625,19 @@ pub fn rpc_effects(store: &mut Store, action: RpcActionWithMeta) .p2p .ready() .and_then(|p2p| p2p.network.scheduler.discovery_state()) - .map(|discovery_state| (&discovery_state.routing_table).into()); + .and_then( + |discovery_state| match (&discovery_state.routing_table).try_into() { + Ok(resp) => Some(resp), + Err(err) => { + bug_condition!( + "{:?} error converting routing table into response: {:?}", + err, + action + ); + None + } + }, + ); respond_or_log!( store .service() diff --git a/node/src/snark_pool/candidate/snark_pool_candidate_reducer.rs b/node/src/snark_pool/candidate/snark_pool_candidate_reducer.rs index bc2af02e4a..845ad0adab 100644 --- a/node/src/snark_pool/candidate/snark_pool_candidate_reducer.rs +++ b/node/src/snark_pool/candidate/snark_pool_candidate_reducer.rs @@ -3,8 +3,9 @@ use std::collections::BTreeMap; use crate::{p2p_ready, SnarkPoolAction}; use openmina_core::snark::Snark; use p2p::{ - channels::rpc::{P2pChannelsRpcAction, P2pRpcRequest}, + channels::rpc::{P2pChannelsRpcAction, P2pRpcId, P2pRpcRequest}, disconnection::{P2pDisconnectionAction, P2pDisconnectionReason}, + PeerId, }; use snark::{work_verify::SnarkWorkVerifyAction, work_verify_effectful::SnarkWorkVerifyId}; @@ -56,15 +57,25 @@ impl SnarkPoolCandidatesState { return; }; let rpc_id = peer.channels.next_local_rpc_id(); + dispatcher.push(P2pChannelsRpcAction::RequestSend { peer_id, id: rpc_id, request: Box::new(P2pRpcRequest::Snark(job_id.clone())), - }); - dispatcher.push(SnarkPoolCandidateAction::WorkFetchPending { - peer_id, - job_id: job_id.clone(), - rpc_id, + on_init: Some(redux::callback!( + on_send_p2p_snark_rpc_request( + (peer_id: PeerId, rpc_id: P2pRpcId, request: P2pRpcRequest) + ) -> crate::Action { + let P2pRpcRequest::Snark(job_id) = request else { + unreachable!() + }; + SnarkPoolCandidateAction::WorkFetchPending { + job_id, + peer_id, + rpc_id, + } + } + )), }); } SnarkPoolCandidateAction::WorkFetchPending { diff --git a/node/src/transaction_pool/mod.rs b/node/src/transaction_pool/mod.rs index e5c07831df..ed3b16a367 100644 --- a/node/src/transaction_pool/mod.rs +++ b/node/src/transaction_pool/mod.rs @@ -11,7 +11,7 @@ use ledger::{ }; use mina_p2p_messages::v2; use openmina_core::{ - consensus::ConsensusConstants, constants::constraint_constants, requests::RpcId, + bug_condition, consensus::ConsensusConstants, constants::constraint_constants, requests::RpcId, }; use p2p::channels::transaction::P2pChannelsTransactionAction; use redux::callback; @@ -231,9 +231,12 @@ impl TransactionPoolState { }); } TransactionPoolAction::BestTipChangedWithAccounts { accounts } => { - substate + if let Err(e) = substate .pool - .on_new_best_tip(global_slot_from_genesis, accounts); + .on_new_best_tip(global_slot_from_genesis, accounts) + { + bug_condition!("transaction pool::on_new_best_tip failed: {:?}", e); + } } TransactionPoolAction::ApplyVerifiedDiff { best_tip_hash, @@ -358,14 +361,19 @@ impl TransactionPoolState { let in_cmds = collect(&account_ids); let uncommitted = collect(&uncommitted); - substate.pool.handle_transition_frontier_diff( + if let Err(e) = substate.pool.handle_transition_frontier_diff( global_slot_from_genesis, global_slot, &diff, &account_ids, &in_cmds, &uncommitted, - ); + ) { + bug_condition!( + "transaction pool::handle_transition_frontier_diff failed: {:?}", + e + ); + } } TransactionPoolAction::Rebroadcast { accepted, rejected } => { let rejected = rejected.iter().map(|(cmd, _)| cmd.data.forget_check()); diff --git a/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_reducer.rs b/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_reducer.rs index 822fac572a..49d40a91a8 100644 --- a/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_reducer.rs +++ b/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_reducer.rs @@ -2,11 +2,10 @@ use std::iter; use mina_p2p_messages::v2::MinaLedgerSyncLedgerQueryStableV1; use p2p::{ - channels::rpc::{P2pChannelsRpcAction, P2pRpcRequest}, + channels::rpc::{P2pChannelsRpcAction, P2pRpcId, P2pRpcRequest}, disconnection::{P2pDisconnectionAction, P2pDisconnectionReason}, PeerId, }; -use redux::ActionMeta; use crate::{ ledger::{ @@ -120,7 +119,7 @@ impl TransitionFrontierSyncLedgerSnarkedState { // Dispatch let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); - peer_query_num_accounts_init(dispatcher, global_state, meta, *peer_id) + peer_query_num_accounts_init(dispatcher, global_state, *peer_id) } TransitionFrontierSyncLedgerSnarkedAction::PeerQueryNumAccountsPending { peer_id, @@ -156,7 +155,7 @@ impl TransitionFrontierSyncLedgerSnarkedState { // Dispatch let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); - peer_query_num_accounts_init(dispatcher, global_state, meta, *peer_id) + peer_query_num_accounts_init(dispatcher, global_state, *peer_id) } TransitionFrontierSyncLedgerSnarkedAction::PeerQueryNumAccountsError { peer_id, @@ -379,7 +378,7 @@ impl TransitionFrontierSyncLedgerSnarkedState { // Dispatch let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); - peer_query_address_init(dispatcher, global_state, meta, *peer_id, address.clone()); + peer_query_address_init(dispatcher, global_state, *peer_id, address.clone()); } TransitionFrontierSyncLedgerSnarkedAction::PeerQueryAddressRetry { address, @@ -399,7 +398,7 @@ impl TransitionFrontierSyncLedgerSnarkedState { // Dispatch let (dispatcher, global_state) = state_context.into_dispatcher_and_state(); - peer_query_address_init(dispatcher, global_state, meta, *peer_id, address.clone()); + peer_query_address_init(dispatcher, global_state, *peer_id, address.clone()); } TransitionFrontierSyncLedgerSnarkedAction::PeerQueryAddressPending { address, @@ -601,7 +600,6 @@ impl TransitionFrontierSyncLedgerSnarkedState { fn peer_query_num_accounts_init( dispatcher: &mut redux::Dispatcher, state: &State, - meta: ActionMeta, peer_id: PeerId, ) { let Some((ledger_hash, rpc_id)) = None.or_else(|| { @@ -616,31 +614,29 @@ fn peer_query_num_accounts_init( return; }; - if dispatcher.push_if_enabled( - P2pChannelsRpcAction::RequestSend { - peer_id, - id: rpc_id, - request: Box::new(P2pRpcRequest::LedgerQuery( - ledger_hash, - MinaLedgerSyncLedgerQueryStableV1::NumAccounts, - )), - }, - state, - meta.time(), - ) { - dispatcher.push( - TransitionFrontierSyncLedgerSnarkedAction::PeerQueryNumAccountsPending { - peer_id, - rpc_id, - }, - ); - } + dispatcher.push(P2pChannelsRpcAction::RequestSend { + peer_id, + id: rpc_id, + request: Box::new(P2pRpcRequest::LedgerQuery( + ledger_hash, + MinaLedgerSyncLedgerQueryStableV1::NumAccounts, + )), + on_init: Some(redux::callback!( + on_send_p2p_num_accounts_rpc_request( + (peer_id: PeerId, rpc_id: P2pRpcId, _request: P2pRpcRequest) + ) -> crate::Action { + TransitionFrontierSyncLedgerSnarkedAction::PeerQueryNumAccountsPending { + peer_id, + rpc_id, + } + } + )), + }); } fn peer_query_address_init( dispatcher: &mut redux::Dispatcher, state: &State, - meta: ActionMeta, peer_id: PeerId, address: LedgerAddress, ) { @@ -662,21 +658,29 @@ fn peer_query_address_init( MinaLedgerSyncLedgerQueryStableV1::WhatChildHashes(address.clone().into()) }; - if dispatcher.push_if_enabled( - P2pChannelsRpcAction::RequestSend { - peer_id, - id: rpc_id, - request: Box::new(P2pRpcRequest::LedgerQuery(ledger_hash, query)), - }, - state, - meta.time(), - ) { - dispatcher.push( - TransitionFrontierSyncLedgerSnarkedAction::PeerQueryAddressPending { - address, - peer_id, - rpc_id, - }, - ); - } + dispatcher.push(P2pChannelsRpcAction::RequestSend { + peer_id, + id: rpc_id, + request: Box::new(P2pRpcRequest::LedgerQuery(ledger_hash, query)), + on_init: Some(redux::callback!( + on_send_p2p_query_address_rpc_request( + (peer_id: PeerId, rpc_id: P2pRpcId, request: P2pRpcRequest) + ) -> crate::Action { + let P2pRpcRequest::LedgerQuery(_, query) = request else { + unreachable!() + }; + let address = match query { + MinaLedgerSyncLedgerQueryStableV1::WhatChildHashes(address) => address.into(), + MinaLedgerSyncLedgerQueryStableV1::WhatContents(address) => address.into(), + MinaLedgerSyncLedgerQueryStableV1::NumAccounts => unreachable!(), + }; + + TransitionFrontierSyncLedgerSnarkedAction::PeerQueryAddressPending { + peer_id, + rpc_id, + address, + } + } + )), + }); } diff --git a/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_service.rs b/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_service.rs index cf3b178375..f9d736b2a6 100644 --- a/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_service.rs +++ b/node/src/transition_frontier/sync/ledger/snarked/transition_frontier_sync_ledger_snarked_service.rs @@ -8,12 +8,12 @@ pub trait TransitionFrontierSyncLedgerSnarkedService: redux::Service { fn compute_snarked_ledger_hashes(&self, snarked_ledger_hash: &LedgerHash) -> Result<(), String>; - /// Creates a new copy of the ledger stored under the `origin` hash + /// Creates a new copy of the ledger stored under the first found `origin` hash /// and stores it under the `target` hash. If `overwrite` is false, /// only copy the ledger if the target doesn't exist already. fn copy_snarked_ledger_contents_for_sync( &self, - origin: LedgerHash, + origin: Vec, target: LedgerHash, overwrite: bool, ) -> Result; diff --git a/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_reducer.rs b/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_reducer.rs index 5f63ad8249..d27eb0cb99 100644 --- a/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_reducer.rs +++ b/node/src/transition_frontier/sync/ledger/staged/transition_frontier_sync_ledger_staged_reducer.rs @@ -1,8 +1,11 @@ use ledger::scan_state::protocol_state::MinaHash; use mina_p2p_messages::{list::List, v2}; -use p2p::channels::{ - rpc::{P2pChannelsRpcAction, P2pRpcRequest}, - streaming_rpc::{P2pChannelsStreamingRpcAction, P2pStreamingRpcRequest}, +use p2p::{ + channels::{ + rpc::{P2pChannelsRpcAction, P2pRpcId, P2pRpcRequest}, + streaming_rpc::{P2pChannelsStreamingRpcAction, P2pStreamingRpcRequest}, + }, + PeerId, }; use crate::ledger::write::{LedgerWriteAction, LedgerWriteRequest}; @@ -58,9 +61,19 @@ impl TransitionFrontierSyncLedgerStagedState { block_hash.clone(), ), ), + on_init: Some(redux::callback!( + on_send_p2p_staged_ledger_parts_rpc_request( + (peer_id: PeerId, rpc_id: P2pRpcId, _request: P2pRpcRequest) + ) -> crate::Action { + TransitionFrontierSyncLedgerStagedAction::PartsPeerFetchPending { + peer_id, + rpc_id, + } + } + )) }, global_state, - meta.time(), + meta.time() ) } else { // use streaming rpc for webrtc peers. @@ -71,20 +84,26 @@ impl TransitionFrontierSyncLedgerStagedState { request: Box::new(P2pStreamingRpcRequest::StagedLedgerParts( block_hash.clone(), )), + on_init: Some(redux::callback!( + on_send_streaming_p2p_staged_ledger_parts_rpc_request( + (peer_id: PeerId, rpc_id: P2pRpcId, _request: P2pStreamingRpcRequest) + ) -> crate::Action { + TransitionFrontierSyncLedgerStagedAction::PartsPeerFetchPending { + peer_id, + rpc_id, + } + })) }, global_state, - meta.time(), + meta.time() ) }; - // TODO(binier): maybe - // Enabling condition is true if the peer exists and is able to handle this request + + // TODO: instead add an intermediary action for the Peer request with an enabling condition + // that will make sure that only one staged ledger part request + // is ongoing. So here we dispatch the action for all peers, but + // after one picks it up the rest will be filtered out. if enqueued { - dispatcher.push( - TransitionFrontierSyncLedgerStagedAction::PartsPeerFetchPending { - peer_id, - rpc_id, - }, - ); break; } } @@ -238,18 +257,17 @@ impl TransitionFrontierSyncLedgerStagedState { let snarked_ledger_hash = target.snarked_ledger_hash.clone(); let parts = parts.cloned(); - if dispatcher.push_if_enabled( - LedgerWriteAction::Init { - request: LedgerWriteRequest::StagedLedgerReconstruct { - snarked_ledger_hash, - parts, - }, + dispatcher.push(LedgerWriteAction::Init { + request: LedgerWriteRequest::StagedLedgerReconstruct { + snarked_ledger_hash, + parts, }, - global_state, - meta.time(), - ) { - dispatcher.push(TransitionFrontierSyncLedgerStagedAction::ReconstructPending); - } + on_init: redux::callback!( + on_staged_ledger_reconstruct_init(_request: LedgerWriteRequest) -> crate::Action { + TransitionFrontierSyncLedgerStagedAction::ReconstructPending + } + ), + }); } TransitionFrontierSyncLedgerStagedAction::ReconstructPending => { let Some((target, parts)) = state.target_with_parts() else { diff --git a/node/src/transition_frontier/sync/transition_frontier_sync_actions.rs b/node/src/transition_frontier/sync/transition_frontier_sync_actions.rs index 54ca6c74f7..a5729de829 100644 --- a/node/src/transition_frontier/sync/transition_frontier_sync_actions.rs +++ b/node/src/transition_frontier/sync/transition_frontier_sync_actions.rs @@ -1,4 +1,4 @@ -use mina_p2p_messages::v2::StateHash; +use mina_p2p_messages::v2::{LedgerHash, StateHash}; use openmina_core::block::ArcBlockWithHash; use openmina_core::consensus::consensus_take; use openmina_core::ActionEvent; @@ -40,6 +40,8 @@ pub enum TransitionFrontierSyncAction { new_root_staged_ledger_hash = display(root_block.staged_ledger_hash()), ))] BestTipUpdate { + // Required to be able to reuse partially synced root ledgers + previous_root_snarked_ledger_hash: Option, best_tip: ArcBlockWithHash, root_block: ArcBlockWithHash, blocks_inbetween: Vec, diff --git a/node/src/transition_frontier/sync/transition_frontier_sync_effects.rs b/node/src/transition_frontier/sync/transition_frontier_sync_effects.rs index dc8c6e2b0d..5e4c0661cc 100644 --- a/node/src/transition_frontier/sync/transition_frontier_sync_effects.rs +++ b/node/src/transition_frontier/sync/transition_frontier_sync_effects.rs @@ -1,5 +1,7 @@ +use mina_p2p_messages::v2::LedgerHash; use openmina_core::block::ArcBlockWithHash; -use p2p::channels::rpc::P2pChannelsRpcAction; +use p2p::channels::rpc::{P2pChannelsRpcAction, P2pRpcId}; +use p2p::PeerId; use redux::ActionMeta; use crate::ledger::write::{LedgerWriteAction, LedgerWriteRequest}; @@ -44,10 +46,19 @@ impl TransitionFrontierSyncAction { store.dispatch(TransitionFrontierSyncAction::LedgerRootPending); } } - TransitionFrontierSyncAction::BestTipUpdate { best_tip, .. } => { + TransitionFrontierSyncAction::BestTipUpdate { + previous_root_snarked_ledger_hash, + best_tip, + .. + } => { // TODO(tizoc): this is currently required because how how complicated the BestTipUpdate reducer is, // once that is simplified this should be handled in separate actions. - maybe_copy_ledgers_for_sync(store, best_tip).unwrap(); + maybe_copy_ledgers_for_sync( + store, + previous_root_snarked_ledger_hash.clone(), + best_tip, + ) + .unwrap(); // if root snarked ledger changed. store.dispatch(TransitionFrontierSyncLedgerAction::Init); @@ -92,6 +103,7 @@ impl TransitionFrontierSyncAction { TransitionFrontierSyncAction::LedgerRootPending => { prepare_transition_frontier_root_ledger_for_sync( store, + None, &sync_best_tip(store.state()), ) .unwrap(); @@ -156,17 +168,25 @@ impl TransitionFrontierSyncAction { return; }; - if store.dispatch(P2pChannelsRpcAction::RequestSend { + store.dispatch(P2pChannelsRpcAction::RequestSend { peer_id: *peer_id, id: rpc_id, request: Box::new(P2pRpcRequest::Block(hash.clone())), - }) { - store.dispatch(TransitionFrontierSyncAction::BlocksPeerQueryPending { - hash: hash.clone(), - peer_id: *peer_id, - rpc_id, - }); - } + on_init: Some(redux::callback!( + on_send_p2p_block_rpc_request( + (peer_id: PeerId, rpc_id: P2pRpcId, request: P2pRpcRequest) + ) -> crate::Action { + let P2pRpcRequest::Block(hash) = request else { + unreachable!() + }; + TransitionFrontierSyncAction::BlocksPeerQueryPending { + hash, + peer_id, + rpc_id, + } + } + )), + }); } TransitionFrontierSyncAction::BlocksPeerQueryRetry { hash, peer_id } => { let p2p = p2p_ready!(store.state().p2p, meta.time()); @@ -177,17 +197,25 @@ impl TransitionFrontierSyncAction { return; }; - if store.dispatch(P2pChannelsRpcAction::RequestSend { + store.dispatch(P2pChannelsRpcAction::RequestSend { peer_id: *peer_id, id: rpc_id, request: Box::new(P2pRpcRequest::Block(hash.clone())), - }) { - store.dispatch(TransitionFrontierSyncAction::BlocksPeerQueryPending { - hash: hash.clone(), - peer_id: *peer_id, - rpc_id, - }); - } + on_init: Some(redux::callback!( + on_send_p2p_block_rpc_request_retry( + (peer_id: PeerId, rpc_id: P2pRpcId, request: P2pRpcRequest) + ) -> crate::Action { + let P2pRpcRequest::Block(hash) = request else { + unreachable!() + }; + TransitionFrontierSyncAction::BlocksPeerQueryPending { + hash, + peer_id, + rpc_id, + } + } + )), + }); } TransitionFrontierSyncAction::BlocksPeerQueryPending { .. } => {} TransitionFrontierSyncAction::BlocksPeerQueryError { .. } => { @@ -219,13 +247,22 @@ impl TransitionFrontierSyncAction { stats.block_producer().block_apply_start(meta.time(), &hash); } - if store.dispatch(LedgerWriteAction::Init { + store.dispatch(LedgerWriteAction::Init { request: LedgerWriteRequest::BlockApply { block, pred_block }, - }) { - store.dispatch(TransitionFrontierSyncAction::BlocksNextApplyPending { - hash: hash.clone(), - }); - } + on_init: redux::callback!( + on_block_next_apply_init(request: LedgerWriteRequest) -> crate::Action { + let LedgerWriteRequest::BlockApply { + block, + pred_block: _, + } = request + else { + unreachable!() + }; + let hash = block.hash().clone(); + TransitionFrontierSyncAction::BlocksNextApplyPending { hash } + } + ), + }); } TransitionFrontierSyncAction::BlocksNextApplyPending { .. } => {} TransitionFrontierSyncAction::BlocksNextApplySuccess { hash } => { @@ -294,7 +331,7 @@ impl TransitionFrontierSyncAction { .collect() }; - if store.dispatch(LedgerWriteAction::Init { + store.dispatch(LedgerWriteAction::Init { request: LedgerWriteRequest::Commit { ledgers_to_keep, root_snarked_ledger_updates, @@ -302,9 +339,12 @@ impl TransitionFrontierSyncAction { new_root: new_root.clone(), new_best_tip: new_best_tip.clone(), }, - }) { - store.dispatch(TransitionFrontierSyncAction::CommitPending); - } + on_init: redux::callback!( + on_frontier_commit_init(_request: LedgerWriteRequest) -> crate::Action { + TransitionFrontierSyncAction::CommitPending + } + ), + }); } TransitionFrontierSyncAction::CommitPending => {} TransitionFrontierSyncAction::CommitSuccess { .. } => { @@ -325,6 +365,7 @@ fn sync_best_tip(state: &crate::State) -> ArcBlockWithHash { /// For snarked ledger sync targets, copy the previous snarked ledger if required fn maybe_copy_ledgers_for_sync( store: &mut Store, + previous_root_snarked_ledger_hash: Option, best_tip: &ArcBlockWithHash, ) -> Result where @@ -341,7 +382,11 @@ where } TransitionFrontierSyncState::RootLedgerPending(_) => { - prepare_transition_frontier_root_ledger_for_sync(store, best_tip) + prepare_transition_frontier_root_ledger_for_sync( + store, + previous_root_snarked_ledger_hash, + best_tip, + ) } _ => Ok(true), } @@ -361,7 +406,7 @@ where store .service() - .copy_snarked_ledger_contents_for_sync(origin, target, false) + .copy_snarked_ledger_contents_for_sync(vec![origin], target, false) } /// Copies (if necessary) the staking ledger into the sync ledger state @@ -383,26 +428,39 @@ where store .service() - .copy_snarked_ledger_contents_for_sync(origin, target, false) + .copy_snarked_ledger_contents_for_sync(vec![origin], target, false) } /// Copies (if necessary) the next epoch ledger into the sync ledger state /// for the transition frontier root ledger to use as a starting point. fn prepare_transition_frontier_root_ledger_for_sync( store: &mut Store, + previous_root_snarked_ledger_hash: Option, best_tip: &ArcBlockWithHash, ) -> Result where S: TransitionFrontierSyncLedgerSnarkedService, { let sync = &store.state().transition_frontier.sync; - let root_block = sync.root_block().unwrap(); - let next_epoch_sync = SyncLedgerTarget::next_epoch(best_tip, root_block) - .unwrap_or_else(|| SyncLedgerTarget::staking_epoch(best_tip)); + let root_block = sync + .root_block() + .expect("Sync root block cannot be missing"); + + // Attempt in order: previous root, next epoch ledger, staking ledger + let mut candidate_origins: Vec = + previous_root_snarked_ledger_hash.into_iter().collect(); + if let Some(next_epoch) = SyncLedgerTarget::next_epoch(best_tip, root_block) { + candidate_origins.push(next_epoch.snarked_ledger_hash.clone()); + } + candidate_origins.push( + SyncLedgerTarget::staking_epoch(best_tip) + .snarked_ledger_hash + .clone(), + ); + let target = root_block.snarked_ledger_hash().clone(); - let origin = next_epoch_sync.snarked_ledger_hash; store .service() - .copy_snarked_ledger_contents_for_sync(origin, target, false) + .copy_snarked_ledger_contents_for_sync(candidate_origins, target, false) } diff --git a/node/src/transition_frontier/sync/transition_frontier_sync_reducer.rs b/node/src/transition_frontier/sync/transition_frontier_sync_reducer.rs index 0846fd4173..3629ae95f9 100644 --- a/node/src/transition_frontier/sync/transition_frontier_sync_reducer.rs +++ b/node/src/transition_frontier/sync/transition_frontier_sync_reducer.rs @@ -40,6 +40,7 @@ impl TransitionFrontierSyncState { } // TODO(binier): refactor TransitionFrontierSyncAction::BestTipUpdate { + previous_root_snarked_ledger_hash: _, best_tip, root_block, blocks_inbetween, diff --git a/node/testing/Cargo.toml b/node/testing/Cargo.toml index 2cb6a4c469..d788319265 100644 --- a/node/testing/Cargo.toml +++ b/node/testing/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-testing" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/node/testing/src/node/ocaml/mod.rs b/node/testing/src/node/ocaml/mod.rs index 6f637e6e1b..3266b36a33 100644 --- a/node/testing/src/node/ocaml/mod.rs +++ b/node/testing/src/node/ocaml/mod.rs @@ -169,7 +169,7 @@ impl OcamlNode { } pub fn peer_id(&self) -> PeerId { - self.peer_id.into() + self.peer_id.try_into().unwrap() } pub async fn exec(&mut self, step: OcamlStep) -> anyhow::Result { diff --git a/node/testing/src/scenarios/multi_node/basic_connectivity_initial_joining.rs b/node/testing/src/scenarios/multi_node/basic_connectivity_initial_joining.rs index e2db9854bb..73eb32db79 100644 --- a/node/testing/src/scenarios/multi_node/basic_connectivity_initial_joining.rs +++ b/node/testing/src/scenarios/multi_node/basic_connectivity_initial_joining.rs @@ -157,7 +157,7 @@ impl MultiNodeBasicConnectivityInitialJoining { .map_or(0, |discovery_state| { discovery_state .routing_table - .closest_peers(&my_id.into()) + .closest_peers(&my_id.try_into().unwrap()) .count() }); let state_machine_peers = if cfg!(feature = "p2p-webrtc") { diff --git a/node/testing/src/scenarios/multi_node/connection_discovery.rs b/node/testing/src/scenarios/multi_node/connection_discovery.rs index a0a0754c54..77672e6580 100644 --- a/node/testing/src/scenarios/multi_node/connection_discovery.rs +++ b/node/testing/src/scenarios/multi_node/connection_discovery.rs @@ -382,7 +382,7 @@ where Ok(peer_ids.into_iter().all(|peer_id| { table - .look_up(&peer_id.into()) + .look_up(&peer_id.try_into().unwrap()) .map(|entry| entry.peer_id == peer_id) .unwrap_or_default() })) diff --git a/node/testing/src/scenarios/solo_node/basic_connectivity_accept_incoming.rs b/node/testing/src/scenarios/solo_node/basic_connectivity_accept_incoming.rs index 915d1721a7..c7d0088ac2 100644 --- a/node/testing/src/scenarios/solo_node/basic_connectivity_accept_incoming.rs +++ b/node/testing/src/scenarios/solo_node/basic_connectivity_accept_incoming.rs @@ -99,7 +99,7 @@ impl SoloNodeBasicConnectivityAcceptIncoming { .map_or(0, |discovery_state| { discovery_state .routing_table - .closest_peers(&my_id.into()) + .closest_peers(&my_id.try_into().unwrap()) .count() }); diff --git a/node/testing/src/scenarios/solo_node/basic_connectivity_initial_joining.rs b/node/testing/src/scenarios/solo_node/basic_connectivity_initial_joining.rs index 6ce1807409..92a1e7e20d 100644 --- a/node/testing/src/scenarios/solo_node/basic_connectivity_initial_joining.rs +++ b/node/testing/src/scenarios/solo_node/basic_connectivity_initial_joining.rs @@ -50,14 +50,15 @@ impl SoloNodeBasicConnectivityInitialJoining { .initial_peers(initial_peers); let node_id = runner.add_rust_node(config); - let peer_id = libp2p::PeerId::from( + let peer_id = libp2p::PeerId::try_from( runner .node(node_id) .expect("must exist") .state() .p2p .my_id(), - ); + ) + .unwrap(); eprintln!("launch Openmina node, id: {node_id}, peer_id: {peer_id}"); for step in 0..STEPS { @@ -102,7 +103,7 @@ impl SoloNodeBasicConnectivityInitialJoining { .map_or(0, |discovery_state| { discovery_state .routing_table - .closest_peers(&my_id.into()) + .closest_peers(&my_id.try_into().unwrap()) .count() }); diff --git a/node/web/Cargo.toml b/node/web/Cargo.toml index 7f1ad4f76e..3af6f74b45 100644 --- a/node/web/Cargo.toml +++ b/node/web/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-node-web" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/p2p/Cargo.toml b/p2p/Cargo.toml index f86d08ca73..16fbdda90c 100644 --- a/p2p/Cargo.toml +++ b/p2p/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "p2p" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/p2p/libp2p-rpc-behaviour/Cargo.toml b/p2p/libp2p-rpc-behaviour/Cargo.toml index 253d194a69..a334c1e9b4 100644 --- a/p2p/libp2p-rpc-behaviour/Cargo.toml +++ b/p2p/libp2p-rpc-behaviour/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libp2p-rpc-behaviour" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/p2p/src/channels/rpc/p2p_channels_rpc_actions.rs b/p2p/src/channels/rpc/p2p_channels_rpc_actions.rs index 00abf5061f..85d171f59a 100644 --- a/p2p/src/channels/rpc/p2p_channels_rpc_actions.rs +++ b/p2p/src/channels/rpc/p2p_channels_rpc_actions.rs @@ -26,6 +26,7 @@ pub enum P2pChannelsRpcAction { peer_id: PeerId, id: P2pRpcId, request: Box, + on_init: Option>, }, Timeout { peer_id: PeerId, @@ -78,59 +79,88 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { state.get_ready_peer(peer_id).map_or(false, |p| { matches!(p.channels.rpc, P2pChannelsRpcState::Enabled) }) - }, + } P2pChannelsRpcAction::Pending { peer_id } => { state.get_ready_peer(peer_id).map_or(false, |p| { matches!(p.channels.rpc, P2pChannelsRpcState::Init { .. }) }) - }, + } P2pChannelsRpcAction::Ready { peer_id } => { state.get_ready_peer(peer_id).map_or(false, |p| { matches!(p.channels.rpc, P2pChannelsRpcState::Pending { .. }) }) - }, - P2pChannelsRpcAction::RequestSend { peer_id, id, request } => { - state.peers.get(peer_id) - .filter(|p| !p.is_libp2p() || request.kind().supported_by_libp2p()) - .and_then(|p| p.status.as_ready()) - .map_or(false, |p| matches!( + } + P2pChannelsRpcAction::RequestSend { + peer_id, + id, + request, + on_init: _, + } => state + .peers + .get(peer_id) + .filter(|p| !p.is_libp2p() || request.kind().supported_by_libp2p()) + .and_then(|p| p.status.as_ready()) + .map_or(false, |p| { + matches!( &p.channels.rpc, - P2pChannelsRpcState::Ready { local: P2pRpcLocalState::WaitingForRequest { .. } | P2pRpcLocalState::Responded { .. }, .. } if p.channels.next_local_rpc_id() == *id - )) - }, + P2pChannelsRpcState::Ready { + local: P2pRpcLocalState::WaitingForRequest { .. } + | P2pRpcLocalState::Responded { .. }, + .. + } if p.channels.next_local_rpc_id() == *id + ) + }), P2pChannelsRpcAction::Timeout { peer_id, id } => { - state.get_ready_peer(peer_id).map_or(false, |p| matches!(&p.channels.rpc, P2pChannelsRpcState::Ready { local: P2pRpcLocalState::Requested { id: rpc_id, .. }, .. } if rpc_id == id)) - && state.is_peer_rpc_timed_out(peer_id, *id, time) - }, + state.get_ready_peer(peer_id).map_or(false, |p| { + matches!( + &p.channels.rpc, + P2pChannelsRpcState::Ready { + local: P2pRpcLocalState::Requested { id: rpc_id, .. }, + .. + } if rpc_id == id + ) + }) && state.is_peer_rpc_timed_out(peer_id, *id, time) + } P2pChannelsRpcAction::ResponseReceived { peer_id, id, .. } => { // TODO(binier): use consensus to enforce that peer doesn't send // us inferior block than it has in the past. - state.get_ready_peer(peer_id).map_or(false, |p| match &p.channels.rpc { - P2pChannelsRpcState::Ready { local, .. } => { - // TODO(binier): validate that response corresponds to request. - matches!(local, P2pRpcLocalState::Requested { id: rpc_id, .. } if rpc_id == id) - }, - _ => false, + state.get_ready_peer(peer_id).map_or(false, |p| { + match &p.channels.rpc { + P2pChannelsRpcState::Ready { local, .. } => { + // TODO(binier): validate that response corresponds to request. + matches!( + local, + P2pRpcLocalState::Requested { id: rpc_id, .. } + if rpc_id == id + ) + } + _ => false, + } }) - }, - P2pChannelsRpcAction::RequestReceived { peer_id, id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| match &p.channels.rpc { + } + P2pChannelsRpcAction::RequestReceived { peer_id, id, .. } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.rpc { P2pChannelsRpcState::Ready { remote, .. } => { - remote.pending_requests.len() < MAX_P2P_RPC_REMOTE_CONCURRENT_REQUESTS && - remote.pending_requests.iter().all(|v| v.id != *id) - }, + remote.pending_requests.len() < MAX_P2P_RPC_REMOTE_CONCURRENT_REQUESTS + && remote.pending_requests.iter().all(|v| v.id != *id) + } _ => false, - }) - }, - P2pChannelsRpcAction::ResponsePending { peer_id, id } => { - state.get_ready_peer(peer_id).map_or(false, |p| match &p.channels.rpc { - P2pChannelsRpcState::Ready { remote, .. } => { - remote.pending_requests.iter().any(|v| v.id == *id && !v.is_pending) - }, + }), + P2pChannelsRpcAction::ResponsePending { peer_id, id } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.rpc { + P2pChannelsRpcState::Ready { remote, .. } => remote + .pending_requests + .iter() + .any(|v| v.id == *id && !v.is_pending), _ => false, - }) - }, - P2pChannelsRpcAction::ResponseSend { peer_id, id, response: _response } => { + }), + P2pChannelsRpcAction::ResponseSend { + peer_id, + id, + response: _response, + } => { #[cfg(feature = "p2p-libp2p")] if state.is_libp2p_peer(peer_id) { let Some(response) = _response.as_ref() else { @@ -138,11 +168,8 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { }; return if !response.kind().supported_by_libp2p() { false - } else if let Some(streams) = state - .network - .scheduler - .rpc_incoming_streams - .get(peer_id) + } else if let Some(streams) = + state.network.scheduler.rpc_incoming_streams.get(peer_id) { !streams.is_empty() } else { @@ -150,14 +177,16 @@ impl redux::EnablingCondition for P2pChannelsRpcAction { }; } - state.get_ready_peer(peer_id).map_or(false, |p| match &p.channels.rpc { - P2pChannelsRpcState::Ready { remote, .. } => { - // TODO(binier): validate that response corresponds to request. - remote.pending_requests.iter().any(|v| v.id == *id) - }, - _ => false, + state.get_ready_peer(peer_id).map_or(false, |p| { + match &p.channels.rpc { + P2pChannelsRpcState::Ready { remote, .. } => { + // TODO(binier): validate that response corresponds to request. + remote.pending_requests.iter().any(|v| v.id == *id) + } + _ => false, + } }) - }, + } } } } diff --git a/p2p/src/channels/rpc/p2p_channels_rpc_effects.rs b/p2p/src/channels/rpc/p2p_channels_rpc_effects.rs index ad1509b7a8..6ac3278bbb 100644 --- a/p2p/src/channels/rpc/p2p_channels_rpc_effects.rs +++ b/p2p/src/channels/rpc/p2p_channels_rpc_effects.rs @@ -27,11 +27,12 @@ impl P2pChannelsRpcAction { peer_id, id, request, + on_init, } => { #[cfg(feature = "p2p-libp2p")] if store.state().is_libp2p_peer(&peer_id) { if let Some((query, data)) = - super::libp2p::internal_request_into_libp2p(*request, id) + super::libp2p::internal_request_into_libp2p(*request.clone(), id) { store.dispatch(P2pNetworkRpcAction::OutgoingQuery { peer_id, @@ -39,13 +40,19 @@ impl P2pChannelsRpcAction { data, }); } + if let Some(on_init) = on_init { + store.dispatch_callback(on_init, (peer_id, id, *request)); + } return; } - let msg = RpcChannelMsg::Request(id, *request); + let msg = RpcChannelMsg::Request(id, *request.clone()); store .service() .channel_send(peer_id, MsgId::first(), msg.into()); + if let Some(on_init) = on_init { + store.dispatch_callback(on_init, (peer_id, id, *request)); + } } P2pChannelsRpcAction::ResponseReceived { peer_id, response, .. diff --git a/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_actions.rs b/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_actions.rs index 9342848021..748e584695 100644 --- a/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_actions.rs +++ b/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_actions.rs @@ -31,6 +31,7 @@ pub enum P2pChannelsStreamingRpcAction { peer_id: PeerId, id: P2pStreamingRpcId, request: Box, + on_init: Option>, }, Timeout { peer_id: PeerId, @@ -106,103 +107,190 @@ impl P2pChannelsStreamingRpcAction { impl redux::EnablingCondition for P2pChannelsStreamingRpcAction { fn is_enabled(&self, state: &P2pState, time: Timestamp) -> bool { match self { - P2pChannelsStreamingRpcAction::Init { peer_id } => { - state.peers.get(peer_id).filter(|p| !p.is_libp2p()) - .and_then(|p| p.status.as_ready()) - .map_or(false, |p| matches!(p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Enabled)) - }, + P2pChannelsStreamingRpcAction::Init { peer_id } => state + .peers + .get(peer_id) + .filter(|p| !p.is_libp2p()) + .and_then(|p| p.status.as_ready()) + .map_or(false, |p| { + matches!( + p.channels.streaming_rpc, + P2pChannelsStreamingRpcState::Enabled + ) + }), P2pChannelsStreamingRpcAction::Pending { peer_id } => { state.get_ready_peer(peer_id).map_or(false, |p| { - matches!(p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Init { .. }) + matches!( + p.channels.streaming_rpc, + P2pChannelsStreamingRpcState::Init { .. } + ) }) - }, + } P2pChannelsStreamingRpcAction::Ready { peer_id } => { state.get_ready_peer(peer_id).map_or(false, |p| { - matches!(p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Pending { .. }) + matches!( + p.channels.streaming_rpc, + P2pChannelsStreamingRpcState::Pending { .. } + ) }) - }, + } P2pChannelsStreamingRpcAction::RequestSend { peer_id, id, .. } => { - state.get_ready_peer(peer_id) - .map_or(false, |p| matches!( + state.get_ready_peer(peer_id).map_or(false, |p| { + matches!( &p.channels.streaming_rpc, - P2pChannelsStreamingRpcState::Ready { local: P2pStreamingRpcLocalState::WaitingForRequest { .. } | P2pStreamingRpcLocalState::Responded { .. }, .. } if p.channels.next_local_rpc_id() == *id - )) - }, + P2pChannelsStreamingRpcState::Ready { + local: P2pStreamingRpcLocalState::WaitingForRequest { .. } + | P2pStreamingRpcLocalState::Responded { .. }, + .. + } if p.channels.next_local_rpc_id() == *id + ) + }) + } P2pChannelsStreamingRpcAction::Timeout { peer_id, id } => { - state.get_ready_peer(peer_id).map_or(false, |p| - matches!(&p.channels.streaming_rpc, P2pChannelsStreamingRpcState::Ready { local: - P2pStreamingRpcLocalState::Requested { id: rpc_id, .. }, .. } if rpc_id == id)) - && state.is_peer_streaming_rpc_timed_out(peer_id, *id, time) - }, - P2pChannelsStreamingRpcAction::ResponseNextPartGet { peer_id, id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| match &p.channels.streaming_rpc { - P2pChannelsStreamingRpcState::Ready { local: P2pStreamingRpcLocalState::Requested { id: rpc_id, progress, .. }, .. } => { - rpc_id == id && !progress.is_done() && !progress.is_part_pending() - }, + state.get_ready_peer(peer_id).map_or(false, |p| { + matches!( + &p.channels.streaming_rpc, + P2pChannelsStreamingRpcState::Ready { + local: P2pStreamingRpcLocalState::Requested { + id: rpc_id, .. }, + .. + } if rpc_id == id + ) + }) && state.is_peer_streaming_rpc_timed_out(peer_id, *id, time) + } + P2pChannelsStreamingRpcAction::ResponseNextPartGet { peer_id, id, .. } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.streaming_rpc { + P2pChannelsStreamingRpcState::Ready { + local: + P2pStreamingRpcLocalState::Requested { + id: rpc_id, + progress, + .. + }, + .. + } => rpc_id == id && !progress.is_done() && !progress.is_part_pending(), _ => false, - }) - }, - P2pChannelsStreamingRpcAction::ResponsePartReceived { peer_id, id, response } => { - state.get_ready_peer(peer_id).map_or(false, |p| match &p.channels.streaming_rpc { - P2pChannelsStreamingRpcState::Ready { local: P2pStreamingRpcLocalState::Requested { id: rpc_id, request, .. }, .. } => { - rpc_id == id && response.kind() == request.kind() - }, + }), + P2pChannelsStreamingRpcAction::ResponsePartReceived { + peer_id, + id, + response, + } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.streaming_rpc { + P2pChannelsStreamingRpcState::Ready { + local: + P2pStreamingRpcLocalState::Requested { + id: rpc_id, + request, + .. + }, + .. + } => rpc_id == id && response.kind() == request.kind(), _ => false, - }) - }, - P2pChannelsStreamingRpcAction::ResponseReceived { peer_id, id, response } => { - state.get_ready_peer(peer_id).map_or(false, |p| match &p.channels.streaming_rpc { - P2pChannelsStreamingRpcState::Ready { local: P2pStreamingRpcLocalState::Requested { id: rpc_id, request, progress, .. }, .. } => { - rpc_id == id && (response.is_none() || progress.is_done()) && response.as_ref().map_or(true, |resp| resp.kind() == request.kind()) - }, + }), + P2pChannelsStreamingRpcAction::ResponseReceived { + peer_id, + id, + response, + } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.streaming_rpc { + P2pChannelsStreamingRpcState::Ready { + local: + P2pStreamingRpcLocalState::Requested { + id: rpc_id, + request, + progress, + .. + }, + .. + } => { + rpc_id == id + && (response.is_none() || progress.is_done()) + && response + .as_ref() + .map_or(true, |resp| resp.kind() == request.kind()) + } _ => false, - }) - }, - P2pChannelsStreamingRpcAction::RequestReceived { peer_id, .. } => { - state.get_ready_peer(peer_id).map_or(false, |p| match &p.channels.streaming_rpc { + }), + P2pChannelsStreamingRpcAction::RequestReceived { peer_id, .. } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.streaming_rpc { P2pChannelsStreamingRpcState::Ready { remote, .. } => { - matches!(remote, P2pStreamingRpcRemoteState::WaitingForRequest { .. } | P2pStreamingRpcRemoteState::Responded { ..}) - }, - _ => false, - }) - }, - P2pChannelsStreamingRpcAction::ResponsePending { peer_id, id } => { - state.get_ready_peer(peer_id) - .and_then(|p| p.channels.streaming_rpc.remote_todo_request()) - .map_or(false, |(rpc_id, _)| rpc_id == *id) - }, - P2pChannelsStreamingRpcAction::ResponseSendInit { peer_id, id, response } => { - state.get_ready_peer(peer_id) - .and_then(|p| p.channels.streaming_rpc.remote_pending_request()) - .map_or(false, |(rpc_id, req)| rpc_id == *id && response.as_ref().map_or(true, |resp| resp.kind() == req.kind())) - }, - P2pChannelsStreamingRpcAction::ResponsePartNextSend { peer_id, id } => { - state.get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { - P2pChannelsStreamingRpcState::Ready { remote: P2pStreamingRpcRemoteState::Requested { id: rpc_id, progress, .. }, .. } => { - rpc_id == id && !progress.is_done() + matches!( + remote, + P2pStreamingRpcRemoteState::WaitingForRequest { .. } + | P2pStreamingRpcRemoteState::Responded { .. } + ) } _ => false, - }) - } - P2pChannelsStreamingRpcAction::ResponsePartSend { peer_id, id, response } => { - state.get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { - P2pChannelsStreamingRpcState::Ready { remote: P2pStreamingRpcRemoteState::Requested { id: rpc_id, request, progress, .. }, .. } => { - rpc_id == id && !progress.is_done() && response.kind() == request.kind() - } + }), + P2pChannelsStreamingRpcAction::ResponsePending { peer_id, id } => state + .get_ready_peer(peer_id) + .and_then(|p| p.channels.streaming_rpc.remote_todo_request()) + .map_or(false, |(rpc_id, _)| rpc_id == *id), + P2pChannelsStreamingRpcAction::ResponseSendInit { + peer_id, + id, + response, + } => state + .get_ready_peer(peer_id) + .and_then(|p| p.channels.streaming_rpc.remote_pending_request()) + .map_or(false, |(rpc_id, req)| { + rpc_id == *id + && response + .as_ref() + .map_or(true, |resp| resp.kind() == req.kind()) + }), + P2pChannelsStreamingRpcAction::ResponsePartNextSend { peer_id, id } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.streaming_rpc { + P2pChannelsStreamingRpcState::Ready { + remote: + P2pStreamingRpcRemoteState::Requested { + id: rpc_id, + progress, + .. + }, + .. + } => rpc_id == id && !progress.is_done(), _ => false, - }) - }, - P2pChannelsStreamingRpcAction::ResponseSent { peer_id, id} => { - state.get_ready_peer(peer_id) - .map_or(false, |p| match &p.channels.streaming_rpc { - P2pChannelsStreamingRpcState::Ready { remote: P2pStreamingRpcRemoteState::Requested { id: rpc_id, progress, .. }, .. } => { - rpc_id == id && progress.is_done() - } + }), + P2pChannelsStreamingRpcAction::ResponsePartSend { + peer_id, + id, + response, + } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.streaming_rpc { + P2pChannelsStreamingRpcState::Ready { + remote: + P2pStreamingRpcRemoteState::Requested { + id: rpc_id, + request, + progress, + .. + }, + .. + } => rpc_id == id && !progress.is_done() && response.kind() == request.kind(), _ => false, - }) - }, + }), + P2pChannelsStreamingRpcAction::ResponseSent { peer_id, id } => state + .get_ready_peer(peer_id) + .map_or(false, |p| match &p.channels.streaming_rpc { + P2pChannelsStreamingRpcState::Ready { + remote: + P2pStreamingRpcRemoteState::Requested { + id: rpc_id, + progress, + .. + }, + .. + } => rpc_id == id && progress.is_done(), + _ => false, + }), } } } diff --git a/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_effects.rs b/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_effects.rs index 4e53163376..ffafac51d9 100644 --- a/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_effects.rs +++ b/p2p/src/channels/streaming_rpc/p2p_channels_streaming_rpc_effects.rs @@ -21,11 +21,15 @@ impl P2pChannelsStreamingRpcAction { peer_id, id, request, + on_init, } => { - let msg = StreamingRpcChannelMsg::Request(id, *request); + let msg = StreamingRpcChannelMsg::Request(id, *request.clone()); store .service() .channel_send(peer_id, MsgId::first(), msg.into()); + if let Some(on_init) = on_init { + store.dispatch_callback(on_init, (peer_id, id, *request)); + } } P2pChannelsStreamingRpcAction::ResponseNextPartGet { peer_id, id } => { let msg = StreamingRpcChannelMsg::Next(id); diff --git a/p2p/src/connection/outgoing/mod.rs b/p2p/src/connection/outgoing/mod.rs index 5f4ac19e91..a022f2e184 100644 --- a/p2p/src/connection/outgoing/mod.rs +++ b/p2p/src/connection/outgoing/mod.rs @@ -179,12 +179,12 @@ impl P2pConnectionOutgoingInitOpts { _ => return None, }; Self::WebRTC { - peer_id: peer_id.into(), + peer_id: peer_id.try_into().ok()?, signaling, } } else { let opts = P2pConnectionOutgoingInitLibp2pOpts { - peer_id: peer_id.into(), + peer_id: peer_id.try_into().ok()?, host: host.parse().ok()?, port: msg.libp2p_port.as_u64() as u16, }; @@ -203,7 +203,8 @@ impl P2pConnectionOutgoingInitOpts { host: opts.host.to_string().as_bytes().into(), libp2p_port: (opts.port as u64).into(), peer_id: v2::NetworkPeerPeerIdStableV1( - libp2p_identity::PeerId::from(opts.peer_id) + libp2p_identity::PeerId::try_from(opts.peer_id) + .ok()? .to_string() .into_bytes() .into(), @@ -235,8 +236,8 @@ impl P2pConnectionOutgoingInitOpts { } impl P2pConnectionOutgoingInitLibp2pOpts { - pub fn to_maddr(&self) -> multiaddr::Multiaddr { - self.into() + pub fn to_maddr(&self) -> Option { + self.clone().try_into().ok() } } @@ -248,7 +249,11 @@ impl fmt::Display for P2pConnectionOutgoingInitOpts { } Self::LibP2P(v) => { - write!(f, "{}", v.to_maddr()) + if let Some(maddr) = v.to_maddr() { + write!(f, "{}", maddr) + } else { + write!(f, "*INVALID MULTIADDRESS*") + } } } } @@ -329,11 +334,13 @@ impl<'de> Deserialize<'de> for P2pConnectionOutgoingInitOpts { } } -impl From<&P2pConnectionOutgoingInitLibp2pOpts> for multiaddr::Multiaddr { - fn from(value: &P2pConnectionOutgoingInitLibp2pOpts) -> Self { +impl TryFrom for multiaddr::Multiaddr { + type Error = libp2p_identity::DecodingError; + + fn try_from(value: P2pConnectionOutgoingInitLibp2pOpts) -> Result { use multiaddr::Protocol; - Self::empty() + Ok(Self::empty() .with(match &value.host { // maybe should be just `Dns`? Host::Domain(v) => Protocol::Dns4(v.into()), @@ -341,7 +348,9 @@ impl From<&P2pConnectionOutgoingInitLibp2pOpts> for multiaddr::Multiaddr { Host::Ipv6(v) => Protocol::Ip6(*v), }) .with(Protocol::Tcp(value.port)) - .with(Protocol::P2p(libp2p_identity::PeerId::from(value.peer_id))) + .with(Protocol::P2p(libp2p_identity::PeerId::try_from( + value.peer_id, + )?))) } } @@ -405,7 +414,12 @@ impl TryFrom<&multiaddr::Multiaddr> for P2pConnectionOutgoingInitLibp2pOpts { "invalid peer_id multihash".to_string(), ) })? - .into(), + .try_into() + .map_err(|_| { + P2pConnectionOutgoingInitOptsParseError::Other( + "unexpected error converting PeerId".to_string(), + ) + })?, Some(_) => { return Err(P2pConnectionOutgoingInitOptsParseError::Other( "unexpected part in multiaddr! expected peer_id".to_string(), diff --git a/p2p/src/discovery/p2p_discovery_effects.rs b/p2p/src/discovery/p2p_discovery_effects.rs index d368981499..24156bd8a1 100644 --- a/p2p/src/discovery/p2p_discovery_effects.rs +++ b/p2p/src/discovery/p2p_discovery_effects.rs @@ -26,6 +26,7 @@ impl P2pDiscoveryAction { peer_id, id: status.channels.next_local_rpc_id(), request: Box::new(P2pRpcRequest::InitialPeers), + on_init: None, }); } P2pDiscoveryAction::Success { .. } => {} diff --git a/p2p/src/identity/peer_id.rs b/p2p/src/identity/peer_id.rs index 71d59e8ce5..ee7ea2f5fd 100644 --- a/p2p/src/identity/peer_id.rs +++ b/p2p/src/identity/peer_id.rs @@ -1,6 +1,7 @@ use std::{fmt, str::FromStr}; use binprot::{BinProtRead, BinProtWrite, Nat0}; +use libp2p_identity::DecodingError; use serde::{Deserialize, Serialize}; use super::PublicKey; @@ -12,26 +13,31 @@ impl PeerId { const BASE58_CHECK_VERSION: u8 = 0x2F; // 'p' pub fn from_bytes(bytes: [u8; 32]) -> Self { - let mut iter = bytes - .chunks(8) - .map(|v| <[u8; 8]>::try_from(v).unwrap()) - .map(u64::from_be_bytes); + let mut chunk0: [u8; 8] = [0; 8]; + let mut chunk1: [u8; 8] = [0; 8]; + let mut chunk2: [u8; 8] = [0; 8]; + let mut chunk3: [u8; 8] = [0; 8]; + + chunk0.copy_from_slice(&bytes[0..8]); + chunk1.copy_from_slice(&bytes[8..16]); + chunk2.copy_from_slice(&bytes[16..24]); + chunk3.copy_from_slice(&bytes[24..32]); + Self([ - iter.next().unwrap(), - iter.next().unwrap(), - iter.next().unwrap(), - iter.next().unwrap(), + u64::from_be_bytes(chunk0), + u64::from_be_bytes(chunk1), + u64::from_be_bytes(chunk2), + u64::from_be_bytes(chunk3), ]) } pub fn to_bytes(self) -> [u8; 32] { - // Not the most optimal way. - self.0 - .into_iter() - .flat_map(|v| v.to_be_bytes()) - .collect::>() - .try_into() - .unwrap() + let mut result: [u8; 32] = [0; 32]; + result[0..8].copy_from_slice(&self.0[0].to_be_bytes()); + result[8..16].copy_from_slice(&self.0[1].to_be_bytes()); + result[16..24].copy_from_slice(&self.0[2].to_be_bytes()); + result[24..32].copy_from_slice(&self.0[3].to_be_bytes()); + result } pub fn from_public_key(key: PublicKey) -> Self { @@ -44,7 +50,11 @@ impl PeerId { #[cfg(not(target_arch = "wasm32"))] pub fn to_libp2p_string(self) -> String { - libp2p_identity::PeerId::from(self).to_string() + if let Ok(peer_id) = libp2p_identity::PeerId::try_from(self) { + peer_id.to_string() + } else { + "INVALID PEER_ID".to_string() + } } } @@ -107,10 +117,10 @@ impl From for [u8; 32] { } } -impl TryFrom<&libp2p_identity::PeerId> for PeerId { +impl TryFrom for PeerId { type Error = PeerIdFromLibp2pPeerId; - fn try_from(value: &libp2p_identity::PeerId) -> Result { + fn try_from(value: libp2p_identity::PeerId) -> Result { let slice = value.as_ref().digest(); if value.as_ref().code() == 0x12 { return Err(PeerIdFromLibp2pPeerId::Code); @@ -121,25 +131,14 @@ impl TryFrom<&libp2p_identity::PeerId> for PeerId { } } -impl From for PeerId { - fn from(value: libp2p_identity::PeerId) -> Self { - let slice = value.as_ref().digest(); - if value.as_ref().code() == 0x12 { - todo!("store such kind of key in our `PeerId`"); - } else { - let key = libp2p_identity::PublicKey::try_decode_protobuf(slice).unwrap(); - let bytes = key.try_into_ed25519().unwrap().to_bytes(); - PeerId::from_bytes(bytes) - } - } -} +impl TryFrom for libp2p_identity::PeerId { + type Error = DecodingError; -impl From for libp2p_identity::PeerId { - fn from(value: PeerId) -> Self { - let key = libp2p_identity::ed25519::PublicKey::try_from_bytes(&value.to_bytes()).unwrap(); + fn try_from(value: PeerId) -> Result { + let key = libp2p_identity::ed25519::PublicKey::try_from_bytes(&value.to_bytes())?; #[allow(deprecated)] let key = libp2p_identity::PublicKey::from(key); - key.to_peer_id() + Ok(key.to_peer_id()) } } @@ -218,8 +217,8 @@ mod tests { fn test_libp2p_peer_id_conv() { let s = "12D3KooWEiGVAFC7curXWXiGZyMWnZK9h8BKr88U8D5PKV3dXciv"; let id: libp2p_identity::PeerId = s.parse().unwrap(); - let conv: PeerId = id.into(); - let id_conv: libp2p_identity::PeerId = conv.into(); + let conv: PeerId = id.try_into().unwrap(); + let id_conv: libp2p_identity::PeerId = conv.try_into().unwrap(); assert_eq!(id_conv, id); } @@ -228,8 +227,8 @@ mod tests { fn test_bare_base58btc_pk() { let s = "QmSXffHzFVSEoQCYBS1bPpCn4vgGEpQnCA9NLYuhamPBU3"; let id: libp2p_identity::PeerId = s.parse().unwrap(); - let conv: PeerId = id.into(); - let id_conv: libp2p_identity::PeerId = conv.into(); + let conv: PeerId = id.try_into().unwrap(); + let id_conv: libp2p_identity::PeerId = conv.try_into().unwrap(); assert_eq!(id_conv, id); } } diff --git a/p2p/src/network/identify/stream/p2p_network_identify_stream_effects.rs b/p2p/src/network/identify/stream/p2p_network_identify_stream_effects.rs index 21f2f37cc0..9b3f32dab0 100644 --- a/p2p/src/network/identify/stream/p2p_network_identify_stream_effects.rs +++ b/p2p/src/network/identify/stream/p2p_network_identify_stream_effects.rs @@ -1,7 +1,7 @@ use std::net::SocketAddr; use multiaddr::Multiaddr; -use openmina_core::{error, fuzzed_maybe, log::system_time, warn}; +use openmina_core::{bug_condition, error, fuzzed_maybe, log::system_time, warn}; use redux::ActionMeta; use super::{ @@ -64,101 +64,100 @@ impl P2pNetworkIdentifyStreamAction { incoming: true, stream_id, } => { - if let P2pNetworkIdentifyStreamState::SendIdentify = state { - let mut listen_addrs = Vec::new(); - for addr in store - .state() - .network - .scheduler - .listeners - .iter() - .cloned() - .collect::>() - { - listen_addrs.extend(get_addrs::, _>(&addr, store.service())) - } + let P2pNetworkIdentifyStreamState::SendIdentify = state else { + bug_condition!("Invalid state {:?} for action {:?}", state, self); + return Ok(()); + }; - let public_key = Some(store.state().config.identity_pub_key.clone()); - - let mut protocols = vec![ - // token::StreamKind::Broadcast(token::BroadcastAlgorithm::Floodsub1_0_0), - token::StreamKind::Identify(token::IdentifyAlgorithm::Identify1_0_0), - // token::StreamKind::Identify( - // token::IdentifyAlgorithm::IdentifyPush1_0_0, - // ), - // token::StreamKind::Broadcast(token::BroadcastAlgorithm::Meshsub1_0_0), - token::StreamKind::Broadcast(token::BroadcastAlgorithm::Meshsub1_1_0), - // token::StreamKind::Ping(token::PingAlgorithm::Ping1_0_0), - // token::StreamKind::Bitswap(token::BitswapAlgorithm::MinaBitswap), - // token::StreamKind::Bitswap(token::BitswapAlgorithm::MinaBitswap1_0_0), - // token::StreamKind::Bitswap(token::BitswapAlgorithm::MinaBitswap1_1_0), - // token::StreamKind::Bitswap(token::BitswapAlgorithm::MinaBitswap1_2_0), - // token::StreamKind::Status(token::StatusAlgorithm::MinaNodeStatus), - token::StreamKind::Rpc(token::RpcAlgorithm::Rpc0_0_1), - ]; - if store.state().network.scheduler.discovery_state.is_some() { - protocols.push(token::StreamKind::Discovery( - token::DiscoveryAlgorithm::Kademlia1_0_0, - )); - } - let identify_msg = P2pNetworkIdentify { - protocol_version: Some("ipfs/0.1.0".to_string()), - // TODO: include build info from GlobalConfig (?) - agent_version: Some("openmina".to_owned()), - public_key, - listen_addrs, - // TODO: other peers seem to report inaccurate information, should we implement this? - observed_addr: None, - protocols, - }; - - //println!("{:?}", identify_msg); - - let mut out = Vec::new(); - let identify_msg_proto: pb::Identify = (&identify_msg).into(); - - if let Err(err) = - prost::Message::encode_length_delimited(&identify_msg_proto, &mut out) - { - warn!(meta.time(); summary = "error serializing Identify message", error = err.to_string(), action = format!("{self:?}")); - return Ok(()); - } + let mut listen_addrs = Vec::new(); + for addr in store + .state() + .network + .scheduler + .listeners + .iter() + .cloned() + .collect::>() + { + listen_addrs.extend(get_addrs::, _>(&addr, store.service())) + } - let data = fuzzed_maybe!( - Data(out.into_boxed_slice()), - crate::fuzzer::mutate_identify_msg - ); + let public_key = Some(store.state().config.identity_pub_key.clone()); - let flags = - fuzzed_maybe!(Default::default(), crate::fuzzer::mutate_yamux_flags); + let mut protocols = vec![ + // token::StreamKind::Broadcast(token::BroadcastAlgorithm::Floodsub1_0_0), + token::StreamKind::Identify(token::IdentifyAlgorithm::Identify1_0_0), + // token::StreamKind::Identify( + // token::IdentifyAlgorithm::IdentifyPush1_0_0, + // ), + // token::StreamKind::Broadcast(token::BroadcastAlgorithm::Meshsub1_0_0), + token::StreamKind::Broadcast(token::BroadcastAlgorithm::Meshsub1_1_0), + // token::StreamKind::Ping(token::PingAlgorithm::Ping1_0_0), + // token::StreamKind::Bitswap(token::BitswapAlgorithm::MinaBitswap), + // token::StreamKind::Bitswap(token::BitswapAlgorithm::MinaBitswap1_0_0), + // token::StreamKind::Bitswap(token::BitswapAlgorithm::MinaBitswap1_1_0), + // token::StreamKind::Bitswap(token::BitswapAlgorithm::MinaBitswap1_2_0), + // token::StreamKind::Status(token::StatusAlgorithm::MinaNodeStatus), + token::StreamKind::Rpc(token::RpcAlgorithm::Rpc0_0_1), + ]; + if store.state().network.scheduler.discovery_state.is_some() { + protocols.push(token::StreamKind::Discovery( + token::DiscoveryAlgorithm::Kademlia1_0_0, + )); + } + let identify_msg = P2pNetworkIdentify { + protocol_version: Some("ipfs/0.1.0".to_string()), + // TODO: include build info from GlobalConfig (?) + agent_version: Some("openmina".to_owned()), + public_key, + listen_addrs, + // TODO: other peers seem to report inaccurate information, should we implement this? + observed_addr: None, + protocols, + }; - store.dispatch(P2pNetworkYamuxAction::OutgoingData { - addr, - stream_id, - data, - flags, - }); + //println!("{:?}", identify_msg); - store.dispatch(P2pNetworkIdentifyStreamAction::Close { - addr, - peer_id, - stream_id, - }); + let mut out = Vec::new(); + let identify_msg_proto: pb::Identify = (&identify_msg).into(); - Ok(()) - } else { - unreachable!() + if let Err(err) = + prost::Message::encode_length_delimited(&identify_msg_proto, &mut out) + { + warn!(meta.time(); summary = "error serializing Identify message", error = err.to_string(), action = format!("{self:?}")); + return Ok(()); } + + let data = fuzzed_maybe!( + Data(out.into_boxed_slice()), + crate::fuzzer::mutate_identify_msg + ); + + let flags = fuzzed_maybe!(Default::default(), crate::fuzzer::mutate_yamux_flags); + + store.dispatch(P2pNetworkYamuxAction::OutgoingData { + addr, + stream_id, + data, + flags, + }); + + store.dispatch(P2pNetworkIdentifyStreamAction::Close { + addr, + peer_id, + stream_id, + }); + + Ok(()) } P2pNetworkIdentifyStreamAction::New { incoming: false, .. } => { - if let P2pNetworkIdentifyStreamState::RecvIdentify = state { - Ok(()) - } else { - println!("STATE: {:?}", state); - unreachable!() + if !matches!(state, P2pNetworkIdentifyStreamState::RecvIdentify) { + bug_condition!("Invalid state {:?} for action {:?}", state, self); } + + Ok(()) } P2pNetworkIdentifyStreamAction::IncomingData { addr, @@ -249,7 +248,10 @@ impl P2pNetworkIdentifyStreamAction { _ => Err(format!("incorrect state {state:?} for action {self:?}")), } } - P2pNetworkIdentifyStreamAction::Prune { .. } => unreachable!(), // handled before match + P2pNetworkIdentifyStreamAction::Prune { .. } => { + bug_condition!("Invalid state {:?} for action {:?}", state, self); + Ok(()) + } } } } diff --git a/p2p/src/network/identify/stream/p2p_network_identify_stream_reducer.rs b/p2p/src/network/identify/stream/p2p_network_identify_stream_reducer.rs index 29c492d1b2..02fd9af4a3 100644 --- a/p2p/src/network/identify/stream/p2p_network_identify_stream_reducer.rs +++ b/p2p/src/network/identify/stream/p2p_network_identify_stream_reducer.rs @@ -5,6 +5,7 @@ use crate::{ network::identify::{pb::Identify, P2pNetworkIdentify}, P2pLimits, P2pNetworkStreamProtobufError, }; +use openmina_core::bug_condition; use prost::Message; use quick_protobuf::BytesReader; use redux::ActionWithMeta; @@ -15,48 +16,53 @@ impl P2pNetworkIdentifyStreamState { action: ActionWithMeta<&P2pNetworkIdentifyStreamAction>, limits: &P2pLimits, ) -> Result<(), String> { - use super::P2pNetworkIdentifyStreamAction as A; - use super::P2pNetworkIdentifyStreamState as S; let (action, _meta) = action.split(); match &self { - S::Default => { - if let A::New { incoming, .. } = action { - let kind = P2pNetworkIdentifyStreamKind::from(*incoming); - - *self = match kind { - // For incoming streams we prepare to send the Identify message - P2pNetworkIdentifyStreamKind::Incoming => S::SendIdentify, - // For outgoing streams we expect to get the Identify message from the remote peer - P2pNetworkIdentifyStreamKind::Outgoing => S::RecvIdentify, - }; - Ok(()) - } else { + P2pNetworkIdentifyStreamState::Default => { + let P2pNetworkIdentifyStreamAction::New { incoming, .. } = action else { // enabling conditions should prevent receiving other actions in Default state - unreachable!() - } + bug_condition!("Received action {:?} in Default state", action); + return Ok(()); + }; + + let kind = P2pNetworkIdentifyStreamKind::from(*incoming); + + *self = match kind { + // For incoming streams we prepare to send the Identify message + P2pNetworkIdentifyStreamKind::Incoming => { + P2pNetworkIdentifyStreamState::SendIdentify + } + // For outgoing streams we expect to get the Identify message from the remote peer + P2pNetworkIdentifyStreamKind::Outgoing => { + P2pNetworkIdentifyStreamState::RecvIdentify + } + }; + + Ok(()) } - S::RecvIdentify => match action { - A::IncomingData { data, .. } => { + P2pNetworkIdentifyStreamState::RecvIdentify => match action { + P2pNetworkIdentifyStreamAction::IncomingData { data, .. } => { let data = &data.0; let mut reader = BytesReader::from_bytes(data); let Ok(len) = reader.read_varint32(data).map(|v| v as usize) else { - *self = S::Error(P2pNetworkStreamProtobufError::MessageLength); + *self = P2pNetworkIdentifyStreamState::Error( + P2pNetworkStreamProtobufError::MessageLength, + ); return Ok(()); }; // TODO: implement as configuration option if len > limits.identify_message() { - *self = S::Error(P2pNetworkStreamProtobufError::Limit( - len, - limits.identify_message(), - )); + *self = P2pNetworkIdentifyStreamState::Error( + P2pNetworkStreamProtobufError::Limit(len, limits.identify_message()), + ); return Ok(()); } let data = &data[(data.len() - reader.len())..]; if len > reader.len() { - *self = S::IncomingPartialData { + *self = P2pNetworkIdentifyStreamState::IncomingPartialData { len, data: data.to_vec(), }; @@ -65,35 +71,44 @@ impl P2pNetworkIdentifyStreamState { self.handle_incoming_identify_message(len, data) } } - A::RemoteClose { .. } => Ok(()), - A::Close { .. } => todo!(), - _ => unreachable!(), + P2pNetworkIdentifyStreamAction::RemoteClose { .. } => Ok(()), + _ => { + // State and connection cleanup should be handled by timeout + bug_condition!("Received action {:?} in RecvIdentify state", action); + Ok(()) + } }, - S::IncomingPartialData { len, data } => match action { - A::IncomingData { data: new_data, .. } => { + P2pNetworkIdentifyStreamState::IncomingPartialData { len, data } => match action { + P2pNetworkIdentifyStreamAction::IncomingData { data: new_data, .. } => { let mut data = data.clone(); data.extend_from_slice(&new_data.0); if *len > data.len() { - *self = S::IncomingPartialData { len: *len, data }; + *self = + P2pNetworkIdentifyStreamState::IncomingPartialData { len: *len, data }; Ok(()) } else { self.handle_incoming_identify_message(*len, &data) } } - A::RemoteClose { .. } => Ok(()), - A::Close { .. } => todo!(), + P2pNetworkIdentifyStreamAction::RemoteClose { .. } => Ok(()), _ => { - unreachable!(); + // State and connection cleanup should be handled by timeout + bug_condition!("Received action {:?} in IncomingPartialData state", action); + Ok(()) } }, - S::SendIdentify => match action { - A::RemoteClose { .. } => Ok(()), - A::Close { .. } => Ok(()), - _ => unreachable!(), + P2pNetworkIdentifyStreamState::SendIdentify => match action { + P2pNetworkIdentifyStreamAction::RemoteClose { .. } => Ok(()), + P2pNetworkIdentifyStreamAction::Close { .. } => Ok(()), + _ => { + // State and connection cleanup should be handled by timeout + bug_condition!("Received action {:?} in SendIdentify state", action); + Ok(()) + } }, - S::IdentifyReceived { .. } => Ok(()), - S::Error(_) => { + P2pNetworkIdentifyStreamState::IdentifyReceived { .. } => Ok(()), + P2pNetworkIdentifyStreamState::Error(_) => { // TODO Ok(()) } diff --git a/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_state.rs b/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_state.rs index 3b98862e54..9ff75d0505 100644 --- a/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_state.rs +++ b/p2p/src/network/kad/bootstrap/p2p_network_kad_bootstrap_state.rs @@ -33,7 +33,7 @@ impl P2pNetworkKadBootstrapState { pub fn new(key: PeerId) -> Self { P2pNetworkKadBootstrapState { key, - kademlia_key: key.into(), + kademlia_key: key.try_into().expect("valid key"), // TODO: propagate error processed_peers: BTreeSet::new(), requests: BTreeMap::new(), successful_requests: 0, diff --git a/p2p/src/network/kad/p2p_network_kad_internals.rs b/p2p/src/network/kad/p2p_network_kad_internals.rs index 332316f293..5c48301cee 100644 --- a/p2p/src/network/kad/p2p_network_kad_internals.rs +++ b/p2p/src/network/kad/p2p_network_kad_internals.rs @@ -5,6 +5,7 @@ use std::{ use crypto_bigint::{ArrayEncoding, Encoding, U256}; use derive_more::From; +use libp2p_identity::DecodingError; use multiaddr::Multiaddr; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; @@ -65,15 +66,27 @@ mod u256_serde { #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] pub struct P2pNetworkKadKey(#[serde(with = "u256_serde")] U256); -impl From<&PeerId> for P2pNetworkKadKey { - fn from(value: &PeerId) -> Self { - P2pNetworkKadKey::from(*value) +#[derive(Clone, Debug, Serialize, PartialEq, Deserialize, thiserror::Error)] +pub enum P2pNetworkKadKeyError { + #[error("decoding error")] + DecodingError, +} + +impl TryFrom<&PeerId> for P2pNetworkKadKey { + type Error = P2pNetworkKadKeyError; + + fn try_from(value: &PeerId) -> Result { + P2pNetworkKadKey::try_from(*value) } } -impl From for P2pNetworkKadKey { - fn from(value: PeerId) -> Self { - P2pNetworkKadKey::from(CID::from(value)) +impl TryFrom for P2pNetworkKadKey { + type Error = P2pNetworkKadKeyError; + + fn try_from(value: PeerId) -> Result { + Ok(P2pNetworkKadKey::from( + CID::try_from(value).map_err(|_| P2pNetworkKadKeyError::DecodingError)?, + )) } } @@ -365,14 +378,13 @@ pub struct P2pNetworkKadEntry { } impl P2pNetworkKadEntry { - pub fn new(peer_id: PeerId, addrs: Vec) -> Self { - let key = peer_id.into(); - P2pNetworkKadEntry { - key, + pub fn new(peer_id: PeerId, addrs: Vec) -> Result { + Ok(P2pNetworkKadEntry { + key: peer_id.try_into()?, peer_id, addrs, connection: ConnectionType::NotConnected, - } + }) } pub fn dist(&self, other: &P2pNetworkKadEntry) -> P2pNetworkKadDist { @@ -397,6 +409,8 @@ pub enum P2pNetworkKadEntryTryFromError { #[error(transparent)] PeerId(#[from] P2pNetworkKademliaPeerIdError), #[error(transparent)] + Key(#[from] P2pNetworkKadKeyError), + #[error(transparent)] Multiaddr(#[from] P2pNetworkKademliaMultiaddrError), } @@ -405,7 +419,7 @@ impl TryFrom> for P2pNetworkKadEntry { fn try_from(value: super::mod_Message::Peer) -> Result { let peer_id = super::peer_id_try_from_bytes(value.id)?; - let key = peer_id.into(); + let key = peer_id.try_into()?; let addrs = value .addrs .into_iter() @@ -421,17 +435,19 @@ impl TryFrom> for P2pNetworkKadEntry { } } -impl<'a> From<&'a P2pNetworkKadEntry> for super::mod_Message::Peer<'a> { - fn from(value: &'a P2pNetworkKadEntry) -> Self { - super::mod_Message::Peer { - id: (&value.peer_id).into(), +impl<'a> TryFrom<&'a P2pNetworkKadEntry> for super::mod_Message::Peer<'a> { + type Error = DecodingError; + + fn try_from(value: &'a P2pNetworkKadEntry) -> Result { + Ok(super::mod_Message::Peer { + id: (&value.peer_id).try_into()?, addrs: value .addrs .iter() .map(|addr| addr.as_ref().into()) .collect(), connection: value.connection.into(), - } + }) } } @@ -627,7 +643,7 @@ mod tests { } fn entry_with_peer_id(peer_id: PeerId) -> P2pNetworkKadEntry { - let key = peer_id.into(); + let key = peer_id.try_into().unwrap(); P2pNetworkKadEntry { key, peer_id, @@ -639,11 +655,11 @@ mod tests { #[test] fn test_key_generation() { let random_peer_id = SecretKey::rand().public_key().peer_id(); - let libp2p_peer_id = libp2p_identity::PeerId::from(random_peer_id); + let libp2p_peer_id = libp2p_identity::PeerId::try_from(random_peer_id).unwrap(); let cid = CID::from(libp2p_peer_id); - let key0 = P2pNetworkKadKey::from(&random_peer_id); - let key1 = P2pNetworkKadKey::from(random_peer_id); + let key0 = P2pNetworkKadKey::try_from(&random_peer_id).unwrap(); + let key1 = P2pNetworkKadKey::try_from(random_peer_id).unwrap(); let key2 = P2pNetworkKadKey::from(cid); assert_eq!(key0, key1); diff --git a/p2p/src/network/kad/p2p_network_kad_protocol.rs b/p2p/src/network/kad/p2p_network_kad_protocol.rs index 13331aa011..c7cc0705bd 100644 --- a/p2p/src/network/kad/p2p_network_kad_protocol.rs +++ b/p2p/src/network/kad/p2p_network_kad_protocol.rs @@ -3,10 +3,11 @@ use std::{ net::{IpAddr, SocketAddr}, }; +use libp2p_identity::DecodingError; use multiaddr::Multiaddr; use serde::{Deserialize, Serialize}; -use super::{P2pNetworkKadEntry, P2pNetworkKadEntryTryFromError}; +use super::{P2pNetworkKadEntry, P2pNetworkKadEntryTryFromError, P2pNetworkKadKeyError}; use crate::{mod_Message::MessageType, PeerId}; #[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Serialize, Deserialize)] @@ -51,11 +52,14 @@ impl CID { } } -impl From for CID { - fn from(value: PeerId) -> Self { - Self::from(libp2p_identity::PeerId::from(value)) +impl TryFrom for CID { + type Error = libp2p_identity::DecodingError; + + fn try_from(value: PeerId) -> Result { + Ok(Self::from(libp2p_identity::PeerId::try_from(value)?)) } } + impl From for CID { fn from(value: libp2p_identity::PeerId) -> Self { Self(value.to_bytes()) @@ -81,8 +85,12 @@ pub enum P2pNetworkKademliaRpcReply { } impl P2pNetworkKademliaRpcRequest { - pub fn find_node(key: PeerId) -> Self { - P2pNetworkKademliaRpcRequest::FindNode { key: key.into() } + pub fn find_node(key: PeerId) -> Result { + Ok(P2pNetworkKademliaRpcRequest::FindNode { + key: key + .try_into() + .map_err(|_| P2pNetworkKadKeyError::DecodingError)?, + }) } } @@ -111,12 +119,14 @@ impl<'a> TryFrom> for PeerId { pub(super) fn peer_id_try_from_bytes( bytes: Cow<'_, [u8]>, ) -> Result { - Ok((&libp2p_identity::PeerId::from_bytes(bytes.as_ref())?).try_into()?) + Ok((libp2p_identity::PeerId::from_bytes(bytes.as_ref())?).try_into()?) } -impl<'a> From<&PeerId> for Cow<'a, [u8]> { - fn from(value: &PeerId) -> Self { - libp2p_identity::PeerId::from(*value).to_bytes().into() +impl<'a> TryFrom<&PeerId> for Cow<'a, [u8]> { + type Error = DecodingError; + + fn try_from(value: &PeerId) -> Result { + Ok(libp2p_identity::PeerId::try_from(*value)?.to_bytes().into()) } } @@ -209,15 +219,25 @@ impl<'a> From<&'a P2pNetworkKademliaRpcRequest> for super::Message<'a> { } } -impl<'a> From<&'a P2pNetworkKademliaRpcReply> for super::Message<'a> { - fn from(value: &'a P2pNetworkKademliaRpcReply) -> Self { +impl<'a> TryFrom<&'a P2pNetworkKademliaRpcReply> for super::Message<'a> { + type Error = DecodingError; + + fn try_from(value: &'a P2pNetworkKademliaRpcReply) -> Result { match value { - P2pNetworkKademliaRpcReply::FindNode { closer_peers } => super::Message { - type_pb: MessageType::FIND_NODE, - clusterLevelRaw: 10, - closerPeers: closer_peers.iter().map(Into::into).collect(), - ..Default::default() - }, + P2pNetworkKademliaRpcReply::FindNode { closer_peers } => { + let mut _closer_peers = Vec::new(); + + for peer in closer_peers.iter() { + _closer_peers.push(peer.try_into()?) + } + + Ok(super::Message { + type_pb: MessageType::FIND_NODE, + clusterLevelRaw: 10, + closerPeers: _closer_peers, + ..Default::default() + }) + } } } } @@ -282,9 +302,9 @@ pub mod tests { #[test] fn cid_generation() { let random_peer_id = SecretKey::rand().public_key().peer_id(); - let libp2p_peer_id = libp2p_identity::PeerId::from(random_peer_id); + let libp2p_peer_id = libp2p_identity::PeerId::try_from(random_peer_id).unwrap(); - let cid0 = CID::from(random_peer_id); + let cid0 = CID::try_from(random_peer_id).unwrap(); let cid1 = CID::from(libp2p_peer_id); assert_eq!(cid0, cid1); @@ -301,7 +321,12 @@ pub mod tests { .parse::() .unwrap(); assert_eq!( - from_bytes(&libp2p_identity::PeerId::from(peer_id).to_bytes()).unwrap(), + from_bytes( + &libp2p_identity::PeerId::try_from(peer_id) + .unwrap() + .to_bytes() + ) + .unwrap(), peer_id ); } diff --git a/p2p/src/network/kad/p2p_network_kad_reducer.rs b/p2p/src/network/kad/p2p_network_kad_reducer.rs index e420c7a597..37583b5526 100644 --- a/p2p/src/network/kad/p2p_network_kad_reducer.rs +++ b/p2p/src/network/kad/p2p_network_kad_reducer.rs @@ -100,9 +100,9 @@ impl super::P2pNetworkKadState { Ok(()) } (_, UpdateRoutingTable { peer_id, addrs }) => { - let _ = self - .routing_table - .insert(P2pNetworkKadEntry::new(*peer_id, addrs.clone())); + let _ = self.routing_table.insert( + P2pNetworkKadEntry::new(*peer_id, addrs.clone()).map_err(|e| e.to_string())?, + ); Ok(()) } (state, action) => Err(format!("invalid action {action:?} for state {state:?}")), diff --git a/p2p/src/network/kad/request/p2p_network_kad_request_effects.rs b/p2p/src/network/kad/request/p2p_network_kad_request_effects.rs index 86bfef995d..6852b78da1 100644 --- a/p2p/src/network/kad/request/p2p_network_kad_request_effects.rs +++ b/p2p/src/network/kad/request/p2p_network_kad_request_effects.rs @@ -1,5 +1,6 @@ use std::net::SocketAddr; +use openmina_core::bug_condition; use redux::ActionMeta; use crate::{ @@ -137,7 +138,12 @@ impl P2pNetworkKadRequestAction { stream_id, addr, } => { - let data = crate::P2pNetworkKademliaRpcRequest::find_node(request_state.key); + let Ok(data) = crate::P2pNetworkKademliaRpcRequest::find_node(request_state.key) + else { + bug_condition!("P2pNetworkKadRequestAction::StreamReady invalid request key"); + return Ok(()); + }; + store.dispatch(P2pNetworkKademliaStreamAction::SendRequest { addr, peer_id, diff --git a/p2p/src/network/kad/request/p2p_network_kad_request_reducer.rs b/p2p/src/network/kad/request/p2p_network_kad_request_reducer.rs index 48899ca9d2..8d903ce8b4 100644 --- a/p2p/src/network/kad/request/p2p_network_kad_request_reducer.rs +++ b/p2p/src/network/kad/request/p2p_network_kad_request_reducer.rs @@ -21,7 +21,8 @@ impl P2pNetworkKadRequestState { self.status = super::P2pNetworkKadRequestStatus::WaitingForKadStream(*stream_id) } P2pNetworkKadRequestAction::StreamReady { .. } => { - let find_node = P2pNetworkKademliaRpcRequest::find_node(self.key); + let find_node = + P2pNetworkKademliaRpcRequest::find_node(self.key).map_err(|e| e.to_string())?; let message = super::super::Message::from(&find_node); self.status = quick_protobuf::serialize_into_vec(&message).map_or_else( |e| { diff --git a/p2p/src/network/kad/stream/p2p_network_kad_stream_reducer.rs b/p2p/src/network/kad/stream/p2p_network_kad_stream_reducer.rs index a4c11dc406..d0ea2bf00d 100644 --- a/p2p/src/network/kad/stream/p2p_network_kad_stream_reducer.rs +++ b/p2p/src/network/kad/stream/p2p_network_kad_stream_reducer.rs @@ -100,7 +100,7 @@ impl P2pNetworkKadIncomingStreamState { P2pNetworkKadIncomingStreamState::WaitingForReply, P2pNetworkKademliaStreamAction::SendResponse { data, .. }, ) => { - let message = Message::from(data); + let message = Message::try_from(data).map_err(|e| e.to_string())?; let bytes = serialize_into_vec(&message).map_err(|e| format!("{e}"))?; *self = P2pNetworkKadIncomingStreamState::ResponseBytesAreReady { bytes }; Ok(()) diff --git a/p2p/src/network/p2p_network_state.rs b/p2p/src/network/p2p_network_state.rs index 584603f287..3f2ca1a84f 100644 --- a/p2p/src/network/p2p_network_state.rs +++ b/p2p/src/network/p2p_network_state.rs @@ -22,13 +22,12 @@ impl P2pNetworkState { let peer_id = identity.peer_id(); let pnet_key = chain_id.preshared_key(); let discovery_state = discovery.then(|| { - let mut routing_table = - P2pNetworkKadRoutingTable::new(P2pNetworkKadEntry::new(peer_id, addrs)); - routing_table.extend( - known_peers - .into_iter() - .map(|(peer_id, maddr)| P2pNetworkKadEntry::new(peer_id, vec![maddr])), + let mut routing_table = P2pNetworkKadRoutingTable::new( + P2pNetworkKadEntry::new(peer_id, addrs).expect("valid peer_id"), ); + routing_table.extend(known_peers.into_iter().map(|(peer_id, maddr)| { + P2pNetworkKadEntry::new(peer_id, vec![maddr]).expect("valid known peer") + })); P2pNetworkKadState { routing_table, ..Default::default() diff --git a/p2p/src/network/pubsub/p2p_network_pubsub_reducer.rs b/p2p/src/network/pubsub/p2p_network_pubsub_reducer.rs index 593e9e807e..0dea83c3c2 100644 --- a/p2p/src/network/pubsub/p2p_network_pubsub_reducer.rs +++ b/p2p/src/network/pubsub/p2p_network_pubsub_reducer.rs @@ -279,11 +279,8 @@ impl P2pNetworkPubsubState { } => { self.seq += 1; - // let libp2p_pk = libp2p_identity::PublicKey::from( - // libp2p_identity::ed25519::PublicKey::try_from_bytes(&author.to_bytes()) - // .expect("msg"), - // ); - let libp2p_peer_id = libp2p_identity::PeerId::from(*author); + let libp2p_peer_id = + libp2p_identity::PeerId::try_from(*author).expect("valid peer_id"); // This can't happen unless something is broken in the configuration self.to_sign.push_back(pb::Message { from: Some(libp2p_peer_id.to_bytes()), data: Some(data.0.clone().into_vec()), diff --git a/p2p/src/network/select/p2p_network_select_effects.rs b/p2p/src/network/select/p2p_network_select_effects.rs index 177fe2ac92..99734e49db 100644 --- a/p2p/src/network/select/p2p_network_select_effects.rs +++ b/p2p/src/network/select/p2p_network_select_effects.rs @@ -1,9 +1,9 @@ use self::token::{ - AuthKind, DiscoveryAlgorithm, IdentifyAlgorithm, MuxKind, PingAlgorithm, Protocol, - RpcAlgorithm, StreamKind, Token, + AuthKind, DiscoveryAlgorithm, IdentifyAlgorithm, MuxKind, Protocol, RpcAlgorithm, StreamKind, + Token, }; -use openmina_core::{fuzz_maybe, fuzzed_maybe}; +use openmina_core::{bug_condition, fuzz_maybe, fuzzed_maybe}; use crate::{ fuzzer::{mutate_select_authentication, mutate_select_multiplexing, mutate_select_stream}, @@ -92,104 +92,117 @@ impl P2pNetworkSelectAction { } } P2pNetworkSelectAction::IncomingPayloadAuth { - addr, fin, data, .. + addr, + fin, + ref data, + .. } | P2pNetworkSelectAction::IncomingPayloadMux { - addr, fin, data, .. + addr, + fin, + ref data, + .. } | P2pNetworkSelectAction::IncomingPayload { - addr, fin, data, .. + addr, + fin, + ref data, + .. } => { - if let Some(Some(negotiated)) = &state.negotiated { - match negotiated { - Protocol::Auth(AuthKind::Noise) => { - store.dispatch(P2pNetworkNoiseAction::IncomingData { addr, data }); - } - Protocol::Mux(MuxKind::Yamux1_0_0 | MuxKind::YamuxNoNewLine1_0_0) => { - store.dispatch(P2pNetworkYamuxAction::IncomingData { addr, data }); - } - Protocol::Stream(kind) => match select_kind { - SelectKind::Stream(peer_id, stream_id) => { - match kind { - StreamKind::Discovery(DiscoveryAlgorithm::Kademlia1_0_0) => { - if !fin { - store.dispatch( - P2pNetworkKademliaStreamAction::IncomingData { - addr, - peer_id, - stream_id, - data, - }, - ); - } else { - store.dispatch( - P2pNetworkKademliaStreamAction::RemoteClose { - addr, - peer_id, - stream_id, - }, - ); - } - } - StreamKind::Identify(IdentifyAlgorithm::Identify1_0_0) => { - if !fin { - //println!("==== {}", hex::encode(&a.data.0)); - store.dispatch( - P2pNetworkIdentifyStreamAction::IncomingData { - addr, - peer_id, - stream_id, - data, - }, - ); - } else { - store.dispatch( - P2pNetworkIdentifyStreamAction::RemoteClose { - addr, - peer_id, - stream_id, - }, - ); - } - } - StreamKind::Identify(IdentifyAlgorithm::IdentifyPush1_0_0) => { - //unimplemented!() - } - StreamKind::Broadcast(_) => { - store.dispatch(P2pNetworkPubsubAction::IncomingData { - peer_id, - addr, - stream_id, - data, - seen_limit: store.state().config.meshsub.mcache_len, - }); - } - StreamKind::Ping(PingAlgorithm::Ping1_0_0) => { - //unimplemented!() - } - StreamKind::Bitswap(_) => { - //unimplemented!() - } - StreamKind::Status(_) => { - //unimplemented!() + let Some(Some(negotiated)) = &state.negotiated else { + bug_condition!( + "Invalid negotiation state {:?} for action {:?}", + state.negotiated, + self + ); + return; + }; + match negotiated { + Protocol::Auth(AuthKind::Noise) => { + store.dispatch(P2pNetworkNoiseAction::IncomingData { + addr, + data: data.clone(), + }); + } + Protocol::Mux(MuxKind::Yamux1_0_0 | MuxKind::YamuxNoNewLine1_0_0) => { + store.dispatch(P2pNetworkYamuxAction::IncomingData { + addr, + data: data.clone(), + }); + } + Protocol::Stream(kind) => match select_kind { + SelectKind::Stream(peer_id, stream_id) => { + match kind { + StreamKind::Discovery(DiscoveryAlgorithm::Kademlia1_0_0) => { + if !fin { + store.dispatch( + P2pNetworkKademliaStreamAction::IncomingData { + addr, + peer_id, + stream_id, + data: data.clone(), + }, + ); + } else { + store.dispatch( + P2pNetworkKademliaStreamAction::RemoteClose { + addr, + peer_id, + stream_id, + }, + ); } - StreamKind::Rpc(RpcAlgorithm::Rpc0_0_1) => { - store.dispatch(P2pNetworkRpcAction::IncomingData { - addr, - peer_id, - stream_id, - data, - }); + } + StreamKind::Identify(IdentifyAlgorithm::Identify1_0_0) => { + if !fin { + //println!("==== {}", hex::encode(&a.data.0)); + store.dispatch( + P2pNetworkIdentifyStreamAction::IncomingData { + addr, + peer_id, + stream_id, + data: data.clone(), + }, + ); + } else { + store.dispatch( + P2pNetworkIdentifyStreamAction::RemoteClose { + addr, + peer_id, + stream_id, + }, + ); } } + StreamKind::Broadcast(_) => { + store.dispatch(P2pNetworkPubsubAction::IncomingData { + peer_id, + addr, + stream_id, + data: data.clone(), + seen_limit: store.state().config.meshsub.mcache_len, + }); + } + StreamKind::Rpc(RpcAlgorithm::Rpc0_0_1) => { + store.dispatch(P2pNetworkRpcAction::IncomingData { + addr, + peer_id, + stream_id, + data: data.clone(), + }); + } + _ => { + bug_condition!( + "trying to negotiate unimplemented stream kind {:?}", + kind + ); + } } - _ => { - openmina_core::error!(meta.time(); "invalid select protocol kind: {:?}", kind); - } - }, - } - } else { - unreachable!() + } + _ => { + openmina_core::error!(meta.time(); "invalid select protocol kind: {:?}", kind); + } + }, } } P2pNetworkSelectAction::IncomingToken { addr, kind, .. } => { diff --git a/p2p/src/p2p_effects.rs b/p2p/src/p2p_effects.rs index a0b8a8f826..46ea13ed38 100644 --- a/p2p/src/p2p_effects.rs +++ b/p2p/src/p2p_effects.rs @@ -1,3 +1,4 @@ +use openmina_core::bug_condition; use redux::{ActionMeta, ActionWithMeta}; use crate::{ @@ -252,14 +253,19 @@ where #[cfg(feature = "p2p-libp2p")] if let Some(discovery_state) = state.network.scheduler.discovery_state() { - let key = state.my_id(); - if discovery_state - .routing_table - .closest_peers(&P2pNetworkKadKey::from(&key)) - .any(|_| true) - && discovery_state.status.can_bootstrap(now, &config.timeouts) - { - store.dispatch(P2pNetworkKademliaAction::StartBootstrap { key }); + let my_id = state.my_id(); + match P2pNetworkKadKey::try_from(&my_id) { + Ok(key) => { + if discovery_state + .routing_table + .closest_peers(&key) + .any(|_| true) + && discovery_state.status.can_bootstrap(now, &config.timeouts) + { + store.dispatch(P2pNetworkKademliaAction::StartBootstrap { key: my_id }); + } + } + Err(e) => bug_condition!("p2p_discovery error {:?}", e), } } } diff --git a/p2p/testing/Cargo.toml b/p2p/testing/Cargo.toml index 44d5f01aa6..8d58d4a0ec 100644 --- a/p2p/testing/Cargo.toml +++ b/p2p/testing/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "p2p-testing" -version = "0.8.1" +version = "0.8.2" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/p2p/testing/src/cluster.rs b/p2p/testing/src/cluster.rs index eed8c33fa7..753650c30a 100644 --- a/p2p/testing/src/cluster.rs +++ b/p2p/testing/src/cluster.rs @@ -438,14 +438,20 @@ impl Cluster { Listener::Rust(id) => Ok(self.rust_node(id).libp2p_dial_opts(self.ip)), Listener::Libp2p(id) => Ok(self.libp2p_node(id).libp2p_dial_opts(self.ip)), Listener::Multiaddr(maddr) => Ok(maddr), - Listener::SocketPeerId(socket, peer_id) => match socket { - SocketAddr::V4(ipv4) => { - Ok(multiaddr!(Ip4(*ipv4.ip()), Tcp(ipv4.port()), P2p(peer_id))) - } - SocketAddr::V6(ipv6) => { - Ok(multiaddr!(Ip6(*ipv6.ip()), Tcp(ipv6.port()), P2p(peer_id))) + Listener::SocketPeerId(socket, peer_id) => { + let peer_id: libp2p::PeerId = peer_id + .try_into() + .map_err(|_| Error::Other("Listener: invalid peer_id".to_string()))?; + + match socket { + SocketAddr::V4(ipv4) => { + Ok(multiaddr!(Ip4(*ipv4.ip()), Tcp(ipv4.port()), P2p(peer_id))) + } + SocketAddr::V6(ipv6) => { + Ok(multiaddr!(Ip6(*ipv6.ip()), Tcp(ipv6.port()), P2p(peer_id))) + } } - }, + } } } diff --git a/p2p/testing/src/libp2p_node.rs b/p2p/testing/src/libp2p_node.rs index a208537c6a..6903df463b 100644 --- a/p2p/testing/src/libp2p_node.rs +++ b/p2p/testing/src/libp2p_node.rs @@ -46,7 +46,7 @@ impl Libp2pNode { impl TestNode for Libp2pNode { fn peer_id(&self) -> PeerId { - (*self.swarm.local_peer_id()).into() + (*self.swarm.local_peer_id()).try_into().unwrap() } fn libp2p_port(&self) -> u16 { diff --git a/p2p/testing/src/predicates.rs b/p2p/testing/src/predicates.rs index d78fe83438..65c9317542 100644 --- a/p2p/testing/src/predicates.rs +++ b/p2p/testing/src/predicates.rs @@ -121,7 +121,10 @@ where ClusterEvent::Libp2p { id, event: Libp2pEvent::ConnectionEstablished { peer_id, .. }, - } => nodes_peers.remove(&(id.into(), peer_id.into())) && nodes_peers.is_empty(), + } => { + nodes_peers.remove(&(id.into(), peer_id.try_into().unwrap())) + && nodes_peers.is_empty() + } _ => false, }) } diff --git a/p2p/testing/src/test_node.rs b/p2p/testing/src/test_node.rs index 1c68628dd5..53d13d4e79 100644 --- a/p2p/testing/src/test_node.rs +++ b/p2p/testing/src/test_node.rs @@ -21,12 +21,14 @@ pub trait TestNode { } fn libp2p_dial_opts(&self, host: IpAddr) -> Multiaddr { + let peer_id: libp2p::PeerId = self.peer_id().try_into().unwrap(); + match host { IpAddr::V4(ip) => { - multiaddr!(Ip4(ip), Tcp(self.libp2p_port()), P2p(self.peer_id())) + multiaddr!(Ip4(ip), Tcp(self.libp2p_port()), P2p(peer_id)) } IpAddr::V6(ip) => { - multiaddr!(Ip6(ip), Tcp(self.libp2p_port()), P2p(self.peer_id())) + multiaddr!(Ip6(ip), Tcp(self.libp2p_port()), P2p(peer_id)) } } } diff --git a/p2p/tests/identify.rs b/p2p/tests/identify.rs index 9d08bcbaf0..1f19e47b5b 100644 --- a/p2p/tests/identify.rs +++ b/p2p/tests/identify.rs @@ -81,7 +81,9 @@ async fn rust_node_to_rust_node() -> anyhow::Result<()> { cluster .connect( node, - addr.clone().with_p2p(peer_id.into()).expect("no error"), + addr.clone() + .with_p2p(peer_id.try_into().unwrap()) + .expect("no error"), ) .expect("no error"); let connected = cluster @@ -137,7 +139,7 @@ async fn test_bad_node() -> anyhow::Result<()> { .routing_table; let bad_peer_entry = routing_table - .look_up(&bad_node_peer_id.into()) + .look_up(&bad_node_peer_id.try_into().unwrap()) .expect("Node not found"); let bad_peer_addresses = bad_peer_entry diff --git a/p2p/tests/rpc.rs b/p2p/tests/rpc.rs index 90232e0018..340d2c38bf 100644 --- a/p2p/tests/rpc.rs +++ b/p2p/tests/rpc.rs @@ -403,6 +403,7 @@ fn send_request( peer_id: receiver_id, id: sender_rpc_id, request: Box::new(request), + on_init: None, }), "dispatch rpc send query" ); diff --git a/producer-dashboard/Cargo.toml b/producer-dashboard/Cargo.toml index 19bafd4874..358b000be9 100644 --- a/producer-dashboard/Cargo.toml +++ b/producer-dashboard/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-producer-dashboard" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" @@ -19,7 +19,7 @@ thiserror = "1.0.44" num-traits = "0.2" serde_json = { version = "1.0.83" } vrf = { workspace = true } -openmina-node-account = { path = "../node/account" } +openmina-node-account = { workspace = true } mina-p2p-messages = { workspace = true } time = {version = "0.3", features = ["formatting", "parsing"]} clap = { version = "4.5", features = ["derive"]} diff --git a/producer-dashboard/src/evaluator/mod.rs b/producer-dashboard/src/evaluator/mod.rs index 4e7799188c..af73daaef2 100644 --- a/producer-dashboard/src/evaluator/mod.rs +++ b/producer-dashboard/src/evaluator/mod.rs @@ -36,7 +36,7 @@ impl Evaluator { println!("Evaluating slots: {start} - {end}"); let total_currency = init.ledger.total_currency(); - let pub_key = self.key.public_key().to_string(); + let pub_key = self.key.public_key(); let delegates = init .ledger diff --git a/producer-dashboard/src/main.rs b/producer-dashboard/src/main.rs index 697d2df509..2052dd1b11 100644 --- a/producer-dashboard/src/main.rs +++ b/producer-dashboard/src/main.rs @@ -41,7 +41,10 @@ async fn main() { let db = Database::open(config.database_path).expect("Failed to open Database"); println!("[main] DB opened"); - let key = AccountSecretKey::from_encrypted_file(config.private_key_path) + let password = std::env::var("MINA_PRIVKEY_PASS") + .expect("Expected password in the variable `MINA_PRIVKEY_PASS`"); + + let key = AccountSecretKey::from_encrypted_file(config.private_key_path, &password) .expect("failed to decrypt secret key file"); println!("[main] Producer key loaded"); diff --git a/snark/Cargo.toml b/snark/Cargo.toml index 42167795cc..a27d7a502d 100644 --- a/snark/Cargo.toml +++ b/snark/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snark" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" diff --git a/tools/bootstrap-sandbox/Cargo.toml b/tools/bootstrap-sandbox/Cargo.toml index 531411642a..bb573c6c10 100644 --- a/tools/bootstrap-sandbox/Cargo.toml +++ b/tools/bootstrap-sandbox/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-bootstrap-sandbox" -version = "0.8.1" +version = "0.8.2" edition = "2021" [dependencies] diff --git a/tools/gossipsub-sandbox/Cargo.toml b/tools/gossipsub-sandbox/Cargo.toml index d7c539993b..b080836839 100644 --- a/tools/gossipsub-sandbox/Cargo.toml +++ b/tools/gossipsub-sandbox/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "openmina-gossipsub-sandbox" -version = "0.8.1" +version = "0.8.2" edition = "2021" [dependencies] diff --git a/tools/hash-tool/Cargo.toml b/tools/hash-tool/Cargo.toml index c906dc55a6..bdfda73104 100644 --- a/tools/hash-tool/Cargo.toml +++ b/tools/hash-tool/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hash-tool" -version = "0.8.1" +version = "0.8.2" edition = "2021" [dependencies] diff --git a/tools/ledger-tool/Cargo.toml b/tools/ledger-tool/Cargo.toml index 74de8d9498..92498a6606 100644 --- a/tools/ledger-tool/Cargo.toml +++ b/tools/ledger-tool/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ledger-tool" -version = "0.8.1" +version = "0.8.2" edition = "2021" [dependencies] diff --git a/tools/salsa-simple/Cargo.toml b/tools/salsa-simple/Cargo.toml index 0572e094da..7922f98529 100644 --- a/tools/salsa-simple/Cargo.toml +++ b/tools/salsa-simple/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "salsa-simple" -version = "0.8.1" +version = "0.8.2" edition = "2021" [dev-dependencies] diff --git a/tools/transport/Cargo.toml b/tools/transport/Cargo.toml index de8330509d..d2c1dfc895 100644 --- a/tools/transport/Cargo.toml +++ b/tools/transport/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mina-transport" -version = "0.8.1" +version = "0.8.2" edition = "2021" [dependencies] diff --git a/vrf/Cargo.toml b/vrf/Cargo.toml index 3fa47798b0..b668a09ac8 100644 --- a/vrf/Cargo.toml +++ b/vrf/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "vrf" -version = "0.8.1" +version = "0.8.2" edition = "2021" license = "Apache-2.0" @@ -17,6 +17,7 @@ mina-p2p-messages = { workspace = true } mina-curves = { workspace = true } o1-utils = { workspace = true } ledger = { workspace = true } +openmina-node-account = { workspace = true } ark-ec = { git = "https://github.com/openmina/algebra", branch = "openmina", features = [ "std" ] } ark-serialize = { git = "https://github.com/openmina/algebra", branch = "openmina", features = [ "std" ] } ark-ff = { git = "https://github.com/openmina/algebra", branch = "openmina", features = [ "parallel", "asm" ] } diff --git a/vrf/src/lib.rs b/vrf/src/lib.rs index 3883879d73..2d4f3571cf 100644 --- a/vrf/src/lib.rs +++ b/vrf/src/lib.rs @@ -4,6 +4,7 @@ use ledger::AccountIndex; use message::VrfMessage; use mina_p2p_messages::v2::EpochSeed; use num::{BigInt, ToPrimitive}; +use openmina_node_account::AccountPublicKey; use output::VrfOutput; use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -53,8 +54,8 @@ pub enum VrfError { #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct VrfWonSlot { - pub producer: String, - pub winner_account: String, + pub producer: AccountPublicKey, + pub winner_account: AccountPublicKey, pub global_slot: u32, pub account_index: AccountIndex, pub vrf_output: Box, @@ -81,7 +82,7 @@ pub struct VrfEvaluationInput { producer_key: Keypair, global_slot: u32, epoch_seed: EpochSeed, - account_pub_key: String, + account_pub_key: AccountPublicKey, delegator_index: AccountIndex, delegated_stake: BigInt, total_currency: BigInt, @@ -91,7 +92,7 @@ impl VrfEvaluationInput { pub fn new( producer_key: Keypair, epoch_seed: EpochSeed, - account_pub_key: String, + account_pub_key: AccountPublicKey, global_slot: u32, delegator_index: AccountIndex, delegated_stake: BigInt, @@ -153,7 +154,7 @@ pub fn evaluate_vrf(vrf_input: VrfEvaluationInput) -> VrfResult