diff --git a/Cargo.lock b/Cargo.lock index c6b9a98a..185f000a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1276,6 +1276,12 @@ dependencies = [ "derive_arbitrary", ] +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + [[package]] name = "ark-bls12-381" version = "0.5.0" @@ -1813,7 +1819,7 @@ dependencies = [ [[package]] name = "bindings" version = "2.0.0" -source = "git+https://github.com/taikoxyz/taiko-mono.git?rev=4a495188afbdbddf41457d440762b89023b32d84#4a495188afbdbddf41457d440762b89023b32d84" +source = "git+https://github.com/taikoxyz/taiko-mono.git?rev=d8741f16b36db30c200b1592310c3d85b625a16c#d8741f16b36db30c200b1592310c3d85b625a16c" dependencies = [ "alloy", "serde", @@ -2434,7 +2440,7 @@ dependencies = [ [[package]] name = "common" -version = "1.23.27" +version = "1.23.28" dependencies = [ "alloy", "alloy-json-rpc", @@ -3524,14 +3530,15 @@ dependencies = [ [[package]] name = "event-scanner" -version = "0.7.0-alpha" +version = "0.9.0-alpha" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efaf4670dbb0005e178cbf019d732f9819c76bfb9144aadbd52f814bf3647b87" +checksum = "1f55759250f35ff46c155168da8020eeda48886eb754d4aaabe4b280214e5630" dependencies = [ "alloy", "alloy-node-bindings", "anyhow", "backon", + "futures", "thiserror 2.0.17", "tokio", "tokio-stream", @@ -5798,7 +5805,7 @@ dependencies = [ [[package]] name = "node" -version = "1.23.27" +version = "1.23.28" dependencies = [ "alloy", "alloy-json-rpc", @@ -6240,7 +6247,7 @@ dependencies = [ [[package]] name = "p2p-boot-node" -version = "1.23.27" +version = "1.23.28" dependencies = [ "anyhow", "discv5 0.9.1", @@ -6252,7 +6259,7 @@ dependencies = [ [[package]] name = "pacaya" -version = "1.23.27" +version = "1.23.28" dependencies = [ "alloy", "alloy-json-rpc", @@ -6396,7 +6403,7 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "permissionless" -version = "1.23.27" +version = "1.23.28" dependencies = [ "alloy", "anyhow", @@ -6770,21 +6777,29 @@ dependencies = [ [[package]] name = "protocol" version = "2.0.0" -source = "git+https://github.com/taikoxyz/taiko-mono.git?rev=4a495188afbdbddf41457d440762b89023b32d84#4a495188afbdbddf41457d440762b89023b32d84" +source = "git+https://github.com/taikoxyz/taiko-mono.git?rev=d8741f16b36db30c200b1592310c3d85b625a16c#d8741f16b36db30c200b1592310c3d85b625a16c" dependencies = [ "alloy", "alloy-consensus", + "alloy-contract", "alloy-eips", "alloy-hardforks 0.4.4", "alloy-primitives", "alloy-provider", "alloy-rlp", + "alloy-rpc-types", "anyhow", + "arc-swap", + "async-trait", "bindings", + "dashmap 6.1.0", "event-scanner", "flate2", "serde", "thiserror 2.0.17", + "tokio", + "tokio-retry", + "tokio-stream", "tracing", ] @@ -10080,7 +10095,7 @@ checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" [[package]] name = "rpc" version = "2.0.0" -source = "git+https://github.com/taikoxyz/taiko-mono.git?rev=4a495188afbdbddf41457d440762b89023b32d84#4a495188afbdbddf41457d440762b89023b32d84" +source = "git+https://github.com/taikoxyz/taiko-mono.git?rev=d8741f16b36db30c200b1592310c3d85b625a16c#d8741f16b36db30c200b1592310c3d85b625a16c" dependencies = [ "alethia-reth-evm", "alethia-reth-primitives", @@ -10725,7 +10740,7 @@ dependencies = [ [[package]] name = "shasta" -version = "1.23.27" +version = "1.23.28" dependencies = [ "alethia-reth-consensus", "alloy", @@ -11558,6 +11573,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand 0.8.5", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.4" @@ -12136,7 +12162,7 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "urc" -version = "1.23.27" +version = "1.23.28" dependencies = [ "alloy", "alloy-json-rpc", diff --git a/Cargo.toml b/Cargo.toml index f152240b..8201f9a7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ resolver = "2" default-members = ["node"] [workspace.package] -version = "1.23.27" +version = "1.23.28" edition = "2024" repository = "https://github.com/NethermindEth/Catalyst" license = "MIT" @@ -80,9 +80,9 @@ shasta = { path = "shasta" } strum = { version = "0.27", features = ["derive"] } taiko_alethia_reth = { git = "https://github.com/taikoxyz/alethia-reth.git", rev = "e07c13fc011798adfa5436f24646b7305337cdf3", package = "alethia-reth-consensus" } -taiko_bindings = { git = "https://github.com/taikoxyz/taiko-mono.git", rev = "4a495188afbdbddf41457d440762b89023b32d84", package = "bindings" } -taiko_protocol = { git = "https://github.com/taikoxyz/taiko-mono.git", rev = "4a495188afbdbddf41457d440762b89023b32d84", package = "protocol" } -taiko_rpc = { git = "https://github.com/taikoxyz/taiko-mono.git", rev = "4a495188afbdbddf41457d440762b89023b32d84", package = "rpc" } +taiko_bindings = { git = "https://github.com/taikoxyz/taiko-mono.git", rev = "d8741f16b36db30c200b1592310c3d85b625a16c", package = "bindings" } +taiko_protocol = { git = "https://github.com/taikoxyz/taiko-mono.git", rev = "d8741f16b36db30c200b1592310c3d85b625a16c", package = "protocol" } +taiko_rpc = { git = "https://github.com/taikoxyz/taiko-mono.git", rev = "d8741f16b36db30c200b1592310c3d85b625a16c", package = "rpc" } tiny-keccak = { version = "2.0", default-features = false } tokio = { version = "1.45", default-features = false, features = ["full"] } tokio-util = { version = "0.7", default-features = false } diff --git a/e2e_tests/.env.example b/e2e_tests/.env.example index e10f97ad..44746ee2 100644 --- a/e2e_tests/.env.example +++ b/e2e_tests/.env.example @@ -25,4 +25,6 @@ L2_PRIVATE_KEY=53321db7c1e331d93a11a41d16f004d7ff63972ec8ec7c25db329728ceeb1710 FORCED_INCLUSION_STORE_ADDRESS=0xc5092f6c1f30C8970dc835F0F754057f79b89CC6 MAX_BLOCKS_PER_BATCH=10 -PRECONF_HEARTBEAT_MS=2000 \ No newline at end of file +PRECONF_HEARTBEAT_MS=2000 + +PROTOCOL=pacaya \ No newline at end of file diff --git a/e2e_tests/chain_info.py b/e2e_tests/chain_info.py index 96be0e9d..49026927 100644 --- a/e2e_tests/chain_info.py +++ b/e2e_tests/chain_info.py @@ -13,10 +13,10 @@ class ChainInfo: block_hash: HexBytes @classmethod - def from_chain(cls, fi_account_address, l2_client_node1, l1_client, taiko_inbox_address, beacon_client, verbose: bool = True): + def from_chain(cls, fi_account_address, l2_client_node1, l1_client, env_vars, beacon_client, verbose: bool = True): """Create ChainInfo instance from current chain state""" fi_sender_nonce = l2_client_node1.eth.get_transaction_count(fi_account_address) - batch_id = get_last_batch_id(l1_client, taiko_inbox_address) + batch_id = get_last_batch_id(l1_client, env_vars) block_number = l2_client_node1.eth.block_number block_hash = l2_client_node1.eth.get_block(block_number).hash diff --git a/e2e_tests/conftest.py b/e2e_tests/conftest.py index 4e20cfac..a3cfc8d1 100644 --- a/e2e_tests/conftest.py +++ b/e2e_tests/conftest.py @@ -4,8 +4,7 @@ from eth_account import Account import os from dotenv import load_dotenv -from utils import ensure_catalyst_node_running, spam_n_blocks -from forced_inclusion_store import forced_inclusion_store_is_empty, check_empty_forced_inclusion_store +from utils import ensure_catalyst_node_running, spam_n_blocks, forced_inclusion_store_is_empty, check_empty_forced_inclusion_store from dataclasses import dataclass from taiko_inbox import get_last_block_id @@ -23,6 +22,7 @@ class EnvVars: preconf_heartbeat_ms: int l2_private_key: str max_blocks_per_batch: int + protocol: str @classmethod def from_env(cls): @@ -64,6 +64,10 @@ def from_env(cls): if not max_blocks_per_batch: raise Exception("Environment variable MAX_BLOCKS_PER_BATCH not set") + protocol = os.getenv("PROTOCOL") + if not protocol: + raise Exception("Environment variable PROTOCOL not set") + return cls( l2_prefunded_priv_key=l2_prefunded_priv_key, l2_prefunded_priv_key_2=l2_prefunded_priv_key_2, @@ -74,8 +78,15 @@ def from_env(cls): preconf_heartbeat_ms=preconf_heartbeat_ms, l2_private_key=l2_private_key, max_blocks_per_batch=max_blocks_per_batch, + protocol=protocol, ) + def is_shasta(self): + return self.protocol == "shasta" + + def is_pacaya(self): + return self.protocol == "pacaya" + @pytest.fixture(scope="session") def env_vars(): """Centralized environment variables fixture""" @@ -124,13 +135,18 @@ def forced_inclusion_teardown(l1_client, l2_client_node1, env_vars): """Fixture to ensure forced inclusion store is empty after test""" yield None print("Test teardown: ensuring forced inclusion store is empty") - if not forced_inclusion_store_is_empty(l1_client, env_vars.forced_inclusion_store_address): + if not forced_inclusion_store_is_empty(l1_client, env_vars): print("Spamming blocks to ensure forced inclusion store is empty") spam_n_blocks(l2_client_node1, env_vars.l2_prefunded_priv_key, env_vars.max_blocks_per_batch, env_vars.preconf_min_txs) @pytest.fixture(scope="session", autouse=True) def global_setup(l1_client, l2_client_node1, l2_client_node2, env_vars): """Run once before all tests""" + + if env_vars.protocol == "shasta": + yield + return + print("Wait for Geth sync with TaikoInbox") block_number_contract = get_last_block_id(l1_client, env_vars.taiko_inbox_address) diff --git a/e2e_tests/forced_inclusion_store.py b/e2e_tests/forced_inclusion_store.py index 016b1515..140a43ea 100644 --- a/e2e_tests/forced_inclusion_store.py +++ b/e2e_tests/forced_inclusion_store.py @@ -1,7 +1,4 @@ -from web3 import Web3 -import json - -abi = [ +pacaya_fi_abi = [ { "inputs": [], "name": "head", @@ -29,18 +26,3 @@ "type": "function" } ] - -def get_forced_inclusion_store_head(l1_client, forced_inclusion_address): - contract = l1_client.eth.contract(address=forced_inclusion_address, abi=abi) - head = contract.functions.head().call() - return int(head) - -def forced_inclusion_store_is_empty(l1_client, forced_inclusion_address): - contract = l1_client.eth.contract(address=forced_inclusion_address, abi=abi) - head = contract.functions.head().call() - tail = contract.functions.tail().call() - print("Forced Inclusion head:", head, "tail: ", tail) - return head == tail - -def check_empty_forced_inclusion_store(l1_client, env_vars): - assert forced_inclusion_store_is_empty(l1_client, env_vars.forced_inclusion_store_address), "Forced inclusion store should be empty" diff --git a/e2e_tests/taiko_inbox.py b/e2e_tests/taiko_inbox.py index ceb59173..f7a5c033 100644 --- a/e2e_tests/taiko_inbox.py +++ b/e2e_tests/taiko_inbox.py @@ -1,18 +1,36 @@ from web3 import Web3 import json +from utils import get_shasta_inbox_abi with open("../pacaya/src/l1/abi/ITaikoInbox.json") as f: - abi = json.load(f) + pacaya_abi = json.load(f) -def get_last_batch_id(l1_client, taiko_inbox_address): - contract = l1_client.eth.contract(address=taiko_inbox_address, abi=abi) - result = contract.functions.getStats2().call() - last_batch_id = result[0] - return last_batch_id +def get_last_batch_id(l1_client, env_vars): + if env_vars.is_pacaya(): + contract = l1_client.eth.contract(address=env_vars.taiko_inbox_address, abi=pacaya_abi) + result = contract.functions.getStats2().call() + last_batch_id = result[0] + return last_batch_id + else: + core_state = get_core_state(l1_client, env_vars) + last_batch_id = core_state[0] - 1 + return last_batch_id -def get_last_block_id(l1_client, taiko_inbox_address): - batch_id = int(get_last_batch_id(l1_client, taiko_inbox_address)) - 1 - contract = l1_client.eth.contract(address=taiko_inbox_address, abi=abi) - result = contract.functions.getBatch(batch_id).call() - last_block_id = result[1] - return last_block_id +def get_last_block_id(l1_client, env_vars): + if env_vars.is_pacaya(): + batch_id = int(get_last_batch_id(l1_client, env_vars)) - 1 + contract = l1_client.eth.contract(address=env_vars.taiko_inbox_address, abi=pacaya_abi) + result = contract.functions.getBatch(batch_id).call() + last_block_id = result[1] + return last_block_id + else: + core_state = get_core_state(l1_client, env_vars) + last_block_id = core_state[1] + #print(f"Last L2 block id from core state: {last_block_id}, next proposal id: {core_state[0]}") + return last_block_id + +def get_core_state(l1_client, env_vars): + shasta_abi = get_shasta_inbox_abi() + contract = l1_client.eth.contract(address=env_vars.taiko_inbox_address, abi=shasta_abi) + result = contract.functions.getCoreState().call() + return result \ No newline at end of file diff --git a/e2e_tests/test_avs_node.py b/e2e_tests/test_avs_node.py index 4776563c..01cb2383 100644 --- a/e2e_tests/test_avs_node.py +++ b/e2e_tests/test_avs_node.py @@ -76,10 +76,21 @@ def test_propose_batch_to_l1_after_reaching_max_blocks_per_batch(l2_client_node1 current_block_timestamp = l1_client.eth.get_block(current_block).timestamp spam_n_txs(l2_client_node1, env_vars.l2_prefunded_priv_key, 11) - event = wait_for_batch_proposed_event(l1_client, env_vars.taiko_inbox_address, current_block) + event = wait_for_batch_proposed_event(l1_client, current_block, env_vars) + + if env_vars.is_pacaya(): + proposer = event['args']['meta']['proposer'] + proposed_at = event['args']['meta']['proposedAt'] + + else: + payload = decode_proposal_payload(l1_client, env_vars.taiko_inbox_address, event['args']['data']) + print(f"Payload[0]: {payload[0]}") + proposer = payload[0][3] + proposed_at = payload[0][1] + + assert proposer in [l1_client.eth.account.from_key(env_vars.l2_prefunded_priv_key).address, l1_client.eth.account.from_key(env_vars.l2_prefunded_priv_key_2).address], "Proposer should be L2 Node 1 or L2 Node 2" + assert proposed_at > current_block_timestamp, "Proposed at timestamp should be larger than current block timestamp" - assert event['args']['meta']['proposer'] in [l1_client.eth.account.from_key(env_vars.l2_prefunded_priv_key).address, l1_client.eth.account.from_key(env_vars.l2_prefunded_priv_key_2).address], "Proposer should be L2 Node 1 or L2 Node 2" - assert event['args']['meta']['proposedAt'] > current_block_timestamp, "Proposed at timestamp should be larger than current block timestamp" def test_proposing_other_operator_blocks(l2_client_node1, l1_client, beacon_client, catalyst_node_teardown, env_vars): catalyst_node_teardown @@ -92,7 +103,7 @@ def test_proposing_other_operator_blocks(l2_client_node1, l1_client, beacon_clie node_number = get_current_operator_number(l1_client, env_vars.l2_prefunded_priv_key, env_vars.preconf_whitelist_address) - spam_txs_until_new_batch_is_proposed(l1_client, l2_client_node1, env_vars.l2_prefunded_priv_key, env_vars.taiko_inbox_address, beacon_client, env_vars.preconf_min_txs) + spam_txs_until_new_batch_is_proposed(l1_client, l2_client_node1, beacon_client, env_vars) # should create new block in new batch tx_hash = spam_n_txs(l2_client_node1, env_vars.l2_prefunded_priv_key, 1) @@ -101,7 +112,7 @@ def test_proposing_other_operator_blocks(l2_client_node1, l1_client, beacon_clie stop_catalyst_node(node_number) wait_for_slot_beginning(beacon_client, 0) - wait_for_batch_proposed_event(l1_client, env_vars.taiko_inbox_address, l1_client.eth.block_number) + wait_for_batch_proposed_event(l1_client, l1_client.eth.block_number, env_vars) # sent tx should still be included, no reorg wait_for_tx_to_be_included(l2_client_node1, tx_hash) @@ -113,7 +124,7 @@ def test_verification_after_node_restart(l1_client, l2_client_node1, catalyst_no wait_for_slot_beginning(beacon_client, 5) - spam_txs_until_new_batch_is_proposed(l1_client, l2_client_node1, env_vars.l2_prefunded_priv_key, env_vars.taiko_inbox_address, beacon_client, env_vars.preconf_min_txs) + spam_txs_until_new_batch_is_proposed(l1_client, l2_client_node1, beacon_client, env_vars) current_block = l1_client.eth.block_number # spam additional block @@ -123,7 +134,7 @@ def test_verification_after_node_restart(l1_client, l2_client_node1, catalyst_no stop_catalyst_node(current_node) start_catalyst_node(current_node) - wait_for_batch_proposed_event(l1_client, env_vars.taiko_inbox_address, current_block) + wait_for_batch_proposed_event(l1_client, current_block, env_vars) def test_end_of_sequencing(l2_client_node1, beacon_client, l1_client, env_vars): wait_for_epoch_with_operator_switch_and_slot(beacon_client, l1_client, env_vars.preconf_whitelist_address, 25) # handover window @@ -131,4 +142,4 @@ def test_end_of_sequencing(l2_client_node1, beacon_client, l1_client, env_vars): l2_block_number = l2_client_node1.eth.block_number send_n_txs_without_waiting(l2_client_node1, env_vars.l2_prefunded_priv_key, env_vars.preconf_min_txs) time.sleep(2 * env_vars.preconf_heartbeat_ms / 1000) - assert l2_client_node1.eth.block_number == l2_block_number+1, "L2 Node 1 should have a new block after sending transactions, even in handover buffer" \ No newline at end of file + assert l2_client_node1.eth.block_number == l2_block_number+1, "L2 Node 1 should have a new block after sending transactions, even in handover buffer" diff --git a/e2e_tests/test_forced_inclusion.py b/e2e_tests/test_forced_inclusion.py index c63aef9e..1ee727ef 100644 --- a/e2e_tests/test_forced_inclusion.py +++ b/e2e_tests/test_forced_inclusion.py @@ -1,23 +1,22 @@ from unittest import skip -import pytest -import requests from web3 import Web3 -import os -import sys from utils import * import subprocess import re import time from eth_account import Account -from taiko_inbox import get_last_batch_id -from forced_inclusion_store import check_empty_forced_inclusion_store, get_forced_inclusion_store_head, forced_inclusion_store_is_empty from chain_info import ChainInfo +from taiko_inbox import get_last_block_id -def send_forced_inclusion(nonce_delta): +def send_forced_inclusion(nonce_delta, env_vars): + if env_vars.is_pacaya(): + image = "nethswitchboard/taiko-forced-inclusion-toolbox" + else: + image = "nethswitchboard/taiko-forced-inclusion-toolbox:shasta" cmd = [ "docker", "run", "--network", "host", "--env-file", ".env", "--rm", - "nethswitchboard/taiko-forced-inclusion-toolbox", "send", + image, "send", "--nonce-delta", str(nonce_delta) ] try: @@ -50,19 +49,21 @@ def test_forced_inclusion(l1_client, beacon_client, l2_client_node1, env_vars, f check_empty_forced_inclusion_store(l1_client, env_vars) fi_account = Account.from_key(env_vars.l2_private_key) - # print chain info - ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) + get_last_block_id(l1_client, env_vars) #send forced inclusion - forced_inclusion_tx_hash = send_forced_inclusion(0) + forced_inclusion_tx_hash = send_forced_inclusion(0, env_vars) print(f"Extracted forced inclusion tx hash: {forced_inclusion_tx_hash}") - # Spam 41 transactions to L2 Node to at least one batch which will include the forced inclusion tx delay = get_two_l2_slots_duration_sec(env_vars.preconf_heartbeat_ms) print("spam 41 transactions with delay", delay) # Synchronize transaction sending with L1 slot time wait_for_next_slot(beacon_client) spam_n_txs_wait_only_for_the_last(l2_client_node1, env_vars.l2_prefunded_priv_key, 41, delay) + wait_for_batch_proposed_event(l1_client, l1_client.eth.block_number, env_vars) + + get_last_block_id(l1_client, env_vars) assert wait_for_tx_to_be_included(l2_client_node1, forced_inclusion_tx_hash), "Forced inclusion tx should be included in L2 Node 1" @@ -84,9 +85,9 @@ def test_three_consecutive_forced_inclusion(l1_client, beacon_client, l2_client_ check_empty_forced_inclusion_store(l1_client, env_vars) # send 3 forced inclusion - tx_1 = send_forced_inclusion(0) - tx_2 = send_forced_inclusion(1) - tx_3 = send_forced_inclusion(2) + tx_1 = send_forced_inclusion(0, env_vars) + tx_2 = send_forced_inclusion(1, env_vars) + tx_3 = send_forced_inclusion(2, env_vars) # Synchronize transaction sending with slot time wait_for_next_slot(beacon_client) # spam transactions @@ -113,9 +114,9 @@ def test_end_of_sequencing_forced_inclusion(l1_client, beacon_client, l2_client_ wait_for_epoch_with_operator_switch_and_slot(beacon_client, l1_client, env_vars.preconf_whitelist_address, 19) # get chain info - chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # send 1 forced inclusion - forced_inclusion_tx_hash = send_forced_inclusion(0) + forced_inclusion_tx_hash = send_forced_inclusion(0, env_vars) # wait for handower window wait_for_slot_beginning(beacon_client, 25) @@ -123,13 +124,13 @@ def test_end_of_sequencing_forced_inclusion(l1_client, beacon_client, l2_client_ wait_for_next_slot(beacon_client) # send transactions to create batch spam_n_txs_wait_only_for_the_last(l2_client_node1, env_vars.l2_prefunded_priv_key, env_vars.max_blocks_per_batch, delay) - after_spam_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + after_spam_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # wait for transactions to be included on L1 wait_for_slot_beginning(beacon_client, 3) # Verify reorg after L1 inclusion after_spam_chain_info.check_reorg(l2_client_node1) # check chain info - after_handover_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + after_handover_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # we should not have forced inclusions after handover assert chain_info.fi_sender_nonce == after_handover_chain_info.fi_sender_nonce, "Transaction not included after handover" # Synchronize transaction sending with L1 slot time @@ -169,10 +170,10 @@ def test_preconf_forced_inclusion_after_restart(l1_client, beacon_client, l2_cli time.sleep(slot_duration_sec * 3) # Validate chain info - chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # Send forced inclusion - forced_inclusion_tx_hash = send_forced_inclusion(0) + forced_inclusion_tx_hash = send_forced_inclusion(0, env_vars) # Synchronize transaction sending with L1 slot time wait_for_next_slot(beacon_client) @@ -186,7 +187,7 @@ def test_preconf_forced_inclusion_after_restart(l1_client, beacon_client, l2_cli ) # Get chain info - before_l1_inclusion_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + before_l1_inclusion_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # Wait for transactions to be included on L1 time.sleep(slot_duration_sec * 3) @@ -207,10 +208,10 @@ def test_recover_forced_inclusion_after_restart(l1_client, beacon_client, l2_cli # wait_for_slot_beginning(beacon_client, 1) - start_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + start_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # start_block = l1_client.eth.block_number - forced_inclusion_tx_hash = send_forced_inclusion(0) + forced_inclusion_tx_hash = send_forced_inclusion(0, env_vars) wait_for_new_block(l2_client_node1, start_chain_info.block_number) @@ -219,7 +220,7 @@ def test_recover_forced_inclusion_after_restart(l1_client, beacon_client, l2_cli restart_catalyst_node(2) wait_for_forced_inclusion_store_to_be_empty(l1_client, env_vars) - chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) assert start_chain_info.fi_sender_nonce + 1 == chain_info.fi_sender_nonce, "FI transaction not included after restart" start_chain_info.check_reorg(l2_client_node1) @@ -240,10 +241,10 @@ def test_verify_forced_inclusion_after_previous_operator_stop(l1_client, beacon_ wait_for_epoch_with_operator_switch_and_slot(beacon_client, l1_client, env_vars.preconf_whitelist_address, 1) node_number = get_current_operator_number(l1_client, env_vars.l2_prefunded_priv_key, env_vars.preconf_whitelist_address) - op1_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + op1_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # Send 2 forced inclusions - send_forced_inclusion(0) + send_forced_inclusion(0, env_vars) send_forced_inclusion(1) # Synchronize transaction sending with L1 slot time @@ -252,7 +253,7 @@ def test_verify_forced_inclusion_after_previous_operator_stop(l1_client, beacon_ # send transactions but don't create batch spam_n_txs_wait_only_for_the_last(l2_client_node1, env_vars.l2_prefunded_priv_key, env_vars.max_blocks_per_batch-1, delay) - op1_stop_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + op1_stop_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) assert op1_chain_info.fi_sender_nonce + 1 == op1_stop_chain_info.fi_sender_nonce, "FI transaction not included" # Stop current operator @@ -271,13 +272,13 @@ def test_verify_forced_inclusion_after_previous_operator_stop(l1_client, beacon_ # send transactions to create batch spam_n_txs_wait_only_for_the_last(l2_client_node1, env_vars.l2_prefunded_priv_key, env_vars.max_blocks_per_batch, delay) - after_spam_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + after_spam_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # wait for new epoch wait_for_slot_beginning(beacon_client, 0) # we started verifier but result not ready yet - new_epoch_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + new_epoch_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) # Validate chain info after_spam_chain_info.check_reorg(l2_client_node1) @@ -290,14 +291,14 @@ def test_verify_forced_inclusion_after_previous_operator_stop(l1_client, beacon_ op1_stop_chain_info.check_reorg(l2_client_node1) after_spam_chain_info.check_reorg(l2_client_node1) new_epoch_chain_info.check_reorg(l2_client_node1) - after_inclusion_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + after_inclusion_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) assert new_epoch_chain_info.fi_sender_nonce == after_inclusion_chain_info.fi_sender_nonce, "FI transaction not included" # Synchronize transaction sending with L1 slot time wait_for_next_slot(beacon_client) # send transactions to create batch with FI spam_n_txs_wait_only_for_the_last(l2_client_node1, env_vars.l2_prefunded_priv_key, env_vars.max_blocks_per_batch, delay) - after_spam_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + after_spam_chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) assert after_inclusion_chain_info.fi_sender_nonce + 1 == after_spam_chain_info.fi_sender_nonce, "FI transaction not included" # wait for transactions to be included on L1 @@ -305,5 +306,5 @@ def test_verify_forced_inclusion_after_previous_operator_stop(l1_client, beacon_ # Validate chain info after_spam_chain_info.check_reorg(l2_client_node1) - chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars.taiko_inbox_address, beacon_client) + chain_info = ChainInfo.from_chain(fi_account.address, l2_client_node1, l1_client, env_vars, beacon_client) assert after_spam_chain_info.fi_sender_nonce == chain_info.fi_sender_nonce, "FI transaction not included" diff --git a/e2e_tests/test_preocnfirmation_after_restart.py b/e2e_tests/test_preocnfirmation_after_restart.py index c045a7f6..81badf06 100644 --- a/e2e_tests/test_preocnfirmation_after_restart.py +++ b/e2e_tests/test_preocnfirmation_after_restart.py @@ -27,20 +27,20 @@ def test_preocnfirmation_after_restart(l1_client, beacon_client, l2_client_node1 # get chain info block_number = l2_client_node1.eth.block_number print("Block number:", block_number) - batch_id = get_last_batch_id(l1_client, env_vars.taiko_inbox_address) + batch_id = get_last_batch_id(l1_client, env_vars) # send transactions to create 3 batches # produce 1 L2 block every 2 L2 slots delay = get_two_l2_slots_duration_sec(env_vars.preconf_heartbeat_ms) print("delay", delay) spam_n_txs_wait_only_for_the_last(l2_client_node1, env_vars.l2_prefunded_priv_key, 3 * env_vars.max_blocks_per_batch, delay) # wait for transactions to be included on L1 - wait_for_batch_proposed_event(l1_client, env_vars.taiko_inbox_address, l1_client.eth.block_number) + wait_for_batch_proposed_event(l1_client, l1_client.eth.block_number, env_vars) # verify slot = get_slot_in_epoch(beacon_client) print("Slot: ", slot) new_block_number = l2_client_node1.eth.block_number print("New block number:", new_block_number) - new_batch_id = get_last_batch_id(l1_client, env_vars.taiko_inbox_address) + new_batch_id = get_last_batch_id(l1_client, env_vars) print("New batch ID:", new_batch_id) assert new_block_number >= block_number + 3 * env_vars.max_blocks_per_batch, "Invalid block number" assert new_batch_id >= batch_id + 3 , "Invalid batch ID" diff --git a/e2e_tests/utils.py b/e2e_tests/utils.py index 26f88fb6..94e5c2a2 100644 --- a/e2e_tests/utils.py +++ b/e2e_tests/utils.py @@ -3,7 +3,9 @@ import subprocess import json import os -from forced_inclusion_store import forced_inclusion_store_is_empty +import requests +import re +from forced_inclusion_store import pacaya_fi_abi def send_transaction(nonce : int, account, amount, eth_client, private_key): base_fee = eth_client.eth.get_block('latest')['baseFeePerGas'] @@ -131,36 +133,49 @@ def send_n_txs_without_waiting(eth_client, private_key, n): for i in range(n): send_transaction(nonce+i, account, '0.00009', eth_client, private_key) -def wait_for_batch_proposed_event(eth_client, taiko_inbox_address, from_block): +def wait_for_batch_proposed_event(eth_client, from_block, env_vars): print(f"Waiting for BatchProposed event from block {from_block}") - with open("../pacaya/src/l1/abi/ITaikoInbox.json") as f: - abi = json.load(f) - - contract = eth_client.eth.contract(address=taiko_inbox_address, abi=abi) - - # Create an event filter for BatchProposed events - batch_proposed_filter = contract.events.BatchProposed.create_filter( - from_block=from_block - ) - - wait_time = 0; - while True: - if wait_time > 100: - assert False, "Warning waited 100 seconds for BatchProposed event without getting one" + proposed_filter = get_proposed_event_filter(eth_client, from_block, env_vars) - new_entries = batch_proposed_filter.get_all_entries() + WAIT_TIME = 100 + for i in range(WAIT_TIME): + new_entries = proposed_filter.get_all_entries() if len(new_entries) > 0: event = new_entries[-1] - print_batch_info(event) + if env_vars.is_pacaya(): + print_batch_info(event) return event time.sleep(1) - wait_time += 1 + assert False, "Warning waited {} seconds for BatchProposed event without getting one".format(WAIT_TIME) + +def get_proposed_event_filter(eth_client, from_block, env_vars): + if env_vars.is_pacaya(): + with open("../pacaya/src/l1/abi/ITaikoInbox.json") as f: + abi = json.load(f) + contract = eth_client.eth.contract(address=env_vars.taiko_inbox_address, abi=abi) + return contract.events.BatchProposed.create_filter( + from_block=from_block + ) + elif env_vars.is_shasta(): + proposed_event_abi = [{ + "type": "event", + "name": "Proposed", + "inputs": [{"name": "data", "type": "bytes", "indexed": False, "internalType": "bytes"}], + "anonymous": False + }] + + contract = eth_client.eth.contract(address=env_vars.taiko_inbox_address, abi=proposed_event_abi) + return contract.events.Proposed().create_filter( + from_block=from_block + ) + else: + raise Exception("Invalid protocol") def wait_for_forced_inclusion_store_to_be_empty(l1_client, env_vars): TIMEOUT = 300 i = 0 - while not forced_inclusion_store_is_empty(l1_client, env_vars.forced_inclusion_store_address): + while not forced_inclusion_store_is_empty(l1_client, env_vars): if i >= TIMEOUT: assert False, "Error: waited {} seconds for forced inclusion store to be empty".format(TIMEOUT) time.sleep(1) @@ -192,37 +207,32 @@ def get_next_operator(eth_client, l1_contract_address): contract = eth_client.eth.contract(address=l1_contract_address, abi=abi) return contract.functions.getOperatorForNextEpoch().call() -def spam_txs_until_new_batch_is_proposed(l1_eth_client, l2_eth_client, private_key, taiko_inbox_address, beacon_client, preconf_min_txs): +def spam_txs_until_new_batch_is_proposed(l1_eth_client, l2_eth_client, beacon_client, env_vars): current_block = l1_eth_client.eth.block_number l1_slot_duration = int(beacon_client.get_spec()['data']['SECONDS_PER_SLOT']) number_of_blocks = 10 for i in range(number_of_blocks): - spam_n_blocks(l2_eth_client, private_key, 1, preconf_min_txs) + spam_n_blocks(l2_eth_client, env_vars.l2_prefunded_priv_key, 1, env_vars.preconf_min_txs) wait_till_next_l1_slot(beacon_client) - event = get_last_batch_proposed_event(l1_eth_client, taiko_inbox_address, current_block) + event = get_last_batch_proposed_event(l1_eth_client, current_block, env_vars) if event is not None: return event - wait_for_batch_proposed_event(l1_eth_client, taiko_inbox_address, current_block) + wait_for_batch_proposed_event(l1_eth_client, current_block, env_vars) def wait_till_next_l1_slot(beacon_client): l1_slot_duration = int(beacon_client.get_spec()['data']['SECONDS_PER_SLOT']) current_time = int(time.time()) % l1_slot_duration time.sleep(l1_slot_duration - current_time) -def get_last_batch_proposed_event(eth_client, taiko_inbox_address, from_block): - with open("../pacaya/src/l1/abi/ITaikoInbox.json") as f: - abi = json.load(f) - - contract = eth_client.eth.contract(address=taiko_inbox_address, abi=abi) - batch_proposed_filter = contract.events.BatchProposed.create_filter( - from_block=from_block - ) - new_entries = batch_proposed_filter.get_all_entries() +def get_last_batch_proposed_event(eth_client, from_block, env_vars): + proposed_filter = get_proposed_event_filter(eth_client, from_block, env_vars) + new_entries = proposed_filter.get_all_entries() if len(new_entries) > 0: event = new_entries[-1] - print_batch_info(event) + if env_vars.is_pacaya(): + print_batch_info(event) return event return None @@ -300,3 +310,83 @@ def wait_for_epoch_with_operator_switch_and_slot(beacon_client, l1_client, preco if current_operator != next_operator: break assert current_operator != next_operator, "Current operator should be different from next operator" + +def read_shasta_inbox_config(l1_client, shasta_inbox_address): + abi = get_shasta_inbox_abi() + contract = l1_client.eth.contract(address=shasta_inbox_address, abi=abi) + config = contract.functions.getConfig().call() + print(f"Shasta inbox config: {config}") + return config + +def get_shasta_inbox_abi(): + commit = get_taiko_bindings_commit() + url = f"https://raw.githubusercontent.com/taikoxyz/taiko-mono/{commit}/packages/taiko-client-rs/crates/bindings/src/inbox.rs" + return read_json_abi_from_rust_bindings(url) + +def get_taiko_bindings_commit(): + """Read the commit hash from Cargo.toml for taiko_bindings dependency""" + cargo_toml_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "Cargo.toml") + with open(cargo_toml_path, 'r') as f: + content = f.read() + + # Find the taiko_bindings dependency and extract the rev value + pattern = r'taiko_bindings\s*=\s*\{[^}]*rev\s*=\s*"([^"]+)"' + match = re.search(pattern, content) + + if not match: + raise ValueError("Could not find taiko_bindings rev in Cargo.toml") + + return match.group(1) + +def decode_proposal_payload(l1_client, shasta_inbox_address, payload): + commit = get_taiko_bindings_commit() + url = f"https://raw.githubusercontent.com/taikoxyz/taiko-mono/{commit}/packages/taiko-client-rs/crates/bindings/src/codec.rs" + abi = read_json_abi_from_rust_bindings(url) + config = read_shasta_inbox_config(l1_client, shasta_inbox_address) + codec = config[0] + codec_contract = l1_client.eth.contract(address=codec, abi=abi) + return codec_contract.functions.decodeProposedEvent(payload).call() + +def read_json_abi_from_rust_bindings(url): + response = requests.get(url) + response.raise_for_status() # Raise an exception for bad status codes + + content = response.text + + # Find the ```json code block + pattern = r'```json\s*\n(.*?)\n```' + match = re.search(pattern, content, re.DOTALL) + + if not match: + raise ValueError(f"Could not find ```json code block in the file at {url}") + + json_content = match.group(1).strip() + + # Parse and return the JSON + return json.loads(json_content) + +def get_forced_inclusion_store_head(l1_client, env_vars): + if env_vars.is_pacaya(): + contract = l1_client.eth.contract(address=env_vars.forced_inclusion_store_address, abi=pacaya_fi_abi) + head = contract.functions.head().call() + return int(head) + else: + shasta_abi = get_shasta_inbox_abi() + contract = l1_client.eth.contract(address=env_vars.forced_inclusion_store_address, abi=shasta_abi) + head, tail = contract.functions.getForcedInclusionState().call() + return int(head) + +def forced_inclusion_store_is_empty(l1_client, env_vars): + if env_vars.is_pacaya(): + contract = l1_client.eth.contract(address=env_vars.forced_inclusion_store_address, abi=pacaya_fi_abi) + head = contract.functions.head().call() + tail = contract.functions.tail().call() + else: + shasta_abi = get_shasta_inbox_abi() + contract = l1_client.eth.contract(address=env_vars.forced_inclusion_store_address, abi=shasta_abi) + head, tail = contract.functions.getForcedInclusionState().call() + print("Forced Inclusion head:", head, "tail: ", tail) + return head == tail + +def check_empty_forced_inclusion_store(l1_client, env_vars): + assert forced_inclusion_store_is_empty(l1_client, env_vars), "Forced inclusion store should be empty" diff --git a/shasta/src/chain_monitor/mod.rs b/shasta/src/chain_monitor/mod.rs index 0de2c532..600d8bd4 100644 --- a/shasta/src/chain_monitor/mod.rs +++ b/shasta/src/chain_monitor/mod.rs @@ -1,18 +1,18 @@ use alloy::{primitives::Address, providers::DynProvider}; use common::chain_monitor::{ChainMonitor, ChainMonitorEventHandler}; -use taiko_bindings::{codec_optimized::CodecOptimized::CodecOptimizedInstance, inbox::Inbox}; +use taiko_bindings::{codec::Codec::CodecInstance, inbox::Inbox}; use tracing::{info, warn}; pub type ShastaChainMonitor = ChainMonitor; #[derive(Clone)] pub struct ProposedHandler { - codec: CodecOptimizedInstance, + codec: CodecInstance, } impl ProposedHandler { pub fn new(codec_address: Address, provider: DynProvider) -> Self { - let codec = CodecOptimizedInstance::new(codec_address, provider); + let codec = CodecInstance::new(codec_address, provider); Self { codec } } } diff --git a/shasta/src/l1/bindings/inbox.rs b/shasta/src/l1/bindings/inbox.rs deleted file mode 100644 index 97736dbd..00000000 --- a/shasta/src/l1/bindings/inbox.rs +++ /dev/null @@ -1,93 +0,0 @@ -use alloy::sol; - -sol!( -#[sol(rpc)] - contract Inbox { - struct Config { - /// @notice The codec used for encoding and hashing - address codec; - /// @notice The token used for bonds - address bondToken; - /// @notice The signal service contract address - address signalService; - /// @notice The proof verifier contract - address proofVerifier; - /// @notice The proposer checker contract - address proposerChecker; - /// @notice The proving window in seconds - uint48 provingWindow; - /// @notice The extended proving window in seconds - uint48 extendedProvingWindow; - /// @notice The ring buffer size for storing proposal hashes - uint256 ringBufferSize; - /// @notice The percentage of basefee paid to coinbase - uint8 basefeeSharingPctg; - /// @notice The minimum number of forced inclusions that the proposer is forced to process - /// if they are due - uint256 minForcedInclusionCount; - /// @notice The delay for forced inclusions measured in seconds - uint16 forcedInclusionDelay; - /// @notice The base fee for forced inclusions in Gwei used in dynamic fee calculation - uint64 forcedInclusionFeeInGwei; - /// @notice Queue size at which the fee doubles - uint64 forcedInclusionFeeDoubleThreshold; - /// @notice The minimum delay between checkpoints in seconds - /// @dev Must be less than or equal to finalization grace period - uint16 minCheckpointDelay; - /// @notice The multiplier to determine when a forced inclusion is too old so that proposing - /// becomes permissionless - uint8 permissionlessInclusionMultiplier; - } - - struct BlobSlice { - /// @notice The blobs containing the proposal's content. - bytes32[] blobHashes; - /// @notice The byte offset of the proposal's content in the containing blobs. - uint24 offset; - /// @notice The timestamp when the frame was created. - uint48 timestamp; - } - - struct CoreState { - /// @notice The next proposal ID to be assigned. - uint48 nextProposalId; - /// @notice The last L1 block ID where a proposal was made. - uint48 lastProposalBlockId; - /// @notice The ID of the last finalized proposal. - uint48 lastFinalizedProposalId; - /// @notice The timestamp when the last proposal was finalized. - uint48 lastFinalizedTimestamp; - /// @notice The timestamp when the last checkpoint was saved. - /// @dev In genesis block, this is set to 0 to allow the first checkpoint to be saved. - uint48 lastCheckpointTimestamp; - /// @notice The hash of the last finalized transition. - bytes32 lastFinalizedTransitionHash; - } - - struct ForcedInclusion { - uint64 feeInGwei; - BlobSlice blobSlice; - } - - uint48 public activationTimestamp; - - function getForcedInclusionState() - external - view - returns (uint48 head_, uint48 tail_, uint48 lastProcessedAt_); - - function getForcedInclusions( - uint48 _start, - uint48 _maxCount - ) - external - view - returns (ForcedInclusion[] memory inclusions_); - - function propose(bytes calldata _lookahead, bytes calldata _data) external; - - function getConfig() external view returns (Config memory config_); - - function getState() external view returns (CoreState memory state_); -} -); diff --git a/shasta/src/l1/bindings/mod.rs b/shasta/src/l1/bindings/mod.rs index 20604d2f..36e8a755 100644 --- a/shasta/src/l1/bindings/mod.rs +++ b/shasta/src/l1/bindings/mod.rs @@ -1,5 +1,3 @@ -mod inbox; mod preconf_whitelist; -pub use inbox::Inbox; pub use preconf_whitelist::PreconfWhitelist; diff --git a/shasta/src/l1/execution_layer.rs b/shasta/src/l1/execution_layer.rs index 814ec771..7f25a245 100644 --- a/shasta/src/l1/execution_layer.rs +++ b/shasta/src/l1/execution_layer.rs @@ -1,7 +1,7 @@ // TODO remove allow dead_code when the module is used #![allow(dead_code)] -use super::bindings::{Inbox, PreconfWhitelist}; +use super::bindings::PreconfWhitelist; use super::config::EthereumL1Config; use super::proposal_tx_builder::ProposalTxBuilder; use super::protocol_config::ProtocolConfig; @@ -26,6 +26,9 @@ use common::{ }; use pacaya::l1::traits::{OperatorError, PreconfOperator, WhitelistProvider}; use std::sync::Arc; +use taiko_bindings::{ + inbox::IForcedInclusionStore::ForcedInclusion, inbox::IInbox::CoreState, inbox::Inbox, +}; use tokio::sync::mpsc::Sender; use tracing::info; @@ -254,7 +257,7 @@ impl ExecutionLayer { Ok(state.tail_.to::()) } - pub async fn get_forced_inclusion(&self, index: u64) -> Result { + pub async fn get_forced_inclusion(&self, index: u64) -> Result { let shasta_inbox = Inbox::new(self.contract_addresses.shasta_inbox, self.provider.clone()); let inclusions = shasta_inbox .getForcedInclusions(U48::from(index), U48::ONE) @@ -269,10 +272,10 @@ impl ExecutionLayer { Ok(inclusion.clone()) } - pub async fn get_inbox_state(&self) -> Result { + pub async fn get_inbox_state(&self) -> Result { let shasta_inbox = Inbox::new(self.contract_addresses.shasta_inbox, self.provider.clone()); let state = shasta_inbox - .getState() + .getCoreState() .call() .await .map_err(|e| anyhow::anyhow!("Failed to call getInboxState for Inbox: {e}"))?; diff --git a/shasta/src/l1/proposal_tx_builder.rs b/shasta/src/l1/proposal_tx_builder.rs index b715098e..0e4b35a6 100644 --- a/shasta/src/l1/proposal_tx_builder.rs +++ b/shasta/src/l1/proposal_tx_builder.rs @@ -1,4 +1,3 @@ -use super::bindings::Inbox; use alloy::{ consensus::SidecarBuilder, network::{TransactionBuilder, TransactionBuilder4844}, @@ -13,8 +12,9 @@ use alloy_json_rpc::RpcError; use anyhow::Error; use common::l1::{fees_per_gas::FeesPerGas, tools, transaction_error::TransactionError}; use common::shared::l2_block_v2::L2BlockV2; -use taiko_bindings::codec_optimized::{ - CodecOptimized::CodecOptimizedInstance, IInbox::ProposeInput, LibBlobs::BlobReference, +use taiko_bindings::{ + codec::{Codec::CodecInstance, IInbox::ProposeInput, LibBlobs::BlobReference}, + inbox::Inbox, }; use taiko_protocol::shasta::{ BlobCoder, @@ -135,7 +135,7 @@ impl ProposalTxBuilder { numForcedInclusions: num_forced_inclusion, }; - let codec = CodecOptimizedInstance::new(self.codec_address, self.provider.clone()); + let codec = CodecInstance::new(self.codec_address, self.provider.clone()); let encoded_proposal_input = codec.encodeProposeInput(input).call().await?; let tx = TransactionRequest::default() diff --git a/shasta/src/l1/protocol_config.rs b/shasta/src/l1/protocol_config.rs index 4ad12775..4ff10b11 100644 --- a/shasta/src/l1/protocol_config.rs +++ b/shasta/src/l1/protocol_config.rs @@ -1,5 +1,5 @@ -use super::bindings::Inbox::Config; use alloy::primitives::Address; +use taiko_bindings::inbox::IInbox::Config; #[derive(Clone, Default)] pub struct ProtocolConfig {