From 680b84cd3d6025df3e2ca2fa625b361611107838 Mon Sep 17 00:00:00 2001 From: felipe Date: Fri, 1 Aug 2025 22:51:34 +0000 Subject: [PATCH 1/2] feat(consume): Add consume sync tests Add a new type of test that is essentially `consume_engine` but gets another client to sync after payloads are executed. - These tests can be filled with: `uv run fill ... -m "blockchain_test_sync"` - Filled tests can be run against hive with `uv run consume sync` Squashes: - chore: fix formatting and lint errors - chore: lower some of the timeouts and sleeps for sync tests - chore(docs): Document consume sync and add entry to CHANGELOG.md - Add tests for the ``verify_sync`` marker to make sure it's behaving as expected. - chore: Don't convert state tests to sync tests - refactor: Make test ids better - Changes from comments on PR #2007 - Use `pytest.mark.verify_sync` instead of the previous `verify_sync` flag on the blockchain test. - Remove the syncing periodic checks as when the forkchoice_updated response is `VALID`, we can break. - Update all files according to changes above. --- docs/CHANGELOG.md | 2 + docs/running_tests/running.md | 20 + .../test_formats/blockchain_test_sync.md | 239 +++++++++ src/cli/pytest_commands/consume.py | 10 + src/cli/pytest_commands/processors.py | 2 + src/ethereum_test_fixtures/__init__.py | 2 + src/ethereum_test_fixtures/blockchain.py | 17 +- src/ethereum_test_rpc/__init__.py | 12 +- src/ethereum_test_rpc/rpc.py | 21 + src/ethereum_test_specs/blockchain.py | 66 ++- src/ethereum_test_specs/state.py | 5 + src/pytest_plugins/consume/consume.py | 7 +- src/pytest_plugins/consume/simulators/base.py | 2 +- .../simulator_logic/test_via_sync.py | 503 ++++++++++++++++++ .../consume/simulators/single_test_client.py | 6 +- .../consume/simulators/sync/__init__.py | 1 + .../consume/simulators/sync/conftest.py | 287 ++++++++++ src/pytest_plugins/filler/filler.py | 3 + .../filler/tests/test_verify_sync_marker.py | 124 +++++ .../test_max_block_rlp_size.py | 10 +- 20 files changed, 1297 insertions(+), 42 deletions(-) create mode 100644 docs/running_tests/test_formats/blockchain_test_sync.md create mode 100644 src/pytest_plugins/consume/simulators/simulator_logic/test_via_sync.py create mode 100644 src/pytest_plugins/consume/simulators/sync/__init__.py create mode 100644 src/pytest_plugins/consume/simulators/sync/conftest.py create mode 100644 src/pytest_plugins/filler/tests/test_verify_sync_marker.py diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 1d419042a88..91e223f36a8 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -82,6 +82,7 @@ Users can select any of the artifacts depending on their testing needs for their - ✨ Added support for the `--benchmark-gas-values` flag in the `fill` command, allowing a single genesis file to be used across different gas limit settings when generating fixtures. ([#1895](https://github.com/ethereum/execution-spec-tests/pull/1895)). - ✨ Static tests can now specify a maximum fork where they should be filled for ([#1977](https://github.com/ethereum/execution-spec-tests/pull/1977)). - ✨ Static tests can now be filled in every format using `--generate-all-formats` ([#2006](https://github.com/ethereum/execution-spec-tests/pull/2006)). +- ✨ Add support for `BlockchainEngineSyncFixture` format for tests marked with `pytest.mark.verify_sync` to enable client synchronization testing via `consume sync` command ([#2007](https://github.com/ethereum/execution-spec-tests/pull/2007)). #### `consume` @@ -90,6 +91,7 @@ Users can select any of the artifacts depending on their testing needs for their - 🔀 `consume` now automatically avoids GitHub API calls when using direct release URLs (better for CI environments), while release specifiers like `stable@latest` continue to use the API for version resolution ([#1788](https://github.com/ethereum/execution-spec-tests/pull/1788)). - 🔀 Refactor consume simulator architecture to use explicit pytest plugin structure with forward-looking architecture ([#1801](https://github.com/ethereum/execution-spec-tests/pull/1801)). - 🔀 Add exponential retry logic to initial fcu within consume engine ([#1815](https://github.com/ethereum/execution-spec-tests/pull/1815)). +- ✨ Add `consume sync` command to test client synchronization capabilities by having one client sync from another via Engine API and P2P networking ([#2007](https://github.com/ethereum/execution-spec-tests/pull/2007)). #### `execute` diff --git a/docs/running_tests/running.md b/docs/running_tests/running.md index 7c5535d0fcc..94272d7d4e0 100644 --- a/docs/running_tests/running.md +++ b/docs/running_tests/running.md @@ -14,6 +14,7 @@ Both `consume` and `execute` provide sub-commands which correspond to different | [`consume direct`](#direct) | Client consume tests via a `statetest` interface | EVM | None | Module test | | [`consume direct`](#direct) | Client consume tests via a `blocktest` interface | EVM, block processing | None | Module test,
Integration test | | [`consume engine`](#engine) | Client imports blocks via Engine API `EngineNewPayload` in Hive | EVM, block processing, Engine API | Staging, Hive | System test | +| [`consume sync`](#sync) | Client syncs from another client using Engine API in Hive | EVM, block processing, Engine API, P2P sync | Staging, Hive | System test | | [`consume rlp`](#rlp) | Client imports RLP-encoded blocks upon start-up in Hive | EVM, block processing, RLP import (sync\*) | Staging, Hive | System test | | [`execute hive`](./execute/hive.md) | Tests executed against a client via JSON RPC `eth_sendRawTransaction` in Hive | EVM, JSON RPC, mempool | Staging, Hive | System test | | [`execute remote`](./execute/remote.md) | Tests executed against a client via JSON RPC `eth_sendRawTransaction` on a live network | EVM, JSON RPC, mempool, EL-EL/EL-CL interaction (indirectly) | Production | System Test | @@ -81,6 +82,25 @@ The `consume rlp` command: This method simulates how clients import blocks during historical sync, testing the complete block validation and state transition pipeline, see below for more details and a comparison to consumption via the Engine API. +## Sync + +| Nomenclature | | +| -------------- |------------------------| +| Command | `consume sync` | +| Simulator | None | +| Fixture format | `blockchain_test_sync` | + +The consume sync method tests execution client synchronization capabilities by having one client sync from another via the Engine API and P2P networking. This method validates that clients can correctly synchronize state and blocks from peers, testing both the Engine API, sync triggering, and P2P block propagation mechanisms. + +The `consume sync` command: + +1. **Initializes the client under test** with genesis state and executes all test payloads. +2. **Spins up a sync client** with the same genesis state. +3. **Establishes P2P connection** between the two clients, utilizing ``admin_addPeer`` with enode url. +4. **Triggers synchronization** by sending the target block to the sync client via `engine_newPayload` followed by `engine_forkchoiceUpdated` requests. +5. **Monitors sync progress** and validates that the sync client reaches the same state. +6. **Verifies final state** matches between both clients. + ## Engine vs RLP Simulator The RLP Simulator (`eest/consume-rlp`) and the Engine Simulator (`eest/consume-engine`) should be seen as complimentary to one another. Although they execute the same underlying EVM test cases, the block validation logic is executed via different client code paths (using different [fixture formats](./test_formats/index.md)). Therefore, ideally, **both simulators should be executed for full coverage**. diff --git a/docs/running_tests/test_formats/blockchain_test_sync.md b/docs/running_tests/test_formats/blockchain_test_sync.md new file mode 100644 index 00000000000..b1ff96e93d3 --- /dev/null +++ b/docs/running_tests/test_formats/blockchain_test_sync.md @@ -0,0 +1,239 @@ +# Blockchain Engine Sync Tests + +The Blockchain Engine Sync Test fixture format tests are included in the fixtures subdirectory `blockchain_tests_sync`, and use Engine API directives to test client synchronization capabilities after fixtures are executed with valid payloads. + +These are produced by the `BlockchainTest` test spec when `pytest.mark.verify_sync` is used as a test marker. + +## Description + +The Blockchain Engine Sync Test fixture format is used to test execution client synchronization between peers. It validates that clients can correctly sync state and blocks from another client using the Engine API and P2P networking. + +The test works by: + +1. Setting up a client under test, defining a pre-execution state, a series of `engine_newPayloadVX` directives, and a post-execution state, as in Blockchain Engine Test fixture formats. +2. Starting a sync client with the same genesis and pre-execution state. +3. Having the sync client synchronize from the client under test. +4. Verifying that both clients reach the same final state. + +A single JSON fixture file is composed of a JSON object where each key-value pair is a different [`SyncFixture`](#syncfixture) test object, with the key string representing the test name. + +The JSON file path plus the test name are used as the unique test identifier, as well as a `{client under test}::sync_{sync client}` identifier. + +## Consumption + +For each [`HiveFixture`](#hivefixture) test object in the JSON fixture file, perform the following steps: + +### Client Under Test Setup + +1. Start the client under test using: + - [`network`](#-network-fork) to configure the execution fork schedule according to the [`Fork`](./common_types.md#fork) type definition. + - [`pre`](#-pre-alloc) as the starting state allocation of the execution environment for the test and calculate the genesis state root. + - [`genesisBlockHeader`](#-genesisblockheader-fixtureheader) as the genesis block header. + +2. Verify the head of the chain is the genesis block, and the state root matches the one calculated on step 1, otherwise fail the test. + +3. Process all [`FixtureEngineNewPayload`](#fixtureenginenewpayload) objects in [`engineNewPayloads`](#-enginenewpayloads-listfixtureenginenewpayload) to build the complete chain on the client under test. + +### Sync Client Setup and Synchronization + +1. Start a sync client using the same genesis configuration: + - Use the same [`network`](#-network-fork), [`pre`](#-pre-alloc), and [`genesisBlockHeader`](#-genesisblockheader-fixtureheader). + +2. Establish P2P connection between the clients: + - Get the enode URL from the client under test + - Use `admin_addPeer` to connect the sync client to the client under test + +3. Trigger synchronization on the sync client: + - Send the [`syncPayload`](#-syncpayload-fixtureenginenewpayload) using `engine_newPayloadVX` + - Send `engine_forkchoiceUpdatedVX` pointing to the last block hash + +4. Monitor and verify synchronization: + - Wait for the sync client to reach the [`lastblockhash`](#-lastblockhash-hash) + - Verify the final state root matches between both clients + - If [`post`](#-post-alloc) is provided, verify the final state matches + +## Structures + +### `HiveFixture` + +#### - `network`: [`Fork`](./common_types.md#fork) + +##### TO BE DEPRECATED + +Fork configuration for the test. + +This field is going to be replaced by the value contained in `config.network`. + +#### - `genesisBlockHeader`: [`FixtureHeader`](./blockchain_test.md#fixtureheader) + +Genesis block header. + +#### - `engineNewPayloads`: [`List`](./common_types.md#list)`[`[`FixtureEngineNewPayload`](#fixtureenginenewpayload)`]` + +List of `engine_newPayloadVX` directives to be processed by the client under test to build the complete chain. + +#### - `syncPayload`: [`FixtureEngineNewPayload`](#fixtureenginenewpayload) + +The final payload to be sent to the sync client to trigger synchronization. This is typically an empty block built on top of the last test block. + +#### - `engineFcuVersion`: [`Number`](./common_types.md#number) + +Version of the `engine_forkchoiceUpdatedVX` directive to use to set the head of the chain. + +#### - `pre`: [`Alloc`](./common_types.md#alloc-mappingaddressaccount) + +Starting account allocation for the test. State root calculated from this allocation must match the one in the genesis block. + +#### - `lastblockhash`: [`Hash`](./common_types.md#hash) + +Hash of the last valid block that the sync client should reach after successful synchronization. + +#### - `post`: [`Alloc`](./common_types.md#alloc-mappingaddressaccount) + +Account allocation for verification after synchronization is complete. + +#### - `postStateHash`: [`Optional`](./common_types.md#optional)`[`[`Hash`](./common_types.md#hash)`]` + +Optional state root hash for verification after synchronization is complete. Used when full post-state is not included. + +#### - `config`: [`FixtureConfig`](#fixtureconfig) + +Chain configuration object to be applied to both clients running the blockchain sync test. + +### `FixtureConfig` + +#### - `network`: [`Fork`](./common_types.md#fork) + +Fork configuration for the test. It is guaranteed that this field contains the same value as the root field `network`. + +#### - `chainId`: [`Number`](./common_types.md#number) + +Chain ID configuration for the test network. + +#### - `blobSchedule`: [`BlobSchedule`](./common_types.md#blobschedule-mappingforkforkblobschedule) + +Optional; present from Cancun on. Maps forks to their blob schedule configurations as defined by [EIP-7840](https://eips.ethereum.org/EIPS/eip-7840). + +### `FixtureEngineNewPayload` + +#### - `executionPayload`: [`FixtureExecutionPayload`](#fixtureexecutionpayload) + +Execution payload. + +#### - `blob_versioned_hashes`: [`Optional`](./common_types.md#optional)`[`[`List`](./common_types.md#list)`[`[`Hash`](./common_types.md#hash)`]]` `(fork: Cancun)` + +List of hashes of the versioned blobs that are part of the execution payload. + +#### - `parentBeaconBlockRoot`: [`Optional`](./common_types.md#optional)`[`[`Hash`](./common_types.md#hash)`]` `(fork: Cancun)` + +Hash of the parent beacon block root. + +#### - `validationError`: [`Optional`](./common_types.md#optional)`[`[`TransactionException`](../../library/ethereum_test_exceptions.md#ethereum_test_exceptions.TransactionException)` | `[`BlockException`](../../library/ethereum_test_exceptions.md#ethereum_test_exceptions.BlockException)`]` + +For sync tests, this field should not be present as sync tests only work with valid chains. Invalid blocks cannot be synced. + +#### - `version`: [`Number`](./common_types.md#number) + +Version of the `engine_newPayloadVX` directive to use to deliver the payload. + +### `FixtureExecutionPayload` + +#### - `parentHash`: [`Hash`](./common_types.md#hash) + +Hash of the parent block. + +#### - `feeRecipient`: [`Address`](./common_types.md#address) + +Address of the account that will receive the rewards for building the block. + +#### - `stateRoot`: [`Hash`](./common_types.md#hash) + +Root hash of the state trie. + +#### - `receiptsRoot`: [`Hash`](./common_types.md#hash) + +Root hash of the receipts trie. + +#### - `logsBloom`: [`Bloom`](./common_types.md#bloom) + +Bloom filter composed of the logs of all the transactions in the block. + +#### - `blockNumber`: [`HexNumber`](./common_types.md#hexnumber) + +Number of the block. + +#### - `gasLimit`: [`HexNumber`](./common_types.md#hexnumber) + +Total gas limit of the block. + +#### - `gasUsed`: [`HexNumber`](./common_types.md#hexnumber) + +Total gas used by all the transactions in the block. + +#### - `timestamp`: [`HexNumber`](./common_types.md#hexnumber) + +Timestamp of the block. + +#### - `extraData`: [`Bytes`](./common_types.md#bytes) + +Extra data of the block. + +#### - `prevRandao`: [`Hash`](./common_types.md#hash) + +PrevRandao of the block. + +#### - `blockHash`: [`Hash`](./common_types.md#hash) + +Hash of the block. + +#### - `transactions`: [`List`](./common_types.md#list)`[`[`Bytes`](./common_types.md#bytes)`]` + +List of transactions in the block, in serialized format. + +#### - `withdrawals`: [`List`](./common_types.md#list)`[`[`FixtureWithdrawal`](#fixturewithdrawal)`]` + +List of withdrawals in the block. + +#### - `baseFeePerGas`: [`HexNumber`](./common_types.md#hexnumber) `(fork: London)` + +Base fee per gas of the block. + +#### - `blobGasUsed`: [`HexNumber`](./common_types.md#hexnumber) `(fork: Cancun)` + +Total blob gas used by all the transactions in the block. + +#### - `excessBlobGas`: [`HexNumber`](./common_types.md#hexnumber) `(fork: Cancun)` + +Excess blob gas of the block used to calculate the blob fee per gas for this block. + +### `FixtureWithdrawal` + +#### - `index`: [`HexNumber`](./common_types.md#hexnumber) + +Index of the withdrawal + +#### - `validatorIndex`: [`HexNumber`](./common_types.md#hexnumber) + +Withdrawing validator index + +#### - `address`: [`Address`](./common_types.md#address) + +Address to withdraw to + +#### - `amount`: [`HexNumber`](./common_types.md#hexnumber) + +Amount of the withdrawal + +## Differences from Blockchain Engine Tests + +While the Blockchain Sync Test format is similar to the Blockchain Engine Test format, there are key differences: + +1. **`syncPayload` field**: Contains the final block used to trigger synchronization on the sync client. +2. **Multi-client testing**: Tests involve two clients (client under test and sync client) rather than a single client. +3. **P2P networking**: Tests require P2P connection establishment between clients. +4. **No invalid blocks**: Sync tests only work with valid chains as invalid blocks cannot be synced. +5. **`postStateHash` field**: Optional field for state verification when full post-state is not included. + +## Fork Support + +Blockchain Sync Tests are only supported for post-merge forks (Paris and later) as they rely on the Engine API for synchronization triggering. diff --git a/src/cli/pytest_commands/consume.py b/src/cli/pytest_commands/consume.py index 5e14413c80b..e3736ddf442 100644 --- a/src/cli/pytest_commands/consume.py +++ b/src/cli/pytest_commands/consume.py @@ -48,6 +48,10 @@ def get_command_logic_test_paths(command_name: str, is_hive: bool) -> List[Path] command_logic_test_paths = [ base_path / "simulators" / "simulator_logic" / f"test_via_{command_name}.py" ] + elif command_name == "sync": + command_logic_test_paths = [ + base_path / "simulators" / "simulator_logic" / "test_via_sync.py" + ] elif command_name == "direct": command_logic_test_paths = [base_path / "direct" / "test_via_direct.py"] else: @@ -107,6 +111,12 @@ def engine() -> None: pass +@consume_command(is_hive=True) +def sync() -> None: + """Client consumes via the Engine API with sync testing.""" + pass + + @consume_command(is_hive=True) def hive() -> None: """Client consumes via all available hive methods (rlp, engine).""" diff --git a/src/cli/pytest_commands/processors.py b/src/cli/pytest_commands/processors.py index e7948962704..5810d87f0c9 100644 --- a/src/cli/pytest_commands/processors.py +++ b/src/cli/pytest_commands/processors.py @@ -101,6 +101,8 @@ def process_args(self, args: List[str]) -> List[str]: if self.command_name == "engine": modified_args.extend(["-p", "pytest_plugins.consume.simulators.engine.conftest"]) + elif self.command_name == "sync": + modified_args.extend(["-p", "pytest_plugins.consume.simulators.sync.conftest"]) elif self.command_name == "rlp": modified_args.extend(["-p", "pytest_plugins.consume.simulators.rlp.conftest"]) else: diff --git a/src/ethereum_test_fixtures/__init__.py b/src/ethereum_test_fixtures/__init__.py index c42b94bcd81..ecfb385399a 100644 --- a/src/ethereum_test_fixtures/__init__.py +++ b/src/ethereum_test_fixtures/__init__.py @@ -4,6 +4,7 @@ from .blockchain import ( BlockchainEngineFixture, BlockchainEngineFixtureCommon, + BlockchainEngineSyncFixture, BlockchainEngineXFixture, BlockchainFixture, BlockchainFixtureCommon, @@ -19,6 +20,7 @@ "BaseFixture", "BlockchainEngineFixture", "BlockchainEngineFixtureCommon", + "BlockchainEngineSyncFixture", "BlockchainEngineXFixture", "BlockchainFixture", "BlockchainFixtureCommon", diff --git a/src/ethereum_test_fixtures/blockchain.py b/src/ethereum_test_fixtures/blockchain.py index 46279a71510..b9b915921b9 100644 --- a/src/ethereum_test_fixtures/blockchain.py +++ b/src/ethereum_test_fixtures/blockchain.py @@ -543,7 +543,6 @@ class BlockchainEngineFixture(BlockchainEngineFixtureCommon): genesis: FixtureHeader = Field(..., alias="genesisBlockHeader") post_state: Alloc | None = Field(None) payloads: List[FixtureEngineNewPayload] = Field(..., alias="engineNewPayloads") - sync_payload: FixtureEngineNewPayload | None = None @post_state_validator(alternate_field="post_state_diff") @@ -571,5 +570,19 @@ class BlockchainEngineXFixture(BlockchainEngineFixtureCommon): payloads: List[FixtureEngineNewPayload] = Field(..., alias="engineNewPayloads") """Engine API payloads for blockchain execution.""" + +class BlockchainEngineSyncFixture(BlockchainEngineFixture): + """ + Engine Sync specific test fixture information. + + This fixture format is specifically designed for sync testing where: + - The client under test receives all payloads + - A sync client attempts to sync from the client under test + - Both client types are parametrized from hive client config + """ + + format_name: ClassVar[str] = "blockchain_test_sync" + description: ClassVar[str] = ( + "Tests that generate a blockchain test fixture for Engine API testing with client sync." + ) sync_payload: FixtureEngineNewPayload | None = None - """Optional sync payload for blockchain synchronization.""" diff --git a/src/ethereum_test_rpc/__init__.py b/src/ethereum_test_rpc/__init__.py index 4df2faa0c9d..5e5438a2cae 100644 --- a/src/ethereum_test_rpc/__init__.py +++ b/src/ethereum_test_rpc/__init__.py @@ -1,6 +1,14 @@ """JSON-RPC methods and helper functions for EEST consume based hive simulators.""" -from .rpc import BlockNumberType, DebugRPC, EngineRPC, EthRPC, SendTransactionExceptionError +from .rpc import ( + AdminRPC, + BlockNumberType, + DebugRPC, + EngineRPC, + EthRPC, + NetRPC, + SendTransactionExceptionError, +) from .types import ( BlobAndProofV1, BlobAndProofV2, @@ -10,6 +18,7 @@ ) __all__ = [ + "AdminRPC", "BlobAndProofV1", "BlobAndProofV2", "BlockNumberType", @@ -19,5 +28,6 @@ "EthRPC", "ForkConfig", "ForkConfigBlobSchedule", + "NetRPC", "SendTransactionExceptionError", ] diff --git a/src/ethereum_test_rpc/rpc.py b/src/ethereum_test_rpc/rpc.py index bf69927691d..b50e22239cb 100644 --- a/src/ethereum_test_rpc/rpc.py +++ b/src/ethereum_test_rpc/rpc.py @@ -152,6 +152,10 @@ def get_block_by_number(self, block_number: BlockNumberType = "latest", full_txs block = hex(block_number) if isinstance(block_number, int) else block_number return self.post_request("getBlockByNumber", block, full_txs) + def get_block_by_hash(self, block_hash: Hash, full_txs: bool = True): + """`eth_getBlockByHash`: Returns information about a block by hash.""" + return self.post_request("getBlockByHash", f"{block_hash}", full_txs) + def get_balance(self, address: Address, block_number: BlockNumberType = "latest") -> int: """`eth_getBalance`: Returns the balance of the account of given address.""" block = hex(block_number) if isinstance(block_number, int) else block_number @@ -378,3 +382,20 @@ def get_blobs( ), context=self.response_validation_context, ) + + +class NetRPC(BaseRPC): + """Represents a net RPC class for network-related RPC calls.""" + + def peer_count(self) -> int: + """`net_peerCount`: Get the number of peers connected to the client.""" + response = self.post_request("peerCount") + return int(response, 16) # hex -> int + + +class AdminRPC(BaseRPC): + """Represents an admin RPC class for administrative RPC calls.""" + + def add_peer(self, enode: str) -> bool: + """`admin_addPeer`: Add a peer by enode URL.""" + return self.post_request("addPeer", enode) diff --git a/src/ethereum_test_specs/blockchain.py b/src/ethereum_test_specs/blockchain.py index b80877d09a4..4a4919a2b4f 100644 --- a/src/ethereum_test_specs/blockchain.py +++ b/src/ethereum_test_specs/blockchain.py @@ -1,7 +1,7 @@ """Ethereum blockchain test spec definition and filler.""" from pprint import pprint -from typing import Any, Callable, ClassVar, Dict, Generator, List, Optional, Sequence, Tuple, Type +from typing import Any, Callable, ClassVar, Dict, Generator, List, Sequence, Tuple, Type import pytest from pydantic import ConfigDict, Field, field_validator @@ -33,6 +33,7 @@ from ethereum_test_fixtures import ( BaseFixture, BlockchainEngineFixture, + BlockchainEngineSyncFixture, BlockchainEngineXFixture, BlockchainFixture, FixtureFormat, @@ -403,7 +404,6 @@ class BlockchainTest(BaseTest): post: Alloc blocks: List[Block] genesis_environment: Environment = Field(default_factory=Environment) - verify_sync: bool = False chain_id: int = 1 exclude_full_post_state_in_output: bool = False """ @@ -415,6 +415,7 @@ class BlockchainTest(BaseTest): BlockchainFixture, BlockchainEngineFixture, BlockchainEngineXFixture, + BlockchainEngineSyncFixture, ] supported_execute_formats: ClassVar[Sequence[LabeledExecuteFormat]] = [ LabeledExecuteFormat( @@ -445,6 +446,8 @@ def discard_fixture_format_by_marks( and "blockchain_test_engine_only" in marker_names ): return True + if fixture_format == BlockchainEngineSyncFixture and "verify_sync" not in marker_names: + return True return False def get_genesis_environment(self, fork: Fork) -> Environment: @@ -714,7 +717,7 @@ def make_hive_fixture( t8n: TransitionTool, fork: Fork, fixture_format: FixtureFormat = BlockchainEngineFixture, - ) -> BlockchainEngineFixture | BlockchainEngineXFixture: + ) -> BlockchainEngineFixture | BlockchainEngineXFixture | BlockchainEngineSyncFixture: """Create a hive fixture from the blocktest definition.""" fixture_payloads: List[FixtureEngineNewPayload] = [] @@ -758,30 +761,11 @@ def make_hive_fixture( self.verify_post_state(t8n, t8n_state=alloc) - sync_payload: Optional[FixtureEngineNewPayload] = None - if self.verify_sync: - # Test is marked for syncing verification. - assert genesis.header.block_hash != head_hash, ( - "Invalid payload tests negative test via sync is not supported yet." - ) - - # Most clients require the header to start the sync process, so we create an empty - # block on top of the last block of the test to send it as new payload and trigger the - # sync process. - sync_built_block = self.generate_block_data( - t8n=t8n, - fork=fork, - block=Block(), - previous_env=env, - previous_alloc=alloc, - last_block=False, - ) - sync_payload = sync_built_block.get_fixture_engine_new_payload() - - # Create base fixture data + # Create base fixture data, common to all fixture formats fixture_data = { "fork": fork, "genesis": genesis.header, + "payloads": fixture_payloads, "last_block_hash": head_hash, "post_state_hash": alloc.state_root() if self.exclude_full_post_state_in_output @@ -799,19 +783,39 @@ def make_hive_fixture( # and prepare for state diff optimization fixture_data.update( { - "payloads": fixture_payloads, - "sync_payload": sync_payload, "post_state": alloc if not self.exclude_full_post_state_in_output else None, "pre_hash": "", # Will be set by BaseTestWrapper } ) return BlockchainEngineXFixture(**fixture_data) + elif fixture_format == BlockchainEngineSyncFixture: + # Sync fixture format + assert genesis.header.block_hash != head_hash, ( + "Invalid payload tests negative test via sync is not supported yet." + ) + # Most clients require the header to start the sync process, so we create an empty + # block on top of the last block of the test to send it as new payload and trigger the + # sync process. + sync_built_block = self.generate_block_data( + t8n=t8n, + fork=fork, + block=Block(), + previous_env=env, + previous_alloc=alloc, + last_block=False, + ) + fixture_data.update( + { + "sync_payload": sync_built_block.get_fixture_engine_new_payload(), + "pre": pre, + "post_state": alloc if not self.exclude_full_post_state_in_output else None, + } + ) + return BlockchainEngineSyncFixture(**fixture_data) else: # Standard engine fixture fixture_data.update( { - "payloads": fixture_payloads, - "sync_payload": sync_payload, "pre": pre, "post_state": alloc if not self.exclude_full_post_state_in_output else None, } @@ -826,7 +830,11 @@ def generate( ) -> BaseFixture: """Generate the BlockchainTest fixture.""" t8n.reset_traces() - if fixture_format in [BlockchainEngineFixture, BlockchainEngineXFixture]: + if fixture_format in [ + BlockchainEngineFixture, + BlockchainEngineXFixture, + BlockchainEngineSyncFixture, + ]: return self.make_hive_fixture(t8n, fork, fixture_format) elif fixture_format == BlockchainFixture: return self.make_fixture(t8n, fork) diff --git a/src/ethereum_test_specs/state.py b/src/ethereum_test_specs/state.py index 7eae27a89c5..f2962747da6 100644 --- a/src/ethereum_test_specs/state.py +++ b/src/ethereum_test_specs/state.py @@ -61,6 +61,11 @@ class StateTest(BaseTest): f"A {fixture_format.format_name} generated from a state_test", ) for fixture_format in BlockchainTest.supported_fixture_formats + # Exclude sync fixtures from state tests - they don't make sense for state tests + if not ( + (hasattr(fixture_format, "__name__") and "Sync" in fixture_format.__name__) + or (hasattr(fixture_format, "format") and "Sync" in fixture_format.format.__name__) + ) ] supported_execute_formats: ClassVar[Sequence[LabeledExecuteFormat]] = [ LabeledExecuteFormat( diff --git a/src/pytest_plugins/consume/consume.py b/src/pytest_plugins/consume/consume.py index ffe2cecb9e1..359ee805584 100644 --- a/src/pytest_plugins/consume/consume.py +++ b/src/pytest_plugins/consume/consume.py @@ -506,5 +506,8 @@ def pytest_generate_tests(metafunc): metafunc.parametrize("test_case", param_list) if "client_type" in metafunc.fixturenames: - client_ids = [client.name for client in metafunc.config.hive_execution_clients] - metafunc.parametrize("client_type", metafunc.config.hive_execution_clients, ids=client_ids) + metafunc.parametrize( + "client_type", + metafunc.config.hive_execution_clients, + ids=[client.name for client in metafunc.config.hive_execution_clients], + ) diff --git a/src/pytest_plugins/consume/simulators/base.py b/src/pytest_plugins/consume/simulators/base.py index 84bd11529ae..0b62ebb5873 100644 --- a/src/pytest_plugins/consume/simulators/base.py +++ b/src/pytest_plugins/consume/simulators/base.py @@ -27,7 +27,7 @@ def check_live_port(test_suite_name: str) -> Literal[8545, 8551]: """Port used by hive to check for liveness of the client.""" if test_suite_name == "eest/consume-rlp": return 8545 - elif test_suite_name == "eest/consume-engine": + elif test_suite_name in {"eest/consume-engine", "eest/consume-sync"}: return 8551 raise ValueError( f"Unexpected test suite name '{test_suite_name}' while setting HIVE_CHECK_LIVE_PORT." diff --git a/src/pytest_plugins/consume/simulators/simulator_logic/test_via_sync.py b/src/pytest_plugins/consume/simulators/simulator_logic/test_via_sync.py new file mode 100644 index 00000000000..adbbbb0f8a8 --- /dev/null +++ b/src/pytest_plugins/consume/simulators/simulator_logic/test_via_sync.py @@ -0,0 +1,503 @@ +""" +A hive based simulator that executes blocks against clients using the `engine_newPayloadV*` method +from the Engine API with sync testing. The simulator uses the `BlockchainEngineSyncFixtures` to +test against clients with client synchronization. + +This simulator: +1. Spins up two clients: one as the client under test and another as the sync client +2. Executes payloads on the client under test +3. Has the sync client synchronize from the client under test +4. Verifies that the sync was successful +""" + +import time + +import pytest + +from ethereum_test_base_types import Hash +from ethereum_test_exceptions import UndefinedException +from ethereum_test_fixtures import BlockchainEngineSyncFixture +from ethereum_test_rpc import AdminRPC, EngineRPC, EthRPC, NetRPC +from ethereum_test_rpc.types import ( + ForkchoiceState, + JSONRPCError, + PayloadStatusEnum, +) + +from ....logging import get_logger +from ..helpers.exceptions import GenesisBlockMismatchExceptionError +from ..helpers.timing import TimingData + +logger = get_logger(__name__) + + +class LoggedError(Exception): + """Exception that uses the logger to log the failure.""" + + def __init__(self, *args: object) -> None: + """Initialize the exception and log the failure.""" + super().__init__(*args) + logger.fail(str(self)) + + +def wait_for_sync( + sync_eth_rpc: EthRPC, + sync_engine_rpc: EngineRPC, + expected_block_hash: str | Hash, + timeout: int = 10, + poll_interval: float = 1.0, +) -> bool: + """Wait for the sync client to reach the expected block hash.""" + start_time = time.time() + last_block_number = 0 + no_progress_count = 0 + + while time.time() - start_time < timeout: + try: + # First check if we have the expected block + block = sync_eth_rpc.get_block_by_hash(Hash(expected_block_hash)) + if block is not None: + logger.info(f"Sync complete! Client has block {expected_block_hash}") + return True + + # Check current sync progress + current_block = sync_eth_rpc.get_block_by_number("latest") + if current_block: + current_number = int(current_block.get("number", "0x0"), 16) + current_hash = current_block.get("hash", "unknown") + if current_number > last_block_number: + logger.info(f"Sync progress: block {current_number} (hash: {current_hash})") + last_block_number = current_number + no_progress_count = 0 + else: + no_progress_count += 1 + if no_progress_count == 1: + logger.info( + f"Sync client is at block {current_number} (hash: {current_hash})" + ) + elif no_progress_count % 10 == 0: + logger.debug( + f"No sync progress for {no_progress_count} polls, " + f"still at block {current_number}" + ) + + except Exception as e: + logger.debug(f"Error checking sync status: {e}") + + time.sleep(poll_interval) + + # Log final state + try: + final_block = sync_eth_rpc.get_block_by_number("latest") + if final_block: + logger.warning( + f"Sync timeout! Final block: {final_block.get('number', 'unknown')} " + f"(hash: {final_block.get('hash', 'unknown')})" + ) + except Exception: + pass + + return False + + +def test_blockchain_via_sync( + timing_data: TimingData, + eth_rpc: EthRPC, + engine_rpc: EngineRPC, + net_rpc: NetRPC, + admin_rpc: AdminRPC, + sync_eth_rpc: EthRPC, + sync_engine_rpc: EngineRPC, + sync_net_rpc: NetRPC, + sync_admin_rpc: AdminRPC, + client_enode_url: str, + fixture: BlockchainEngineSyncFixture, + strict_exception_matching: bool, +): + """ + Test blockchain synchronization between two clients. + + 1. Initialize the client under test with the genesis block + 2. Execute all payloads on the client under test + 3. Initialize the sync client with the genesis block + 4. Send sync payload and forkchoice_updated to the sync client to trigger + synchronization + 5. Verify that the sync client successfully syncs to the same state + """ + # Initialize client under test + with timing_data.time("Initialize client under test"): + logger.info("Initializing client under test with genesis block...") + + # Send initial forkchoice update to client under test + delay = 0.5 + for attempt in range(3): + forkchoice_response = engine_rpc.forkchoice_updated( + forkchoice_state=ForkchoiceState( + head_block_hash=fixture.genesis.block_hash, + ), + payload_attributes=None, + version=fixture.payloads[0].forkchoice_updated_version, + ) + status = forkchoice_response.payload_status.status + logger.info(f"Initial forkchoice update response attempt {attempt + 1}: {status}") + if status != PayloadStatusEnum.SYNCING: + break + if attempt < 2: + time.sleep(delay) + delay *= 2 + + if forkchoice_response.payload_status.status != PayloadStatusEnum.VALID: + logger.error( + f"Client under test failed to initialize properly after 3 attempts, " + f"final status: {forkchoice_response.payload_status.status}" + ) + raise LoggedError( + f"unexpected status on forkchoice updated to genesis: {forkchoice_response}" + ) + + # Verify genesis block on client under test + with timing_data.time("Verify genesis on client under test"): + logger.info("Verifying genesis block on client under test...") + genesis_block = eth_rpc.get_block_by_number(0) + if genesis_block["hash"] != str(fixture.genesis.block_hash): + expected = fixture.genesis.block_hash + got = genesis_block["hash"] + logger.fail(f"Genesis block hash mismatch. Expected: {expected}, Got: {got}") + raise GenesisBlockMismatchExceptionError( + expected_header=fixture.genesis, + got_genesis_block=genesis_block, + ) + + # Execute all payloads on client under test + last_valid_block_hash = fixture.genesis.block_hash + with timing_data.time("Execute payloads on client under test") as total_payload_timing: + logger.info(f"Starting execution of {len(fixture.payloads)} payloads...") + for i, payload in enumerate(fixture.payloads): + logger.info(f"Processing payload {i + 1}/{len(fixture.payloads)}...") + with total_payload_timing.time(f"Payload {i + 1}") as payload_timing: + with payload_timing.time(f"engine_newPayloadV{payload.new_payload_version}"): + logger.info(f"Sending engine_newPayloadV{payload.new_payload_version}...") + # Note: This is similar to the logic in test_via_engine.py + try: + payload_response = engine_rpc.new_payload( + *payload.params, + version=payload.new_payload_version, + ) + logger.info(f"Payload response status: {payload_response.status}") + expected_validity = ( + PayloadStatusEnum.VALID + if payload.valid() + else PayloadStatusEnum.INVALID + ) + if payload_response.status != expected_validity: + raise LoggedError( + f"unexpected status: want {expected_validity}," + f" got {payload_response.status}" + ) + if payload.error_code is not None: + raise LoggedError( + f"Client failed to raise expected Engine API error code: " + f"{payload.error_code}" + ) + elif payload_response.status == PayloadStatusEnum.INVALID: + if payload_response.validation_error is None: + raise LoggedError( + "Client returned INVALID but no validation error was provided." + ) + if isinstance(payload_response.validation_error, UndefinedException): + message = ( + "Undefined exception message: " + f'expected exception: "{payload.validation_error}", ' + f'returned exception: "{payload_response.validation_error}" ' + f'(mapper: "{payload_response.validation_error.mapper_name}")' + ) + if strict_exception_matching: + raise LoggedError(message) + else: + logger.warning(message) + else: + if ( + payload.validation_error + not in payload_response.validation_error + ): + message = ( + "Client returned unexpected validation error: " + f'got: "{payload_response.validation_error}" ' + f'expected: "{payload.validation_error}"' + ) + if strict_exception_matching: + raise LoggedError(message) + else: + logger.warning(message) + + except JSONRPCError as e: + logger.info(f"JSONRPC error encountered: {e.code} - {e.message}") + if payload.error_code is None: + raise LoggedError(f"Unexpected error: {e.code} - {e.message}") from e + if e.code != payload.error_code: + raise LoggedError( + f"Unexpected error code: {e.code}, expected: {payload.error_code}" + ) from e + + if payload.valid(): + with payload_timing.time( + f"engine_forkchoiceUpdatedV{payload.forkchoice_updated_version}" + ): + # Send a forkchoice update to the engine + version = payload.forkchoice_updated_version + logger.info(f"Sending engine_forkchoiceUpdatedV{version}...") + forkchoice_response = engine_rpc.forkchoice_updated( + forkchoice_state=ForkchoiceState( + head_block_hash=payload.params[0].block_hash, + ), + payload_attributes=None, + version=payload.forkchoice_updated_version, + ) + status = forkchoice_response.payload_status.status + logger.info(f"Forkchoice update response: {status}") + if forkchoice_response.payload_status.status != PayloadStatusEnum.VALID: + raise LoggedError( + f"unexpected status: want {PayloadStatusEnum.VALID}," + f" got {forkchoice_response.payload_status.status}" + ) + last_valid_block_hash = payload.params[0].block_hash + + logger.info("All payloads processed successfully on client under test.") + + # sync_payload creates the final block that the sync client will sync to + if not fixture.sync_payload: + pytest.fail("Sync tests require a syncPayload that is not present in this test.") + + with timing_data.time("Send sync payload to client under test"): + logger.info("Sending sync payload (empty block) to client under test...") + try: + sync_response = engine_rpc.new_payload( + *fixture.sync_payload.params, + version=fixture.sync_payload.new_payload_version, + ) + logger.info(f"Client sync payload response status: {sync_response.status}") + + if sync_response.status == PayloadStatusEnum.VALID: + # Update forkchoice on client under test to include sync block + forkchoice_response = engine_rpc.forkchoice_updated( + forkchoice_state=ForkchoiceState( + head_block_hash=fixture.sync_payload.params[0].block_hash, + ), + payload_attributes=None, + version=fixture.sync_payload.forkchoice_updated_version, + ) + status = forkchoice_response.payload_status.status + logger.info(f"Client forkchoice update to sync block: {status}") + last_valid_block_hash = fixture.sync_payload.params[0].block_hash + else: + logger.error(f"Sync payload was not valid: {sync_response.status}") + raise LoggedError(f"Sync payload validation failed: {sync_response.status}") + except JSONRPCError as e: + logger.error( + f"Error sending sync payload to client under test: {e.code} - {e.message}" + ) + raise + + # Initialize sync client + with timing_data.time("Initialize sync client"): + logger.info("Initializing sync client with genesis block...") + + # Send initial forkchoice update to sync client + delay = 0.5 + for attempt in range(3): + forkchoice_response = sync_engine_rpc.forkchoice_updated( + forkchoice_state=ForkchoiceState( + head_block_hash=fixture.genesis.block_hash, + ), + payload_attributes=None, + version=fixture.payloads[0].forkchoice_updated_version, + ) + status = forkchoice_response.payload_status.status + logger.info(f"Sync client forkchoice update response attempt {attempt + 1}: {status}") + if status != PayloadStatusEnum.SYNCING: + break + if attempt < 2: + time.sleep(delay) + delay *= 2 + + if forkchoice_response.payload_status.status != PayloadStatusEnum.VALID: + logger.error( + f"Sync client failed to initialize properly after 3 attempts, " + f"final status: {forkchoice_response.payload_status.status}" + ) + raise LoggedError( + f"Unexpected status on sync client forkchoice updated to genesis: " + f"{forkchoice_response}" + ) + + # Add peer using admin_addPeer + # This seems to be required... TODO: we can maybe improve flow here if not required + logger.info(f"Adding peer: {client_enode_url}") + assert sync_admin_rpc is not None, "sync_admin_rpc is required" + try: + add_result = sync_admin_rpc.add_peer(client_enode_url) + logger.info(f"admin_addPeer result: {add_result}") + except Exception as e: + raise LoggedError(f"admin_addPeer failed: {e}") from e + + time.sleep(1) # quick sleep to allow for connection - TODO: is this necessary? + + try: + sync_peer_count = sync_net_rpc.peer_count() + client_peer_count = net_rpc.peer_count() + logger.info( + f"Peer count: sync_client={sync_peer_count}, client_under_test={client_peer_count}" + ) + + if sync_peer_count == 0 and client_peer_count == 0: + raise LoggedError("No P2P connection established between clients") + except Exception as e: + logger.warning(f"Could not verify peer connection: {e}") + + # Trigger sync by sending the target block via newPayload followed by forkchoice update + logger.info(f"Triggering sync to block {last_valid_block_hash}") + + # Find the last valid payload to send to sync client + last_valid_payload = None + if fixture.sync_payload and last_valid_block_hash == fixture.sync_payload.params[0].block_hash: + last_valid_payload = fixture.sync_payload + else: + # Find the payload that matches last_valid_block_hash + for payload in fixture.payloads: + if payload.params[0].block_hash == last_valid_block_hash and payload.valid(): + last_valid_payload = payload + break + + if last_valid_payload: + last_valid_block_forkchoice_state = ForkchoiceState( + head_block_hash=last_valid_block_hash, + safe_block_hash=last_valid_block_hash, + finalized_block_hash=fixture.genesis.block_hash, + ) + + try: + version = last_valid_payload.new_payload_version # log version used for debugging + logger.info(f"Sending target payload via engine_newPayloadV{version}") + + # send the payload to sync client + assert sync_engine_rpc is not None, "sync_engine_rpc is required" + sync_payload_response = sync_engine_rpc.new_payload( + *last_valid_payload.params, + version=last_valid_payload.new_payload_version, + ) + logger.info(f"Sync client newPayload response: {sync_payload_response.status}") + + # send forkchoice update pointing to latest block + logger.info("Sending forkchoice update with last valid block to trigger sync...") + sync_forkchoice_response = sync_engine_rpc.forkchoice_updated( + forkchoice_state=last_valid_block_forkchoice_state, + payload_attributes=None, + version=last_valid_payload.forkchoice_updated_version, + ) + status = sync_forkchoice_response.payload_status.status + logger.info(f"Sync trigger forkchoice response: {status}") + + if sync_forkchoice_response.payload_status.status == PayloadStatusEnum.SYNCING: + logger.info("Sync client is now syncing!") + elif sync_forkchoice_response.payload_status.status == PayloadStatusEnum.ACCEPTED: + logger.info("Sync client accepted the block, may start syncing ancestors") + + # Give a moment for P2P connections to establish after sync starts + time.sleep(1) + + # Check peer count after triggering sync + # Note: Reth does not actually raise the peer count but doesn't seem + # to need this to sync. + try: + assert sync_net_rpc is not None, "sync_net_rpc is required" + client_peer_count = net_rpc.peer_count() + sync_peer_count = sync_net_rpc.peer_count() + if sync_peer_count > 0 or client_peer_count > 0: + logger.debug( + f"Peers connected: client_under_test={client_peer_count}, " + f"sync_client={sync_peer_count}" + ) + except Exception as e: + logger.debug(f"Could not check peer count: {e}") + + except Exception as e: + logger.warning(f"Failed to trigger sync with newPayload/forkchoice update: {e}") + else: + logger.warning( + f"Could not find payload for block {last_valid_block_hash} to send to sync client" + ) + + # Wait for synchronization with continuous forkchoice updates + with timing_data.time("Wait for synchronization"): + # Get the target block number for logging + target_block = eth_rpc.get_block_by_hash(last_valid_block_hash) + target_block_number = int(target_block["number"], 16) if target_block else "unknown" + logger.info( + f"Waiting for sync client to reach block #{target_block_number} " + f"(hash: {last_valid_block_hash})" + ) + + # Start monitoring sync progress + sync_start_time = time.time() + last_forkchoice_time = time.time() + forkchoice_interval = 2.0 # Send forkchoice updates every 2 seconds + + while time.time() - sync_start_time < 10: # 10 second timeout + # Send periodic forkchoice updates to keep sync alive + if time.time() - last_forkchoice_time >= forkchoice_interval: + try: + # Send forkchoice update to sync client to trigger/maintain sync + assert sync_engine_rpc is not None, "sync_engine_rpc is required" + sync_fc_response = sync_engine_rpc.forkchoice_updated( + forkchoice_state=last_valid_block_forkchoice_state, + payload_attributes=None, + version=fixture.sync_payload.forkchoice_updated_version + if fixture.sync_payload + else fixture.payloads[-1].forkchoice_updated_version, + ) + status = sync_fc_response.payload_status.status + logger.debug(f"Periodic forkchoice update status: {status}") + if status.VALID: + break + last_forkchoice_time = time.time() + except Exception as fc_err: + logger.debug(f"Periodic forkchoice update failed: {fc_err}") + time.sleep(0.5) + else: + raise LoggedError( + f"Sync client failed to synchronize to block {last_valid_block_hash} " + f"within timeout" + ) + + # Final verification + assert sync_eth_rpc is not None, "sync_eth_rpc is required" + assert sync_engine_rpc is not None, "sync_engine_rpc is required" + if wait_for_sync(sync_eth_rpc, sync_engine_rpc, last_valid_block_hash, timeout=5): + logger.info("Sync verification successful!") + + # Verify the final state + sync_block = sync_eth_rpc.get_block_by_hash(last_valid_block_hash) + client_block = eth_rpc.get_block_by_hash(last_valid_block_hash) + + if sync_block["stateRoot"] != client_block["stateRoot"]: + raise LoggedError( + f"State root mismatch after sync. " + f"Sync client: {sync_block['stateRoot']}, " + f"Client under test: {client_block['stateRoot']}" + ) + + # Verify post state if available + if fixture.post_state_hash: + if sync_block["stateRoot"] != str(fixture.post_state_hash): + raise LoggedError( + f"Final state root mismatch. " + f"Expected: {fixture.post_state_hash}, " + f"Got: {sync_block['stateRoot']}" + ) + else: + raise LoggedError( + f"Sync client failed to synchronize to block {last_valid_block_hash} " + f"within timeout" + ) + + logger.info("Sync test completed successfully!") diff --git a/src/pytest_plugins/consume/simulators/single_test_client.py b/src/pytest_plugins/consume/simulators/single_test_client.py index 6836c94a6fe..b6db7674c95 100644 --- a/src/pytest_plugins/consume/simulators/single_test_client.py +++ b/src/pytest_plugins/consume/simulators/single_test_client.py @@ -38,8 +38,10 @@ def environment( ) -> dict: """Define the environment that hive will start the client with.""" assert fixture.fork in ruleset, f"fork '{fixture.fork}' missing in hive ruleset" + chain_id = str(Number(fixture.config.chain_id)) return { - "HIVE_CHAIN_ID": str(Number(fixture.config.chain_id)), + "HIVE_CHAIN_ID": chain_id, + "HIVE_NETWORK_ID": chain_id, # Use same value for P2P network compatibility "HIVE_FORK_DAO_VOTE": "1", "HIVE_NODETYPE": "full", "HIVE_CHECK_LIVE_PORT": str(check_live_port), @@ -71,6 +73,8 @@ def client( ) -> Generator[Client, None, None]: """Initialize the client with the appropriate files and environment variables.""" logger.info(f"Starting client ({client_type.name})...") + logger.debug(f"Main client Network ID: {environment.get('HIVE_NETWORK_ID', 'NOT SET!')}") + logger.debug(f"Main client Chain ID: {environment.get('HIVE_CHAIN_ID', 'NOT SET!')}") with total_timing_data.time("Start client"): client = hive_test.start_client( client_type=client_type, environment=environment, files=client_files diff --git a/src/pytest_plugins/consume/simulators/sync/__init__.py b/src/pytest_plugins/consume/simulators/sync/__init__.py new file mode 100644 index 00000000000..c9e0ee4b0ba --- /dev/null +++ b/src/pytest_plugins/consume/simulators/sync/__init__.py @@ -0,0 +1 @@ +"""Consume Sync test functions.""" diff --git a/src/pytest_plugins/consume/simulators/sync/conftest.py b/src/pytest_plugins/consume/simulators/sync/conftest.py new file mode 100644 index 00000000000..10af1907c2b --- /dev/null +++ b/src/pytest_plugins/consume/simulators/sync/conftest.py @@ -0,0 +1,287 @@ +""" +Pytest fixtures for the `consume sync` simulator. + +Configures the hive back-end & EL clients for each individual test execution. +""" + +import io +import json +from typing import Generator, Mapping, cast + +import pytest +from hive.client import Client, ClientType +from hive.testing import HiveTest + +from ethereum_test_base_types import to_json +from ethereum_test_exceptions import ExceptionMapper +from ethereum_test_fixtures import BlockchainEngineSyncFixture +from ethereum_test_rpc import AdminRPC, EngineRPC, EthRPC, NetRPC + +pytest_plugins = ( + "pytest_plugins.pytest_hive.pytest_hive", + "pytest_plugins.consume.simulators.base", + "pytest_plugins.consume.simulators.single_test_client", + "pytest_plugins.consume.simulators.test_case_description", + "pytest_plugins.consume.simulators.timing_data", + "pytest_plugins.consume.simulators.exceptions", +) + + +def pytest_configure(config): + """Set the supported fixture formats for the engine sync simulator.""" + config._supported_fixture_formats = [BlockchainEngineSyncFixture.format_name] + + +def pytest_generate_tests(metafunc): + """Parametrize sync_client_type separately from client_type.""" + if "sync_client_type" in metafunc.fixturenames: + client_ids = [f"sync_{client.name}" for client in metafunc.config.hive_execution_clients] + metafunc.parametrize( + "sync_client_type", metafunc.config.hive_execution_clients, ids=client_ids + ) + + +@pytest.hookimpl(trylast=True) +def pytest_collection_modifyitems(session, config, items): + """Modify test IDs to show both client and sync client clearly.""" + for item in items: + # Check if this test has both client_type and sync_client_type + if ( + hasattr(item, "callspec") + and "client_type" in item.callspec.params + and "sync_client_type" in item.callspec.params + ): + # Get the client names and remove fork suffix if present + client_name = item.callspec.params["client_type"].name.replace("-", "_") + sync_client_name = item.callspec.params["sync_client_type"].name.replace("-", "_") + + # Format: ``-{client}_sync_{sync_client}`` + new_suffix = f"-{client_name}::sync_{sync_client_name}" + + # client_param-tests/path/to/test.py::test_name[test_params]-sync_client_param + # 1. Remove the client prefix from the beginning + # 2. Replace the -client_param part at the end with our new format + nodeid = item.nodeid + prefix_index = item.nodeid.find("-tests/") + if prefix_index != -1: + nodeid = item.nodeid[prefix_index + 1 :] + + # Find the last hyphen followed by client name pattern and replace + if "-" in nodeid: + # Split by the last hyphen to separate the client suffix + parts = nodeid.rsplit("]-", 1) + assert len(parts) == 2, ( + # expect "..._end_of_test]-client_name" suffix... + f"Unexpected format to parse client name: {nodeid}" + ) + + base = parts[0] + if base.endswith("sync_test"): + # Insert suffix before the closing bracket + base = base + new_suffix + "]" + item._nodeid = base + else: + item._nodeid = base + new_suffix + + +@pytest.fixture(scope="function") +def engine_rpc(client: Client, client_exception_mapper: ExceptionMapper | None) -> EngineRPC: + """Initialize engine RPC client for the execution client under test.""" + if client_exception_mapper: + return EngineRPC( + f"http://{client.ip}:8551", + response_validation_context={ + "exception_mapper": client_exception_mapper, + }, + ) + return EngineRPC(f"http://{client.ip}:8551") + + +@pytest.fixture(scope="function") +def eth_rpc(client: Client) -> EthRPC: + """Initialize eth RPC client for the execution client under test.""" + return EthRPC(f"http://{client.ip}:8545") + + +@pytest.fixture(scope="function") +def net_rpc(client: Client) -> NetRPC: + """Initialize net RPC client for the execution client under test.""" + return NetRPC(f"http://{client.ip}:8545") + + +@pytest.fixture(scope="function") +def admin_rpc(client: Client) -> AdminRPC: + """Initialize admin RPC client for the execution client under test.""" + return AdminRPC(f"http://{client.ip}:8545") + + +@pytest.fixture(scope="function") +def sync_genesis(fixture: BlockchainEngineSyncFixture) -> dict: + """Convert the fixture genesis block header and pre-state to a sync client genesis state.""" + genesis = to_json(fixture.genesis) + alloc = to_json(fixture.pre) + # NOTE: nethermind requires account keys without '0x' prefix + genesis["alloc"] = {k.replace("0x", ""): v for k, v in alloc.items()} + return genesis + + +@pytest.fixture(scope="function") +def sync_buffered_genesis(sync_genesis: dict) -> io.BufferedReader: + """Create a buffered reader for the genesis block header of the sync client.""" + genesis_json = json.dumps(sync_genesis) + genesis_bytes = genesis_json.encode("utf-8") + return io.BufferedReader(cast(io.RawIOBase, io.BytesIO(genesis_bytes))) + + +@pytest.fixture(scope="function") +def sync_client_files(sync_buffered_genesis: io.BufferedReader) -> Mapping[str, io.BufferedReader]: + """Define the files that hive will start the sync client with.""" + files = {} + files["/genesis.json"] = sync_buffered_genesis + return files + + +@pytest.fixture(scope="function") +def client_enode_url(client: Client) -> str: + """Get the enode URL from the client under test.""" + import logging + + logger = logging.getLogger(__name__) + + enode = client.enode() + logger.info(f"Client enode object: {enode}") + + # Build the enode URL string with container IP + enode_url = f"enode://{enode.id}@{client.ip}:{enode.port}" + logger.info(f"Client enode URL: {enode_url}") + return enode_url + + +@pytest.fixture(scope="function") +def sync_client( + hive_test: HiveTest, + client: Client, # The main client under test + sync_client_files: dict, + environment: dict, + sync_client_type: ClientType, # Separate parametrization for sync client + client_enode_url: str, # Get the enode URL from fixture +) -> Generator[Client, None, None]: + """Start a sync client that will sync from the client under test.""" + import logging + + logger = logging.getLogger(__name__) + logger.info(f"Starting sync client setup for {sync_client_type.name}") + + # Start with the same environment as the main client + sync_environment = environment.copy() + + # Only override what's necessary for sync client + sync_environment["HIVE_MINER"] = "" # Disable mining on sync client + + # Set bootnode even though we also use admin_addPeer + # Some clients use this for initial P2P configuration + sync_environment["HIVE_BOOTNODE"] = client_enode_url + + # Ensure both network and chain IDs are properly set + if "HIVE_NETWORK_ID" not in sync_environment and "HIVE_CHAIN_ID" in sync_environment: + # Some clients need explicit HIVE_NETWORK_ID + sync_environment["HIVE_NETWORK_ID"] = sync_environment["HIVE_CHAIN_ID"] + + logger.info(f"Starting sync client ({sync_client_type.name})") + logger.info(f" Network ID: {sync_environment.get('HIVE_NETWORK_ID', 'NOT SET!')}") + logger.info(f" Chain ID: {sync_environment.get('HIVE_CHAIN_ID', 'NOT SET!')}") + + # Debug: log all HIVE_ variables + hive_vars = {k: v for k, v in sync_environment.items() if k.startswith("HIVE_")} + logger.debug(f"All HIVE_ environment variables: {hive_vars}") + + # Use the separately parametrized sync client type + sync_client = hive_test.start_client( + client_type=sync_client_type, + environment=sync_environment, + files=sync_client_files, + ) + + error_message = ( + f"Unable to start sync client ({sync_client_type.name}) via Hive. " + "Check the client or Hive server logs for more information." + ) + assert sync_client is not None, error_message + + logger.info(f"Sync client ({sync_client_type.name}) started with IP: {sync_client.ip}") + + yield sync_client + + # Cleanup + sync_client.stop() + + +@pytest.fixture(scope="function") +def sync_client_exception_mapper( + sync_client_type: ClientType, client_exception_mapper_cache +) -> ExceptionMapper | None: + """Return the exception mapper for the sync client type, with caching.""" + if sync_client_type.name not in client_exception_mapper_cache: + from ..exceptions import EXCEPTION_MAPPERS + + for client in EXCEPTION_MAPPERS: + if client in sync_client_type.name: + client_exception_mapper_cache[sync_client_type.name] = EXCEPTION_MAPPERS[client] + break + else: + client_exception_mapper_cache[sync_client_type.name] = None + + return client_exception_mapper_cache[sync_client_type.name] + + +@pytest.fixture(scope="function") +def sync_engine_rpc( + sync_client: Client, sync_client_exception_mapper: ExceptionMapper | None +) -> EngineRPC: + """Initialize engine RPC client for the sync client.""" + if sync_client_exception_mapper: + return EngineRPC( + f"http://{sync_client.ip}:8551", + response_validation_context={ + "exception_mapper": sync_client_exception_mapper, + }, + ) + return EngineRPC(f"http://{sync_client.ip}:8551") + + +@pytest.fixture(scope="function") +def sync_eth_rpc(sync_client: Client) -> EthRPC: + """Initialize eth RPC client for the sync client.""" + return EthRPC(f"http://{sync_client.ip}:8545") + + +@pytest.fixture(scope="function") +def sync_net_rpc(sync_client: Client) -> NetRPC: + """Initialize net RPC client for the sync client.""" + return NetRPC(f"http://{sync_client.ip}:8545") + + +@pytest.fixture(scope="function") +def sync_admin_rpc(sync_client: Client) -> AdminRPC: + """Initialize admin RPC client for the sync client.""" + return AdminRPC(f"http://{sync_client.ip}:8545") + + +@pytest.fixture(scope="module") +def test_suite_name() -> str: + """The name of the hive test suite used in this simulator.""" + return "eest/consume-sync" + + +@pytest.fixture(scope="module") +def test_suite_description() -> str: + """The description of the hive test suite used in this simulator.""" + return "Execute blockchain sync tests against clients using the Engine API." + + +@pytest.fixture(scope="function") +def client_files(buffered_genesis: io.BufferedReader) -> Mapping[str, io.BufferedReader]: + """Define the files that hive will start the client with.""" + files = {} + files["/genesis.json"] = buffered_genesis + return files diff --git a/src/pytest_plugins/filler/filler.py b/src/pytest_plugins/filler/filler.py index 0414c833015..a9faa5b658d 100644 --- a/src/pytest_plugins/filler/filler.py +++ b/src/pytest_plugins/filler/filler.py @@ -586,6 +586,7 @@ def pytest_configure(config): called before the pytest-html plugin's pytest_configure to ensure that it uses the modified `htmlpath` option. """ + # Register custom markers # Modify the block gas limit if specified. if config.getoption("block_gas_limit"): EnvironmentDefaults.gas_limit = config.getoption("block_gas_limit") @@ -802,6 +803,7 @@ def pytest_runtest_makereport(item, call): "state_test", "blockchain_test", "blockchain_test_engine", + "blockchain_test_sync", ]: report.user_properties.append(("evm_dump_dir", item.config.evm_dump_dir)) else: @@ -1312,6 +1314,7 @@ def pytest_collection_modifyitems( if not fixture_format.supports_fork(fork): items_for_removal.append(i) continue + markers = list(item.iter_markers()) # Both the fixture format itself and the spec filling it have a chance to veto the # filling of a specific format. diff --git a/src/pytest_plugins/filler/tests/test_verify_sync_marker.py b/src/pytest_plugins/filler/tests/test_verify_sync_marker.py new file mode 100644 index 00000000000..2ecc167d621 --- /dev/null +++ b/src/pytest_plugins/filler/tests/test_verify_sync_marker.py @@ -0,0 +1,124 @@ +"""Test blockchain sync fixture generation with verify_sync pytest marker.""" + +import textwrap + +from ethereum_clis import TransitionTool + +test_module_with_verify_sync = textwrap.dedent( + """\ + import pytest + from ethereum_test_tools import ( + Account, + BlockException, + Block, + Environment, + Header, + TestAddress, + Transaction, + ) + + TEST_ADDRESS = Account(balance=1_000_000) + + @pytest.mark.valid_at("Cancun") + def test_verify_sync_default(blockchain_test): + blockchain_test( + pre={TestAddress: TEST_ADDRESS}, + post={}, + blocks=[Block(txs=[Transaction()])] + ) + + + @pytest.mark.valid_at("Cancun") + @pytest.mark.verify_sync + def test_verify_sync_with_marker(blockchain_test): + blockchain_test( + pre={TestAddress: TEST_ADDRESS}, + post={}, + blocks=[Block(txs=[Transaction()])] + ) + + @pytest.mark.valid_at("Cancun") + @pytest.mark.parametrize( + "has_exception", + [ + pytest.param(False, id="no_exception", marks=pytest.mark.verify_sync), + pytest.param( + True, id="with_exception", marks=pytest.mark.exception_test + ), + ] + ) + def test_verify_sync_with_param_marks(blockchain_test, has_exception): + blockchain_test( + pre={TestAddress: TEST_ADDRESS}, + post={}, + blocks=[ + Block( + txs=[Transaction()], + rlp_modifier=Header(gas_limit=0) if has_exception else None, + exception=BlockException.INCORRECT_BLOCK_FORMAT if has_exception else None, + ) + ], + ) + + """ +) + + +def test_verify_sync_marker( + pytester, + default_t8n: TransitionTool, +): + """ + Test blockchain sync fixture generation with verify_sync marker. + + The test module has 3 test functions (4 test cases with parametrization): + - test_verify_sync_default: generates all formats except sync (no verify_sync marker) + - test_verify_sync_with_marker: generates all formats including sync (has verify_sync marker) + - test_verify_sync_with_param_marks: tests parametrized marks with verify_sync (2 cases) + + Each test generates fixture formats: + - BlockchainFixture (always) + - BlockchainEngineFixture (always) + - BlockchainEngineSyncFixture (only when marked with verify_sync marker) + + Expected outcomes: + - 4 test cases total + - Each generates BlockchainFixture (4) and BlockchainEngineFixture (4) = 8 fixtures + - Sync fixtures: + - test_verify_sync_with_marker: 1 sync fixture ✓ + - test_verify_sync_with_param_marks[no_exception]: 1 sync fixture ✓ + - Total sync fixtures: 2 + - Not generated (due to exception_test marker): + - test_verify_sync_with_param_marks[with_exception]: sync fixture not generated + + Final counts: + - Passed: 8 (base fixtures) + 2 (sync fixtures) = 10 passed + - Skipped: 0 skipped + - Failed: 0 failed + """ + # Create proper directory structure for tests + tests_dir = pytester.mkdir("tests") + cancun_tests_dir = tests_dir / "cancun" + cancun_tests_dir.mkdir() + verify_sync_test_dir = cancun_tests_dir / "verify_sync_test_module" + verify_sync_test_dir.mkdir() + test_module = verify_sync_test_dir / "test_verify_sync_marker.py" + test_module.write_text(test_module_with_verify_sync) + + pytester.copy_example(name="src/cli/pytest_commands/pytest_ini_files/pytest-fill.ini") + + # Add the test directory to the arguments + args = [ + "-c", + "pytest-fill.ini", + "-v", + "--no-html", + "--t8n-server-url", + default_t8n.server_url, + "tests/cancun/verify_sync_test_module/", + ] + + expected_outcomes = {"passed": 10, "failed": 0, "skipped": 0, "errors": 0} + + result = pytester.runpytest(*args) + result.assert_outcomes(**expected_outcomes) diff --git a/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py b/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py index 48b74f58d0a..fc90e59bde3 100644 --- a/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py +++ b/tests/osaka/eip7934_block_rlp_limit/test_max_block_rlp_size.py @@ -384,8 +384,8 @@ def _exact_size_transactions_impl( @pytest.mark.parametrize( "delta", [ - pytest.param(-1, id="max_rlp_size_minus_1_byte"), - pytest.param(0, id="max_rlp_size"), + pytest.param(-1, id="max_rlp_size_minus_1_byte", marks=pytest.mark.verify_sync), + pytest.param(0, id="max_rlp_size", marks=pytest.mark.verify_sync), pytest.param(1, id="max_rlp_size_plus_1_byte", marks=pytest.mark.exception_test), ], ) @@ -437,11 +437,11 @@ def test_block_at_rlp_size_limit_boundary( pre=pre, post=post, blocks=[block], - verify_sync=False if delta > 0 else True, ) @pytest.mark.with_all_typed_transactions +@pytest.mark.verify_sync def test_block_rlp_size_at_limit_with_all_typed_transactions( blockchain_test: BlockchainTestFiller, pre: Alloc, @@ -451,7 +451,6 @@ def test_block_rlp_size_at_limit_with_all_typed_transactions( block_size_limit: int, env: Environment, typed_transaction: Transaction, - request: pytest.FixtureRequest, ) -> None: """Test the block RLP size limit with all transaction types.""" transactions, gas_used = exact_size_transactions( @@ -477,10 +476,10 @@ def test_block_rlp_size_at_limit_with_all_typed_transactions( pre=pre, post=post, blocks=[block], - verify_sync=True, ) +@pytest.mark.verify_sync def test_block_at_rlp_limit_with_logs( blockchain_test: BlockchainTestFiller, pre: Alloc, @@ -515,5 +514,4 @@ def test_block_at_rlp_limit_with_logs( pre=pre, post=post, blocks=[block], - verify_sync=True, ) From 00889e592766d01772bfa64cdddb252fdc200d34 Mon Sep 17 00:00:00 2001 From: fselmo Date: Mon, 11 Aug 2025 16:23:45 -0600 Subject: [PATCH 2/2] Addtional changes from last comments on PR #2007 - Add marker to `pytest_configure` as an official marker and to silence warnings. - Define a `BlockchainEngineSyncFixture.discard_fixture_format_by_marks` and specify `verify_sync` marker presence only for `BlockchainEngineSyncFixture`. --- src/ethereum_test_fixtures/blockchain.py | 11 +++++++++++ src/ethereum_test_specs/blockchain.py | 2 -- src/pytest_plugins/shared/execute_fill.py | 5 +++++ 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/src/ethereum_test_fixtures/blockchain.py b/src/ethereum_test_fixtures/blockchain.py index b9b915921b9..3cd0b848d17 100644 --- a/src/ethereum_test_fixtures/blockchain.py +++ b/src/ethereum_test_fixtures/blockchain.py @@ -16,6 +16,7 @@ ) import ethereum_rlp as eth_rlp +import pytest from ethereum_types.numeric import Uint from pydantic import AliasChoices, Field, PlainSerializer, computed_field, model_validator @@ -586,3 +587,13 @@ class BlockchainEngineSyncFixture(BlockchainEngineFixture): "Tests that generate a blockchain test fixture for Engine API testing with client sync." ) sync_payload: FixtureEngineNewPayload | None = None + + @classmethod + def discard_fixture_format_by_marks( + cls, + fork: Fork, + markers: List[pytest.Mark], + ) -> bool: + """Discard the fixture format based on the provided markers.""" + marker_names = [m.name for m in markers] + return "verify_sync" not in marker_names diff --git a/src/ethereum_test_specs/blockchain.py b/src/ethereum_test_specs/blockchain.py index 4a4919a2b4f..b87528fb430 100644 --- a/src/ethereum_test_specs/blockchain.py +++ b/src/ethereum_test_specs/blockchain.py @@ -446,8 +446,6 @@ def discard_fixture_format_by_marks( and "blockchain_test_engine_only" in marker_names ): return True - if fixture_format == BlockchainEngineSyncFixture and "verify_sync" not in marker_names: - return True return False def get_genesis_environment(self, fork: Fork) -> Environment: diff --git a/src/pytest_plugins/shared/execute_fill.py b/src/pytest_plugins/shared/execute_fill.py index d928ffb5d6f..35c1a81a93e 100644 --- a/src/pytest_plugins/shared/execute_fill.py +++ b/src/pytest_plugins/shared/execute_fill.py @@ -132,6 +132,11 @@ def pytest_configure(config: pytest.Config): "addresses for static tests at fill time. Untagged tests are incompatible with " "dynamic address generation.", ) + config.addinivalue_line( + "markers", + "verify_sync: Marks a test to be run with `consume sync`, verifying blockchain " + "engine tests and having hive clients sync after payload execution.", + ) @pytest.fixture(scope="function")