From e0a2b67f04f1b8426195953eb9515c59338db603 Mon Sep 17 00:00:00 2001 From: Felix H Date: Thu, 17 Jul 2025 12:46:55 +0000 Subject: [PATCH 1/7] added bpo config + function to retrieve from it + unit test for that function --- .../composite_types.py | 38 ++++++++++-- src/ethereum_test_types/blob_bpo_config.json | 37 +++++++++++ src/ethereum_test_types/blob_types.py | 46 ++++++++++++++ .../tests/test_blob_types.py | 29 ++++++++- tests/osaka/eip7892_bpo/__init__.py | 1 + tests/osaka/eip7892_bpo/conftest.py | 1 + tests/osaka/eip7892_bpo/spec.py | 24 ++++++++ tests/osaka/eip7892_bpo/test_bpo.py | 61 +++++++++++++++++++ 8 files changed, 232 insertions(+), 5 deletions(-) create mode 100644 src/ethereum_test_types/blob_bpo_config.json create mode 100644 tests/osaka/eip7892_bpo/__init__.py create mode 100644 tests/osaka/eip7892_bpo/conftest.py create mode 100644 tests/osaka/eip7892_bpo/spec.py create mode 100644 tests/osaka/eip7892_bpo/test_bpo.py diff --git a/src/ethereum_test_base_types/composite_types.py b/src/ethereum_test_base_types/composite_types.py index 035bfb2a88e..2cc84d5b97c 100644 --- a/src/ethereum_test_base_types/composite_types.py +++ b/src/ethereum_test_base_types/composite_types.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import Any, ClassVar, Dict, List, SupportsBytes, Type, TypeAlias -from pydantic import Field, PrivateAttr, TypeAdapter +from pydantic import BaseModel, Field, PrivateAttr, TypeAdapter from .base_types import Address, Bytes, Hash, HashInt, HexNumber, ZeroPaddedHexNumber from .conversions import BytesConvertible, NumberConvertible @@ -487,7 +487,7 @@ class ForkBlobSchedule(CamelModel): class BlobSchedule(EthereumTestRootModel[Dict[str, ForkBlobSchedule]]): - """Blob schedule configuration dictionary.""" + """Blob schedule configuration dictionary. Key is fork name.""" root: Dict[str, ForkBlobSchedule] = Field(default_factory=dict, validate_default=True) @@ -503,6 +503,36 @@ def last(self) -> ForkBlobSchedule | None: return None return list(self.root.values())[-1] - def __getitem__(self, key: str) -> ForkBlobSchedule: + def __getitem__(self, key: str) -> ForkBlobSchedule | None: """Return the schedule for a given fork.""" - return self.root[key] + return self.root.get(key) + + +class TimestampBlobSchedule(BaseModel): + """ + Contains a list of dictionaries. Each dictionary is a scheduled BPO fork. + Each dictionary's key is the activation timestamp and the values are a ForkBlobSchedule + object with the fields max, target and base_fee_update_fraction. + """ + + root: List[Dict[int, ForkBlobSchedule]] = Field(default_factory=list, validate_default=True) + + @classmethod + def add_schedule(cls, activation_timestamp: int, schedule: ForkBlobSchedule): + """Add a schedule to the schedule list.""" + assert activation_timestamp > -1 + assert schedule.max_blobs_per_block > 0 + assert schedule.base_fee_update_fraction > 0 + assert schedule.target_blobs_per_block > 0 + + # ensure that the timestamp of each scheduled bpo fork is unique + existing_keys: set = set() + for d in cls.root: + existing_keys.update(d.keys()) + assert activation_timestamp not in existing_keys, ( + f"No duplicate activation forks allowed: Timestamp {activation_timestamp} already " + "exists in the current schedule." + ) + + # add a scheduled bpo fork + cls.root.append({activation_timestamp: schedule}) diff --git a/src/ethereum_test_types/blob_bpo_config.json b/src/ethereum_test_types/blob_bpo_config.json new file mode 100644 index 00000000000..6d311a6bf3e --- /dev/null +++ b/src/ethereum_test_types/blob_bpo_config.json @@ -0,0 +1,37 @@ +{ + "blobSchedule": { + "cancun": { + "target": 3, + "max": 6, + "baseFeeUpdateFraction": 3338477 + }, + "prague": { + "target": 6, + "max": 9, + "baseFeeUpdateFraction": 5007716 + }, + "osaka": { + "target": 6, + "max": 9, + "maxBlobsPerTx": 6, + "baseFeeUpdateFraction": 5007716 + }, + "bpo1": { + "target": 12, + "max": 16, + "maxBlobsPerTx": 12, + "baseFeeUpdateFraction": 5007716 + }, + "bpo2": { + "target": 16, + "max": 24, + "maxBlobsPerTx": 12, + "baseFeeUpdateFraction": 5007716 + } + }, +"cancunTime": 0, +"pragueTime": 0, +"osakaTime": 1747387400, +"bpo1Time": 1757387400, +"bpo2Time": 1767387784 +} \ No newline at end of file diff --git a/src/ethereum_test_types/blob_types.py b/src/ethereum_test_types/blob_types.py index 4fb9f3adc1b..9ae2cc3730e 100644 --- a/src/ethereum_test_types/blob_types.py +++ b/src/ethereum_test_types/blob_types.py @@ -1,5 +1,6 @@ """Blob-related types for Ethereum tests.""" +import json import random from enum import Enum from hashlib import sha256 @@ -22,6 +23,51 @@ logger = get_logger(__name__) +class BPO_Parameters(Enum): # noqa: N801 + """Define BPO keys for IDE autocomplete.""" + + TARGET = "target" + MAX = "max" + BASE_FEE_UPDATE_FRACTION = "baseFeeUpdateFraction" + TIME = "Time" # actually it is: Time + + +def bpo_get_value(bpo_fork: str, bpo_parameter: BPO_Parameters) -> int: # noqa: D417 + """ + Retrieve BPO values from the JSON config. + + Arguments: + - bpo_fork: Any fork (e.g. cancun) or bpo forks (e.g. bpo1 or bpo2) + - bpo_parameter: Enum value that specifies what you want to access in the bpo config + + Returns the retrieved int. + + """ + # ensure the bpo config exists and can be read + bpo_config_path = Path("src") / "ethereum_test_types" / "blob_bpo_config.json" + if not bpo_config_path.exists(): + raise FileNotFoundError(f"Failed to find BPO config json: {bpo_config_path}") + with open(bpo_config_path, "r") as file: + bpo_config = json.load(file) + + # force-lowercase the provided fork + bpo_fork = bpo_fork.lower() + + # retrieve requested value + if bpo_parameter == BPO_Parameters.TARGET: + return bpo_config["blobSchedule"][bpo_fork][BPO_Parameters.TARGET.value] + elif bpo_parameter == BPO_Parameters.MAX: + return bpo_config["blobSchedule"][bpo_fork][BPO_Parameters.MAX.value] + elif bpo_parameter == BPO_Parameters.BASE_FEE_UPDATE_FRACTION: + return bpo_config["blobSchedule"][bpo_fork][BPO_Parameters.BASE_FEE_UPDATE_FRACTION.value] + elif bpo_parameter == BPO_Parameters.TIME: + return bpo_config[bpo_fork + BPO_Parameters.TIME.value] + + raise NotImplementedError( + f"This function has not yet been updated to handle BPO Parameter: {bpo_parameter}" + ) + + def clear_blob_cache(cached_blobs_folder_path: Path): """Delete all cached blobs.""" if not cached_blobs_folder_path.is_dir(): diff --git a/src/ethereum_test_types/tests/test_blob_types.py b/src/ethereum_test_types/tests/test_blob_types.py index d3f1be5daa5..cb47add924d 100644 --- a/src/ethereum_test_types/tests/test_blob_types.py +++ b/src/ethereum_test_types/tests/test_blob_types.py @@ -15,7 +15,13 @@ ShanghaiToCancunAtTime15k, ) -from ..blob_types import CACHED_BLOBS_DIRECTORY, Blob, clear_blob_cache +from ..blob_types import ( + CACHED_BLOBS_DIRECTORY, + Blob, + BPO_Parameters, + bpo_get_value, + clear_blob_cache, +) @pytest.mark.parametrize("seed", [0, 10, 100]) @@ -109,3 +115,24 @@ def test_transition_fork_blobs( f"Transition fork failure! Fork {fork.name()} at timestamp: {timestamp} should have " f"transitioned to {post_transition_fork_at_15k.name()} but is still at {b.fork.name()}" ) + + +@pytest.mark.parametrize("bpo_fork", ["cancun", "prague", "osaka", "bpo1", "bpo2"]) +@pytest.mark.parametrize( + "bpo_parameter", + [ + BPO_Parameters.TARGET, + BPO_Parameters.MAX, + BPO_Parameters.BASE_FEE_UPDATE_FRACTION, + BPO_Parameters.TIME, + ], +) +def test_bpo_parameter_lookup(bpo_fork, bpo_parameter): + """Tries looking up different values from the BPO configuration json.""" + result = bpo_get_value(bpo_fork=bpo_fork, bpo_parameter=bpo_parameter) + print( + f"\nbpo_fork: {bpo_fork}\n" + f"bpo_parameter: {bpo_parameter}\n" + f"retrieved value from config: {result}\n" + ) + # TODO: when one day actual bpo_config is known assert correction of retrieved values diff --git a/tests/osaka/eip7892_bpo/__init__.py b/tests/osaka/eip7892_bpo/__init__.py new file mode 100644 index 00000000000..a2a1ca022b0 --- /dev/null +++ b/tests/osaka/eip7892_bpo/__init__.py @@ -0,0 +1 @@ +"""EIP-7892 Tests.""" diff --git a/tests/osaka/eip7892_bpo/conftest.py b/tests/osaka/eip7892_bpo/conftest.py new file mode 100644 index 00000000000..e65011432ec --- /dev/null +++ b/tests/osaka/eip7892_bpo/conftest.py @@ -0,0 +1 @@ +"""Pytest (plugin) definitions local to EIP-7892 tests.""" diff --git a/tests/osaka/eip7892_bpo/spec.py b/tests/osaka/eip7892_bpo/spec.py new file mode 100644 index 00000000000..fb650ba6d8c --- /dev/null +++ b/tests/osaka/eip7892_bpo/spec.py @@ -0,0 +1,24 @@ +"""Defines EIP-7892 specification constants and functions.""" + +from dataclasses import dataclass + +# Base the spec on EIP-4844 which EIP-7892 extends +from ...cancun.eip4844_blobs.spec import Spec as EIP4844Spec + + +@dataclass(frozen=True) +class ReferenceSpec: + """Defines the reference spec version and git path.""" + + git_path: str + version: str + + +ref_spec_7892 = ReferenceSpec("EIPS/eip-7892.md", "e42c14f83052bfaa8c38832dcbc46e357dd1a1d9") + + +@dataclass(frozen=True) +class Spec(EIP4844Spec): + """Parameters from the EIP-7892 specifications.""" + + pass diff --git a/tests/osaka/eip7892_bpo/test_bpo.py b/tests/osaka/eip7892_bpo/test_bpo.py new file mode 100644 index 00000000000..7adbe78a1ff --- /dev/null +++ b/tests/osaka/eip7892_bpo/test_bpo.py @@ -0,0 +1,61 @@ +"""abstract: Test [EIP-7892: Blob Parameter Only Hardforks](https://eips.ethereum.org/EIPS/eip-7892).""" + +import pytest + +from ethereum_test_base_types.composite_types import ForkBlobSchedule, TimestampBlobSchedule +from ethereum_test_forks import Fork +from ethereum_test_tools import ( + Alloc, + Block, + BlockchainTestFiller, +) +from ethereum_test_types import Environment + +from .spec import ref_spec_7892 # type: ignore + +REFERENCE_SPEC_GIT_PATH = ref_spec_7892.git_path +REFERENCE_SPEC_VERSION = ref_spec_7892.version + + +@pytest.mark.valid_from("Osaka") +def test_bpo_schedule( + blockchain_test: BlockchainTestFiller, + pre: Alloc, + post: Alloc, + env: Environment, + fork: Fork, +): + """Test whether clients correctly set provided BPO schedules.""" + bpo_schedule = TimestampBlobSchedule() + # below ensure that there is a timestamp difference of at least 3 between each scheduled fork + bpo_schedule.add_schedule( + 1234, ForkBlobSchedule(max=6, target=5, base_fee_update_fraction=5007716) + ) + bpo_schedule.add_schedule( + 2345, ForkBlobSchedule(max=4, target=3, base_fee_update_fraction=5007716) + ) + + blocks = [] + for schedule_dict in bpo_schedule.root: + for t in schedule_dict: + # add block before bpo + blocks.append(Block(timestamp=t - 1)) + # add block at bpo + blocks.append(Block(timestamp=t)) + # add block after bpo + blocks.append(Block(timestamp=t + 1)) + + # amount of created blocks = 3 * len(bpo_schedule.root) + assert len(blocks) == 3 * len(bpo_schedule.root) + + # TODO: + # for each block the client should report the current values of: max, target and base_fee_update_fraction # noqa: E501 + # we need to signal to the client that the expected response is according to the bpo_schedule defined above # noqa: E501 + + blockchain_test( + genesis_environment=env, + pre=pre, + post=post, + blocks=blocks, + bpo_schedule=bpo_schedule, + ) From c1fe28d9f772ab1732c339f1cd9895bb8e164cdc Mon Sep 17 00:00:00 2001 From: Felix H Date: Tue, 29 Jul 2025 08:43:37 +0000 Subject: [PATCH 2/7] added bpo test, still WIP --- .../composite_types.py | 10 +- src/ethereum_test_base_types/conversions.py | 6 +- src/pytest_plugins/eels_resolutions.json | 8 +- tests/osaka/eip7594_peerdas/spec.py | 3 +- tests/osaka/eip7892_bpo/test_bpo.py | 116 ++++++++++++++---- 5 files changed, 109 insertions(+), 34 deletions(-) diff --git a/src/ethereum_test_base_types/composite_types.py b/src/ethereum_test_base_types/composite_types.py index 2cc84d5b97c..76b343157b9 100644 --- a/src/ethereum_test_base_types/composite_types.py +++ b/src/ethereum_test_base_types/composite_types.py @@ -517,17 +517,19 @@ class TimestampBlobSchedule(BaseModel): root: List[Dict[int, ForkBlobSchedule]] = Field(default_factory=list, validate_default=True) - @classmethod - def add_schedule(cls, activation_timestamp: int, schedule: ForkBlobSchedule): + def add_schedule(self, activation_timestamp: int, schedule: ForkBlobSchedule): """Add a schedule to the schedule list.""" assert activation_timestamp > -1 assert schedule.max_blobs_per_block > 0 assert schedule.base_fee_update_fraction > 0 assert schedule.target_blobs_per_block > 0 + if self.root is None: + self.root = [] + # ensure that the timestamp of each scheduled bpo fork is unique existing_keys: set = set() - for d in cls.root: + for d in self.root: existing_keys.update(d.keys()) assert activation_timestamp not in existing_keys, ( f"No duplicate activation forks allowed: Timestamp {activation_timestamp} already " @@ -535,4 +537,4 @@ def add_schedule(cls, activation_timestamp: int, schedule: ForkBlobSchedule): ) # add a scheduled bpo fork - cls.root.append({activation_timestamp: schedule}) + self.root.append({activation_timestamp: schedule}) diff --git a/src/ethereum_test_base_types/conversions.py b/src/ethereum_test_base_types/conversions.py index e99743ba241..6c7c7008730 100644 --- a/src/ethereum_test_base_types/conversions.py +++ b/src/ethereum_test_base_types/conversions.py @@ -100,4 +100,8 @@ def to_number(input_number: NumberConvertible) -> int: return int(input_number, 0) if isinstance(input_number, bytes) or isinstance(input_number, SupportsBytes): return int.from_bytes(input_number, byteorder="big") - raise Exception("invalid type for `number`") + + raise Exception( + f"Invalid type for `number`. Got {type(input_number)} but expected int, str or bytes!\n" + f"Value of `number` you passed: {input_number}" + ) diff --git a/src/pytest_plugins/eels_resolutions.json b/src/pytest_plugins/eels_resolutions.json index 81b4f9208c4..53547608817 100644 --- a/src/pytest_plugins/eels_resolutions.json +++ b/src/pytest_plugins/eels_resolutions.json @@ -1,8 +1,8 @@ { "EELSMaster": { "git_url": "https://github.com/ethereum/execution-specs.git", - "branch": "master", - "commit": "e13d33ab21ef1fdd2f073c96a3346e23eb7727f6" + "branch": "forks/osaka", + "commit": "5a49b2f39a909be6a8c84bb70611febdc2b2fd98" }, "Frontier": { "same_as": "EELSMaster" @@ -38,8 +38,6 @@ "same_as": "EELSMaster" }, "Osaka": { - "git_url": "https://github.com/spencer-tb/execution-specs.git", - "branch": "forks/osaka", - "commit": "07699170182691533023fa5d83086258c3edcfd3" + "same_as": "EELSMaster" } } diff --git a/tests/osaka/eip7594_peerdas/spec.py b/tests/osaka/eip7594_peerdas/spec.py index 050da14ac62..65fdf66d8b5 100644 --- a/tests/osaka/eip7594_peerdas/spec.py +++ b/tests/osaka/eip7594_peerdas/spec.py @@ -21,4 +21,5 @@ class Spec: https://eips.ethereum.org/EIPS/eip-7594. """ - pass + MAX_BLOBS_PER_TX = 6 + BLOB_COMMITMENT_VERSION_KZG = 1 diff --git a/tests/osaka/eip7892_bpo/test_bpo.py b/tests/osaka/eip7892_bpo/test_bpo.py index 7adbe78a1ff..a8071bab239 100644 --- a/tests/osaka/eip7892_bpo/test_bpo.py +++ b/tests/osaka/eip7892_bpo/test_bpo.py @@ -2,6 +2,7 @@ import pytest +from ethereum_test_base_types.base_types import Address, Hash from ethereum_test_base_types.composite_types import ForkBlobSchedule, TimestampBlobSchedule from ethereum_test_forks import Fork from ethereum_test_tools import ( @@ -10,6 +11,9 @@ BlockchainTestFiller, ) from ethereum_test_types import Environment +from ethereum_test_types.helpers import add_kzg_version +from ethereum_test_types.transaction_types import Transaction +from tests.osaka.eip7594_peerdas.spec import Spec from .spec import ref_spec_7892 # type: ignore @@ -17,45 +21,111 @@ REFERENCE_SPEC_VERSION = ref_spec_7892.version +# def min_base_fee_per_blob_gas_bpo(block_number: int = 0, timestamp: int = 0) -> int: +# """Return the minimum base fee per blob gas for BPO fork.""" +# return 1 + + +# def blob_gas_price_calculator_bpo( +# fork_blob_schedule: ForkBlobSchedule, +# block_number: int = 0, +# timestamp: int = 0, +# ) -> BlobGasPriceCalculator: +# """Return a callable that calculates the blob gas price at Cancun.""" +# fake_exponential( +# factor=min_base_fee_per_blob_gas_bpo(block_number=block_number, timestamp=timestamp), +# numerator=excess_blob_gas, +# denominator=fork_blob_schedule.base_fee_update_fraction, +# ) + + +@pytest.fixture +def bpo_schedule() -> TimestampBlobSchedule: + bpo_schedule = TimestampBlobSchedule() + # below ensure that there is a timestamp difference of at least 3 between each scheduled fork + bpo_schedule.add_schedule( + 20000, ForkBlobSchedule(max=6, target_blobs_per_block=5, base_fee_update_fraction=5007716) + ) + bpo_schedule.add_schedule( + 21000, ForkBlobSchedule(max=8, target_blobs_per_block=7, base_fee_update_fraction=5555555) + ) + bpo_schedule.add_schedule( + 22000, ForkBlobSchedule(max=4, target_blobs_per_block=3, base_fee_update_fraction=4444444) + ) + return bpo_schedule + + +def tx( + sender: Address, + fork_blob_schedule: ForkBlobSchedule, +) -> Transaction: + """Blob transaction fixture.""" + # calculator = blob_gas_price_calculator_bpo(fork_blob_schedule=fork_blob_schedule) + # max_fee_per_blob_gas = calculator() + + return Transaction( + ty=3, + sender=sender, + value=1, + gas_limit=21_000, + max_fee_per_gas=10, + max_priority_fee_per_gas=1, + max_fee_per_blob_gas=999_999_999_999, # idk how to use the excess_blob_gas fixtures with bpo which is not a fork in our modelling # noqa: E501 + access_list=[], + blob_versioned_hashes=add_kzg_version( + [Hash(i) for i in range(0, fork_blob_schedule.max_blobs_per_block)], + Spec.BLOB_COMMITMENT_VERSION_KZG, + ), + ) + + @pytest.mark.valid_from("Osaka") def test_bpo_schedule( blockchain_test: BlockchainTestFiller, pre: Alloc, - post: Alloc, env: Environment, fork: Fork, + sender: Address, + bpo_schedule: TimestampBlobSchedule, ): """Test whether clients correctly set provided BPO schedules.""" - bpo_schedule = TimestampBlobSchedule() - # below ensure that there is a timestamp difference of at least 3 between each scheduled fork - bpo_schedule.add_schedule( - 1234, ForkBlobSchedule(max=6, target=5, base_fee_update_fraction=5007716) - ) - bpo_schedule.add_schedule( - 2345, ForkBlobSchedule(max=4, target=3, base_fee_update_fraction=5007716) - ) - blocks = [] - for schedule_dict in bpo_schedule.root: - for t in schedule_dict: - # add block before bpo - blocks.append(Block(timestamp=t - 1)) - # add block at bpo - blocks.append(Block(timestamp=t)) - # add block after bpo - blocks.append(Block(timestamp=t + 1)) + for schedule_dict in bpo_schedule.root: # each schedule_dict is Dict[int, ForkBlobSchedule] where int is the timestamp # noqa: E501 + timestamp = next(iter(schedule_dict)) # first key is timestamp + fork_blob_schedule: ForkBlobSchedule | None = schedule_dict.get(timestamp) + assert fork_blob_schedule is not None + + # add block before bpo + blocks.append( + Block( + txs=[ + tx(sender=sender, fork_blob_schedule=fork_blob_schedule) + ], # tx should pick up fork_blob_schedule as input parameter + timestamp=timestamp - 1, + ) + ) + # add block at bpo + blocks.append( + Block( + txs=[tx(sender=sender, fork_blob_schedule=fork_blob_schedule)], + timestamp=timestamp, + ) + ) + # add block after bpo + blocks.append( + Block( + txs=[tx(sender=sender, fork_blob_schedule=fork_blob_schedule)], + timestamp=timestamp + 1, + ) + ) # amount of created blocks = 3 * len(bpo_schedule.root) assert len(blocks) == 3 * len(bpo_schedule.root) - # TODO: - # for each block the client should report the current values of: max, target and base_fee_update_fraction # noqa: E501 - # we need to signal to the client that the expected response is according to the bpo_schedule defined above # noqa: E501 - blockchain_test( genesis_environment=env, pre=pre, - post=post, + post={}, blocks=blocks, bpo_schedule=bpo_schedule, ) From 6da66a9d77ea347ca0f6028d74fb794482cb7c36 Mon Sep 17 00:00:00 2001 From: Felix H Date: Tue, 29 Jul 2025 08:46:08 +0000 Subject: [PATCH 3/7] fix --- .../tests/test_blob_types.py | 99 ++++++++++++++----- 1 file changed, 72 insertions(+), 27 deletions(-) diff --git a/src/ethereum_test_types/tests/test_blob_types.py b/src/ethereum_test_types/tests/test_blob_types.py index cb47add924d..4aefc61a912 100644 --- a/src/ethereum_test_types/tests/test_blob_types.py +++ b/src/ethereum_test_types/tests/test_blob_types.py @@ -1,8 +1,10 @@ """Test suite for blobs.""" import copy +import time import pytest +from filelock import FileLock from ethereum_test_forks import ( Cancun, @@ -15,13 +17,66 @@ ShanghaiToCancunAtTime15k, ) -from ..blob_types import ( - CACHED_BLOBS_DIRECTORY, - Blob, - BPO_Parameters, - bpo_get_value, - clear_blob_cache, -) +from ..blob_types import CACHED_BLOBS_DIRECTORY, Blob, clear_blob_cache + + +def increment_counter(timeout: float = 10): + """ + Increment counter in file, creating if doesn't exist. + + This is needed because we require the unit test 'test_transition_fork_blobs' to run + at the end without having to include another dependency for ordering tests. + That test has to run at the end because it assumes that no json blobs not created + by itself are created while it is running. + + The hardcoded counter value in the test above has to be updated if any new blob_related + unit tests that create json blobs are added in the future. + + """ + file_path = CACHED_BLOBS_DIRECTORY / "blob_unit_test_counter.txt" + lock_file = file_path.with_suffix(".lock") + + with FileLock(lock_file, timeout=timeout): + # Read current value or start at 0 + if file_path.exists(): + current_value = int(file_path.read_text().strip()) + else: + current_value = 0 + + # Increment and write back + new_value = current_value + 1 + file_path.write_text(str(new_value)) + + return new_value + + +def wait_until_counter_reached(target: int, poll_interval: float = 0.1): + """Wait until blob unit test counter reaches target value.""" + file_path = CACHED_BLOBS_DIRECTORY / "blob_unit_test_counter.txt" + lock_file = file_path.with_suffix(".lock") # Add lock file path + + while True: + # Use FileLock when reading! + with FileLock(lock_file, timeout=10): + if file_path.exists(): + try: + current_value = int(file_path.read_text().strip()) + if current_value == target: + # file_path.unlink() # get rid to effectively reset counter to 0 + return current_value + elif current_value > target: + pytest.fail( + f"The blob_unit_test lock counter is too high! " + f"Expected {target}, but got {current_value}. " + f"It probably reused an existing file that was not cleared. " + f"Delete {file_path} manually to fix this." + ) + except Exception: + current_value = 0 + else: + current_value = 0 + + time.sleep(poll_interval) @pytest.mark.parametrize("seed", [0, 10, 100]) @@ -48,6 +103,8 @@ def test_blob_creation_and_writing_and_reading( # ensure file you read equals file you wrote assert b.model_dump() == restored.model_dump() + increment_counter() + @pytest.mark.parametrize( "corruption_mode", @@ -77,6 +134,8 @@ def test_blob_proof_corruption( "proof is unchanged!" ) + increment_counter() + @pytest.mark.parametrize("timestamp", [14999, 15000]) @pytest.mark.parametrize( @@ -87,6 +146,9 @@ def test_transition_fork_blobs( timestamp, ): """Generates blobs for transition forks (time 14999 is old fork, time 15000 is new fork).""" + # line below guarantees that this test runs only after the other blob unit tests are done + wait_until_counter_reached(21) + clear_blob_cache(CACHED_BLOBS_DIRECTORY) print(f"Original fork: {fork}, Timestamp: {timestamp}") @@ -116,23 +178,6 @@ def test_transition_fork_blobs( f"transitioned to {post_transition_fork_at_15k.name()} but is still at {b.fork.name()}" ) - -@pytest.mark.parametrize("bpo_fork", ["cancun", "prague", "osaka", "bpo1", "bpo2"]) -@pytest.mark.parametrize( - "bpo_parameter", - [ - BPO_Parameters.TARGET, - BPO_Parameters.MAX, - BPO_Parameters.BASE_FEE_UPDATE_FRACTION, - BPO_Parameters.TIME, - ], -) -def test_bpo_parameter_lookup(bpo_fork, bpo_parameter): - """Tries looking up different values from the BPO configuration json.""" - result = bpo_get_value(bpo_fork=bpo_fork, bpo_parameter=bpo_parameter) - print( - f"\nbpo_fork: {bpo_fork}\n" - f"bpo_parameter: {bpo_parameter}\n" - f"retrieved value from config: {result}\n" - ) - # TODO: when one day actual bpo_config is known assert correction of retrieved values + # delete counter at last iteration (otherwise re-running all unit tests will fail) + if timestamp == 15_000 and pre_transition_fork == Prague: + (CACHED_BLOBS_DIRECTORY / "blob_unit_test_counter.txt").unlink() From 9ea546c3d9c6e0a54ca1e1dad6493de03f498ab8 Mon Sep 17 00:00:00 2001 From: Felix H Date: Tue, 29 Jul 2025 08:51:37 +0000 Subject: [PATCH 4/7] fix --- src/ethereum_test_types/blob_types.py | 88 ++++++++------------------- tests/osaka/eip7892_bpo/test_bpo.py | 1 + 2 files changed, 26 insertions(+), 63 deletions(-) diff --git a/src/ethereum_test_types/blob_types.py b/src/ethereum_test_types/blob_types.py index 9ae2cc3730e..ca7e13afc31 100644 --- a/src/ethereum_test_types/blob_types.py +++ b/src/ethereum_test_types/blob_types.py @@ -1,6 +1,5 @@ """Blob-related types for Ethereum tests.""" -import json import random from enum import Enum from hashlib import sha256 @@ -23,61 +22,23 @@ logger = get_logger(__name__) -class BPO_Parameters(Enum): # noqa: N801 - """Define BPO keys for IDE autocomplete.""" - - TARGET = "target" - MAX = "max" - BASE_FEE_UPDATE_FRACTION = "baseFeeUpdateFraction" - TIME = "Time" # actually it is: Time - - -def bpo_get_value(bpo_fork: str, bpo_parameter: BPO_Parameters) -> int: # noqa: D417 - """ - Retrieve BPO values from the JSON config. - - Arguments: - - bpo_fork: Any fork (e.g. cancun) or bpo forks (e.g. bpo1 or bpo2) - - bpo_parameter: Enum value that specifies what you want to access in the bpo config - - Returns the retrieved int. - - """ - # ensure the bpo config exists and can be read - bpo_config_path = Path("src") / "ethereum_test_types" / "blob_bpo_config.json" - if not bpo_config_path.exists(): - raise FileNotFoundError(f"Failed to find BPO config json: {bpo_config_path}") - with open(bpo_config_path, "r") as file: - bpo_config = json.load(file) - - # force-lowercase the provided fork - bpo_fork = bpo_fork.lower() - - # retrieve requested value - if bpo_parameter == BPO_Parameters.TARGET: - return bpo_config["blobSchedule"][bpo_fork][BPO_Parameters.TARGET.value] - elif bpo_parameter == BPO_Parameters.MAX: - return bpo_config["blobSchedule"][bpo_fork][BPO_Parameters.MAX.value] - elif bpo_parameter == BPO_Parameters.BASE_FEE_UPDATE_FRACTION: - return bpo_config["blobSchedule"][bpo_fork][BPO_Parameters.BASE_FEE_UPDATE_FRACTION.value] - elif bpo_parameter == BPO_Parameters.TIME: - return bpo_config[bpo_fork + BPO_Parameters.TIME.value] - - raise NotImplementedError( - f"This function has not yet been updated to handle BPO Parameter: {bpo_parameter}" - ) - - def clear_blob_cache(cached_blobs_folder_path: Path): """Delete all cached blobs.""" if not cached_blobs_folder_path.is_dir(): return - for f in cached_blobs_folder_path.glob("*.json"): # only delete .json files + + json_files = list(cached_blobs_folder_path.glob("*.json")) + + for f in json_files: + lock_file_path = f.with_suffix(".lock") + try: - f.unlink() # permanently delete this file + # get file lock for what you want to delete + with FileLock(lock_file_path): + f.unlink() except Exception as e: print( - f"Critical error while trying to delete file {f}:{e}.. " + f"Error while trying to delete file {f}:{e}. " "Aborting clearing of blob cache folder." ) return @@ -232,11 +193,15 @@ def get_cells(fork: Fork, data: Bytes) -> List[Bytes] | None: # (blob related constants are needed and only available for normal forks) fork = fork.fork_at(timestamp=timestamp) - # if this blob already exists then load from file + # if this blob already exists then load from file. use lock blob_location: Path = Blob.get_filepath(fork, seed) - if blob_location.exists(): - logger.debug(f"Blob exists already, reading it from file {blob_location}") - return Blob.from_file(Blob.get_filename(fork, seed)) + + # use lock to avoid race conditions + lock_file_path = blob_location.with_suffix(".lock") + with FileLock(lock_file_path): + if blob_location.exists(): + logger.debug(f"Blob exists already, reading it from file {blob_location}") + return Blob.from_file(Blob.get_filename(fork, seed)) assert fork.supports_blobs(), f"Provided fork {fork.name()} does not support blobs!" @@ -283,17 +248,14 @@ def from_file(file_name: str) -> "Blob": # determine path where this blob would be stored if it existed blob_file_location = CACHED_BLOBS_DIRECTORY / file_name - # use lock to avoid race conditions - lock_file_path = blob_file_location.with_suffix(".lock") - with FileLock(lock_file_path): - # check whether blob exists - assert blob_file_location.exists(), ( - f"Tried to load blob from file but {blob_file_location} does not exist" - ) + # check whether blob exists + assert blob_file_location.exists(), ( + f"Tried to load blob from file but {blob_file_location} does not exist" + ) - # read blob from file - with open(blob_file_location, "r", encoding="utf-8") as f: - json_str: str = f.read() + # read blob from file + with open(blob_file_location, "r", encoding="utf-8") as f: + json_str: str = f.read() # reconstruct and return blob object return Blob.model_validate_json(json_str) diff --git a/tests/osaka/eip7892_bpo/test_bpo.py b/tests/osaka/eip7892_bpo/test_bpo.py index a8071bab239..ee0641a6618 100644 --- a/tests/osaka/eip7892_bpo/test_bpo.py +++ b/tests/osaka/eip7892_bpo/test_bpo.py @@ -41,6 +41,7 @@ @pytest.fixture def bpo_schedule() -> TimestampBlobSchedule: + """Create and return the BPO pseudo schedule used for the tests in this file.""" bpo_schedule = TimestampBlobSchedule() # below ensure that there is a timestamp difference of at least 3 between each scheduled fork bpo_schedule.add_schedule( From dcd23aa51cc375cb63bed29d8f1a49bd933f2b51 Mon Sep 17 00:00:00 2001 From: Felix H Date: Tue, 29 Jul 2025 10:20:16 +0000 Subject: [PATCH 5/7] started modifying BlockchainTest class to allow bpo schedule overwrites --- src/ethereum_test_specs/blockchain.py | 46 ++++++++++++++++++++------- 1 file changed, 35 insertions(+), 11 deletions(-) diff --git a/src/ethereum_test_specs/blockchain.py b/src/ethereum_test_specs/blockchain.py index d896a2509ac..5f081204443 100644 --- a/src/ethereum_test_specs/blockchain.py +++ b/src/ethereum_test_specs/blockchain.py @@ -1,7 +1,6 @@ """Ethereum blockchain test spec definition and filler.""" import warnings -from pprint import pprint from typing import Any, Callable, ClassVar, Dict, Generator, List, Optional, Sequence, Tuple, Type import pytest @@ -18,6 +17,7 @@ HexNumber, Number, ) +from ethereum_test_base_types.composite_types import BlobSchedule, TimestampBlobSchedule from ethereum_test_exceptions import ( BlockException, EngineAPIError, @@ -52,11 +52,14 @@ from ethereum_test_fixtures.common import FixtureBlobSchedule from ethereum_test_forks import Fork from ethereum_test_types import Alloc, Environment, Removable, Requests, Transaction, Withdrawal +from pytest_plugins.logging import get_logger from .base import BaseTest, verify_result from .debugging import print_traces from .helpers import verify_block, verify_transactions +logger = get_logger(__name__) + def environment_from_parent_header(parent: "FixtureHeader") -> "Environment": """Instantiate new environment with the provided header as parent.""" @@ -407,6 +410,7 @@ class BlockchainTest(BaseTest): verify_sync: bool = False chain_id: int = 1 exclude_full_post_state_in_output: bool = False + bpo_schedule: TimestampBlobSchedule = None # type: ignore[assignment] """ Exclude the post state from the fixture output. In this case, the state verification is only performed based on the state root. @@ -489,21 +493,24 @@ def generate_block_data( block: Block, previous_env: Environment, previous_alloc: Alloc, + bpo_schedule: TimestampBlobSchedule | None, ) -> BuiltBlock: """Generate common block data for both make_fixture and make_hive_fixture.""" env = block.set_environment(previous_env) env = env.set_fork_requirements(fork) txs: List[Transaction] = [] + + # check if any tests are gas-heavy for tx in block.txs: if not self.is_tx_gas_heavy_test() and tx.gas_limit >= Environment().gas_limit: warnings.warn( f"{self.node_id()} uses a high Transaction gas_limit: {tx.gas_limit}", stacklevel=2, ) - txs.append(tx.with_signature_and_sender()) + # ensure exception test comes at the end, if it exists at all if failing_tx_count := len([tx for tx in txs if tx.error]) > 0: if failing_tx_count > 1: raise Exception( @@ -515,6 +522,14 @@ def generate_block_data( + "must be the last transaction in the block" ) + # if a bpo schedule has been passed let it overwrite the blob_schedule + blob_schedule: BlobSchedule | None = fork.blob_schedule() + assert blob_schedule is not None + if bpo_schedule is not None: + print(f"GENERATE BLOCK DATE: I got this bpo schedule: {bpo_schedule}") + # TODO: implement + + # get transition tool response transition_tool_output = t8n.evaluate( transition_tool_data=TransitionTool.TransitionToolData( alloc=previous_alloc, @@ -523,7 +538,7 @@ def generate_block_data( fork=fork, chain_id=self.chain_id, reward=fork.get_reward(env.number, env.timestamp), - blob_schedule=fork.blob_schedule(), + blob_schedule=blob_schedule, ), debug_output_path=self.get_next_transition_tool_output_path(), slow_request=self.is_tx_gas_heavy_test(), @@ -617,9 +632,13 @@ def generate_block_data( verify_result(transition_tool_output.result, env) except Exception as e: print_traces(t8n.get_traces()) - pprint(transition_tool_output.result) - pprint(previous_alloc) - pprint(transition_tool_output.alloc) + + # only spam the cmd with t8n if debug logging is explicitly activated + logger.debug( + f"T8n output: {transition_tool_output.result}\n" + f"Previous alloc: {previous_alloc}\n" + f"T8n alloc: {transition_tool_output.alloc}" + ) raise e if len(rejected_txs) > 0 and block.exception is None: @@ -646,9 +665,7 @@ def verify_post_state(self, t8n, t8n_state: Alloc, expected_state: Alloc | None raise e def make_fixture( - self, - t8n: TransitionTool, - fork: Fork, + self, t8n: TransitionTool, fork: Fork, bpo_schedule: TimestampBlobSchedule | None ) -> BlockchainFixture: """Create a fixture from the blockchain test definition.""" fixture_blocks: List[FixtureBlock | InvalidFixtureBlock] = [] @@ -669,6 +686,7 @@ def make_fixture( block=block, previous_env=env, previous_alloc=alloc, + bpo_schedule=bpo_schedule, ) fixture_blocks.append(built_block.get_fixture_block()) if block.exception is None: @@ -705,6 +723,7 @@ def make_hive_fixture( self, t8n: TransitionTool, fork: Fork, + bpo_schedule: TimestampBlobSchedule | None, fixture_format: FixtureFormat = BlockchainEngineFixture, ) -> BlockchainEngineFixture | BlockchainEngineXFixture: """Create a hive fixture from the blocktest definition.""" @@ -725,6 +744,7 @@ def make_hive_fixture( block=block, previous_env=env, previous_alloc=alloc, + bpo_schedule=bpo_schedule, ) fixture_payloads.append(built_block.get_fixture_engine_new_payload()) if block.exception is None: @@ -765,6 +785,7 @@ def make_hive_fixture( block=Block(), previous_env=env, previous_alloc=alloc, + bpo_schedule=bpo_schedule, ) sync_payload = sync_built_block.get_fixture_engine_new_payload() @@ -813,13 +834,16 @@ def generate( t8n: TransitionTool, fork: Fork, fixture_format: FixtureFormat, + bpo_schedule: TimestampBlobSchedule | None = None, ) -> BaseFixture: """Generate the BlockchainTest fixture.""" t8n.reset_traces() if fixture_format in [BlockchainEngineFixture, BlockchainEngineXFixture]: - return self.make_hive_fixture(t8n, fork, fixture_format) + return self.make_hive_fixture( + t8n=t8n, fork=fork, bpo_schedule=bpo_schedule, fixture_format=fixture_format + ) elif fixture_format == BlockchainFixture: - return self.make_fixture(t8n, fork) + return self.make_fixture(t8n=t8n, fork=fork, bpo_schedule=bpo_schedule) raise Exception(f"Unknown fixture format: {fixture_format}") From 645277c3c024c2b1658315bce364b30751504390 Mon Sep 17 00:00:00 2001 From: Felix H Date: Tue, 29 Jul 2025 12:29:47 +0000 Subject: [PATCH 6/7] wip --- src/ethereum_test_specs/blockchain.py | 32 ++++++++++++++++++++------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/src/ethereum_test_specs/blockchain.py b/src/ethereum_test_specs/blockchain.py index 5f081204443..927057da729 100644 --- a/src/ethereum_test_specs/blockchain.py +++ b/src/ethereum_test_specs/blockchain.py @@ -7,6 +7,7 @@ from pydantic import ConfigDict, Field, field_validator from ethereum_clis import BlockExceptionWithMessage, Result, TransitionTool +from ethereum_clis.types import TransitionToolOutput from ethereum_test_base_types import ( Address, Bloom, @@ -523,14 +524,15 @@ def generate_block_data( ) # if a bpo schedule has been passed let it overwrite the blob_schedule - blob_schedule: BlobSchedule | None = fork.blob_schedule() - assert blob_schedule is not None + fork_blob_schedule: BlobSchedule | None = fork.blob_schedule() + assert fork_blob_schedule is not None if bpo_schedule is not None: - print(f"GENERATE BLOCK DATE: I got this bpo schedule: {bpo_schedule}") - # TODO: implement + # fork_blob_schedule = bpo_schedule + pass + # TODO: # get transition tool response - transition_tool_output = t8n.evaluate( + transition_tool_output: TransitionToolOutput = t8n.evaluate( transition_tool_data=TransitionTool.TransitionToolData( alloc=previous_alloc, txs=txs, @@ -538,7 +540,7 @@ def generate_block_data( fork=fork, chain_id=self.chain_id, reward=fork.get_reward(env.number, env.timestamp), - blob_schedule=blob_schedule, + blob_schedule=fork_blob_schedule, ), debug_output_path=self.get_next_transition_tool_output_path(), slow_request=self.is_tx_gas_heavy_test(), @@ -668,6 +670,13 @@ def make_fixture( self, t8n: TransitionTool, fork: Fork, bpo_schedule: TimestampBlobSchedule | None ) -> BlockchainFixture: """Create a fixture from the blockchain test definition.""" + fork_blob_schedule: BlobSchedule | None = fork.blob_schedule() + assert fork_blob_schedule is not None + if bpo_schedule is not None: + # fork_blob_schedule = bpo_schedule + pass + # TODO: + fixture_blocks: List[FixtureBlock | InvalidFixtureBlock] = [] pre, genesis = self.make_genesis(fork=fork, apply_pre_allocation_blockchain=True) @@ -714,7 +723,7 @@ def make_fixture( post_state_hash=alloc.state_root() if self.exclude_full_post_state_in_output else None, config=FixtureConfig( fork=fork, - blob_schedule=FixtureBlobSchedule.from_blob_schedule(fork.blob_schedule()), + blob_schedule=FixtureBlobSchedule.from_blob_schedule(fork_blob_schedule), chain_id=self.chain_id, ), ) @@ -727,6 +736,13 @@ def make_hive_fixture( fixture_format: FixtureFormat = BlockchainEngineFixture, ) -> BlockchainEngineFixture | BlockchainEngineXFixture: """Create a hive fixture from the blocktest definition.""" + fork_blob_schedule: BlobSchedule | None = fork.blob_schedule() + assert fork_blob_schedule is not None + if bpo_schedule is not None: + # fork_blob_schedule = bpo_schedule + pass + # TODO: + fixture_payloads: List[FixtureEngineNewPayload] = [] pre, genesis = self.make_genesis( @@ -800,7 +816,7 @@ def make_hive_fixture( "config": FixtureConfig( fork=fork, chain_id=self.chain_id, - blob_schedule=FixtureBlobSchedule.from_blob_schedule(fork.blob_schedule()), + blob_schedule=FixtureBlobSchedule.from_blob_schedule(fork_blob_schedule), ), } From 09d451de42857173efb2dcf6f248dfcdc6162e4d Mon Sep 17 00:00:00 2001 From: Felix H Date: Tue, 29 Jul 2025 12:52:43 +0000 Subject: [PATCH 7/7] sanity checks added for TimestampBlobSchedule --- src/ethereum_test_base_types/composite_types.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/ethereum_test_base_types/composite_types.py b/src/ethereum_test_base_types/composite_types.py index 76b343157b9..0a2a68a90ef 100644 --- a/src/ethereum_test_base_types/composite_types.py +++ b/src/ethereum_test_base_types/composite_types.py @@ -515,6 +515,7 @@ class TimestampBlobSchedule(BaseModel): object with the fields max, target and base_fee_update_fraction. """ + # never directly modify root, instead use `add_schedule()` root: List[Dict[int, ForkBlobSchedule]] = Field(default_factory=list, validate_default=True) def add_schedule(self, activation_timestamp: int, schedule: ForkBlobSchedule): @@ -538,3 +539,15 @@ def add_schedule(self, activation_timestamp: int, schedule: ForkBlobSchedule): # add a scheduled bpo fork self.root.append({activation_timestamp: schedule}) + + # sort list ascending by dict keys (timestamp) + self.root = sorted(self.root, key=lambda d: list(d.keys())[0]) + + # sanity check to ensure that timestamps are at least 3 time units apart (relevant for bpo test logic) # noqa: E501 + prev_time: int = 0 + for i in self.root: + cur_time = next(iter(i)) # get key of only key-value pair in dict + assert cur_time - prev_time >= 3, "The timestamp difference of the keys of scheduled " + "bpo_forks need to have a difference of at least 3! But you tried to append the " + f"dictionary {dict({activation_timestamp: schedule})} which would lead " # noqa: C418 + f"to a timestamp difference of just {cur_time - prev_time} compared to prev element"