Skip to content

Commit 7832603

Browse files
committed
refactor(forks): move BAL tests to Amsterdam
- chore(cleanup): Remove reference to told BlockAccessLists fork - refactor(fork): ``Amsterdam`` above old EOF fork.
1 parent 9626bd8 commit 7832603

File tree

6 files changed

+61
-118
lines changed

6 files changed

+61
-118
lines changed

scripts/convert_addresses.py

Lines changed: 47 additions & 87 deletions
Original file line numberDiff line numberDiff line change
@@ -133,11 +133,9 @@ class Context(Enum):
133133
"storageCostsFiller.yml",
134134
"variedContextFiller.yml",
135135
"vitalikTransactionTestParisFiller.json",
136-
137136
# stExample
138137
"add11_ymlFiller.yml",
139138
"add11Filler.json",
140-
141139
# stExtCodeHash
142140
"extcodehashEmpty_ParisFiller.yml",
143141
"extCodeHashSelfInInitFiller.json",
@@ -164,7 +162,6 @@ class Context(Enum):
164162
"extCodeHashCreatedAndDeletedAccountCallFiller.json",
165163
"createEmptyThenExtcodehashFiller.json",
166164
"contractCreationOOGdontLeaveEmptyContractViaTransactionFiller.json",
167-
168165
# Really only `ReturnTestFiller` and `ReturnTest2Filler` are compatible inside `stInitCodeTest`
169166
"CallContractToCreateContractAndCallItOOGFiller.json",
170167
"CallContractToCreateContractOOGBonusGasFiller.json",
@@ -179,8 +176,6 @@ class Context(Enum):
179176
"OutOfGasContractCreationFiller.json",
180177
"OutOfGasPrefundedContractCreationFiller.json",
181178
"TransactionCreateStopInInitcodeFiller.json",
182-
183-
184179
# stInitCodeTest
185180
"ReturnTestFiller.json",
186181
"ReturnTest2Filler.json",
@@ -189,7 +184,6 @@ class Context(Enum):
189184
"CREATE_BoundsFiller.json",
190185
"NonZeroValue_CALLCODEFiller.json",
191186
"NonZeroValue_DELEGATECALLFiller.json",
192-
193187
"bufferFiller.yml",
194188
"bufferSrcOffsetFiller.yml",
195189
"callDataCopyOffsetFiller.json",
@@ -213,7 +207,6 @@ class Context(Enum):
213207
"Create1000ShnghaiFiller.json",
214208
"QuadraticComplexitySolidity_CallDataCopyFiller.json",
215209
"testRandomTestFiller.json",
216-
217210
# uses coinbase address (0x41) as integer memory offsets. TODO: is this necessary for the test?
218211
# we could end up changing this for compatibility instead.
219212
"randomStatetest1Filler.json",
@@ -247,17 +240,14 @@ class Context(Enum):
247240
"Call50000_sha256Filler.json",
248241
"Create1000ByzantiumFiller.json",
249242
"Create1000Filler.json",
250-
251243
# stRecursiveCreate
252244
"RecursiveCreateContractsCreate4ContractsFiller.json",
253245
"RecursiveCreateContractsFiller.json",
254-
255246
# stStackTests
256247
"shallowStackFiller.json",
257248
"stackOverflowFiller.json",
258249
"stackOverflowDUPFiller.json",
259250
"stackOverflowPUSHFiller.json",
260-
261251
"revertRetDataSizeFiller.yml",
262252
"returndatacopy_0_0_following_successful_createFiller.json",
263253
"RevertPrefoundFiller.json",
@@ -388,15 +378,12 @@ class Context(Enum):
388378
"createNameRegistratorValueTooHighFiller.json",
389379
"suicideCallerAddresTooBigLeftFiller.json",
390380
"ABAcallsSuicide1Filler.json",
391-
392-
393381
"/stCreate2/",
394382
"/stCreateTest/",
395383
"/stRecursiveCreate/",
396384
"/stWalletTest/",
397385
"/stZeroKnowledge/",
398386
"/stZeroKnowledge2/",
399-
400387
# TODO: See if these can be turned on with fine tuning
401388
"/stTimeConsuming/",
402389
}
@@ -448,9 +435,7 @@ def is_incompatible_file(file_path: Path) -> bool:
448435
"ccccccccccccccccccccccccccccccccccccccc1",
449436
"ccccccccccccccccccccccccccccccccccccccc2",
450437
},
451-
"callToSuicideThenExtcodehashFiller.json": {
452-
"0000000000000000000000000000000000000025"
453-
},
438+
"callToSuicideThenExtcodehashFiller.json": {"0000000000000000000000000000000000000025"},
454439
"doubleSelfdestructTouch_ParisFiller.yml": {
455440
"0000000000000000000000000000000000e49701",
456441
"0000000000000000000000000000000000e49702",
@@ -466,7 +451,7 @@ def is_incompatible_file(file_path: Path) -> bool:
466451
"coinbaseT2Filler.yml",
467452
"doubleSelfdestructTouch_ParisFiller.yml",
468453
"tooLongReturnDataCopyFiller.yml",
469-
"coinbaseWarmAccountCallGasFailFiller.yml"
454+
"coinbaseWarmAccountCallGasFailFiller.yml",
470455
}
471456

472457
# Fillers that should have precompile check disabled
@@ -576,9 +561,7 @@ def __init__(self, filename: str = ""):
576561
self.coinbase_addr: Optional[str] = None
577562
self.target_addr: Optional[str] = None
578563
self.is_json = filename.lower().endswith(".json")
579-
self.skip_precompile_check = any(
580-
kw in filename for kw in DISABLE_PRECOMPILE_CHECK_FILLERS
581-
)
564+
self.skip_precompile_check = any(kw in filename for kw in DISABLE_PRECOMPILE_CHECK_FILLERS)
582565
self.no_tags_in_code = any(kw in filename for kw in NO_TAGS_IN_CODE)
583566
self.validate_addr_entropy_in_code = any(
584567
kw in filename for kw in VALIDATE_ADDR_ENTROPY_IN_CODE
@@ -628,9 +611,24 @@ def detect_context_change(self, line: str) -> Optional[Context]:
628611
return None # Don't change context for empty storage
629612
return Context.STORAGE
630613
# Known fields that indicate we're back in NORMAL context
631-
elif any(kw in stripped for kw in {"balance:", "nonce:", "secretKey:", "gasLimit:",
632-
"gasPrice:", "value:", "to:", "from:", "address:",
633-
"shouldnotexist:", "indexes:", "network:", "result:"}):
614+
elif any(
615+
kw in stripped
616+
for kw in {
617+
"balance:",
618+
"nonce:",
619+
"secretKey:",
620+
"gasLimit:",
621+
"gasPrice:",
622+
"value:",
623+
"to:",
624+
"from:",
625+
"address:",
626+
"shouldnotexist:",
627+
"indexes:",
628+
"network:",
629+
"result:",
630+
}
631+
):
634632
return Context.NORMAL
635633
return None
636634

@@ -677,10 +675,7 @@ def collect_addresses(self, lines: List[str]) -> None:
677675
for line_num, line in enumerate(lines, 1):
678676
stripped = line.strip()
679677
stripped_no_spaces_or_quotes = (
680-
stripped.replace('"', "")
681-
.replace("'", "")
682-
.replace(" ", "")
683-
.replace(",", "")
678+
stripped.replace('"', "").replace("'", "").replace(" ", "").replace(",", "")
684679
)
685680

686681
# Check for section changes
@@ -698,8 +693,9 @@ def collect_addresses(self, lines: List[str]) -> None:
698693
if current_section in [Section.PRE, Section.RESULT]:
699694
# Check if this line is an address key (40 hex chars followed by colon)
700695
# Also check for quoted addresses in JSON format
701-
if (re.match(r"^\s*(?:0x)?[a-fA-F0-9]{40}\s*:", line, re.IGNORECASE) or
702-
re.match(r'^\s*"(?:0x)?[a-fA-F0-9]{40}"\s*:', line, re.IGNORECASE)):
696+
if re.match(r"^\s*(?:0x)?[a-fA-F0-9]{40}\s*:", line, re.IGNORECASE) or re.match(
697+
r'^\s*"(?:0x)?[a-fA-F0-9]{40}"\s*:', line, re.IGNORECASE
698+
):
703699
current_context = Context.NORMAL
704700

705701
# Don't reset context on closing braces - let field names determine context
@@ -777,9 +773,7 @@ def collect_addresses(self, lines: List[str]) -> None:
777773
if (
778774
code_content
779775
and code_content not in ["", "0x", "0X", "{}", "[]"]
780-
and (
781-
code_content in ["|", ">"] or len(code_content) > 2
782-
)
776+
and (code_content in ["|", ">"] or len(code_content) > 2)
783777
):
784778
self.addresses_with_code.add(current_address)
785779
# Also check during CODE context for multi-line code
@@ -789,11 +783,7 @@ def collect_addresses(self, lines: List[str]) -> None:
789783
kw in stripped_no_spaces_or_quotes
790784
for kw in {"balance:", "nonce:", "storage:"}
791785
):
792-
if (
793-
stripped
794-
and not stripped.startswith("#")
795-
and "{" not in stripped
796-
):
786+
if stripped and not stripped.startswith("#") and "{" not in stripped:
797787
# Any non-empty, non-comment, non-brace line in code section indicates there's code
798788
self.addresses_with_code.add(current_address)
799789

@@ -807,10 +797,7 @@ def collect_addresses(self, lines: List[str]) -> None:
807797
if (
808798
code_content
809799
and code_content not in ["", "0x", "{}", "[]"]
810-
and (
811-
not code_content.startswith("0x")
812-
or len(code_content) > 3
813-
)
800+
and (not code_content.startswith("0x") or len(code_content) > 3)
814801
): # More than just "0x"
815802
self.addresses_with_code.add(current_address)
816803

@@ -958,9 +945,7 @@ def convert_line(self, line: str, section: Section, context: Context) -> str:
958945
f"'{KNOWN_SECRET_KEY}'", f"'<eoa:sender:0x{KNOWN_SECRET_KEY}>'"
959946
)
960947
elif KNOWN_SECRET_KEY in line:
961-
line = line.replace(
962-
KNOWN_SECRET_KEY, f'"<eoa:sender:0x{KNOWN_SECRET_KEY}>"'
963-
)
948+
line = line.replace(KNOWN_SECRET_KEY, f'"<eoa:sender:0x{KNOWN_SECRET_KEY}>"')
964949
return line
965950

966951
# Sort addresses by length (longest first) to avoid partial replacements
@@ -984,20 +969,19 @@ def convert_line(self, line: str, section: Section, context: Context) -> str:
984969

985970
# Skip replacements in code/storage if no_tags_in_code is set
986971
# EXCEPT for address keys which should always be replaced
987-
if self.no_tags_in_code and context in [Context.CODE, Context.STORAGE] and not is_address_key:
972+
if (
973+
self.no_tags_in_code
974+
and context in [Context.CODE, Context.STORAGE]
975+
and not is_address_key
976+
):
988977
continue
989978

990979
# Use regex to find and replace addresses (case-insensitive)
991980
# Pattern 1: Address followed by colon (as a key) - with or without 0x prefix
992-
pattern_key = re.compile(
993-
rf'(^|\s|"|\')(?:0x)?{re.escape(addr)}(?=:)', re.IGNORECASE
994-
)
981+
pattern_key = re.compile(rf'(^|\s|"|\')(?:0x)?{re.escape(addr)}(?=:)', re.IGNORECASE)
995982
if pattern_key.search(line):
996983
# Skip replacement if we're in storage context, depending on dont_replace_tags_in_storage_keys
997-
if (
998-
context != Context.STORAGE
999-
and not self.dont_replace_tags_in_storage_keys
1000-
):
984+
if context != Context.STORAGE and not self.dont_replace_tags_in_storage_keys:
1001985
# Replace while preserving the prefix (whitespace, quote, etc)
1002986
line = pattern_key.sub(r"\1" + tag, line)
1003987

@@ -1022,38 +1006,26 @@ def protect_tag(match):
10221006
# In storage context, handle keys and values separately
10231007
if not self.dont_replace_tags_in_storage_keys:
10241008
# Replace addresses everywhere in storage, including keys
1025-
pattern_general = re.compile(
1026-
rf"(?:0x)?{re.escape(addr)}", re.IGNORECASE
1027-
)
1028-
line_with_placeholders = pattern_general.sub(
1029-
tag, line_with_placeholders
1030-
)
1009+
pattern_general = re.compile(rf"(?:0x)?{re.escape(addr)}", re.IGNORECASE)
1010+
line_with_placeholders = pattern_general.sub(tag, line_with_placeholders)
10311011
else:
10321012
# Default behavior: only replace in values, not keys
10331013
# Split by colon and only replace in values
10341014
parts = line_with_placeholders.split(":")
10351015
for i in range(1, len(parts)): # Skip first part (the key)
10361016
# Replace address in this part
1037-
pattern_general = re.compile(
1038-
rf"(?:0x)?{re.escape(addr)}", re.IGNORECASE
1039-
)
1017+
pattern_general = re.compile(rf"(?:0x)?{re.escape(addr)}", re.IGNORECASE)
10401018
parts[i] = pattern_general.sub(tag, parts[i])
10411019
line_with_placeholders = ":".join(parts)
10421020
else:
10431021
# Not in storage context - replace all occurrences
1044-
pattern_general = re.compile(
1045-
rf"(?:0x)?{re.escape(addr)}", re.IGNORECASE
1046-
)
1047-
line_with_placeholders = pattern_general.sub(
1048-
tag, line_with_placeholders
1049-
)
1022+
pattern_general = re.compile(rf"(?:0x)?{re.escape(addr)}", re.IGNORECASE)
1023+
line_with_placeholders = pattern_general.sub(tag, line_with_placeholders)
10501024

10511025
# Restore protected tags
10521026
for i, protected_tag in enumerate(protected_tags):
10531027
placeholder = f"__TAG_PLACEHOLDER_{i}__"
1054-
line_with_placeholders = line_with_placeholders.replace(
1055-
placeholder, protected_tag
1056-
)
1028+
line_with_placeholders = line_with_placeholders.replace(placeholder, protected_tag)
10571029

10581030
line = line_with_placeholders
10591031

@@ -1072,9 +1044,7 @@ def protect_tag(match):
10721044
# Only replace short names in code/storage values, not in address keys
10731045
if not is_address_key:
10741046
# Extract the hex part without 0x prefix from short_name
1075-
hex_part = (
1076-
short_name[2:] if short_name.startswith("0x") else short_name
1077-
)
1047+
hex_part = short_name[2:] if short_name.startswith("0x") else short_name
10781048

10791049
# Create a pattern that matches the hex part with any number of leading/trailing zeros
10801050
# Pattern: optional 0x prefix, any number of zeros, the hex part, any number of zeros
@@ -1091,9 +1061,7 @@ def protect_tag(match):
10911061
for match in reversed(matches):
10921062
# Only replace if it's a valid hex number (starts with 0x or is all hex)
10931063
match_text = match.group(0)
1094-
if match_text.startswith("0x") or match_text.startswith(
1095-
"0X"
1096-
):
1064+
if match_text.startswith("0x") or match_text.startswith("0X"):
10971065
line = line[: match.start()] + tag + line[match.end() :]
10981066
elif all(c in "0123456789abcdefABCDEF" for c in match_text):
10991067
# Pure hex without 0x prefix - check context to ensure it's meant as an address
@@ -1102,16 +1070,10 @@ def protect_tag(match):
11021070
after = line[match.end() :].lstrip()
11031071
if (
11041072
before.endswith(('"', "'", ":", " ", "(", "[", ","))
1105-
or after.startswith(
1106-
('"', "'", " ", ")", "]", ",", "\n")
1107-
)
1073+
or after.startswith(('"', "'", " ", ")", "]", ",", "\n"))
11081074
or not before
11091075
): # Start of line
1110-
line = (
1111-
line[: match.start()]
1112-
+ tag
1113-
+ line[match.end() :]
1114-
)
1076+
line = line[: match.start()] + tag + line[match.end() :]
11151077

11161078
return line
11171079

@@ -1241,9 +1203,7 @@ def main():
12411203
converted_json += 1
12421204
print(f"Converted JSON: {file_path}")
12431205

1244-
print(
1245-
f"\nSummary: Converted {converted_yaml} YAML files and {converted_json} JSON files"
1246-
)
1206+
print(f"\nSummary: Converted {converted_yaml} YAML files and {converted_json} JSON files")
12471207

12481208

12491209
if __name__ == "__main__":

src/ethereum_test_forks/forks/forks.py

Lines changed: 9 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1437,11 +1437,6 @@ def engine_forkchoice_updated_version(
14371437
class Osaka(Prague, solc_name="cancun"):
14381438
"""Osaka fork."""
14391439

1440-
@classmethod
1441-
def header_bal_hash_required(cls, block_number: int = 0, timestamp: int = 0) -> bool:
1442-
"""Hash of the block access list is required starting from Osaka fork."""
1443-
return True
1444-
14451440
# update some blob constants
14461441
BLOB_CONSTANTS = {
14471442
**Prague.BLOB_CONSTANTS, # same base constants as prague
@@ -1646,6 +1641,15 @@ def max_blobs_per_block(cls, block_number: int = 0, timestamp: int = 0) -> int:
16461641
return 21
16471642

16481643

1644+
class Amsterdam(Osaka):
1645+
"""Amsterdam fork."""
1646+
1647+
@classmethod
1648+
def is_deployed(cls) -> bool:
1649+
"""Return True if this fork is deployed."""
1650+
return False
1651+
1652+
16491653
class EOFv1(Prague, solc_name="cancun"):
16501654
"""EOF fork."""
16511655

@@ -1675,21 +1679,3 @@ def is_deployed(cls) -> bool:
16751679
development.
16761680
"""
16771681
return False
1678-
1679-
1680-
class Amsterdam(Osaka):
1681-
"""Amsterdam fork."""
1682-
1683-
@classmethod
1684-
def is_deployed(cls) -> bool:
1685-
"""Return True if this fork is deployed."""
1686-
return False
1687-
1688-
1689-
class BlockAccessLists(Prague):
1690-
"""A development fork for Block Access Lists."""
1691-
1692-
@classmethod
1693-
def header_bal_hash_required(cls, block_number: int = 0, timestamp: int = 0) -> bool:
1694-
"""Hash of the block access list is required starting from this fork."""
1695-
return True
File renamed without changes.

tests/osaka/eip7928_block_level_access_lists/spec.py renamed to tests/amsterdam/eip7928_block_level_access_lists/spec.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,6 @@
22

33
from dataclasses import dataclass
44

5-
# ACTIVATION_FORK_NAME = "BlockAccessLists"
6-
# """The fork name for EIP-7928 activation."""
7-
85

96
@dataclass(frozen=True)
107
class ReferenceSpec:

0 commit comments

Comments
 (0)