Skip to content

Commit e46317a

Browse files
spencer-tbmarioevz
andauthored
chore(ci|cli): improve coverage ci and compare fixtures python script (#1970)
* chore(cli): improve compare fixtures script. * chore(ci): add better logging for coverage ci step. * chore(ci): add prague back for coverage ci. * chore(ci): add summary test diff to coverage ci, use uv for python. * Update src/cli/compare_fixtures.py --------- Co-authored-by: Mario Vega <[email protected]>
1 parent 96e85f8 commit e46317a

File tree

2 files changed

+60
-30
lines changed

2 files changed

+60
-30
lines changed

.github/workflows/coverage.yaml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ on:
44
pull_request:
55
paths:
66
- "tests/**" # This triggers the workflow for any changes in the tests folder
7-
- "!tests/prague/**" # exclude changes in 'tests/prague'
87
- "!tests/osaka/**" # exclude changes in 'tests/osaka'
98
- "!tests/unscheduled/**" # exclude changes in 'tests/unscheduled'
109

@@ -141,14 +140,15 @@ jobs:
141140
run: |
142141
./.github/scripts/fill_prepatched_tests.sh "$MODIFIED_TEST_FILES $DELETED_TEST_FILES" "${{ github.workspace }}/evmtest_coverage/coverage/BASE_TESTS" "${{ github.workspace }}/evmtest_coverage/coverage/PATCH_TESTS" "${{ env.BLOCK_GAS_LIMIT }}" "${{ env.FILL_UNTIL }}"
143142
144-
- name: Print tests that will be covered
143+
- name: Print Unique Test IDs that will be covered
145144
if: ${{ steps.pre-patch-fill.outputs.any_modified_fixtures == 'true' || steps.ported-from.outputs.any_ported == 'true' }}
146145
run: |
147-
echo "Original BASE tests:"
148-
ls ${{ github.workspace }}/evmtest_coverage/coverage/BASE_TESTS
149-
echo "--------------------"
150-
echo "Ported PATCH tests:"
151-
ls ${{ github.workspace }}/evmtest_coverage/coverage/PATCH_TESTS
146+
echo "=== Original BASE (main) test IDs ==="
147+
uv run python -c "import json; data=json.load(open('${{ github.workspace }}/evmtest_coverage/coverage/BASE_TESTS/.meta/index.json')); [print(tc['id']) for tc in data['test_cases']]"
148+
echo "=== Ported PATCH test IDs ==="
149+
uv run python -c "import json; data=json.load(open('${{ github.workspace }}/evmtest_coverage/coverage/PATCH_TESTS/.meta/index.json')); [print(tc['id']) for tc in data['test_cases']]"
150+
echo "=== SUMMARY ==="
151+
uv run python -c "import json; base=json.load(open('${{ github.workspace }}/evmtest_coverage/coverage/BASE_TESTS/.meta/index.json')); patch=json.load(open('${{ github.workspace }}/evmtest_coverage/coverage/PATCH_TESTS/.meta/index.json')); print(f'BASE: {len(base[\"test_cases\"])} tests, PATCH: {len(patch[\"test_cases\"])} tests, Difference: {len(patch[\"test_cases\"]) - len(base[\"test_cases\"])}')"
152152
153153
- name: Run coverage of the BASE tests
154154
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185

src/cli/compare_fixtures.py

Lines changed: 53 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@
99
import json
1010
import shutil
1111
import sys
12+
from collections import defaultdict
1213
from pathlib import Path
13-
from typing import Set
14+
from typing import List, Set
1415

1516
import click
1617

@@ -49,12 +50,19 @@ def find_duplicates(base_hashes: Set[HexNumber], patch_hashes: Set[HexNumber]) -
4950
return base_hashes & patch_hashes
5051

5152

52-
def pop_by_hash(index: IndexFile, fixture_hash: HexNumber) -> TestCaseIndexFile:
53-
"""Pops a single test case from an index file by its hash."""
54-
for i in range(len(index.test_cases)):
55-
if index.test_cases[i].fixture_hash == fixture_hash:
56-
return index.test_cases.pop(i)
57-
raise Exception(f"Hash {fixture_hash} not found in index.")
53+
def pop_all_by_hash(index: IndexFile, fixture_hash: HexNumber) -> List[TestCaseIndexFile]:
54+
"""Pops all test cases from an index file by their hash."""
55+
test_cases = []
56+
remaining_cases = []
57+
for test_case in index.test_cases:
58+
if test_case.fixture_hash == fixture_hash:
59+
test_cases.append(test_case)
60+
else:
61+
remaining_cases.append(test_case)
62+
if not test_cases:
63+
raise Exception(f"Hash {fixture_hash} not found in index.")
64+
index.test_cases = remaining_cases
65+
return test_cases
5866

5967

6068
def remove_fixture_from_file(file: Path, test_case_id: str):
@@ -70,19 +78,19 @@ def remove_fixture_from_file(file: Path, test_case_id: str):
7078
raise KeyError(f"Test case {test_case_id} not found in {file}") from None
7179

7280

73-
def remove_fixture(
74-
folder: Path,
75-
index: IndexFile,
76-
fixture_hash: HexNumber,
77-
dry_run: bool,
78-
):
79-
"""Remove a single fixture from a folder that matches the given hash."""
80-
test_case = pop_by_hash(index, fixture_hash)
81-
test_case_file = folder / test_case.json_path
82-
if dry_run:
83-
print(f"Remove {test_case.id} from {test_case_file}")
84-
else:
85-
remove_fixture_from_file(test_case_file, test_case.id)
81+
def batch_remove_fixtures_from_files(removals_by_file):
82+
"""Batch process file removals to minimize I/O."""
83+
for file_path, test_case_ids in removals_by_file.items():
84+
try:
85+
full_file = json.loads(file_path.read_text())
86+
for test_case_id in test_case_ids:
87+
full_file.pop(test_case_id, None)
88+
if len(full_file) > 0:
89+
file_path.write_text(json.dumps(full_file, indent=2))
90+
else:
91+
file_path.unlink()
92+
except Exception as e:
93+
print(f"Error processing {file_path}: {e}")
8694

8795

8896
def rewrite_index(folder: Path, index: IndexFile, dry_run: bool):
@@ -144,10 +152,32 @@ def main(
144152
click.echo("Patch folder would be empty after fixture removal.")
145153
sys.exit(0)
146154

155+
# Collect removals by file for batching
156+
base_removals_by_file = defaultdict(list)
157+
patch_removals_by_file = defaultdict(list)
158+
147159
for duplicate_hash in duplicate_hashes:
148-
# Remove from both folders
149-
remove_fixture(base, base_index, duplicate_hash, dry_run)
150-
remove_fixture(patch, patch_index, duplicate_hash, dry_run)
160+
base_test_cases = pop_all_by_hash(base_index, duplicate_hash)
161+
patch_test_cases = pop_all_by_hash(patch_index, duplicate_hash)
162+
163+
for base_test_case in base_test_cases:
164+
base_file = base / base_test_case.json_path
165+
if dry_run:
166+
print(f"Remove {base_test_case.id} from {base_file}")
167+
else:
168+
base_removals_by_file[base_file].append(base_test_case.id)
169+
170+
for patch_test_case in patch_test_cases:
171+
patch_file = patch / patch_test_case.json_path
172+
if dry_run:
173+
print(f"Remove {patch_test_case.id} from {patch_file}")
174+
else:
175+
patch_removals_by_file[patch_file].append(patch_test_case.id)
176+
177+
# Batch process file operations
178+
if not dry_run:
179+
batch_remove_fixtures_from_files(base_removals_by_file)
180+
batch_remove_fixtures_from_files(patch_removals_by_file)
151181

152182
# Rewrite indices if necessary
153183
rewrite_index(base, base_index, dry_run)

0 commit comments

Comments
 (0)