|
| 1 | +"""Test blockchain sync fixture generation with verify_sync parameter.""" |
| 2 | + |
| 3 | +import textwrap |
| 4 | + |
| 5 | +from ethereum_clis import TransitionTool |
| 6 | + |
| 7 | +test_module_with_sync = textwrap.dedent( |
| 8 | + """\ |
| 9 | + import pytest |
| 10 | + from ethereum_test_tools import ( |
| 11 | + Account, |
| 12 | + BlockException, |
| 13 | + Block, |
| 14 | + Environment, |
| 15 | + Header, |
| 16 | + TestAddress, |
| 17 | + Transaction, |
| 18 | + ) |
| 19 | +
|
| 20 | + TEST_ADDRESS = Account(balance=1_000_000) |
| 21 | +
|
| 22 | + @pytest.mark.valid_at("Cancun") |
| 23 | + def test_sync_default(blockchain_test): |
| 24 | + # verify_sync defaults to False |
| 25 | + blockchain_test( |
| 26 | + pre={TestAddress: TEST_ADDRESS}, |
| 27 | + post={}, |
| 28 | + blocks=[Block(txs=[Transaction()])] |
| 29 | + ) |
| 30 | +
|
| 31 | + @pytest.mark.valid_at("Cancun") |
| 32 | + def test_sync_true(blockchain_test): |
| 33 | + blockchain_test( |
| 34 | + verify_sync=True, |
| 35 | + pre={TestAddress: TEST_ADDRESS}, |
| 36 | + post={}, |
| 37 | + blocks=[Block(txs=[Transaction()])] |
| 38 | + ) |
| 39 | +
|
| 40 | + @pytest.mark.valid_at("Cancun") |
| 41 | + def test_sync_false(blockchain_test): |
| 42 | + blockchain_test( |
| 43 | + verify_sync=False, |
| 44 | + pre={TestAddress: TEST_ADDRESS}, |
| 45 | + post={}, |
| 46 | + blocks=[Block(txs=[Transaction()])] |
| 47 | + ) |
| 48 | +
|
| 49 | + @pytest.mark.valid_at("Cancun") |
| 50 | + @pytest.mark.parametrize("sync", [True, False]) |
| 51 | + def test_sync_conditional(blockchain_test, sync): |
| 52 | + blockchain_test( |
| 53 | + pre={TestAddress: TEST_ADDRESS}, |
| 54 | + post={}, |
| 55 | + blocks=[Block(txs=[Transaction()])], |
| 56 | + verify_sync=sync |
| 57 | + ) |
| 58 | +
|
| 59 | + @pytest.mark.valid_at("Cancun") |
| 60 | + @pytest.mark.parametrize( |
| 61 | + "has_exception", |
| 62 | + [ |
| 63 | + pytest.param(False, id="no_exception"), |
| 64 | + pytest.param( |
| 65 | + True, id="with_exception", marks=pytest.mark.exception_test |
| 66 | + ), |
| 67 | + ] |
| 68 | + ) |
| 69 | + def test_sync_with_exception(blockchain_test, has_exception): |
| 70 | + blockchain_test( |
| 71 | + pre={TestAddress: TEST_ADDRESS}, |
| 72 | + post={}, |
| 73 | + blocks=[ |
| 74 | + Block( |
| 75 | + txs=[Transaction()], |
| 76 | + rlp_modifier=Header(gas_limit=0) if has_exception else None, |
| 77 | + exception=BlockException.INCORRECT_BLOCK_FORMAT if has_exception else None, |
| 78 | + ) |
| 79 | + ], |
| 80 | + verify_sync=not has_exception, |
| 81 | + ) |
| 82 | +
|
| 83 | + """ |
| 84 | +) |
| 85 | + |
| 86 | + |
| 87 | +def test_blockchain_sync_marker( |
| 88 | + pytester, |
| 89 | + default_t8n: TransitionTool, |
| 90 | +): |
| 91 | + """ |
| 92 | + Test blockchain sync fixture generation with exception_test marker. |
| 93 | +
|
| 94 | + The test module has 5 test functions (7 test cases with parametrization): |
| 95 | + - test_sync_default: generates all formats except sync (verify_sync defaults to False) |
| 96 | + - test_sync_true: generates all formats including sync (verify_sync=True) |
| 97 | + - test_sync_false: generates all formats except sync (verify_sync=False) |
| 98 | + - test_sync_conditional: generates formats based on the sync parameter (2 cases) |
| 99 | + - test_sync_with_exception: tests exception_test marker behavior (2 cases) |
| 100 | +
|
| 101 | + Each test generates fixture formats: |
| 102 | + - BlockchainFixture (always) |
| 103 | + - BlockchainEngineFixture (always) |
| 104 | + - BlockchainEngineSyncFixture (only when verify_sync=True AND not marked with exception_test) |
| 105 | +
|
| 106 | + Expected outcomes: |
| 107 | + - 7 test cases total |
| 108 | + - Each generates BlockchainFixture (7) and BlockchainEngineFixture (7) = 14 fixtures |
| 109 | + - Sync fixtures: |
| 110 | + - test_sync_true: 1 sync fixture ✓ |
| 111 | + - test_sync_conditional[True]: 1 sync fixture ✓ |
| 112 | + - test_sync_with_exception[no_exception]: 1 sync fixture ✓ |
| 113 | + - Total sync fixtures: 3 |
| 114 | + - Skipped sync fixtures: |
| 115 | + - test_sync_default: 1 skipped |
| 116 | + - test_sync_false: 1 skipped |
| 117 | + - test_sync_conditional[False]: 1 skipped |
| 118 | + - Total skipped: 3 |
| 119 | + - Not generated (due to exception_test marker): |
| 120 | + - test_sync_with_exception[with_exception]: sync fixture not generated at all |
| 121 | +
|
| 122 | + Final counts: |
| 123 | + - Passed: 14 (base fixtures) + 3 (sync fixtures) = 17 passed |
| 124 | + - Skipped: 3 skipped |
| 125 | + - Failed: 0 failed |
| 126 | + """ |
| 127 | + # Create proper directory structure for tests |
| 128 | + tests_dir = pytester.mkdir("tests") |
| 129 | + cancun_tests_dir = tests_dir / "cancun" |
| 130 | + cancun_tests_dir.mkdir() |
| 131 | + sync_test_dir = cancun_tests_dir / "sync_test_module" |
| 132 | + sync_test_dir.mkdir() |
| 133 | + test_module = sync_test_dir / "test_sync_marker.py" |
| 134 | + test_module.write_text(test_module_with_sync) |
| 135 | + |
| 136 | + pytester.copy_example(name="src/cli/pytest_commands/pytest_ini_files/pytest-fill.ini") |
| 137 | + |
| 138 | + # Add the test directory to the arguments |
| 139 | + args = [ |
| 140 | + "-c", |
| 141 | + "pytest-fill.ini", |
| 142 | + "-v", |
| 143 | + "--no-html", |
| 144 | + "--t8n-server-url", |
| 145 | + default_t8n.server_url, |
| 146 | + "tests/cancun/sync_test_module/", |
| 147 | + ] |
| 148 | + |
| 149 | + expected_outcomes = {"passed": 17, "failed": 0, "skipped": 3, "errors": 0} |
| 150 | + |
| 151 | + result = pytester.runpytest(*args) |
| 152 | + result.assert_outcomes(**expected_outcomes) |
0 commit comments