@@ -919,6 +919,10 @@ async def suppress_value_error(coro: Coroutine[Any, Any, None]) -> None:
919
919
920
920
921
921
@pytest .mark .anyio
922
+ @pytest .mark .limit_consensus_modes (
923
+ allowed = [ConsensusMode .HARD_FORK_2_0 , ConsensusMode .HARD_FORK_3_0 ],
924
+ reason = "We can no longer (reliably) farm blocks from before the hard fork" ,
925
+ )
922
926
async def test_new_transaction_and_mempool (
923
927
wallet_nodes : tuple [
924
928
FullNodeSimulator , FullNodeSimulator , ChiaServer , ChiaServer , WalletTool , WalletTool , BlockTools
@@ -946,8 +950,10 @@ async def test_new_transaction_and_mempool(
946
950
947
951
# Makes a bunch of coins
948
952
conditions_dict : dict [ConditionOpcode , list [ConditionWithArgs ]] = {ConditionOpcode .CREATE_COIN : []}
949
- # This should fit in one transaction
950
- for _ in range (100 ):
953
+ # This should fit in one transaction. The test constants have a max block cost of 400,000,000
954
+ # and the default max *transaction* cost is half that, so 200,000,000. CREATE_COIN has a cost of
955
+ # 1,800,000, we create 80 coins
956
+ for _ in range (80 ):
951
957
receiver_puzzlehash = wallet_receiver .get_new_puzzlehash ()
952
958
puzzle_hashes .append (receiver_puzzlehash )
953
959
output = ConditionWithArgs (ConditionOpcode .CREATE_COIN , [receiver_puzzlehash , int_to_bytes (10000000000 )])
@@ -1046,8 +1052,8 @@ async def test_new_transaction_and_mempool(
1046
1052
# these numbers reflect the capacity of the mempool. In these
1047
1053
# tests MEMPOOL_BLOCK_BUFFER is 1. The other factors are COST_PER_BYTE
1048
1054
# and MAX_BLOCK_COST_CLVM
1049
- assert included_tx == 23
1050
- assert not_included_tx == 10
1055
+ assert included_tx == 20
1056
+ assert not_included_tx == 7
1051
1057
assert seen_bigger_transaction_has_high_fee
1052
1058
1053
1059
# Mempool is full
@@ -1882,7 +1888,9 @@ async def test_new_signage_point_caching(
1882
1888
) -> None :
1883
1889
full_node_1 , _full_node_2 , server_1 , server_2 , _wallet_a , _wallet_receiver , bt = wallet_nodes
1884
1890
blocks = await full_node_1 .get_all_full_blocks ()
1885
-
1891
+ assert full_node_1 .full_node .full_node_store .get_signage_point_by_index_and_cc_output (
1892
+ bytes32 .zeros , full_node_1 .full_node .constants .GENESIS_CHALLENGE , uint8 (0 )
1893
+ ) == SignagePoint (None , None , None , None )
1886
1894
peer = await connect_and_get_peer (server_1 , server_2 , self_hostname )
1887
1895
blocks = bt .get_consecutive_blocks (3 , block_list_input = blocks , skip_slots = 2 )
1888
1896
await full_node_1 .full_node .add_block (blocks [- 3 ])
@@ -1951,10 +1959,6 @@ async def test_new_signage_point_caching(
1951
1959
is not None
1952
1960
)
1953
1961
1954
- assert full_node_1 .full_node .full_node_store .get_signage_point_by_index_and_cc_output (
1955
- full_node_1 .full_node .constants .GENESIS_CHALLENGE , bytes32 .zeros , uint8 (0 )
1956
- ) == SignagePoint (None , None , None , None )
1957
-
1958
1962
1959
1963
@pytest .mark .anyio
1960
1964
async def test_slot_catch_up_genesis (
@@ -3294,3 +3298,62 @@ def compare_unfinished_blocks(block1: UnfinishedBlock, block2: UnfinishedBlock)
3294
3298
# Final assertion to check the entire block
3295
3299
assert block1 == block2 , "The entire block objects are not identical"
3296
3300
return True
3301
+
3302
+
3303
+ @pytest .mark .anyio
3304
+ @pytest .mark .parametrize (
3305
+ "condition, error" ,
3306
+ [
3307
+ (ConditionOpcode .ASSERT_HEIGHT_RELATIVE , "ASSERT_HEIGHT_RELATIVE_FAILED" ),
3308
+ (ConditionOpcode .ASSERT_HEIGHT_ABSOLUTE , "ASSERT_HEIGHT_ABSOLUTE_FAILED" ),
3309
+ ],
3310
+ )
3311
+ async def test_pending_tx_cache_retry_on_new_peak (
3312
+ condition : ConditionOpcode , error : str , blockchain_constants : ConsensusConstants , caplog : pytest .LogCaptureFixture
3313
+ ) -> None :
3314
+ """
3315
+ Covers PendingTXCache items that are placed there due to unmet relative or
3316
+ absolute height conditions, to make sure those items get retried at peak
3317
+ post processing when those conditions are met.
3318
+ """
3319
+ async with setup_simulators_and_wallets (1 , 0 , blockchain_constants ) as new :
3320
+ full_node_api = new .simulators [0 ].peer_api
3321
+ bt = new .bt
3322
+ wallet = WalletTool (test_constants )
3323
+ ph = wallet .get_new_puzzlehash ()
3324
+ blocks = bt .get_consecutive_blocks (
3325
+ 3 , guarantee_transaction_block = True , farmer_reward_puzzle_hash = ph , pool_reward_puzzle_hash = ph
3326
+ )
3327
+ for block in blocks :
3328
+ await full_node_api .full_node .add_block (block )
3329
+ peak = full_node_api .full_node .blockchain .get_peak ()
3330
+ assert peak is not None
3331
+ current_height = peak .height
3332
+ # Create a transaction with a height condition that makes it pending
3333
+ coin = blocks [- 1 ].get_included_reward_coins ()[0 ]
3334
+ if condition == ConditionOpcode .ASSERT_HEIGHT_RELATIVE :
3335
+ condition_height = 1
3336
+ else :
3337
+ condition_height = current_height + 1
3338
+ condition_dic = {condition : [ConditionWithArgs (condition , [int_to_bytes (condition_height )])]}
3339
+ sb = wallet .generate_signed_transaction (uint64 (42 ), ph , coin , condition_dic )
3340
+ sb_name = sb .name ()
3341
+ # Send the transaction
3342
+ res = await full_node_api .send_transaction (SendTransaction (sb ))
3343
+ assert res is not None
3344
+ assert ProtocolMessageTypes (res .type ) == ProtocolMessageTypes .transaction_ack
3345
+ transaction_ack = TransactionAck .from_bytes (res .data )
3346
+ assert transaction_ack .status == MempoolInclusionStatus .PENDING .value
3347
+ assert transaction_ack .error == error
3348
+ # Make sure it ends up in the pending cache, not the mempool
3349
+ assert full_node_api .full_node .mempool_manager .get_mempool_item (sb_name , include_pending = False ) is None
3350
+ assert full_node_api .full_node .mempool_manager .get_mempool_item (sb_name , include_pending = True ) is not None
3351
+ # Advance peak to meet the asserted height condition
3352
+ with caplog .at_level (logging .DEBUG ):
3353
+ blocks = bt .get_consecutive_blocks (2 , block_list_input = blocks , guarantee_transaction_block = True )
3354
+ for block in blocks :
3355
+ await full_node_api .full_node .add_block (block )
3356
+ # This should trigger peak post processing with the added transaction
3357
+ assert f"Added transaction to mempool: { sb_name } \n " in caplog .text
3358
+ # Make sure the transaction was retried and got added to the mempool
3359
+ assert full_node_api .full_node .mempool_manager .get_mempool_item (sb_name , include_pending = False ) is not None
0 commit comments