77Test mempool update of transaction descendants/ancestors information (count, size)
88when transactions have been re-added from a disconnected block to the mempool.
99"""
10+ from decimal import Decimal
1011from math import ceil
1112import time
1213
14+ from test_framework .blocktools import (
15+ create_block ,
16+ create_coinbase ,
17+ )
1318from test_framework .test_framework import BitcoinTestFramework
14- from test_framework .util import assert_equal
19+ from test_framework .util import assert_equal , assert_raises_rpc_error
1520from test_framework .wallet import MiniWallet
1621
22+ MAX_DISCONNECTED_TX_POOL_BYTES = 20_000_000
23+
24+ CUSTOM_ANCESTOR_COUNT = 100
25+ CUSTOM_DESCENDANT_COUNT = CUSTOM_ANCESTOR_COUNT
1726
1827class MempoolUpdateFromBlockTest (BitcoinTestFramework ):
1928 def set_test_params (self ):
2029 self .num_nodes = 1
21- self .extra_args = [['-limitdescendantsize=1000' , '-limitancestorsize=1000' , '-limitancestorcount=100' ]]
30+ # Ancestor and descendant limits depend on transaction_graph_test requirements
31+ self .extra_args = [['-limitdescendantsize=1000' , '-limitancestorsize=1000' , f'-limitancestorcount={ CUSTOM_ANCESTOR_COUNT } ' , f'-limitdescendantcount={ CUSTOM_DESCENDANT_COUNT } ' , '-datacarriersize=100000' ]]
32+
33+ def create_empty_fork (self , fork_length ):
34+ '''
35+ Creates a fork using first node's chaintip as the starting point.
36+ Returns a list of blocks to submit in order.
37+ '''
38+ tip = int (self .nodes [0 ].getbestblockhash (), 16 )
39+ height = self .nodes [0 ].getblockcount ()
40+ block_time = self .nodes [0 ].getblock (self .nodes [0 ].getbestblockhash ())['time' ] + 1
41+
42+ blocks = []
43+ for _ in range (fork_length ):
44+ block = create_block (tip , create_coinbase (height + 1 ), block_time )
45+ block .solve ()
46+ blocks .append (block )
47+ tip = block .sha256
48+ block_time += 1
49+ height += 1
50+
51+ return blocks
2252
23- def transaction_graph_test (self , size , n_tx_to_mine = None , fee = 100_000 ):
53+ def transaction_graph_test (self , size , * , n_tx_to_mine , fee = 100_000 ):
2454 """Create an acyclic tournament (a type of directed graph) of transactions and use it for testing.
2555
2656 Keyword arguments:
2757 size -- the order N of the tournament which is equal to the number of the created transactions
28- n_tx_to_mine -- the number of transaction that should be mined into a block
58+ n_tx_to_mine -- the number of transactions that should be mined into a block
2959
3060 If all of the N created transactions tx[0]..tx[N-1] reside in the mempool,
3161 the following holds:
@@ -36,7 +66,11 @@ def transaction_graph_test(self, size, n_tx_to_mine=None, fee=100_000):
3666 More details: https://en.wikipedia.org/wiki/Tournament_(graph_theory)
3767 """
3868 wallet = MiniWallet (self .nodes [0 ])
39- first_block_hash = ''
69+
70+ # Prep for fork with empty blocks to not use invalidateblock directly
71+ # for reorg case. The rpc has different codepath
72+ fork_blocks = self .create_empty_fork (fork_length = 7 )
73+
4074 tx_id = []
4175 tx_size = []
4276 self .log .info ('Creating {} transactions...' .format (size ))
@@ -73,17 +107,17 @@ def transaction_graph_test(self, size, n_tx_to_mine=None, fee=100_000):
73107 if tx_count in n_tx_to_mine :
74108 # The created transactions are mined into blocks by batches.
75109 self .log .info ('The batch of {} transactions has been accepted into the mempool.' .format (len (self .nodes [0 ].getrawmempool ())))
76- block_hash = self .generate (self .nodes [0 ], 1 )[0 ]
77- if not first_block_hash :
78- first_block_hash = block_hash
110+ self .generate (self .nodes [0 ], 1 )[0 ]
79111 assert_equal (len (self .nodes [0 ].getrawmempool ()), 0 )
80112 self .log .info ('All of the transactions from the current batch have been mined into a block.' )
81113 elif tx_count == size :
82- # At the end all of the mined blocks are invalidated , and all of the created
114+ # At the end the old fork is submitted to cause reorg , and all of the created
83115 # transactions should be re-added from disconnected blocks to the mempool.
84116 self .log .info ('The last batch of {} transactions has been accepted into the mempool.' .format (len (self .nodes [0 ].getrawmempool ())))
85117 start = time .time ()
86- self .nodes [0 ].invalidateblock (first_block_hash )
118+ # Trigger reorg
119+ for block in fork_blocks :
120+ self .nodes [0 ].submitblock (block .serialize ().hex ())
87121 end = time .time ()
88122 assert_equal (len (self .nodes [0 ].getrawmempool ()), size )
89123 self .log .info ('All of the recently mined transactions have been re-added into the mempool in {} seconds.' .format (end - start ))
@@ -97,10 +131,100 @@ def transaction_graph_test(self, size, n_tx_to_mine=None, fee=100_000):
97131 assert_equal (entry ['ancestorcount' ], k + 1 )
98132 assert_equal (entry ['ancestorsize' ], sum (tx_size [0 :(k + 1 )]))
99133
134+ self .generate (self .nodes [0 ], 1 )
135+ assert_equal (self .nodes [0 ].getrawmempool (), [])
136+ wallet .rescan_utxos ()
137+
138+ def test_max_disconnect_pool_bytes (self ):
139+ self .log .info ('Creating independent transactions to test MAX_DISCONNECTED_TX_POOL_BYTES limit during reorg' )
140+
141+ # Generate coins for the hundreds of transactions we will make
142+ parent_target_vsize = 100_000
143+ wallet = MiniWallet (self .nodes [0 ])
144+ self .generate (wallet , (MAX_DISCONNECTED_TX_POOL_BYTES // parent_target_vsize ) + 100 )
145+
146+ assert_equal (self .nodes [0 ].getrawmempool (), [])
147+
148+ # Set up empty fork blocks ahead of time, needs to be longer than full fork made later
149+ fork_blocks = self .create_empty_fork (fork_length = 60 )
150+
151+ large_std_txs = []
152+ # Add children to ensure they're recursively removed if disconnectpool trimming of parent occurs
153+ small_child_txs = []
154+ aggregate_serialized_size = 0
155+ while aggregate_serialized_size < MAX_DISCONNECTED_TX_POOL_BYTES :
156+ # Mine parents in FIFO order via fee ordering
157+ large_std_txs .append (wallet .create_self_transfer (target_vsize = parent_target_vsize , fee = Decimal ("0.00400000" ) - (Decimal ("0.00001000" ) * len (large_std_txs ))))
158+ small_child_txs .append (wallet .create_self_transfer (utxo_to_spend = large_std_txs [- 1 ]['new_utxo' ]))
159+ # Slight underestimate of dynamic cost, so we'll be over during reorg
160+ aggregate_serialized_size += len (large_std_txs [- 1 ]["tx" ].serialize ())
161+
162+ for large_std_tx in large_std_txs :
163+ self .nodes [0 ].sendrawtransaction (large_std_tx ["hex" ])
164+
165+ assert_equal (self .nodes [0 ].getmempoolinfo ()["size" ], len (large_std_txs ))
166+
167+ # Mine non-empty chain that will be reorged shortly
168+ self .generate (self .nodes [0 ], len (fork_blocks ) - 1 )
169+ assert_equal (self .nodes [0 ].getrawmempool (), [])
170+
171+ # Stick children in mempool, evicted with parent potentially
172+ for small_child_tx in small_child_txs :
173+ self .nodes [0 ].sendrawtransaction (small_child_tx ["hex" ])
174+
175+ assert_equal (self .nodes [0 ].getmempoolinfo ()["size" ], len (small_child_txs ))
176+
177+ # Reorg back before the first block in the series, should drop something
178+ # but not all, and any time parent is dropped, child is also removed
179+ for block in fork_blocks :
180+ self .nodes [0 ].submitblock (block .serialize ().hex ())
181+ mempool = self .nodes [0 ].getrawmempool ()
182+ expected_parent_count = len (large_std_txs ) - 2
183+ assert_equal (len (mempool ), expected_parent_count * 2 )
184+
185+ # The txns at the end of the list, or most recently confirmed, should have been trimmed
186+ assert_equal ([tx ["txid" ] in mempool for tx in large_std_txs ], [tx ["txid" ] in mempool for tx in small_child_txs ])
187+ assert_equal ([tx ["txid" ] in mempool for tx in large_std_txs ], [True ] * expected_parent_count + [False ] * 2 )
188+
189+ def test_chainlimits_exceeded (self ):
190+ self .log .info ('Check that too long chains on reorg are handled' )
191+
192+ wallet = MiniWallet (self .nodes [0 ])
193+ self .generate (wallet , 101 )
194+
195+ assert_equal (self .nodes [0 ].getrawmempool (), [])
196+
197+ # Prep fork
198+ fork_blocks = self .create_empty_fork (fork_length = 10 )
199+
200+ # Two higher than descendant count
201+ chain = wallet .create_self_transfer_chain (chain_length = CUSTOM_DESCENDANT_COUNT + 2 )
202+ for tx in chain [:- 2 ]:
203+ self .nodes [0 ].sendrawtransaction (tx ["hex" ])
204+
205+ assert_raises_rpc_error (- 26 , "too-long-mempool-chain, too many unconfirmed ancestors [limit: 100]" , self .nodes [0 ].sendrawtransaction , chain [- 2 ]["hex" ])
206+
207+ # Mine a block with all but last transaction, non-standardly long chain
208+ self .generateblock (self .nodes [0 ], output = "raw(42)" , transactions = [tx ["hex" ] for tx in chain [:- 1 ]])
209+ assert_equal (self .nodes [0 ].getrawmempool (), [])
210+
211+ # Last tx fits now
212+ self .nodes [0 ].sendrawtransaction (chain [- 1 ]["hex" ])
213+
214+ # Finally, reorg to empty chain kick everything back into mempool
215+ # at normal chain limits
216+ for block in fork_blocks :
217+ self .nodes [0 ].submitblock (block .serialize ().hex ())
218+ mempool = self .nodes [0 ].getrawmempool ()
219+ assert_equal (set (mempool ), set ([tx ["txid" ] for tx in chain [:- 2 ]]))
220+
100221 def run_test (self ):
101- # Use batch size limited by DEFAULT_ANCESTOR_LIMIT = 25 to not fire "too many unconfirmed parents" error.
102- self .transaction_graph_test (size = 100 , n_tx_to_mine = [25 , 50 , 75 ])
222+ # Mine in batches of 25 to test multi-block reorg under chain limits
223+ self .transaction_graph_test (size = CUSTOM_ANCESTOR_COUNT , n_tx_to_mine = [25 , 50 , 75 ])
224+
225+ self .test_max_disconnect_pool_bytes ()
103226
227+ self .test_chainlimits_exceeded ()
104228
105229if __name__ == '__main__' :
106230 MempoolUpdateFromBlockTest (__file__ ).main ()
0 commit comments