Skip to content

Commit 38975ec

Browse files
author
MarcoFalke
committed
Merge bitcoin/bitcoin#22229: test: consolidate to f-strings (part 1)
68faa87 test: use f-strings in mining_*.py tests (fanquake) c2a5d56 test: use f-strings in interface_*.py tests (fanquake) 86d9582 test: use f-strings in feature_proxy.py (fanquake) 31bdb33 test: use f-strings in feature_segwit.py (fanquake) b166d54 test: use f-strings in feature_versionbits_warning.py (fanquake) cf6d66b test: use f-strings in feature_settings.py (fanquake) 6651d77 test: use f-strings in feature_pruning.py (fanquake) 961f581 test: use f-strings in feature_notifications.py (fanquake) 1a546e6 test: use f-strings in feature_minchainwork.py (fanquake) 6679ece test: use f-strings in feature_logging.py (fanquake) fb63393 test: use f-strings in feature_loadblock.py (fanquake) e9ca8b2 test: use f-strings in feature_help.py (fanquake) ff7e330 test: use f-strings in feature_filelock.py (fanquake) d5a6adc test: use f-strings in feature_fee_estimation.py (fanquake) a2de33c test: use f-strings in feature_dersig.py (fanquake) a2502cc test: use f-strings in feature_dbcrash.py (fanquake) 3e2f84e test: use f-strings in feature_csv_activation.py (fanquake) e2f1fd8 test: use f-strings in feature_config_args.py (fanquake) 36d33d3 test: use f-strings in feature_cltv.py (fanquake) dca173c test: use f-strings in feature_blocksdir.py (fanquake) 5453e87 test: use f-strings in feature_backwards_compatibility.py (fanquake) 6f3d5ad test: use f-strings in feature_asmap.py (fanquake) Pull request description: Rather than using 3 different ways to build/format strings (sometimes all in the same test, i.e [`feature_config_args.py`](https://github.com/bitcoin/bitcoin/blob/master/test/functional/feature_config_args.py)), consolidate to using [f-strings (3.6+)](https://docs.python.org/3/reference/lexical_analysis.html#f-strings), which are generally more concise / readable, as well as more performant than existing methods. This deals with the `feature_*.py`, `interface_*.py` and `mining_*.py` tests. See also: [PEP 498](https://www.python.org/dev/peps/pep-0498/) ACKs for top commit: mjdietzx: reACK 68faa87 Zero-1729: crACK 68faa87 Tree-SHA512: d4e1a42e07d96d2c552387a46da1534223c4ce408703d7568ad2ef580797dd68d9695b8d19666b567af37f44de6e430e8be5db5d5404ba8fcecf9f5b026a6efb
2 parents 9049935 + 68faa87 commit 38975ec

26 files changed

+171
-175
lines changed

test/functional/feature_asmap.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,8 @@
3131
VERSION = 'fec61fa21a9f46f3b17bdcd660d7f4cd90b966aad3aec593c99b35f0aca15853'
3232

3333
def expected_messages(filename):
34-
return ['Opened asmap file "{}" (59 bytes) from disk'.format(filename),
35-
'Using asmap version {} for IP bucketing'.format(VERSION)]
34+
return [f'Opened asmap file "{filename}" (59 bytes) from disk',
35+
f'Using asmap version {VERSION} for IP bucketing']
3636

3737
class AsmapTest(BitcoinTestFramework):
3838
def set_test_params(self):
@@ -50,7 +50,7 @@ def test_asmap_with_absolute_path(self):
5050
filename = os.path.join(self.datadir, 'my-map-file.map')
5151
shutil.copyfile(self.asmap_raw, filename)
5252
with self.node.assert_debug_log(expected_messages(filename)):
53-
self.start_node(0, ['-asmap={}'.format(filename)])
53+
self.start_node(0, [f'-asmap={filename}'])
5454
os.remove(filename)
5555

5656
def test_asmap_with_relative_path(self):
@@ -60,13 +60,13 @@ def test_asmap_with_relative_path(self):
6060
filename = os.path.join(self.datadir, name)
6161
shutil.copyfile(self.asmap_raw, filename)
6262
with self.node.assert_debug_log(expected_messages(filename)):
63-
self.start_node(0, ['-asmap={}'.format(name)])
63+
self.start_node(0, [f'-asmap={name}'])
6464
os.remove(filename)
6565

6666
def test_default_asmap(self):
6767
shutil.copyfile(self.asmap_raw, self.default_asmap)
6868
for arg in ['-asmap', '-asmap=']:
69-
self.log.info('Test bitcoind {} (using default map file)'.format(arg))
69+
self.log.info(f'Test bitcoind {arg} (using default map file)')
7070
self.stop_node(0)
7171
with self.node.assert_debug_log(expected_messages(self.default_asmap)):
7272
self.start_node(0, [arg])
@@ -75,15 +75,15 @@ def test_default_asmap(self):
7575
def test_default_asmap_with_missing_file(self):
7676
self.log.info('Test bitcoind -asmap with missing default map file')
7777
self.stop_node(0)
78-
msg = "Error: Could not find asmap file \"{}\"".format(self.default_asmap)
78+
msg = f"Error: Could not find asmap file \"{self.default_asmap}\""
7979
self.node.assert_start_raises_init_error(extra_args=['-asmap'], expected_msg=msg)
8080

8181
def test_empty_asmap(self):
8282
self.log.info('Test bitcoind -asmap with empty map file')
8383
self.stop_node(0)
8484
with open(self.default_asmap, "w", encoding="utf-8") as f:
8585
f.write("")
86-
msg = "Error: Could not parse asmap file \"{}\"".format(self.default_asmap)
86+
msg = f"Error: Could not parse asmap file \"{self.default_asmap}\""
8787
self.node.assert_start_raises_init_error(extra_args=['-asmap'], expected_msg=msg)
8888
os.remove(self.default_asmap)
8989

test/functional/feature_backwards_compatibility.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -366,7 +366,7 @@ def run_test(self):
366366
assert_equal(load_res['warning'], '')
367367
wallet = node_master.get_wallet_rpc("u1_v16")
368368
info = wallet.getaddressinfo(v16_addr)
369-
descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + v16_pubkey + ")"
369+
descriptor = f"wpkh([{info['hdmasterfingerprint']}{hdkeypath[1:]}]{v16_pubkey})"
370370
assert_equal(info["desc"], descsum_create(descriptor))
371371

372372
# Now copy that same wallet back to 0.16 to make sure no automatic upgrade breaks it
@@ -389,7 +389,7 @@ def run_test(self):
389389
node_master.loadwallet("u1_v17")
390390
wallet = node_master.get_wallet_rpc("u1_v17")
391391
info = wallet.getaddressinfo(address)
392-
descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + pubkey + ")"
392+
descriptor = f"wpkh([{info['hdmasterfingerprint']}{hdkeypath[1:]}]{pubkey})"
393393
assert_equal(info["desc"], descsum_create(descriptor))
394394

395395
# Now copy that same wallet back to 0.17 to make sure no automatic upgrade breaks it

test/functional/feature_blocksdir.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,10 @@ def run_test(self):
2424
initialize_datadir(self.options.tmpdir, 0, self.chain)
2525
self.log.info("Starting with nonexistent blocksdir ...")
2626
blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir')
27-
self.nodes[0].assert_start_raises_init_error(["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "{}" does not exist.'.format(blocksdir_path))
27+
self.nodes[0].assert_start_raises_init_error([f"-blocksdir={blocksdir_path}"], f'Error: Specified blocks directory "{blocksdir_path}" does not exist.')
2828
os.mkdir(blocksdir_path)
2929
self.log.info("Starting with existing blocksdir ...")
30-
self.start_node(0, ["-blocksdir=" + blocksdir_path])
30+
self.start_node(0, [f"-blocksdir={blocksdir_path}"])
3131
self.log.info("mining blocks..")
3232
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
3333
assert os.path.isfile(os.path.join(blocksdir_path, self.chain, "blocks", "blk00000.dat"))

test/functional/feature_cltv.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ def run_test(self):
135135
block.nVersion = 3
136136
block.solve()
137137

138-
with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]):
138+
with self.nodes[0].assert_debug_log(expected_msgs=[f'{block.hash}, bad-version(0x00000003)']):
139139
peer.send_and_ping(msg_block(block))
140140
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
141141
peer.sync_with_ping()
@@ -173,8 +173,7 @@ def run_test(self):
173173
block.hashMerkleRoot = block.calc_merkle_root()
174174
block.solve()
175175

176-
with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputScripts on {} failed with {}'.format(
177-
block.vtx[-1].hash, expected_cltv_reject_reason)]):
176+
with self.nodes[0].assert_debug_log(expected_msgs=[f'CheckInputScripts on {block.vtx[-1].hash} failed with {expected_cltv_reject_reason}']):
178177
peer.send_and_ping(msg_block(block))
179178
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
180179
peer.sync_with_ping()

test/functional/feature_config_args.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def test_config_file_parser(self):
2424

2525
inc_conf_file_path = os.path.join(self.nodes[0].datadir, 'include.conf')
2626
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
27-
conf.write('includeconf={}\n'.format(inc_conf_file_path))
27+
conf.write(f'includeconf={inc_conf_file_path}\n')
2828

2929
self.nodes[0].assert_start_raises_init_error(
3030
expected_msg='Error: Error parsing command line arguments: Invalid parameter -dash_cli=1',
@@ -43,13 +43,13 @@ def test_config_file_parser(self):
4343
if self.is_wallet_compiled():
4444
with open(inc_conf_file_path, 'w', encoding='utf8') as conf:
4545
conf.write("wallet=foo\n")
46-
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Config setting for -wallet only applied on %s network when in [%s] section.' % (self.chain, self.chain))
46+
self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: Config setting for -wallet only applied on {self.chain} network when in [{self.chain}] section.')
4747

4848
main_conf_file_path = os.path.join(self.options.tmpdir, 'node0', 'bitcoin_main.conf')
49-
util.write_config(main_conf_file_path, n=0, chain='', extra_config='includeconf={}\n'.format(inc_conf_file_path))
49+
util.write_config(main_conf_file_path, n=0, chain='', extra_config=f'includeconf={inc_conf_file_path}\n')
5050
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
5151
conf.write('acceptnonstdtxn=1\n')
52-
self.nodes[0].assert_start_raises_init_error(extra_args=["-conf={}".format(main_conf_file_path)], expected_msg='Error: acceptnonstdtxn is not currently supported for main chain')
52+
self.nodes[0].assert_start_raises_init_error(extra_args=[f"-conf={main_conf_file_path}"], expected_msg='Error: acceptnonstdtxn is not currently supported for main chain')
5353

5454
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
5555
conf.write('nono\n')
@@ -69,14 +69,14 @@ def test_config_file_parser(self):
6969

7070
inc_conf_file2_path = os.path.join(self.nodes[0].datadir, 'include2.conf')
7171
with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf:
72-
conf.write('includeconf={}\n'.format(inc_conf_file2_path))
72+
conf.write(f'includeconf={inc_conf_file2_path}\n')
7373

7474
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
7575
conf.write('testnot.datadir=1\n')
7676
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
7777
conf.write('[testnet]\n')
7878
self.restart_node(0)
79-
self.nodes[0].stop_node(expected_stderr='Warning: ' + inc_conf_file_path + ':1 Section [testnot] is not recognized.' + os.linesep + inc_conf_file2_path + ':1 Section [testnet] is not recognized.')
79+
self.nodes[0].stop_node(expected_stderr=f'Warning: {inc_conf_file_path}:1 Section [testnot] is not recognized.{os.linesep}{inc_conf_file2_path}:1 Section [testnet] is not recognized.')
8080

8181
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
8282
conf.write('') # clear
@@ -105,8 +105,8 @@ def test_args_log(self):
105105
'Command-line arg: rpcpassword=****',
106106
'Command-line arg: rpcuser=****',
107107
'Command-line arg: torpassword=****',
108-
'Config file arg: %s="1"' % self.chain,
109-
'Config file arg: [%s] server="1"' % self.chain,
108+
f'Config file arg: {self.chain}="1"',
109+
f'Config file arg: [{self.chain}] server="1"',
110110
],
111111
unexpected_msgs=[
112112
'alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0',
@@ -235,29 +235,29 @@ def run_test(self):
235235

236236
# Check that using -datadir argument on non-existent directory fails
237237
self.nodes[0].datadir = new_data_dir
238-
self.nodes[0].assert_start_raises_init_error(['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.')
238+
self.nodes[0].assert_start_raises_init_error([f'-datadir={new_data_dir}'], f'Error: Specified data directory "{new_data_dir}" does not exist.')
239239

240240
# Check that using non-existent datadir in conf file fails
241241
conf_file = os.path.join(default_data_dir, "bitcoin.conf")
242242

243243
# datadir needs to be set before [chain] section
244244
conf_file_contents = open(conf_file, encoding='utf8').read()
245245
with open(conf_file, 'w', encoding='utf8') as f:
246-
f.write("datadir=" + new_data_dir + "\n")
246+
f.write(f"datadir={new_data_dir}\n")
247247
f.write(conf_file_contents)
248248

249-
self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error: Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
249+
self.nodes[0].assert_start_raises_init_error([f'-conf={conf_file}'], f'Error: Error reading configuration file: specified data directory "{new_data_dir}" does not exist.')
250250

251251
# Create the directory and ensure the config file now works
252252
os.mkdir(new_data_dir)
253-
self.start_node(0, ['-conf='+conf_file])
253+
self.start_node(0, [f'-conf={conf_file}'])
254254
self.stop_node(0)
255255
assert os.path.exists(os.path.join(new_data_dir, self.chain, 'blocks'))
256256

257257
# Ensure command line argument overrides datadir in conf
258258
os.mkdir(new_data_dir_2)
259259
self.nodes[0].datadir = new_data_dir_2
260-
self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file])
260+
self.start_node(0, [f'-datadir={new_data_dir_2}', f'-conf={conf_file}'])
261261
assert os.path.exists(os.path.join(new_data_dir_2, self.chain, 'blocks'))
262262

263263

test/functional/feature_csv_activation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ def run_test(self):
247247
self.send_blocks(test_blocks)
248248

249249
assert_equal(self.tipheight, CSV_ACTIVATION_HEIGHT - 2)
250-
self.log.info("Height = {}, CSV not yet active (will activate for block {}, not {})".format(self.tipheight, CSV_ACTIVATION_HEIGHT, CSV_ACTIVATION_HEIGHT - 1))
250+
self.log.info(f"Height = {self.tipheight}, CSV not yet active (will activate for block {CSV_ACTIVATION_HEIGHT}, not {CSV_ACTIVATION_HEIGHT - 1})")
251251
assert not softfork_active(self.nodes[0], 'csv')
252252

253253
# Test both version 1 and version 2 transactions for all tests

test/functional/feature_dbcrash.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def restart_node(self, node_index, expected_tip):
102102
# perhaps we generated a test case that blew up our cache?
103103
# TODO: If this happens a lot, we should try to restart without -dbcrashratio
104104
# and make sure that recovery happens.
105-
raise AssertionError("Unable to successfully restart node %d in allotted time", node_index)
105+
raise AssertionError(f"Unable to successfully restart node {node_index} in allotted time")
106106

107107
def submit_block_catch_error(self, node_index, block):
108108
"""Try submitting a block to the given node.
@@ -114,10 +114,10 @@ def submit_block_catch_error(self, node_index, block):
114114
self.nodes[node_index].submitblock(block)
115115
return True
116116
except (http.client.CannotSendRequest, http.client.RemoteDisconnected) as e:
117-
self.log.debug("node %d submitblock raised exception: %s", node_index, e)
117+
self.log.debug(f"node {node_index} submitblock raised exception: {e}")
118118
return False
119119
except OSError as e:
120-
self.log.debug("node %d submitblock raised OSError exception: errno=%s", node_index, e.errno)
120+
self.log.debug(f"node {node_index} submitblock raised OSError exception: errno={e.errno}")
121121
if e.errno in [errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET]:
122122
# The node has likely crashed
123123
return False
@@ -142,15 +142,15 @@ def sync_node3blocks(self, block_hashes):
142142
# Deliver each block to each other node
143143
for i in range(3):
144144
nodei_utxo_hash = None
145-
self.log.debug("Syncing blocks to node %d", i)
145+
self.log.debug(f"Syncing blocks to node {i}")
146146
for (block_hash, block) in blocks:
147147
# Get the block from node3, and submit to node_i
148-
self.log.debug("submitting block %s", block_hash)
148+
self.log.debug(f"submitting block {block_hash}")
149149
if not self.submit_block_catch_error(i, block):
150150
# TODO: more carefully check that the crash is due to -dbcrashratio
151151
# (change the exit code perhaps, and check that here?)
152152
self.wait_for_node_exit(i, timeout=30)
153-
self.log.debug("Restarting node %d after block hash %s", i, block_hash)
153+
self.log.debug(f"Restarting node {i} after block hash {block_hash}")
154154
nodei_utxo_hash = self.restart_node(i, block_hash)
155155
assert nodei_utxo_hash is not None
156156
self.restart_counts[i] += 1
@@ -167,7 +167,7 @@ def sync_node3blocks(self, block_hashes):
167167
# - we only update the utxo cache after a node restart, since flushing
168168
# the cache is a no-op at that point
169169
if nodei_utxo_hash is not None:
170-
self.log.debug("Checking txoutsetinfo matches for node %d", i)
170+
self.log.debug(f"Checking txoutsetinfo matches for node {i}")
171171
assert_equal(nodei_utxo_hash, node3_utxo_hash)
172172

173173
def verify_utxo_hash(self):
@@ -218,14 +218,14 @@ def run_test(self):
218218
# Start by creating a lot of utxos on node3
219219
initial_height = self.nodes[3].getblockcount()
220220
utxo_list = create_confirmed_utxos(self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000)
221-
self.log.info("Prepped %d utxo entries", len(utxo_list))
221+
self.log.info(f"Prepped {len(utxo_list)} utxo entries")
222222

223223
# Sync these blocks with the other nodes
224224
block_hashes_to_sync = []
225225
for height in range(initial_height + 1, self.nodes[3].getblockcount() + 1):
226226
block_hashes_to_sync.append(self.nodes[3].getblockhash(height))
227227

228-
self.log.debug("Syncing %d blocks with other nodes", len(block_hashes_to_sync))
228+
self.log.debug(f"Syncing {len(block_hashes_to_sync)} blocks with other nodes")
229229
# Syncing the blocks could cause nodes to crash, so the test begins here.
230230
self.sync_node3blocks(block_hashes_to_sync)
231231

@@ -235,18 +235,18 @@ def run_test(self):
235235
# each time through the loop, generate a bunch of transactions,
236236
# and then either mine a single new block on the tip, or some-sized reorg.
237237
for i in range(40):
238-
self.log.info("Iteration %d, generating 2500 transactions %s", i, self.restart_counts)
238+
self.log.info(f"Iteration {i}, generating 2500 transactions {self.restart_counts}")
239239
# Generate a bunch of small-ish transactions
240240
self.generate_small_transactions(self.nodes[3], 2500, utxo_list)
241241
# Pick a random block between current tip, and starting tip
242242
current_height = self.nodes[3].getblockcount()
243243
random_height = random.randint(starting_tip_height, current_height)
244-
self.log.debug("At height %d, considering height %d", current_height, random_height)
244+
self.log.debug(f"At height {current_height}, considering height {random_height}")
245245
if random_height > starting_tip_height:
246246
# Randomly reorg from this point with some probability (1/4 for
247247
# tip, 1/5 for tip-1, ...)
248248
if random.random() < 1.0 / (current_height + 4 - random_height):
249-
self.log.debug("Invalidating block at height %d", random_height)
249+
self.log.debug(f"Invalidating block at height {random_height}")
250250
self.nodes[3].invalidateblock(self.nodes[3].getblockhash(random_height))
251251

252252
# Now generate new blocks until we pass the old tip height
@@ -258,18 +258,18 @@ def run_test(self):
258258
# new address to avoid mining a block that has just been invalidated
259259
address=self.nodes[3].getnewaddress(),
260260
))
261-
self.log.debug("Syncing %d new blocks...", len(block_hashes))
261+
self.log.debug(f"Syncing {len(block_hashes)} new blocks...")
262262
self.sync_node3blocks(block_hashes)
263263
utxo_list = self.nodes[3].listunspent()
264-
self.log.debug("Node3 utxo count: %d", len(utxo_list))
264+
self.log.debug(f"Node3 utxo count: {len(utxo_list)}")
265265

266266
# Check that the utxo hashes agree with node3
267267
# Useful side effect: each utxo cache gets flushed here, so that we
268268
# won't get crashes on shutdown at the end of the test.
269269
self.verify_utxo_hash()
270270

271271
# Check the test coverage
272-
self.log.info("Restarted nodes: %s; crashes on restart: %d", self.restart_counts, self.crashed_on_restart)
272+
self.log.info(f"Restarted nodes: {self.restart_counts}; crashes on restart: {self.crashed_on_restart}")
273273

274274
# If no nodes were restarted, we didn't test anything.
275275
assert self.restart_counts != [0, 0, 0]
@@ -280,7 +280,7 @@ def run_test(self):
280280
# Warn if any of the nodes escaped restart.
281281
for i in range(3):
282282
if self.restart_counts[i] == 0:
283-
self.log.warning("Node %d never crashed during utxo flush!", i)
283+
self.log.warning(f"Node {i} never crashed during utxo flush!")
284284

285285

286286
if __name__ == "__main__":

0 commit comments

Comments
 (0)