diff --git a/contrib/pyln-client/pyln/client/lightning.py b/contrib/pyln-client/pyln/client/lightning.py index 1706a4727e3b..440e36bb5f17 100644 --- a/contrib/pyln-client/pyln/client/lightning.py +++ b/contrib/pyln-client/pyln/client/lightning.py @@ -254,9 +254,12 @@ def connect(self) -> None: # Open an fd to our home directory, that we can then find # through `/proc/self/fd` and access the contents. dirfd = os.open(dirname, os.O_DIRECTORY | os.O_RDONLY) - short_path = "/proc/self/fd/%d/%s" % (dirfd, basename) - self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - self.sock.connect(short_path) + try: + short_path = "/proc/self/fd/%d/%s" % (dirfd, basename) + self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + self.sock.connect(short_path) + finally: + os.close(dirfd) elif (e.args[0] == "AF_UNIX path too long" and os.uname()[0] == "Darwin"): temp_dir = tempfile.mkdtemp() temp_link = os.path.join(temp_dir, "socket_link") diff --git a/contrib/pyln-testing/pyln/testing/fixtures.py b/contrib/pyln-testing/pyln/testing/fixtures.py index 215c54022325..7bc45a25a2fa 100644 --- a/contrib/pyln-testing/pyln/testing/fixtures.py +++ b/contrib/pyln-testing/pyln/testing/fixtures.py @@ -512,6 +512,9 @@ def map_node_error(nodes, f, msg): if not ok: map_node_error(nf.nodes, prinErrlog, "some node failed unexpected, non-empty errlog file") + for n in nf.nodes: + n.daemon.cleanup_files() + def getErrlog(node): for error_file in os.listdir(node.daemon.lightning_dir): diff --git a/contrib/pyln-testing/pyln/testing/gossip.py b/contrib/pyln-testing/pyln/testing/gossip.py index eb7ce99f4480..5c642846aaea 100644 --- a/contrib/pyln-testing/pyln/testing/gossip.py +++ b/contrib/pyln-testing/pyln/testing/gossip.py @@ -16,7 +16,13 @@ def __init__(self, path: Path): self.path = path self.log = logging.getLogger("GossipStore") + def __del__(self): + if self.fd is not None: + self.fd.close() + def open(self): + if self.fd is not None: + self.fd.close() self.fd = self.path.open(mode="rb") self.version = ord(self.fd.read(1)) if self.version < 3: diff --git a/contrib/pyln-testing/pyln/testing/utils.py b/contrib/pyln-testing/pyln/testing/utils.py index 2317c96c315f..c9e473336bf8 100644 --- a/contrib/pyln-testing/pyln/testing/utils.py +++ b/contrib/pyln-testing/pyln/testing/utils.py @@ -60,7 +60,8 @@ def env(name, default=None): """ fname = 'config.vars' if os.path.exists(fname): - lines = open(fname, 'r').readlines() + with open(fname, 'r') as f: + lines = f.readlines() config = dict([(line.rstrip().split('=', 1)) for line in lines]) else: config = {} @@ -938,7 +939,7 @@ def grpc(self): import grpc p = Path(self.daemon.lightning_dir) / TEST_NETWORK - cert, key, ca = [f.open('rb').read() for f in [ + cert, key, ca = [f.read_bytes() for f in [ p / 'client.pem', p / 'client-key.pem', p / "ca.pem"]] @@ -1689,16 +1690,16 @@ def get_node(self, node_id=None, options=None, dbfile=None, self.nodes.append(node) self.reserved_ports.append(port) if dbfile: - out = open(os.path.join(node.daemon.lightning_dir, TEST_NETWORK, - 'lightningd.sqlite3'), 'xb') - with lzma.open(os.path.join('tests/data', dbfile), 'rb') as f: - out.write(f.read()) + with open(os.path.join(node.daemon.lightning_dir, TEST_NETWORK, + 'lightningd.sqlite3'), 'xb') as out: + with lzma.open(os.path.join('tests/data', dbfile), 'rb') as f: + out.write(f.read()) if bkpr_dbfile: - out = open(os.path.join(node.daemon.lightning_dir, TEST_NETWORK, - 'accounts.sqlite3'), 'xb') - with lzma.open(os.path.join('tests/data', bkpr_dbfile), 'rb') as f: - out.write(f.read()) + with open(os.path.join(node.daemon.lightning_dir, TEST_NETWORK, + 'accounts.sqlite3'), 'xb') as out: + with lzma.open(os.path.join('tests/data', bkpr_dbfile), 'rb') as f: + out.write(f.read()) if gossip_store_file: shutil.copy(gossip_store_file, os.path.join(node.daemon.lightning_dir, TEST_NETWORK, diff --git a/tests/fixtures.py b/tests/fixtures.py index 6cc91c12ffef..cde3a18d62f7 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -58,7 +58,8 @@ class CompatLevel(object): """ def __init__(self): makefile = os.path.join(os.path.dirname(__file__), "..", "Makefile") - lines = [l for l in open(makefile, 'r') if l.startswith('COMPAT_CFLAGS')] + with open(makefile, 'r') as f: + lines = [l for l in f if l.startswith('COMPAT_CFLAGS')] assert(len(lines) == 1) line = lines[0] flags = re.findall(r'COMPAT_V([0-9]+)=1', line) diff --git a/tests/test_bookkeeper.py b/tests/test_bookkeeper.py index edb57c0aa04f..5860ad92bb7d 100644 --- a/tests/test_bookkeeper.py +++ b/tests/test_bookkeeper.py @@ -690,7 +690,7 @@ def test_bookkeeping_descriptions(node_factory, bitcoind, chainparams): l1.rpc.bkpr_dumpincomecsv('koinly', 'koinly.csv') l2.rpc.bkpr_dumpincomecsv('koinly', 'koinly.csv') koinly_path = os.path.join(l1.daemon.lightning_dir, TEST_NETWORK, 'koinly.csv') - l1_koinly_csv = open(koinly_path, 'rb').read() + l1_koinly_csv = Path(koinly_path).read_bytes() bolt11_exp = bytes('invoice,"test \'bolt11\' description, 🥰🪢",', 'utf-8') bolt12_exp = bytes('invoice,"test \'bolt12\' description, 🥰🪢",', 'utf-8') @@ -698,7 +698,7 @@ def test_bookkeeping_descriptions(node_factory, bitcoind, chainparams): assert l1_koinly_csv.find(bolt12_exp) >= 0 koinly_path = os.path.join(l2.daemon.lightning_dir, TEST_NETWORK, 'koinly.csv') - l2_koinly_csv = open(koinly_path, 'rb').read() + l2_koinly_csv = Path(koinly_path).read_bytes() assert l2_koinly_csv.find(bolt11_exp) >= 0 assert l2_koinly_csv.find(bolt12_exp) >= 0 diff --git a/tests/test_cln_rs.py b/tests/test_cln_rs.py index 6363e7830458..d5f258b66c1d 100644 --- a/tests/test_cln_rs.py +++ b/tests/test_cln_rs.py @@ -129,9 +129,9 @@ def test_grpc_connect(node_factory): key_path = p / "client-key.pem" ca_cert_path = p / "ca.pem" creds = grpc.ssl_channel_credentials( - root_certificates=ca_cert_path.open('rb').read(), - private_key=key_path.open('rb').read(), - certificate_chain=cert_path.open('rb').read() + root_certificates=ca_cert_path.read_bytes(), + private_key=key_path.read_bytes(), + certificate_chain=cert_path.read_bytes() ) wait_for_grpc_start(l1) @@ -196,15 +196,15 @@ def test_grpc_generate_certificate(node_factory): assert [f.exists() for f in files] == [True] * len(files) # The files exist, restarting should not change them - contents = [f.open().read() for f in files] + contents = [f.read_bytes() for f in files] l1.restart() - assert contents == [f.open().read() for f in files] + assert contents == [f.read_bytes() for f in files] # Now we delete the last file, we should regenerate it as well as its key files[-1].unlink() l1.restart() - assert contents[-2] != files[-2].open().read() - assert contents[-1] != files[-1].open().read() + assert contents[-2] != files[-2].read_bytes() + assert contents[-1] != files[-1].read_bytes() keys = [f for f in files if f.name.endswith('-key.pem')] modes = [f.stat().st_mode for f in keys] @@ -241,7 +241,7 @@ def test_grpc_wrong_auth(node_factory): def connect(node): p = Path(node.daemon.lightning_dir) / TEST_NETWORK - cert, key, ca = [f.open('rb').read() for f in [ + cert, key, ca = [f.read_bytes() for f in [ p / 'client.pem', p / 'client-key.pem', p / "ca.pem"]] diff --git a/tests/test_clnrest.py b/tests/test_clnrest.py index f9c14a28a985..47206c046ac5 100644 --- a/tests/test_clnrest.py +++ b/tests/test_clnrest.py @@ -105,22 +105,22 @@ def test_generate_certificate(node_factory): wait_for(lambda: [f.exists() for f in files] == [True] * len(files)) # the files exist, restarting should not change them - contents = [f.open().read() for f in files] + contents = [f.read_bytes() for f in files] l1.restart() - assert contents == [f.open().read() for f in files] + assert contents == [f.read_bytes() for f in files] # remove client.pem file, so all certs are regenerated at restart files[2].unlink() l1.restart() wait_for(lambda: [f.exists() for f in files] == [True] * len(files)) - contents_1 = [f.open().read() for f in files] + contents_1 = [f.read_bytes() for f in files] assert [c[0] != c[1] for c in zip(contents, contents_1)] == [True] * len(files) # remove client-key.pem file, so all certs are regenerated at restart files[3].unlink() l1.restart() wait_for(lambda: [f.exists() for f in files] == [True] * len(files)) - contents_2 = [f.open().read() for f in files] + contents_2 = [f.read_bytes() for f in files] assert [c[0] != c[1] for c in zip(contents, contents_2)] == [True] * len(files) diff --git a/tests/test_connection.py b/tests/test_connection.py index ed81c75f52d4..8f10c2555cff 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,6 +1,7 @@ from fixtures import * # noqa: F401,F403 from fixtures import TEST_NETWORK from decimal import Decimal +from pathlib import Path from pyln.client import RpcError, Millisatoshi import pyln.proto.wire as wire from utils import ( @@ -3019,7 +3020,7 @@ def test_dataloss_protection(node_factory, bitcoind): # Save copy of the db. dbpath = os.path.join(l2.daemon.lightning_dir, TEST_NETWORK, "lightningd.sqlite3") - orig_db = open(dbpath, "rb").read() + orig_db = Path(dbpath).read_bytes() l2.start() # l1 should have sent WIRE_CHANNEL_REESTABLISH with extra fields. @@ -3063,7 +3064,7 @@ def test_dataloss_protection(node_factory, bitcoind): # Now, move l2 back in time. l2.stop() # Overwrite with OLD db. - open(dbpath, "wb").write(orig_db) + Path(dbpath).write_bytes(orig_db) l2.start() # l2 should freak out! @@ -3118,7 +3119,7 @@ def test_dataloss_protection_no_broadcast(node_factory, bitcoind): # Save copy of the db. dbpath = os.path.join(l2.daemon.lightning_dir, TEST_NETWORK, "lightningd.sqlite3") - orig_db = open(dbpath, "rb").read() + orig_db = Path(dbpath).read_bytes() l2.start() l1.rpc.connect(l2.info['id'], 'localhost', l2.port) @@ -3133,9 +3134,9 @@ def test_dataloss_protection_no_broadcast(node_factory, bitcoind): # Now, move l2 back in time. l2.stop() # Save new db - new_db = open(dbpath, "rb").read() + new_db = Path(dbpath).read_bytes() # Overwrite with OLD db. - open(dbpath, "wb").write(orig_db) + Path(dbpath).write_bytes(orig_db) l2.start() l1.rpc.connect(l2.info['id'], 'localhost', l2.port) @@ -3149,7 +3150,7 @@ def test_dataloss_protection_no_broadcast(node_factory, bitcoind): # fix up l2. l2.stop() - open(dbpath, "wb").write(new_db) + Path(dbpath).write_bytes(new_db) l2.start() # All is forgiven diff --git a/tests/test_misc.py b/tests/test_misc.py index 5832787aff20..5759bbeb0118 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -3,6 +3,7 @@ from decimal import Decimal from fixtures import * # noqa: F401,F403 from fixtures import LightningNode, TEST_NETWORK +from pathlib import Path from pyln.client import RpcError, Millisatoshi from threading import Event from pyln.testing.utils import ( @@ -1899,11 +1900,13 @@ def test_logging(node_factory): wait_for(lambda: os.path.exists(logpath_moved)) wait_for(lambda: os.path.exists(logpath)) - log1 = open(logpath_moved).readlines() + with open(logpath_moved) as f: + log1 = f.readlines() assert log1[-1].endswith("Ending log due to SIGHUP\n") def check_new_log(): - log2 = open(logpath).readlines() + with open(logpath) as f: + log2 = f.readlines() return len(log2) > 0 and log2[0].endswith("Started log due to SIGHUP\n") wait_for(check_new_log) @@ -3106,7 +3109,7 @@ def test_recover_plugin(node_factory, bitcoind): # Save copy of the db. dbpath = os.path.join(l2.daemon.lightning_dir, TEST_NETWORK, "lightningd.sqlite3") - orig_db = open(dbpath, "rb").read() + orig_db = Path(dbpath).read_bytes() l2.start() @@ -3121,7 +3124,7 @@ def test_recover_plugin(node_factory, bitcoind): l2.stop() # Overwrite with OLD db. - open(dbpath, "wb").write(orig_db) + Path(dbpath).write_bytes(orig_db) l2.start() @@ -4917,45 +4920,46 @@ def test_tracing(node_factory): traces = set() suspended = set() for fname in glob.glob(f"{trace_fnamebase}.*"): - for linenum, l in enumerate(open(fname, "rt").readlines(), 1): - # In case an assertion fails - print(f"Parsing {fname}:{linenum}") - parts = l.split(maxsplit=2) - cmd = parts[0] - spanid = parts[1] - if cmd == 'span_emit': - assert spanid in traces - assert spanid not in suspended - # Should be valid JSON - res = json.loads(parts[2]) - - # This is an array for some reason - assert len(res) == 1 - res = res[0] - assert res['id'] == spanid - assert res['localEndpoint'] == {"serviceName": "lightningd"} - expected_keys = ['id', 'name', 'timestamp', 'duration', 'tags', 'traceId', 'localEndpoint'] - if 'parentId' in res: - assert res['parentId'] in traces - expected_keys.append('parentId') - assert set(res.keys()) == set(expected_keys) - traces.remove(spanid) - elif cmd == 'span_end': - assert spanid in traces - elif cmd == 'span_start': - assert spanid not in traces - traces.add(spanid) - elif cmd == 'span_suspend': - assert spanid in traces - assert spanid not in suspended - suspended.add(spanid) - elif cmd == 'span_resume': - assert spanid in traces - suspended.remove(spanid) - elif cmd == 'destroying': - pass - else: - assert False, "Unknown trace line" + with open(fname, "rt") as f: + for linenum, l in enumerate(f.readlines(), 1): + # In case an assertion fails + print(f"Parsing {fname}:{linenum}") + parts = l.split(maxsplit=2) + cmd = parts[0] + spanid = parts[1] + if cmd == 'span_emit': + assert spanid in traces + assert spanid not in suspended + # Should be valid JSON + res = json.loads(parts[2]) + + # This is an array for some reason + assert len(res) == 1 + res = res[0] + assert res['id'] == spanid + assert res['localEndpoint'] == {"serviceName": "lightningd"} + expected_keys = ['id', 'name', 'timestamp', 'duration', 'tags', 'traceId', 'localEndpoint'] + if 'parentId' in res: + assert res['parentId'] in traces + expected_keys.append('parentId') + assert set(res.keys()) == set(expected_keys) + traces.remove(spanid) + elif cmd == 'span_end': + assert spanid in traces + elif cmd == 'span_start': + assert spanid not in traces + traces.add(spanid) + elif cmd == 'span_suspend': + assert spanid in traces + assert spanid not in suspended + suspended.add(spanid) + elif cmd == 'span_resume': + assert spanid in traces + suspended.remove(spanid) + elif cmd == 'destroying': + pass + else: + assert False, "Unknown trace line" assert suspended == set() assert traces == set() @@ -4969,20 +4973,21 @@ def test_tracing(node_factory): # The parent should set all the trace ids and span ids for fname in glob.glob(f"{trace_fnamebase}.*"): - for linenum, l in enumerate(open(fname, "rt").readlines(), 1): - # In case an assertion fails - print(f"Parsing {fname}:{linenum}") - parts = l.split(maxsplit=2) - cmd = parts[0] - spanid = parts[1] - # This span doesn't actually appear anywhere - assert spanid != '0123456789abcdef' - if cmd == 'span_emit': - # Should be valid JSON - res = json.loads(parts[2]) - assert res[0]['traceId'] == '00112233445566778899aabbccddeeff' - # Everyone has a parent! - assert 'parentId' in res[0] + with open(fname, "rt") as f: + for linenum, l in enumerate(f.readlines(), 1): + # In case an assertion fails + print(f"Parsing {fname}:{linenum}") + parts = l.split(maxsplit=2) + cmd = parts[0] + spanid = parts[1] + # This span doesn't actually appear anywhere + assert spanid != '0123456789abcdef' + if cmd == 'span_emit': + # Should be valid JSON + res = json.loads(parts[2]) + assert res[0]['traceId'] == '00112233445566778899aabbccddeeff' + # Everyone has a parent! + assert 'parentId' in res[0] def test_zero_locktime_blocks(node_factory, bitcoind): diff --git a/tests/test_pay.py b/tests/test_pay.py index 62420c2328b0..a86d5a6ea2c0 100644 --- a/tests/test_pay.py +++ b/tests/test_pay.py @@ -1241,7 +1241,7 @@ def test_forward(node_factory, bitcoind): l1.rpc.bkpr_dumpincomecsv('koinly', 'koinly.csv') koinly_path = os.path.join(l1.daemon.lightning_dir, TEST_NETWORK, 'koinly.csv') - koinly_csv = open(koinly_path, 'rb').read() + koinly_csv = Path(koinly_path).read_bytes() expected_line = r'0.00100000000,.*,,,0.00000001001,.*,invoice' assert only_one(re.findall(expected_line, str(koinly_csv))) diff --git a/tests/test_plugin.py b/tests/test_plugin.py index b01005479322..788942c6dcb0 100644 --- a/tests/test_plugin.py +++ b/tests/test_plugin.py @@ -1244,7 +1244,8 @@ def test_htlc_accepted_hook_forward_restart(node_factory, executor): # additional checks logline = l2.daemon.wait_for_log(r'Onion written to') fname = re.search(r'Onion written to (.*\.json)', logline).group(1) - onion = json.load(open(fname)) + with open(fname) as f: + onion = json.load(f) assert onion['type'] == 'tlv' assert re.match(r'^11020203e80401..0608................$', onion['payload']) assert len(onion['shared_secret']) == 64 @@ -2072,15 +2073,16 @@ def test_watchtower(node_factory, bitcoind, directory, chainparams): cheat_tx = bitcoind.rpc.decoderawtransaction(tx) lastcommitnum = 0 - for l in open(wt_file, 'r'): - txid, penalty, channel_id_hook, commitnum = l.strip().split(', ') - assert lastcommitnum == int(commitnum) - assert channel_id_hook == channel_id - lastcommitnum += 1 - if txid == cheat_tx['txid']: - # This one should succeed, since it is a response to the cheat_tx - bitcoind.rpc.sendrawtransaction(penalty) - break + with open(wt_file, 'r') as f: + for l in f: + txid, penalty, channel_id_hook, commitnum = l.strip().split(', ') + assert lastcommitnum == int(commitnum) + assert channel_id_hook == channel_id + lastcommitnum += 1 + if txid == cheat_tx['txid']: + # This one should succeed, since it is a response to the cheat_tx + bitcoind.rpc.sendrawtransaction(penalty) + break # Need this to check that l2 gets the funds penalty_meta = bitcoind.rpc.decoderawtransaction(penalty) @@ -2686,9 +2688,8 @@ def init(options, configuration, plugin): # write hello world plugin to lndir/plugins os.makedirs(os.path.join(lndir, 'plugins'), exist_ok=True) path = os.path.join(lndir, 'plugins', 'test_restart_on_update.py') - file = open(path, 'w+') - file.write(content % "1") - file.close() + with open(path, 'w+') as file: + file.write(content % "1") os.chmod(path, os.stat(path).st_mode | stat.S_IEXEC) # now fire up the node and wait for the plugin to print hello @@ -2701,9 +2702,8 @@ def init(options, configuration, plugin): assert not n.daemon.is_in_log(r"Plugin changed, needs restart.") # modify the file - file = open(path, 'w+') - file.write(content % "2") - file.close() + with open(path, 'w+') as file: + file.write(content % "2") os.chmod(path, os.stat(path).st_mode | stat.S_IEXEC) # rescan and check diff --git a/tests/test_wallet.py b/tests/test_wallet.py index 63ddba2f1abe..b0a774717395 100644 --- a/tests/test_wallet.py +++ b/tests/test_wallet.py @@ -2,6 +2,7 @@ from decimal import Decimal from fixtures import * # noqa: F401,F403 from fixtures import TEST_NETWORK +from pathlib import Path from pyln.client import RpcError, Millisatoshi from utils import ( only_one, wait_for, sync_blockheight, @@ -1527,7 +1528,7 @@ def test_hsmtool_generatehsm(node_factory): "generatehsm", hsm_path, "en", "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"]) - assert open(hsm_path, "rb").read().hex() == "5eb00bbddcf069084889a8ab9155568165f5c453ccb85e70811aaed6f6da5fc1" + assert Path(hsm_path).read_bytes().hex() == "5eb00bbddcf069084889a8ab9155568165f5c453ccb85e70811aaed6f6da5fc1" # Including passphrase os.remove(hsm_path)