|
42 | 42 | import urllib.request
|
43 | 43 | import enum
|
44 | 44 | from hashlib import sha256
|
45 |
| -from pathlib import Path |
| 45 | +from pathlib import PurePath |
46 | 46 |
|
47 | 47 | # The primary host; this will fail if we can't retrieve files from here.
|
48 | 48 | HOST1 = "https://bitcoincore.org"
|
@@ -440,11 +440,11 @@ def verify_shasums_signature(
|
440 | 440 | return (ReturnCode.SUCCESS, good_trusted, good_untrusted, unknown, bad)
|
441 | 441 |
|
442 | 442 |
|
443 |
| -def parse_sums_file(sums_file_path: Path, filename_filter: str) -> t.List[t.List[str]]: |
| 443 | +def parse_sums_file(sums_file_path: str, filename_filter: t.List[str]) -> t.List[t.List[str]]: |
444 | 444 | # extract hashes/filenames of binaries to verify from hash file;
|
445 | 445 | # each line has the following format: "<hash> <binary_filename>"
|
446 | 446 | with open(sums_file_path, 'r', encoding='utf8') as hash_file:
|
447 |
| - return [line.split()[:2] for line in hash_file if filename_filter in line] |
| 447 | + return [line.split()[:2] for line in hash_file if len(filename_filter) == 0 or any(f in line for f in filename_filter)] |
448 | 448 |
|
449 | 449 |
|
450 | 450 | def verify_binary_hashes(hashes_to_verify: t.List[t.List[str]]) -> t.Tuple[ReturnCode, t.Dict[str, str]]:
|
@@ -579,6 +579,73 @@ def cleanup():
|
579 | 579 | return ReturnCode.SUCCESS
|
580 | 580 |
|
581 | 581 |
|
| 582 | +def verify_binaries_handler(args: argparse.Namespace) -> ReturnCode: |
| 583 | + binary_to_basename = {} |
| 584 | + for file in args.binary: |
| 585 | + binary_to_basename[PurePath(file).name] = file |
| 586 | + |
| 587 | + sums_sig_path = None |
| 588 | + if args.sums_sig_file: |
| 589 | + sums_sig_path = Path(args.sums_sig_file) |
| 590 | + else: |
| 591 | + log.info(f"No signature file specified, assuming it is {args.sums_file}.asc") |
| 592 | + sums_sig_path = Path(args.sums_file).with_suffix(".asc") |
| 593 | + |
| 594 | + # Verify the signature on the SHA256SUMS file |
| 595 | + sigs_status, good_trusted, good_untrusted, unknown, bad = verify_shasums_signature(sums_sig_path, args.sums_file, args) |
| 596 | + if sigs_status != ReturnCode.SUCCESS: |
| 597 | + return sigs_status |
| 598 | + |
| 599 | + # Extract hashes and filenames |
| 600 | + hashes_to_verify = parse_sums_file(args.sums_file, [k for k, n in binary_to_basename.items()]) |
| 601 | + if not hashes_to_verify: |
| 602 | + log.error(f"No files in {args.sums_file} match the specified binaries") |
| 603 | + return ReturnCode.NO_BINARIES_MATCH |
| 604 | + |
| 605 | + # Make sure all files are accounted for |
| 606 | + sums_file_path = Path(args.sums_file) |
| 607 | + missing_files = [] |
| 608 | + files_to_hash = [] |
| 609 | + if len(binary_to_basename) > 0: |
| 610 | + for file_hash, file in hashes_to_verify: |
| 611 | + files_to_hash.append([file_hash, binary_to_basename[file]]) |
| 612 | + del binary_to_basename[file] |
| 613 | + if len(binary_to_basename) > 0: |
| 614 | + log.error(f"Not all specified binaries are in {args.sums_file}") |
| 615 | + return ReturnCode.NO_BINARIES_MATCH |
| 616 | + else: |
| 617 | + log.info(f"No binaries specified, assuming all files specified in {args.sums_file} are located relatively") |
| 618 | + for file_hash, file in hashes_to_verify: |
| 619 | + file_path = Path(sums_file_path.parent.joinpath(file)) |
| 620 | + if file_path.exists(): |
| 621 | + files_to_hash.append([file_hash, str(file_path)]) |
| 622 | + else: |
| 623 | + missing_files.append(file) |
| 624 | + |
| 625 | + # verify hashes |
| 626 | + hashes_status, files_to_hashes = verify_binary_hashes(files_to_hash) |
| 627 | + if hashes_status != ReturnCode.SUCCESS: |
| 628 | + return hashes_status |
| 629 | + |
| 630 | + if args.json: |
| 631 | + output = { |
| 632 | + 'good_trusted_sigs': [str(s) for s in good_trusted], |
| 633 | + 'good_untrusted_sigs': [str(s) for s in good_untrusted], |
| 634 | + 'unknown_sigs': [str(s) for s in unknown], |
| 635 | + 'bad_sigs': [str(s) for s in bad], |
| 636 | + 'verified_binaries': files_to_hashes, |
| 637 | + "missing_binaries": missing_files, |
| 638 | + } |
| 639 | + print(json.dumps(output, indent=2)) |
| 640 | + else: |
| 641 | + for filename in files_to_hashes: |
| 642 | + print(f"VERIFIED: {filename}") |
| 643 | + for filename in missing_files: |
| 644 | + print(f"MISSING: {filename}") |
| 645 | + |
| 646 | + return ReturnCode.SUCCESS |
| 647 | + |
| 648 | + |
582 | 649 | def main():
|
583 | 650 | parser = argparse.ArgumentParser(description=__doc__)
|
584 | 651 | parser.add_argument(
|
@@ -638,6 +705,15 @@ def main():
|
638 | 705 | '(Sometimes bitcoin.org lags behind bitcoincore.org.)')
|
639 | 706 | )
|
640 | 707 |
|
| 708 | + bin_parser = subparsers.add_parser("bin", help="Verify local binaries.") |
| 709 | + bin_parser.set_defaults(func=verify_binaries_handler) |
| 710 | + bin_parser.add_argument("--sums-sig-file", "-s", help="Path to the SHA256SUMS.asc file to verify") |
| 711 | + bin_parser.add_argument("sums_file", help="Path to the SHA256SUMS file to verify") |
| 712 | + bin_parser.add_argument( |
| 713 | + "binary", nargs="*", |
| 714 | + help="Path to a binary distribution file to verify. Can be specified multiple times for multiple files to verify." |
| 715 | + ) |
| 716 | + |
641 | 717 | args = parser.parse_args()
|
642 | 718 | if args.quiet:
|
643 | 719 | log.setLevel(logging.WARNING)
|
|
0 commit comments