|
8 | 8 |
|
9 | 9 | import abc |
10 | 10 | import fnmatch |
| 11 | +import hashlib |
11 | 12 | import io |
12 | 13 | import logging |
| 14 | +import sys |
13 | 15 | from dataclasses import dataclass |
14 | 16 | from datetime import datetime, timezone |
15 | 17 | from typing import ( |
|
21 | 23 | ) |
22 | 24 |
|
23 | 25 | from securesystemslib import exceptions as sslib_exceptions |
24 | | -from securesystemslib import hash as sslib_hash |
25 | 26 | from securesystemslib.signer import Key, Signature |
26 | 27 |
|
27 | 28 | from tuf.api.exceptions import LengthOrHashMismatchError, UnsignedMetadataError |
|
34 | 35 | _TARGETS = "targets" |
35 | 36 | _TIMESTAMP = "timestamp" |
36 | 37 |
|
| 38 | +_DEFAULT_HASH_ALGORITHM = "sha256" |
| 39 | +_BLAKE_HASH_ALGORITHM = "blake2b-256" |
| 40 | + |
37 | 41 | # We aim to support SPECIFICATION_VERSION and require the input metadata |
38 | 42 | # files to have the same major version (the first number) as ours. |
39 | 43 | SPECIFICATION_VERSION = ["1", "0", "31"] |
|
45 | 49 | T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets") |
46 | 50 |
|
47 | 51 |
|
| 52 | +def _hash(algo: str) -> Any: # noqa: ANN401 |
| 53 | + """Returns new hash object, supporting custom "blake2b-256" algo name.""" |
| 54 | + if algo == _BLAKE_HASH_ALGORITHM: |
| 55 | + return hashlib.blake2b(digest_size=32) |
| 56 | + |
| 57 | + return hashlib.new(algo) |
| 58 | + |
| 59 | + |
| 60 | +def _file_hash(f: IO[bytes], algo: str) -> Any: # noqa: ANN401 |
| 61 | + """Returns hashed file.""" |
| 62 | + f.seek(0) |
| 63 | + if sys.version_info >= (3, 11): |
| 64 | + digest = hashlib.file_digest(f, lambda: _hash(algo)) # type: ignore[arg-type] |
| 65 | + |
| 66 | + else: |
| 67 | + # Fallback for older Pythons. Chunk size is taken from the previously |
| 68 | + # used and now deprecated `securesystemslib.hash.digest_fileobject`. |
| 69 | + digest = _hash(algo) |
| 70 | + for chunk in iter(lambda: f.read(4096), b""): |
| 71 | + digest.update(chunk) |
| 72 | + |
| 73 | + return digest |
| 74 | + |
| 75 | + |
48 | 76 | class Signed(metaclass=abc.ABCMeta): |
49 | 77 | """A base class for the signed part of TUF metadata. |
50 | 78 |
|
@@ -664,19 +692,15 @@ def _verify_hashes( |
664 | 692 | data: bytes | IO[bytes], expected_hashes: dict[str, str] |
665 | 693 | ) -> None: |
666 | 694 | """Verify that the hash of ``data`` matches ``expected_hashes``.""" |
667 | | - is_bytes = isinstance(data, bytes) |
668 | 695 | for algo, exp_hash in expected_hashes.items(): |
669 | 696 | try: |
670 | | - if is_bytes: |
671 | | - digest_object = sslib_hash.digest(algo) |
| 697 | + if isinstance(data, bytes): |
| 698 | + digest_object = _hash(algo) |
672 | 699 | digest_object.update(data) |
673 | 700 | else: |
674 | 701 | # if data is not bytes, assume it is a file object |
675 | | - digest_object = sslib_hash.digest_fileobject(data, algo) |
676 | | - except ( |
677 | | - sslib_exceptions.UnsupportedAlgorithmError, |
678 | | - sslib_exceptions.FormatError, |
679 | | - ) as e: |
| 702 | + digest_object = _file_hash(data, algo) |
| 703 | + except (ValueError, TypeError) as e: |
680 | 704 | raise LengthOrHashMismatchError( |
681 | 705 | f"Unsupported algorithm '{algo}'" |
682 | 706 | ) from e |
@@ -731,21 +755,16 @@ def _get_length_and_hashes( |
731 | 755 | hashes = {} |
732 | 756 |
|
733 | 757 | if hash_algorithms is None: |
734 | | - hash_algorithms = [sslib_hash.DEFAULT_HASH_ALGORITHM] |
| 758 | + hash_algorithms = [_DEFAULT_HASH_ALGORITHM] |
735 | 759 |
|
736 | 760 | for algorithm in hash_algorithms: |
737 | 761 | try: |
738 | 762 | if isinstance(data, bytes): |
739 | | - digest_object = sslib_hash.digest(algorithm) |
| 763 | + digest_object = _hash(algorithm) |
740 | 764 | digest_object.update(data) |
741 | 765 | else: |
742 | | - digest_object = sslib_hash.digest_fileobject( |
743 | | - data, algorithm |
744 | | - ) |
745 | | - except ( |
746 | | - sslib_exceptions.UnsupportedAlgorithmError, |
747 | | - sslib_exceptions.FormatError, |
748 | | - ) as e: |
| 766 | + digest_object = _file_hash(data, algorithm) |
| 767 | + except (ValueError, TypeError) as e: |
749 | 768 | raise ValueError(f"Unsupported algorithm '{algorithm}'") from e |
750 | 769 |
|
751 | 770 | hashes[algorithm] = digest_object.hexdigest() |
@@ -1150,7 +1169,7 @@ def is_delegated_path(self, target_filepath: str) -> bool: |
1150 | 1169 | if self.path_hash_prefixes is not None: |
1151 | 1170 | # Calculate the hash of the filepath |
1152 | 1171 | # to determine in which bin to find the target. |
1153 | | - digest_object = sslib_hash.digest(algorithm="sha256") |
| 1172 | + digest_object = hashlib.new(name="sha256") |
1154 | 1173 | digest_object.update(target_filepath.encode("utf-8")) |
1155 | 1174 | target_filepath_hash = digest_object.hexdigest() |
1156 | 1175 |
|
@@ -1269,7 +1288,7 @@ def get_role_for_target(self, target_filepath: str) -> str: |
1269 | 1288 | target_filepath: URL path to a target file, relative to a base |
1270 | 1289 | targets URL. |
1271 | 1290 | """ |
1272 | | - hasher = sslib_hash.digest(algorithm="sha256") |
| 1291 | + hasher = hashlib.new(name="sha256") |
1273 | 1292 | hasher.update(target_filepath.encode("utf-8")) |
1274 | 1293 |
|
1275 | 1294 | # We can't ever need more than 4 bytes (32 bits). |
|
0 commit comments