|
2 | 2 | # Licensed to PSF under a Contributor Agreement. |
3 | 3 | # |
4 | 4 |
|
5 | | -__doc__ = """hashlib module - A common interface to many hash functions. |
| 5 | +__doc__ = r"""hashlib module - A common interface to many hash functions. |
6 | 6 |
|
7 | 7 | new(name, data=b'', **kwargs) - returns a new hash object implementing the |
8 | 8 | given hash function; initializing the hash |
|
12 | 12 | than using new(name): |
13 | 13 |
|
14 | 14 | md5(), sha1(), sha224(), sha256(), sha384(), sha512(), blake2b(), blake2s(), |
15 | | -sha3_224, sha3_256, sha3_384, sha3_512, shake_128, and shake_256. |
| 15 | +sha3_224(), sha3_256(), sha3_384(), sha3_512(), shake_128(), and shake_256(). |
16 | 16 |
|
17 | 17 | More algorithms may be available on your platform but the above are guaranteed |
18 | 18 | to exist. See the algorithms_guaranteed and algorithms_available attributes |
|
21 | 21 | NOTE: If you want the adler32 or crc32 hash functions they are available in |
22 | 22 | the zlib module. |
23 | 23 |
|
24 | | -Choose your hash function wisely. Some have known collision weaknesses. |
25 | | -sha384 and sha512 will be slow on 32 bit platforms. |
| 24 | +Choose your hash function wisely. Some have known collision weaknesses, |
| 25 | +while others may be slower depending on the CPU architecture. |
26 | 26 |
|
27 | 27 | Hash objects have these methods: |
28 | 28 | - update(data): Update the hash object with the bytes in data. Repeated calls |
|
36 | 36 | efficiently compute the digests of data that share a common |
37 | 37 | initial substring. |
38 | 38 |
|
39 | | -For example, to obtain the digest of the byte string 'Nobody inspects the |
40 | | -spammish repetition': |
| 39 | +Assuming that Python has been built with MD5 support, the following computes |
| 40 | +the MD5 digest of the byte string b'Nobody inspects the spammish repetition': |
41 | 41 |
|
42 | 42 | >>> import hashlib |
43 | 43 | >>> m = hashlib.md5() |
44 | 44 | >>> m.update(b"Nobody inspects") |
45 | 45 | >>> m.update(b" the spammish repetition") |
46 | 46 | >>> m.digest() |
47 | | - b'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9' |
| 47 | + b'\xbbd\x9c\x83\xdd\x1e\xa5\xc9\xd9\xde\xc9\xa1\x8d\xf0\xff\xe9' |
48 | 48 |
|
49 | 49 | More condensed: |
50 | 50 |
|
51 | | - >>> hashlib.sha224(b"Nobody inspects the spammish repetition").hexdigest() |
52 | | - 'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2' |
| 51 | + >>> hashlib.md5(b"Nobody inspects the spammish repetition").hexdigest() |
| 52 | + 'bb649c83dd1ea5c9d9dec9a18df0ffe9' |
53 | 53 |
|
54 | 54 | """ |
55 | 55 |
|
@@ -204,7 +204,7 @@ def file_digest(fileobj, digest, /, *, _bufsize=2**18): |
204 | 204 | *digest* must either be a hash algorithm name as a *str*, a hash |
205 | 205 | constructor, or a callable that returns a hash object. |
206 | 206 | """ |
207 | | - # On Linux we could use AF_ALG sockets and sendfile() to archive zero-copy |
| 207 | + # On Linux we could use AF_ALG sockets and sendfile() to achieve zero-copy |
208 | 208 | # hashing with hardware acceleration. |
209 | 209 | if isinstance(digest, str): |
210 | 210 | digestobj = new(digest) |
|
0 commit comments