|
| 1 | +import copy |
| 2 | +import sys |
| 3 | + |
| 4 | + |
| 5 | +if sys.version_info.major == 2: |
| 6 | + from repoze.lru import lru_cache |
| 7 | +else: |
| 8 | + from functools import lru_cache |
| 9 | + |
| 10 | + |
| 11 | +cache_seeds = [b'\x00' * 32] |
| 12 | + |
| 13 | + |
| 14 | +def mkcache(block_number): |
| 15 | + while len(cache_seeds) <= block_number // EPOCH_LENGTH: |
| 16 | + cache_seeds.append(sha3.sha3_256(cache_seeds[-1]).digest()) |
| 17 | + |
| 18 | + seed = cache_seeds[block_number // EPOCH_LENGTH] |
| 19 | + |
| 20 | + n = get_cache_size(block_number) // HASH_BYTES |
| 21 | + return _get_cache(seed, n) |
| 22 | + |
| 23 | + |
| 24 | +@lru_cache(5) |
| 25 | +def _get_cache(seed, n): |
| 26 | + # Sequentially produce the initial dataset |
| 27 | + o = [sha3_512(seed)] |
| 28 | + for i in range(1, n): |
| 29 | + o.append(sha3_512(o[-1])) |
| 30 | + |
| 31 | + for _ in range(CACHE_ROUNDS): |
| 32 | + for i in range(n): |
| 33 | + v = o[i][0] % n |
| 34 | + o[i] = sha3_512(list(map(xor, o[(i - 1 + n) % n], o[v]))) |
| 35 | + |
| 36 | + return o |
| 37 | + |
| 38 | + |
| 39 | +def calc_dataset_item(cache, i): |
| 40 | + n = len(cache) |
| 41 | + r = HASH_BYTES // WORD_BYTES |
| 42 | + mix = copy.copy(cache[i % n]) |
| 43 | + mix[0] ^= i |
| 44 | + mix = sha3_512(mix) |
| 45 | + for j in range(DATASET_PARENTS): |
| 46 | + cache_index = fnv(i ^ j, mix[j % r]) |
| 47 | + mix = list(map(fnv, mix, cache[cache_index % n])) |
| 48 | + return sha3_512(mix) |
| 49 | + |
| 50 | + |
| 51 | +def calc_dataset(full_size, cache): |
| 52 | + o = [] |
| 53 | + percent = (full_size // HASH_BYTES) // 100 |
| 54 | + for i in range(full_size // HASH_BYTES): |
| 55 | + if i % percent == 0: |
| 56 | + sys.stderr.write("Completed %d items, %d percent\n" % (i, i // percent)) |
| 57 | + o.append(calc_dataset_item(cache, i)) |
| 58 | + return o |
| 59 | + |
| 60 | + |
| 61 | +def hashimoto(header, nonce, full_size, dataset_lookup): |
| 62 | + n = full_size // HASH_BYTES |
| 63 | + w = MIX_BYTES // WORD_BYTES |
| 64 | + mixhashes = MIX_BYTES // HASH_BYTES |
| 65 | + s = sha3_512(header + nonce[::-1]) |
| 66 | + mix = [] |
| 67 | + for _ in range(MIX_BYTES // HASH_BYTES): |
| 68 | + mix.extend(s) |
| 69 | + for i in range(ACCESSES): |
| 70 | + p = fnv(i ^ s[0], mix[i % w]) % (n // mixhashes) * mixhashes |
| 71 | + newdata = [] |
| 72 | + for j in range(mixhashes): |
| 73 | + newdata.extend(dataset_lookup(p + j)) |
| 74 | + mix = list(map(fnv, mix, newdata)) |
| 75 | + cmix = [] |
| 76 | + for i in range(0, len(mix), 4): |
| 77 | + cmix.append(fnv(fnv(fnv(mix[i], mix[i + 1]), mix[i + 2]), mix[i + 3])) |
| 78 | + return { |
| 79 | + "mix digest": serialize_hash(cmix), |
| 80 | + "result": serialize_hash(sha3_256(s + cmix)) |
| 81 | + } |
| 82 | + |
| 83 | + |
| 84 | +def hashimoto_light(block_number, cache, header, nonce): |
| 85 | + return hashimoto(header, nonce, get_full_size(block_number), |
| 86 | + lambda x: calc_dataset_item(cache, x)) |
| 87 | + |
| 88 | + |
| 89 | +def hashimoto_full(dataset, header, nonce): |
| 90 | + return hashimoto(header, nonce, len(dataset) * HASH_BYTES, |
| 91 | + lambda x: dataset[x]) |
| 92 | + |
| 93 | + |
| 94 | +def mine(full_size, dataset, header, difficulty): |
| 95 | + from random import randint |
| 96 | + nonce = randint(0, 2**64) |
| 97 | + while decode_int(hashimoto_full(full_size, dataset, header, nonce)) < difficulty: |
| 98 | + nonce += 1 |
| 99 | + nonce %= 2**64 |
| 100 | + return nonce |
0 commit comments