Skip to content
This repository was archived by the owner on May 23, 2023. It is now read-only.

Commit 0173c54

Browse files
author
Jan Xie
authored
Merge pull request #751 from gsalgado/develop
Revert deletion of pow.ethash and pow.ethash_utils
2 parents 4bf59c1 + 5710b51 commit 0173c54

File tree

6 files changed

+265
-8
lines changed

6 files changed

+265
-8
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ lint:
4040
flake8 ethereum tests --ignore=E501
4141

4242
lint-minimal:
43-
python -m flake8 --ignore=F401,F841,F811 --select=F --exclude=todo,experimental ethereum
43+
python -m flake8 --ignore=F401,F841,F811 --select=F --exclude=todo,experimental,ethash.py,ethash_utils.py ethereum
4444

4545
test:
4646
py.test --tb=no ethereum/tests/

ethereum/pow/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
from ethereum.pow import chain, consensus, ethpow
1+
from ethereum.pow import chain, consensus, ethash, ethash_utils, ethpow

ethereum/pow/consensus.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from ethereum.pow import ethpow
1+
from ethereum.pow import ethash, ethash_utils, ethpow
22
from ethereum import utils
33
from ethereum.common import update_block_env_variables, calc_difficulty
44
from ethereum.exceptions import VerificationFailed

ethereum/pow/ethash.py

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
import copy
2+
import sys
3+
4+
5+
if sys.version_info.major == 2:
6+
from repoze.lru import lru_cache
7+
else:
8+
from functools import lru_cache
9+
10+
11+
cache_seeds = [b'\x00' * 32]
12+
13+
14+
def mkcache(block_number):
15+
while len(cache_seeds) <= block_number // EPOCH_LENGTH:
16+
cache_seeds.append(sha3.sha3_256(cache_seeds[-1]).digest())
17+
18+
seed = cache_seeds[block_number // EPOCH_LENGTH]
19+
20+
n = get_cache_size(block_number) // HASH_BYTES
21+
return _get_cache(seed, n)
22+
23+
24+
@lru_cache(5)
25+
def _get_cache(seed, n):
26+
# Sequentially produce the initial dataset
27+
o = [sha3_512(seed)]
28+
for i in range(1, n):
29+
o.append(sha3_512(o[-1]))
30+
31+
for _ in range(CACHE_ROUNDS):
32+
for i in range(n):
33+
v = o[i][0] % n
34+
o[i] = sha3_512(list(map(xor, o[(i - 1 + n) % n], o[v])))
35+
36+
return o
37+
38+
39+
def calc_dataset_item(cache, i):
40+
n = len(cache)
41+
r = HASH_BYTES // WORD_BYTES
42+
mix = copy.copy(cache[i % n])
43+
mix[0] ^= i
44+
mix = sha3_512(mix)
45+
for j in range(DATASET_PARENTS):
46+
cache_index = fnv(i ^ j, mix[j % r])
47+
mix = list(map(fnv, mix, cache[cache_index % n]))
48+
return sha3_512(mix)
49+
50+
51+
def calc_dataset(full_size, cache):
52+
o = []
53+
percent = (full_size // HASH_BYTES) // 100
54+
for i in range(full_size // HASH_BYTES):
55+
if i % percent == 0:
56+
sys.stderr.write("Completed %d items, %d percent\n" % (i, i // percent))
57+
o.append(calc_dataset_item(cache, i))
58+
return o
59+
60+
61+
def hashimoto(header, nonce, full_size, dataset_lookup):
62+
n = full_size // HASH_BYTES
63+
w = MIX_BYTES // WORD_BYTES
64+
mixhashes = MIX_BYTES // HASH_BYTES
65+
s = sha3_512(header + nonce[::-1])
66+
mix = []
67+
for _ in range(MIX_BYTES // HASH_BYTES):
68+
mix.extend(s)
69+
for i in range(ACCESSES):
70+
p = fnv(i ^ s[0], mix[i % w]) % (n // mixhashes) * mixhashes
71+
newdata = []
72+
for j in range(mixhashes):
73+
newdata.extend(dataset_lookup(p + j))
74+
mix = list(map(fnv, mix, newdata))
75+
cmix = []
76+
for i in range(0, len(mix), 4):
77+
cmix.append(fnv(fnv(fnv(mix[i], mix[i + 1]), mix[i + 2]), mix[i + 3]))
78+
return {
79+
"mix digest": serialize_hash(cmix),
80+
"result": serialize_hash(sha3_256(s + cmix))
81+
}
82+
83+
84+
def hashimoto_light(block_number, cache, header, nonce):
85+
return hashimoto(header, nonce, get_full_size(block_number),
86+
lambda x: calc_dataset_item(cache, x))
87+
88+
89+
def hashimoto_full(dataset, header, nonce):
90+
return hashimoto(header, nonce, len(dataset) * HASH_BYTES,
91+
lambda x: dataset[x])
92+
93+
94+
def mine(full_size, dataset, header, difficulty):
95+
from random import randint
96+
nonce = randint(0, 2**64)
97+
while decode_int(hashimoto_full(full_size, dataset, header, nonce)) < difficulty:
98+
nonce += 1
99+
nonce %= 2**64
100+
return nonce

ethereum/pow/ethash_utils.py

Lines changed: 136 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,136 @@
1+
try:
2+
from Crypto.Hash import keccak
3+
sha3_256 = lambda x: keccak.new(digest_bits=256, data=x).digest()
4+
sha3_512 = lambda x: keccak.new(digest_bits=512, data=x)
5+
except:
6+
import sha3 as _sha3
7+
sha3_256 = lambda x: _sha3.sha3_256(x).digest()
8+
sha3_512 = lambda x: _sha3.sha3_512(x).digest()
9+
from rlp.utils import decode_hex
10+
from ethereum.utils import encode_hex
11+
import sys
12+
13+
WORD_BYTES = 4 # bytes in word
14+
DATASET_BYTES_INIT = 2**30 # bytes in dataset at genesis
15+
DATASET_BYTES_GROWTH = 2**23 # growth per epoch (~7 GB per year)
16+
CACHE_BYTES_INIT = 2**24 # Size of the dataset relative to the cache
17+
CACHE_BYTES_GROWTH = 2**17 # Size of the dataset relative to the cache
18+
EPOCH_LENGTH = 30000 # blocks per epoch
19+
MIX_BYTES = 128 # width of mix
20+
HASH_BYTES = 64 # hash length in bytes
21+
DATASET_PARENTS = 256 # number of parents of each dataset element
22+
CACHE_ROUNDS = 3 # number of rounds in cache production
23+
ACCESSES = 64 # number of accesses in hashimoto loop
24+
25+
26+
FNV_PRIME = 0x01000193
27+
28+
29+
def fnv(v1, v2):
30+
return (v1 * FNV_PRIME ^ v2) % 2**32
31+
32+
33+
# Assumes little endian bit ordering (same as Intel architectures)
34+
def decode_int(s):
35+
return int(encode_hex(s[::-1]), 16) if s else 0
36+
37+
38+
def encode_int(s):
39+
a = "%x" % s
40+
return b'' if s == 0 else decode_hex('0' * (len(a) % 2) + a)[::-1]
41+
42+
43+
def zpad(s, length):
44+
return s + b'\x00' * max(0, length - len(s))
45+
46+
47+
def serialize_hash(h):
48+
return b''.join([zpad(encode_int(x), 4) for x in h])
49+
50+
51+
def deserialize_hash(h):
52+
return [decode_int(h[i:i+WORD_BYTES]) for i in range(0, len(h), WORD_BYTES)]
53+
54+
55+
def hash_words(h, sz, x):
56+
if isinstance(x, list):
57+
x = serialize_hash(x)
58+
y = h(x)
59+
return deserialize_hash(y)
60+
61+
62+
def to_bytes(x):
63+
if sys.version_info.major > 2 and isinstance(x, str):
64+
x = bytes(x, 'utf-8')
65+
return x
66+
67+
68+
# sha3 hash function, outputs 64 bytes
69+
def sha3_512(x):
70+
return hash_words(sha3_512(to_bytes(v)).digest(), 64, x)
71+
72+
73+
def sha3_256(x):
74+
return hash_words(sha3_256(to_bytes(v)).digest(), 32, x)
75+
76+
77+
def xor(a, b):
78+
return a ^ b
79+
80+
81+
# Works for dataset and cache
82+
def serialize_cache(ds):
83+
return b''.join([serialize_hash(h) for h in ds])
84+
85+
serialize_dataset = serialize_cache
86+
87+
88+
def deserialize_cache(ds):
89+
return [deserialize_hash(ds[i:i+HASH_BYTES])
90+
for i in range(0, len(ds), HASH_BYTES)]
91+
92+
deserialize_dataset = deserialize_cache
93+
94+
95+
class ListWrapper(list):
96+
def __init__(self, data):
97+
self.data = data
98+
self.len = len(data) // HASH_BYTES
99+
100+
def __len__(self):
101+
return self.len
102+
103+
def __getitem__(self, i):
104+
if i >= self.len:
105+
raise Exception("listwrap access out of range")
106+
return deserialize_hash(self.data[i*HASH_BYTES:(i+1)*HASH_BYTES])
107+
108+
def __iter__(self):
109+
for i in range(self.len):
110+
yield self[i]
111+
112+
def __repr__(self):
113+
return repr([x for x in self])
114+
115+
116+
def isprime(x):
117+
for i in range(2, int(x**0.5)):
118+
if not x % i:
119+
return False
120+
return True
121+
122+
123+
def get_cache_size(block_number):
124+
sz = CACHE_BYTES_INIT + CACHE_BYTES_GROWTH * (block_number // EPOCH_LENGTH)
125+
sz -= HASH_BYTES
126+
while not isprime(sz // HASH_BYTES):
127+
sz -= 2 * HASH_BYTES
128+
return sz
129+
130+
131+
def get_full_size(block_number):
132+
sz = DATASET_BYTES_INIT + DATASET_BYTES_GROWTH * (block_number // EPOCH_LENGTH)
133+
sz -= MIX_BYTES
134+
while not isprime(sz // MIX_BYTES):
135+
sz -= 2 * MIX_BYTES
136+
return sz

ethereum/pow/ethpow.py

Lines changed: 26 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,12 @@
1+
from ethereum.pow import ethash
12
from ethereum import utils
3+
import time
24
import sys
5+
import warnings
36
from collections import OrderedDict
7+
from ethereum import utils
48
from ethereum.slogging import get_logger
5-
import pyethash
9+
import rlp
610

711
log = get_logger('eth.pow')
812

@@ -11,10 +15,24 @@
1115
else:
1216
from functools import lru_cache
1317

14-
mkcache = pyethash.mkcache_bytes
15-
EPOCH_LENGTH = 30000
16-
def hashimoto_light(s, c, h, n):
17-
return pyethash.hashimoto_light(s, c, h, utils.big_endian_to_int(n))
18+
try:
19+
import pyethash
20+
ETHASH_LIB = 'pyethash' # the C++ based implementation
21+
except ImportError:
22+
ETHASH_LIB = 'ethash'
23+
warnings.warn('using pure python implementation', ImportWarning)
24+
25+
if ETHASH_LIB == 'ethash':
26+
mkcache = ethash.mkcache
27+
EPOCH_LENGTH = 30000
28+
hashimoto_light = ethash.hashimoto_light
29+
elif ETHASH_LIB == 'pyethash':
30+
mkcache = pyethash.mkcache_bytes
31+
EPOCH_LENGTH = 30000
32+
hashimoto_light = lambda s, c, h, n: \
33+
pyethash.hashimoto_light(s, c, h, utils.big_endian_to_int(n))
34+
else:
35+
raise Exception("invalid ethash library set")
1836

1937
TT64M1 = 2**64 - 1
2038
cache_seeds = ['\x00' * 32]
@@ -23,6 +41,7 @@ def hashimoto_light(s, c, h, n):
2341

2442

2543
def get_cache(block_number):
44+
import sha3
2645
while len(cache_seeds) <= block_number // EPOCH_LENGTH:
2746
cache_seeds.append(utils.sha3(cache_seeds[-1]))
2847
seed = cache_seeds[block_number // EPOCH_LENGTH]
@@ -105,3 +124,5 @@ def mine(block_number, difficulty, mining_hash, start_nonce=0, rounds=1000):
105124
assert len(o[b"mix digest"]) == 32
106125
return bin_nonce, o[b"mix digest"]
107126
return None, None
127+
128+

0 commit comments

Comments
 (0)