diff --git a/ethereum/abi.py b/ethereum/abi.py index 4d3b9e8d0..ae8dae637 100644 --- a/ethereum/abi.py +++ b/ethereum/abi.py @@ -1,11 +1,11 @@ import sys import re import yaml # use yaml instead of json to get non unicode (works with ascii only data) -from ethereum import utils +import utils from rlp.utils import decode_hex, encode_hex -from ethereum.utils import encode_int, zpad, big_endian_to_int, is_numeric, is_string, ceil32 -from ethereum.utils import isnumeric +from utils import encode_int, zpad, big_endian_to_int, is_numeric, is_string, ceil32, ADDR_BYTES, isnumeric import ast +import copy def json_decode(x): @@ -100,24 +100,24 @@ def decode(self, name, data): def is_unknown_type(self, name): return self.function_data[name]["is_unknown_type"] - def listen(self, log, noprint=False): - if not len(log.topics) or log.topics[0] not in self.event_data: + def listen(self, sender, topics, data, noprint=False): + if not len(topics) or topics[0] not in self.event_data: return - types = self.event_data[log.topics[0]]['types'] - name = self.event_data[log.topics[0]]['name'] - names = self.event_data[log.topics[0]]['names'] - indexed = self.event_data[log.topics[0]]['indexed'] + types = self.event_data[topics[0]]['types'] + name = self.event_data[topics[0]]['name'] + names = self.event_data[topics[0]]['names'] + indexed = self.event_data[topics[0]]['indexed'] indexed_types = [types[i] for i in range(len(types)) if indexed[i]] unindexed_types = [types[i] for i in range(len(types)) if not indexed[i]] # print('listen', log.data.encode('hex'), log.topics) - deserialized_args = decode_abi(unindexed_types, log.data) + deserialized_args = decode_abi(unindexed_types, data) o = {} c1, c2 = 0, 0 for i in range(len(names)): if indexed[i]: - topic_bytes = utils.zpad(utils.encode_int(log.topics[c1 + 1]), 32) + topic_bytes = utils.zpad(utils.encode_int(topics[c1 + 1]), 32) o[names[i]] = decode_single(process_type(indexed_types[c1]), topic_bytes) c1 += 1 @@ -153,7 +153,7 @@ def decint(n): return n elif is_numeric(n): raise EncodingError("Number out of range: %r" % n) - elif is_string(n) and len(n) == 40: + elif is_string(n) and len(n) == ADDR_BYTES * 2: return big_endian_to_int(decode_hex(n)) elif is_string(n) and len(n) <= 32: return big_endian_to_int(n) @@ -232,18 +232,21 @@ def encode_single(typ, arg): assert sub == '' if isnumeric(arg): return zpad(encode_int(arg), 32) - elif len(arg) == 20: + elif len(arg) == ADDR_BYTES: return zpad(arg, 32) - elif len(arg) == 40: + elif len(arg) == ADDR_BYTES * 2: return zpad(decode_hex(arg), 32) - elif len(arg) == 42 and arg[2:] == '0x': + elif len(arg) == ADDR_BYTES * 2 + 2 and arg[2:] == '0x': return zpad(decode_hex(arg[2:]), 32) else: raise EncodingError("Could not parse address: %r" % arg) raise EncodingError("Unhandled type: %r %r" % (base, sub)) +proctype_cache = {} def process_type(typ): + if typ in proctype_cache: + return proctype_cache[typ] # Crazy reg expression to separate out base type component (eg. uint), # size (eg. 256, 128x128, none), array component (eg. [], [45], none) regexp = '([a-z]*)([0-9]*x?[0-9]*)((\[[0-9]*\])*)' @@ -251,12 +254,8 @@ def process_type(typ): arrlist = re.findall('\[[0-9]*\]', arr) assert len(''.join(arrlist)) == len(arr), \ "Unknown characters found in array declaration" - # Check validity of string type - if base == 'string' or base == 'bytes': - assert re.match('^[0-9]*$', sub), \ - "String type must have no suffix or numerical suffix" # Check validity of integer type - elif base == 'uint' or base == 'int': + if base == 'uint' or base == 'int': assert re.match('^[0-9]+$', sub), \ "Integer type must have numerical suffix" assert 8 <= int(sub) <= 256, \ @@ -264,7 +263,7 @@ def process_type(typ): assert int(sub) % 8 == 0, \ "Integer size must be multiple of 8" # Check validity of string type - if base == 'string' or base == 'bytes': + elif base == 'string' or base == 'bytes': assert re.match('^[0-9]*$', sub), \ "String type must have no suffix or numerical suffix" assert not sub or int(sub) <= 32, \ @@ -285,7 +284,9 @@ def process_type(typ): # Check validity of address type elif base == 'address': assert sub == '', "Address cannot have suffix" - return base, sub, [ast.literal_eval(x) for x in arrlist] + o = base, sub, [ast.literal_eval(x) for x in arrlist] + proctype_cache[typ] = o + return o # Returns the static size of a type, or None if dynamic @@ -374,7 +375,7 @@ def encode_abi(types, args): def decode_single(typ, data): base, sub, _ = typ if base == 'address': - return encode_hex(data[12:]) + return encode_hex(data[32-ADDR_BYTES:]) elif base == 'string' or base == 'bytes' or base == 'hash': return data[:int(sub)] if len(sub) else data elif base == 'uint': @@ -392,8 +393,13 @@ def decode_single(typ, data): return bool(int(data.encode('hex'), 16)) +decode_abi_cache = {} + # Decodes multiple arguments using the head/tail mechanism def decode_abi(types, data): + cache_key = str(types) + data + if cache_key in decode_abi_cache: + return copy.deepcopy(decode_abi_cache[cache_key]) # Process types proctypes = [process_type(typ) for typ in types] # Get sizes of everything @@ -431,7 +437,9 @@ def decode_abi(types, data): next_offset = start_positions[i + 1] outs[i] = data[offset:next_offset] # Recursively decode them all - return [dec(proctypes[i], outs[i]) for i in range(len(outs))] + o = [dec(proctypes[i], outs[i]) for i in range(len(outs))] + decode_abi_cache[cache_key] = copy.deepcopy(o) + return o # Decode a single value (static or dynamic) diff --git a/ethereum/blocks.py b/ethereum/blocks.py deleted file mode 100644 index b8fdae52d..000000000 --- a/ethereum/blocks.py +++ /dev/null @@ -1,1411 +0,0 @@ -# ####### dev hack flags ############### - -dump_block_on_failed_verification = False - -# ###################################### -import time -from itertools import count -import sys -import rlp -from rlp.sedes import big_endian_int, Binary, binary, CountableList -from rlp.utils import decode_hex, encode_hex -from ethereum import pruning_trie as trie -from ethereum.pruning_trie import Trie -from ethereum.securetrie import SecureTrie -from ethereum import utils -from ethereum.utils import address, int256, trie_root, hash32, to_string -from ethereum import processblock -from ethereum.transactions import Transaction -from ethereum import bloom -from ethereum.exceptions import UnknownParentException, VerificationFailed -from ethereum.slogging import get_logger -from ethereum.ethpow import check_pow -from ethereum.db import BaseDB -from ethereum.config import Env, default_config - -if sys.version_info.major == 2: - from repoze.lru import lru_cache -else: - from functools import lru_cache - - -log = get_logger('eth.block') -log_state = get_logger('eth.msg.state') -Log = processblock.Log - - - -# Difficulty adjustment algo -def calc_difficulty(parent, timestamp): - config = parent.config - offset = parent.difficulty // config['BLOCK_DIFF_FACTOR'] - if parent.number >= (config['HOMESTEAD_FORK_BLKNUM'] - 1): - sign = max(1 - 2 * ((timestamp - parent.timestamp) // config['HOMESTEAD_DIFF_ADJUSTMENT_CUTOFF']), -99) - else: - sign = 1 if timestamp - parent.timestamp < config['DIFF_ADJUSTMENT_CUTOFF'] else -1 - # If we enter a special mode where the genesis difficulty starts off below - # the minimal difficulty, we allow low-difficulty blocks (this will never - # happen in the official protocol) - o = int(max(parent.difficulty + offset * sign, min(parent.difficulty, config['MIN_DIFF']))) - period_count = (parent.number + 1) // config['EXPDIFF_PERIOD'] - if period_count >= config['EXPDIFF_FREE_PERIODS']: - o = max(o + 2**(period_count - config['EXPDIFF_FREE_PERIODS']), config['MIN_DIFF']) - return o - - - -class Account(rlp.Serializable): - - """An Ethereum account. - - :ivar nonce: the account's nonce (the number of transactions sent by the - account) - :ivar balance: the account's balance in wei - :ivar storage: the root of the account's storage trie - :ivar code_hash: the SHA3 hash of the code associated with the account - :ivar db: the database in which the account's code is stored - """ - - fields = [ - ('nonce', big_endian_int), - ('balance', big_endian_int), - ('storage', trie_root), - ('code_hash', hash32) - ] - - def __init__(self, nonce, balance, storage, code_hash, db): - assert isinstance(db, BaseDB) - self.db = db - super(Account, self).__init__(nonce, balance, storage, code_hash) - - @property - def code(self): - """The EVM code of the account. - - This property will be read from or written to the db at each access, - with :ivar:`code_hash` used as key. - """ - return self.db.get(self.code_hash) - - @code.setter - def code(self, value): - self.code_hash = utils.sha3(value) - # Technically a db storage leak, but doesn't really matter; the only - # thing that fails to get garbage collected is when code disappears due - # to a suicide - self.db.inc_refcount(self.code_hash, value) - - @classmethod - def blank_account(cls, db, initial_nonce=0): - """Create a blank account - - The returned account will have zero nonce and balance, a blank storage - trie and empty code. - - :param db: the db in which the account will store its code. - """ - code_hash = utils.sha3(b'') - db.put(code_hash, b'') - return cls(initial_nonce, 0, trie.BLANK_ROOT, code_hash, db) - - -class Receipt(rlp.Serializable): - - fields = [ - ('state_root', trie_root), - ('gas_used', big_endian_int), - ('bloom', int256), - ('logs', CountableList(processblock.Log)) - ] - - def __init__(self, state_root, gas_used, logs, bloom=None): - # does not call super.__init__ as bloom should not be an attribute but a property - self.state_root = state_root - self.gas_used = gas_used - self.logs = logs - if bloom is not None and bloom != self.bloom: - raise ValueError("Invalid bloom filter") - self._cached_rlp = None - self._mutable = True - - @property - def bloom(self): - bloomables = [x.bloomables() for x in self.logs] - return bloom.bloom_from_list(utils.flatten(bloomables)) - - -class BlockHeader(rlp.Serializable): - - """A block header. - - If the block with this header exists as an instance of :class:`Block`, the - connection can be made explicit by setting :attr:`BlockHeader.block`. Then, - :attr:`BlockHeader.state_root`, :attr:`BlockHeader.tx_list_root` and - :attr:`BlockHeader.receipts_root` always refer to the up-to-date value in - the block instance. - - :ivar block: an instance of :class:`Block` or `None` - :ivar prevhash: the 32 byte hash of the previous block - :ivar uncles_hash: the 32 byte hash of the RLP encoded list of uncle - headers - :ivar coinbase: the 20 byte coinbase address - :ivar state_root: the root of the block's state trie - :ivar tx_list_root: the root of the block's transaction trie - :ivar receipts_root: the root of the block's receipts trie - :ivar bloom: TODO - :ivar difficulty: the block's difficulty - :ivar number: the number of ancestors of this block (0 for the genesis - block) - :ivar gas_limit: the block's gas limit - :ivar gas_used: the total amount of gas used by all transactions in this - block - :ivar timestamp: a UNIX timestamp - :ivar extra_data: up to 1024 bytes of additional data - :ivar nonce: a 32 byte nonce constituting a proof-of-work, or the empty - string as a placeholder - """ - - fields = [ - ('prevhash', hash32), - ('uncles_hash', hash32), - ('coinbase', address), - ('state_root', trie_root), - ('tx_list_root', trie_root), - ('receipts_root', trie_root), - ('bloom', int256), - ('difficulty', big_endian_int), - ('number', big_endian_int), - ('gas_limit', big_endian_int), - ('gas_used', big_endian_int), - ('timestamp', big_endian_int), - ('extra_data', binary), - ('mixhash', binary), - ('nonce', Binary(8, allow_empty=True)) - ] - - def __init__(self, - prevhash=default_config['GENESIS_PREVHASH'], - uncles_hash=utils.sha3rlp([]), - coinbase=default_config['GENESIS_COINBASE'], - state_root=trie.BLANK_ROOT, - tx_list_root=trie.BLANK_ROOT, - receipts_root=trie.BLANK_ROOT, - bloom=0, - difficulty=default_config['GENESIS_DIFFICULTY'], - number=0, - gas_limit=default_config['GENESIS_GAS_LIMIT'], - gas_used=0, - timestamp=0, - extra_data='', - mixhash=default_config['GENESIS_MIXHASH'], - nonce=''): - # at the beginning of a method, locals() is a dict of all arguments - fields = {k: v for k, v in locals().items() if k != 'self'} - if len(fields['coinbase']) == 40: - fields['coinbase'] = decode_hex(fields['coinbase']) - assert len(fields['coinbase']) == 20 - self.block = None - super(BlockHeader, self).__init__(**fields) - - @classmethod - def from_block_rlp(self, rlp_data): - block_data = rlp.decode_lazy(rlp_data) - r = super(BlockHeader, self).deserialize(block_data[0]) - assert isinstance(r, BlockHeader) - return r - - @property - def state_root(self): - if self.block: - return self.block.state_root - else: - return self._state_root - - @state_root.setter - def state_root(self, value): - if self.block: - self.block.state_root = value - else: - self._state_root = value - - @property - def tx_list_root(self): - if self.block: - return self.block.tx_list_root - else: - return self._tx_list_root - - @tx_list_root.setter - def tx_list_root(self, value): - if self.block: - self.block.tx_list_root = value - else: - self._tx_list_root = value - - @property - def receipts_root(self): - if self.block: - return self.block.receipts_root - else: - return self._receipts_root - - @receipts_root.setter - def receipts_root(self, value): - if self.block: - self.block.receipts_root = value - else: - self._receipts_root = value - - _fimxe_hash = None - - @property - def hash(self): - """The binary block hash""" - return self._fimxe_hash or utils.sha3(rlp.encode(self)) - - def hex_hash(self): - """The hex encoded block hash""" - return encode_hex(self.hash) - - @property - def mining_hash(self): - return utils.sha3(rlp.encode(self, BlockHeader.exclude(['mixhash', 'nonce']))) - - def check_pow(self, nonce=None): - """Check if the proof-of-work of the block is valid. - - :param nonce: if given the proof of work function will be evaluated - with this nonce instead of the one already present in - the header - :returns: `True` or `False` - """ - log.debug('checking pow', block=self.hex_hash()[:8]) - return check_pow(self.number, self.mining_hash, self.mixhash, nonce or self.nonce, - self.difficulty) - - def to_dict(self): - """Serialize the header to a readable dictionary.""" - d = {} - for field in ('prevhash', 'uncles_hash', 'extra_data', 'nonce', - 'mixhash'): - d[field] = b'0x' + encode_hex(getattr(self, field)) - for field in ('state_root', 'tx_list_root', 'receipts_root', - 'coinbase'): - d[field] = encode_hex(getattr(self, field)) - for field in ('number', 'difficulty', 'gas_limit', 'gas_used', - 'timestamp'): - d[field] = to_string(getattr(self, field)) - d['bloom'] = encode_hex(int256.serialize(self.bloom)) - assert len(d) == len(BlockHeader.fields) - return d - - def __repr__(self): - return '<%s(#%d %s)>' % (self.__class__.__name__, self.number, - encode_hex(self.hash)[:8]) - - def __eq__(self, other): - """Two blockheader are equal iff they have the same hash.""" - return isinstance(other, BlockHeader) and self.hash == other.hash - - def __hash__(self): - return utils.big_endian_to_int(self.hash) - - def __ne__(self, other): - return not self.__eq__(other) - - -def mirror_from(source, attributes, only_getters=True): - """Decorator (factory) for classes that mirror some attributes from an - instance variable. - - :param source: the name of the instance variable to mirror from - :param attributes: list of attribute names to mirror - :param only_getters: if true only getters but not setters are created - """ - def decorator(cls): - for attribute in attributes: - def make_gs_etter(source, attribute): - def getter(self): - return getattr(getattr(self, source), attribute) - - def setter(self, value): - setattr(getattr(self, source), attribute, value) - return getter, setter - getter, setter = make_gs_etter(source, attribute) - if only_getters: - setattr(cls, attribute, property(getter)) - else: - setattr(cls, attribute, property(getter, setter)) - return cls - return decorator - - -@mirror_from('header', set(field for field, _ in BlockHeader.fields) - - set(['state_root', 'receipts_root', 'tx_list_root']), - only_getters=False) -class Block(rlp.Serializable): - - """A block. - - All attributes from the block header are accessible via properties - (i.e. ``block.prevhash`` is equivalent to ``block.header.prevhash``). It - is ensured that no discrepancies between header and block occur. - - :param header: the block header - :param transaction_list: a list of transactions which are replayed if the - state given by the header is not known. If the - state is known, `None` can be used instead of the - empty list. - :param uncles: a list of the headers of the uncles of this block - :param db: the database in which the block's state, transactions and - receipts are stored (required) - :param parent: optional parent which if not given may have to be loaded from - the database for replay - """ - - fields = [ - ('header', BlockHeader), - ('transaction_list', CountableList(Transaction)), - ('uncles', CountableList(BlockHeader)) - ] - - def __init__(self, header, transaction_list=[], uncles=[], env=None, - parent=None, making=False): - assert isinstance(env, Env), "No Env object given" - assert isinstance(env.db, BaseDB), "No database object given" - self.env = env # don't re-set after init - self.db = env.db - self.config = env.config - - self.header = header - self.uncles = uncles - - self.uncles = uncles - self.suicides = [] - self.logs = [] - self.log_listeners = [] - self.refunds = 0 - - self.ether_delta = 0 - self._get_transactions_cache = [] - - # Journaling cache for state tree updates - self.caches = { - 'balance': {}, - 'nonce': {}, - 'code': {}, - 'storage': {}, - 'all': {} - } - self.journal = [] - - if self.number > 0: - self.ancestor_hashes = [self.prevhash] - else: - self.ancestor_hashes = [None] * 256 - - # do some consistency checks on parent if given - if parent: - if hasattr(parent, 'db') and self.db != parent.db and self.db.db != parent.db: - raise ValueError("Parent lives in different database") - if self.prevhash != parent.header.hash: - raise ValueError("Block's prevhash and parent's hash do not match") - if self.number != parent.header.number + 1: - raise ValueError("Block's number is not the successor of its parent number") - if not check_gaslimit(parent, self.gas_limit): - raise ValueError("Block's gaslimit is inconsistent with its parent's gaslimit") - if self.difficulty != calc_difficulty(parent, self.timestamp): - raise ValueError("Block's difficulty is inconsistent with its parent's difficulty") - if self.gas_used > self.gas_limit: - raise ValueError("Gas used exceeds gas limit") - if self.timestamp <= parent.header.timestamp: - raise ValueError("Timestamp equal to or before parent") - if self.timestamp >= 2**256: - raise ValueError("Timestamp waaaaaaaaaaayy too large") - - for uncle in uncles: - assert isinstance(uncle, BlockHeader) - - original_values = { - 'gas_used': header.gas_used, - 'timestamp': header.timestamp, - 'difficulty': header.difficulty, - 'uncles_hash': header.uncles_hash, - 'bloom': header.bloom, - 'header_mutable': self.header._mutable, - } - assert self._mutable - self._cached_rlp = None - self.header._mutable = True - - self.transactions = Trie(self.db, trie.BLANK_ROOT) - self.receipts = Trie(self.db, trie.BLANK_ROOT) - # replay transactions if state is unknown - state_unknown = (header.prevhash != self.config['GENESIS_PREVHASH'] and - header.number != 0 and - header.state_root != trie.BLANK_ROOT and - (len(header.state_root) != 32 or - b'validated:' + self.hash not in self.db) and - not making) - if state_unknown: - assert transaction_list is not None - if not parent: - parent = self.get_parent_header() - self.state = SecureTrie(Trie(self.db, parent.state_root)) - self.transaction_count = 0 - self.gas_used = 0 - # replay - for tx in transaction_list: - success, output = processblock.apply_transaction(self, tx) - self.finalize() - else: - # trust the state root in the header - self.state = SecureTrie(Trie(self.db, header._state_root)) - self.transaction_count = 0 - if transaction_list: - for tx in transaction_list: - self.add_transaction_to_list(tx) - if self.transactions.root_hash != header.tx_list_root: - raise ValueError("Transaction list root hash does not match") - # receipts trie populated by add_transaction_to_list is incorrect - # (it doesn't know intermediate states), so reset it - self.receipts = Trie(self.db, header.receipts_root) - - # checks ############################## - - def must(what, f, symb, a, b): - if not f(a, b): - if dump_block_on_failed_verification: - sys.stderr.write('%r' % self.to_dict()) - raise VerificationFailed(what, a, symb, b) - - def must_equal(what, a, b): - return must(what, lambda x, y: x == y, "==", a, b) - - def must_ge(what, a, b): - return must(what, lambda x, y: x >= y, ">=", a, b) - - def must_le(what, a, b): - return must(what, lambda x, y: x <= y, "<=", a, b) - - if parent: - must_equal('prev_hash', self.prevhash, parent.hash) - must_equal('gas_used', original_values['gas_used'], self.gas_used) - must_equal('timestamp', self.timestamp, original_values['timestamp']) - must_equal('difficulty', self.difficulty, original_values['difficulty']) - must_equal('uncles_hash', utils.sha3(rlp.encode(uncles)), original_values['uncles_hash']) - assert header.block is None - must_equal('state_root', self.state.root_hash, header.state_root) - must_equal('tx_list_root', self.transactions.root_hash, header.tx_list_root) - must_equal('receipts_root', self.receipts.root_hash, header.receipts_root) - must_equal('bloom', self.bloom, original_values['bloom']) - - # from now on, trie roots refer to block instead of header - header.block = self - self.header._mutable = original_values['header_mutable'] - - # Basic consistency verifications - if not self.check_fields(): - raise ValueError("Block is invalid") - if len(to_string(self.header.extra_data)) > self.config['MAX_EXTRADATA_LENGTH']: - raise ValueError("Extra data cannot exceed %d bytes" \ - % default_config['MAX_EXTRADATA_LENGTH']) - if self.header.coinbase == '': - raise ValueError("Coinbase cannot be empty address") - if not self.state.root_hash_valid(): - raise ValueError("State Merkle root of block %r not found in " - "database" % self) - if (not self.is_genesis() and self.nonce and not self.header.check_pow()): - raise ValueError("PoW check failed") - if b'validated:' + self.hash not in self.db: - if self.number == 0: - self.db.put(b'validated:' + self.hash, '1') - else: - self.db.put_temporarily(b'validated:' + self.hash, '1') - - @classmethod - def init_from_header(cls, header_rlp, env): - """Create a block without specifying transactions or uncles. - - :param header_rlp: the RLP encoded block header - :param env: the database for the block - """ - header = rlp.decode(header_rlp, BlockHeader, env=env) - return cls(header, None, [], env=env) - - @classmethod - def init_from_parent(cls, parent, coinbase, nonce=b'', extra_data=b'', - timestamp=int(time.time()), uncles=[], env=None): - """Create a new block based on a parent block. - - The block will not include any transactions and will not be finalized. - """ - header = BlockHeader(prevhash=parent.hash, - uncles_hash=utils.sha3(rlp.encode(uncles)), - coinbase=coinbase, - state_root=parent.state_root, - tx_list_root=trie.BLANK_ROOT, - receipts_root=trie.BLANK_ROOT, - bloom=0, - difficulty=calc_difficulty(parent, timestamp), - mixhash='', - number=parent.number + 1, - gas_limit=calc_gaslimit(parent), - gas_used=0, - timestamp=timestamp, - extra_data=extra_data, - nonce=nonce) - block = Block(header, [], uncles, env=env or parent.env, - parent=parent, making=True) - block.ancestor_hashes = [parent.hash] + parent.ancestor_hashes - block.log_listeners = parent.log_listeners - return block - - def check_fields(self): - """Check that the values of all fields are well formed.""" - # serialize and deserialize and check that the values didn't change - l = Block.serialize(self) - return rlp.decode(rlp.encode(l)) == l - - @property - def hash(self): - """The binary block hash - - This is equivalent to ``header.hash``. - """ - return utils.sha3(rlp.encode(self.header)) - - def hex_hash(self): - """The hex encoded block hash. - - This is equivalent to ``header.hex_hash(). - """ - return encode_hex(self.hash) - - @property - def tx_list_root(self): - return self.transactions.root_hash - - @tx_list_root.setter - def tx_list_root(self, value): - self.transactions = Trie(self.db, value) - - @property - def receipts_root(self): - return self.receipts.root_hash - - @receipts_root.setter - def receipts_root(self, value): - self.receipts = Trie(self.db, value) - - @property - def state_root(self): - self.commit_state() - return self.state.root_hash - - @state_root.setter - def state_root(self, value): - self.state = SecureTrie(Trie(self.db, value)) - self.reset_cache() - - @property - def uncles_hash(self): - return utils.sha3(rlp.encode(self.uncles)) - - @property - def transaction_list(self): - txs = [] - for i in range(self.transaction_count): - txs.append(self.get_transaction(i)) - return txs - - def validate_uncles(self): - """Validate the uncles of this block.""" - if utils.sha3(rlp.encode(self.uncles)) != self.uncles_hash: - return False - if len(self.uncles) > self.config['MAX_UNCLES']: - return False - for uncle in self.uncles: - assert uncle.prevhash in self.db - if uncle.number == self.number: - log.error("uncle at same block height", block=self) - return False - - # Check uncle validity - MAX_UNCLE_DEPTH = self.config['MAX_UNCLE_DEPTH'] - ancestor_chain = [self] + [a for a in self.get_ancestor_list(MAX_UNCLE_DEPTH + 1) if a] - assert len(ancestor_chain) == min(self.header.number + 1, MAX_UNCLE_DEPTH + 2) - ineligible = [] - # Uncles of this block cannot be direct ancestors and cannot also - # be uncles included 1-6 blocks ago - for ancestor in ancestor_chain[1:]: - ineligible.extend(ancestor.uncles) - ineligible.extend([b.header for b in ancestor_chain]) - eligible_ancestor_hashes = [x.hash for x in ancestor_chain[2:]] - for uncle in self.uncles: - parent = get_block(self.env, uncle.prevhash) - if uncle.difficulty != calc_difficulty(parent, uncle.timestamp): - return False - if not uncle.check_pow(): - return False - if uncle.prevhash not in eligible_ancestor_hashes: - log.error("Uncle does not have a valid ancestor", block=self, - eligible=[x.encode('hex') for x in eligible_ancestor_hashes], - uncle_prevhash=uncle.prevhash.encode('hex')) - return False - if uncle in ineligible: - log.error("Duplicate uncle", block=self, - uncle=encode_hex(utils.sha3(rlp.encode(uncle)))) - return False - ineligible.append(uncle) - return True - - def get_ancestor_list(self, n): - """Return `n` ancestors of this block. - - :returns: a list [p(self), p(p(self)), ..., p^n(self)] - """ - if n == 0 or self.header.number == 0: - return [] - p = self.get_parent() - return [p] + p.get_ancestor_list(n-1) - - def get_ancestor_hash(self, n): - assert n > 0 - while len(self.ancestor_hashes) < n: - if self.number == len(self.ancestor_hashes) - 1: - self.ancestor_hashes.append(None) - else: - self.ancestor_hashes.append( - get_block(self.env, - self.ancestor_hashes[-1]).get_parent().hash) - return self.ancestor_hashes[n-1] - - # def get_ancestor(self, n): - # return self.get_block(self.get_ancestor_hash(n)) - - def is_genesis(self): - """`True` if this block is the genesis block, otherwise `False`.""" - return self.header.number == 0 - - def _get_acct(self, address): - """Get the account with the given address. - - Note that this method ignores cached account items. - """ - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 or len(address) == 0 - rlpdata = self.state.get(address) - if rlpdata != trie.BLANK_NODE: - acct = rlp.decode(rlpdata, Account, db=self.db) - acct._mutable = True - acct._cached_rlp = None - else: - acct = Account.blank_account(self.db, self.config['ACCOUNT_INITIAL_NONCE']) - return acct - - def _get_acct_item(self, address, param): - """Get a specific parameter of a specific account. - - :param address: the address of the account (binary or hex string) - :param param: the requested parameter (`'nonce'`, `'balance'`, - `'storage'` or `'code'`) - """ - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 or len(address) == 0 - if address in self.caches[param]: - return self.caches[param][address] - else: - account = self._get_acct(address) - o = getattr(account, param) - self.caches[param][address] = o - return o - - def _set_acct_item(self, address, param, value): - """Set a specific parameter of a specific account. - - :param address: the address of the account (binary or hex string) - :param param: the requested parameter (`'nonce'`, `'balance'`, - `'storage'` or `'code'`) - :param value: the new value - """ - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 - self.set_and_journal(param, address, value) - self.set_and_journal('all', address, True) - - def set_and_journal(self, cache, index, value): - prev = self.caches[cache].get(index, None) - if prev != value: - self.journal.append([cache, index, prev, value]) - self.caches[cache][index] = value - - def _delta_item(self, address, param, value): - """Add a value to an account item. - - If the resulting value would be negative, it is left unchanged and - `False` is returned. - - :param address: the address of the account (binary or hex string) - :param param: the parameter to increase or decrease (`'nonce'`, - `'balance'`, `'storage'` or `'code'`) - :param value: can be positive or negative - :returns: `True` if the operation was successful, `False` if not - """ - new_value = self._get_acct_item(address, param) + value - if new_value < 0: - return False - self._set_acct_item(address, param, new_value % 2**256) - return True - - def mk_transaction_receipt(self, tx): - """Create a receipt for a transaction.""" - return Receipt(self.state_root, self.gas_used, self.logs) - - def add_transaction_to_list(self, tx): - """Add a transaction to the transaction trie. - - Note that this does not execute anything, i.e. the state is not - updated. - """ - k = rlp.encode(self.transaction_count) - self.transactions.update(k, rlp.encode(tx)) - r = self.mk_transaction_receipt(tx) - self.receipts.update(k, rlp.encode(r)) - self.bloom |= r.bloom # int - self.transaction_count += 1 - - def get_transaction(self, num): - """Get the `num`th transaction in this block. - - :raises: :exc:`IndexError` if the transaction does not exist - """ - index = rlp.encode(num) - tx = self.transactions.get(index) - if tx == trie.BLANK_NODE: - raise IndexError('Transaction does not exist') - else: - return rlp.decode(tx, Transaction) - - def num_transactions(self): - return self.transaction_count - - def get_transactions(self): - """Build a list of all transactions in this block.""" - num = self.transaction_count - if len(self._get_transactions_cache) != num: - txs = [] - for i in range(num): - txs.append(self.get_transaction(i)) - self._get_transactions_cache = txs - return self._get_transactions_cache - - def get_transaction_hashes(self): - "helper to check if blk contains a tx" - return [utils.sha3(self.transactions.get(rlp.encode(i))) - for i in range(self.transaction_count)] - - def includes_transaction(self, tx_hash): - assert isinstance(tx_hash, bytes) - #assert self.get_transaction_hashes() == [tx.hash for tx in self.get_transactions()] - return tx_hash in self.get_transaction_hashes() - - def get_receipt(self, num): - """Get the receipt of the `num`th transaction. - - :returns: an instance of :class:`Receipt` - """ - index = rlp.encode(num) - receipt = self.receipts.get(index) - if receipt == trie.BLANK_NODE: - raise IndexError('Receipt does not exist') - else: - return rlp.decode(receipt, Receipt) - - def get_receipts(self): - """Build a list of all receipts in this block.""" - receipts = [] - for i in count(): - try: - receipts.append(self.get_receipt(i)) - except IndexError: - return receipts - - def get_nonce(self, address): - """Get the nonce of an account. - - :param address: the address of the account (binary or hex string) - """ - return self._get_acct_item(address, 'nonce') - - def set_nonce(self, address, value): - """Set the nonce of an account. - - :param address: the address of the account (binary or hex string) - :param value: the new nonce - :returns: `True` if successful, otherwise `False` - """ - return self._set_acct_item(address, 'nonce', value) - - def increment_nonce(self, address): - """Increment the nonce of an account. - - :param address: the address of the account (binary or hex string) - :returns: `True` if successful, otherwise `False` - """ - if self.get_nonce(address) == 0: - return self._delta_item(address, 'nonce', self.config['ACCOUNT_INITIAL_NONCE'] + 1) - else: - return self._delta_item(address, 'nonce', 1) - - def get_balance(self, address): - """Get the balance of an account. - - :param address: the address of the account (binary or hex string) - """ - return self._get_acct_item(address, 'balance') - - def set_balance(self, address, value): - """Set the balance of an account. - - :param address: the address of the account (binary or hex string) - :param value: the new balance - :returns: `True` if successful, otherwise `False` - """ - self._set_acct_item(address, 'balance', value) - - def delta_balance(self, address, value): - """Increase the balance of an account. - - :param address: the address of the account (binary or hex string) - :param value: can be positive or negative - :returns: `True` if successful, otherwise `False` - """ - return self._delta_item(address, 'balance', value) - - def transfer_value(self, from_addr, to_addr, value): - """Transfer a value between two account balances. - - :param from_addr: the address of the sending account (binary or hex - string) - :param to_addr: the address of the receiving account (binary or hex - string) - :param value: the (positive) value to send - :returns: `True` if successful, otherwise `False` - """ - assert value >= 0 - if self.delta_balance(from_addr, -value): - return self.delta_balance(to_addr, value) - return False - - def get_code(self, address): - """Get the code of an account. - - :param address: the address of the account (binary or hex string) - """ - return self._get_acct_item(address, 'code') - - def set_code(self, address, value): - """Set the code of an account. - - :param address: the address of the account (binary or hex string) - :param value: the new code - :returns: `True` if successful, otherwise `False` - """ - self._set_acct_item(address, 'code', value) - - def get_storage(self, address): - """Get the trie holding an account's storage. - - :param address: the address of the account (binary or hex string) - :param value: the new code - """ - storage_root = self._get_acct_item(address, 'storage') - return SecureTrie(Trie(self.db, storage_root)) - - def reset_storage(self, address): - self._set_acct_item(address, 'storage', b'') - CACHE_KEY = b'storage:' + address - if CACHE_KEY in self.caches: - for k in self.caches[CACHE_KEY]: - self.set_and_journal(CACHE_KEY, k, 0) - - def get_storage_data(self, address, index): - """Get a specific item in the storage of an account. - - :param address: the address of the account (binary or hex string) - :param index: the index of the requested item in the storage - """ - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 - CACHE_KEY = b'storage:' + address - if CACHE_KEY in self.caches: - if index in self.caches[CACHE_KEY]: - return self.caches[CACHE_KEY][index] - key = utils.zpad(utils.coerce_to_bytes(index), 32) - storage = self.get_storage(address).get(key) - if storage: - return rlp.decode(storage, big_endian_int) - else: - return 0 - - def set_storage_data(self, address, index, value): - """Set a specific item in the storage of an account. - - :param address: the address of the account (binary or hex string) - :param index: the index of the item in the storage - :param value: the new value of the item - """ - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 - CACHE_KEY = b'storage:' + address - if CACHE_KEY not in self.caches: - self.caches[CACHE_KEY] = {} - self.set_and_journal('all', address, True) - self.set_and_journal(CACHE_KEY, index, value) - - def account_exists(self, address): - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 - return len(self.state.get(address)) > 0 or address in self.caches['all'] - - def add_log(self, log): - self.logs.append(log) - for L in self.log_listeners: - L(log) - - def commit_state(self): - """Commit account caches""" - """Write the acount caches on the corresponding tries.""" - changes = [] - if len(self.journal) == 0: - # log_state.trace('delta', changes=[]) - return - addresses = sorted(list(self.caches['all'].keys())) - for addr in addresses: - acct = self._get_acct(addr) - - # storage - for field in ('balance', 'nonce', 'code', 'storage'): - if addr in self.caches[field]: - v = self.caches[field][addr] - changes.append([field, addr, v]) - setattr(acct, field, v) - - t = SecureTrie(Trie(self.db, acct.storage)) - for k, v in self.caches.get(b'storage:' + addr, {}).items(): - enckey = utils.zpad(utils.coerce_to_bytes(k), 32) - val = rlp.encode(v) - changes.append(['storage', addr, k, v]) - # if self.number > 18280 and False: - # try: - # self.db.logging = True - # except: - # pass - # sys.stderr.write("pre: %r\n" % self.account_to_dict(addr)['storage']) - # sys.stderr.write("pre: %r\n" % self.get_storage(addr).root_hash.encode('hex')) - # sys.stderr.write("changed: %s %s %s\n" % (encode_hex(addr), encode_hex(enckey), encode_hex(val))) - if v: - t.update(enckey, val) - else: - t.delete(enckey) - acct.storage = t.root_hash - self.state.update(addr, rlp.encode(acct)) - log_state.trace('delta', changes=changes) - self.reset_cache() - self.db.put_temporarily(b'validated:' + self.hash, '1') - - def del_account(self, address): - """Delete an account. - - :param address: the address of the account (binary or hex string) - """ - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 - self.commit_state() - self.state.delete(address) - - def account_to_dict(self, address, with_storage_root=False, - with_storage=True): - """Serialize an account to a dictionary with human readable entries. - - :param address: the 20 bytes account address - :param with_storage_root: include the account's storage root - :param with_storage: include the whole account's storage - """ - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 - - if with_storage_root: - # if there are uncommited account changes the current storage root - # is meaningless - assert len(self.journal) == 0 - med_dict = {} - - account = self._get_acct(address) - for field in ('balance', 'nonce'): - value = self.caches[field].get(address, getattr(account, field)) - med_dict[field] = to_string(value) - code = self.caches['code'].get(address, account.code) - med_dict['code'] = b'0x' + encode_hex(code) - - storage_trie = SecureTrie(Trie(self.db, account.storage)) - if with_storage_root: - med_dict['storage_root'] = encode_hex(storage_trie.get_root_hash()) - if with_storage: - med_dict['storage'] = {} - d = storage_trie.to_dict() - subcache = self.caches.get(b'storage:' + address, {}) - subkeys = [utils.zpad(utils.coerce_to_bytes(kk), 32) - for kk in list(subcache.keys())] - for k in list(d.keys()) + subkeys: - v = d.get(k, None) - v2 = subcache.get(utils.big_endian_to_int(k), None) - hexkey = b'0x' + encode_hex(utils.zunpad(k)) - if v2 is not None: - if v2 != 0: - med_dict['storage'][hexkey] = \ - b'0x' + encode_hex(utils.int_to_big_endian(v2)) - elif v is not None: - med_dict['storage'][hexkey] = b'0x' + encode_hex(rlp.decode(v)) - - return med_dict - - def reset_cache(self): - """Reset cache and journal without commiting any changes.""" - self.caches = { - 'all': {}, - 'balance': {}, - 'nonce': {}, - 'code': {}, - 'storage': {}, - } - self.journal = [] - - def snapshot(self): - """Make a snapshot of the current state to enable later reverting.""" - return { - 'state': self.state.root_hash, - 'gas': self.gas_used, - 'txs': self.transactions, - 'txcount': self.transaction_count, - 'suicides': self.suicides, - 'logs': self.logs, - 'refunds': self.refunds, - 'suicides_size': len(self.suicides), - 'logs_size': len(self.logs), - 'journal': self.journal, # pointer to reference, so is not static - 'journal_size': len(self.journal), - 'ether_delta': self.ether_delta - } - - def revert(self, mysnapshot): - """Revert to a previously made snapshot. - - Reverting is for example necessary when a contract runs out of gas - during execution. - """ - self.journal = mysnapshot['journal'] - log_state.trace('reverting') - while len(self.journal) > mysnapshot['journal_size']: - cache, index, prev, post = self.journal.pop() - log_state.trace('%r %r %r %r' % (cache, index, prev, post)) - if prev is not None: - self.caches[cache][index] = prev - else: - del self.caches[cache][index] - self.suicides = mysnapshot['suicides'] - while len(self.suicides) > mysnapshot['suicides_size']: - self.suicides.pop() - self.logs = mysnapshot['logs'] - while len(self.logs) > mysnapshot['logs_size']: - self.logs.pop() - self.refunds = mysnapshot['refunds'] - self.state.root_hash = mysnapshot['state'] - self.gas_used = mysnapshot['gas'] - self.transactions = mysnapshot['txs'] - self.transaction_count = mysnapshot['txcount'] - self._get_transactions_cache = [] - self.ether_delta = mysnapshot['ether_delta'] - - def finalize(self): - """Apply rewards and commit.""" - delta = int(self.config['BLOCK_REWARD'] + self.config['NEPHEW_REWARD'] * len(self.uncles)) - self.delta_balance(self.coinbase, delta) - self.ether_delta += delta - - for uncle in self.uncles: - r = self.config['BLOCK_REWARD'] * \ - (self.config['UNCLE_DEPTH_PENALTY_FACTOR'] + uncle.number - self.number) \ - / self.config['UNCLE_DEPTH_PENALTY_FACTOR'] - r = int(r) - self.delta_balance(uncle.coinbase, r) - self.ether_delta += r - self.commit_state() - - def to_dict(self, with_state=False, full_transactions=False, - with_storage_roots=False, with_uncles=False): - """Serialize the block to a readable dictionary. - - :param with_state: include state for all accounts - :param full_transactions: include serialized transactions (hashes - otherwise) - :param with_storage_roots: if account states are included also include - their storage roots - :param with_uncles: include uncle hashes - """ - b = {"header": self.header.to_dict()} - txlist = [] - for i, tx in enumerate(self.get_transactions()): - receipt_rlp = self.receipts.get(rlp.encode(i)) - receipt = rlp.decode(receipt_rlp, Receipt) - if full_transactions: - txjson = tx.to_dict() - else: - txjson = tx.hash - txlist.append({ - "tx": txjson, - "medstate": encode_hex(receipt.state_root), - "gas": to_string(receipt.gas_used), - "logs": [Log.serialize(log) for log in receipt.logs], - "bloom": utils.int256.serialize(receipt.bloom) - }) - b["transactions"] = txlist - if with_state: - state_dump = {} - for address, v in self.state.to_dict().items(): - state_dump[encode_hex(address)] = self.account_to_dict(address, with_storage_roots) - b['state'] = state_dump - if with_uncles: - b['uncles'] = [self.__class__.deserialize_header(u) - for u in self.uncles] - return b - - @property - def mining_hash(self): - return utils.sha3(rlp.encode(self.header, - BlockHeader.exclude(['nonce', 'mixhash']))) - - def get_parent(self): - """Get the parent of this block.""" - if self.number == 0: - raise UnknownParentException('Genesis block has no parent') - try: - parent = get_block(self.env, self.prevhash) - except KeyError: - raise UnknownParentException(encode_hex(self.prevhash)) - # assert parent.state.db.db == self.state.db.db - return parent - - def get_parent_header(self): - """Get the parent of this block.""" - if self.number == 0: - raise UnknownParentException('Genesis block has no parent') - try: - parent_header = get_block_header(self.db, self.prevhash) - except KeyError: - raise UnknownParentException(encode_hex(self.prevhash)) - # assert parent.state.db.db == self.state.db.db - return parent_header - - def has_parent(self): - """`True` if this block has a known parent, otherwise `False`.""" - try: - self.get_parent() - return True - except UnknownParentException: - return False - - def chain_difficulty(self): - """Get the summarized difficulty. - - If the summarized difficulty is not stored in the database, it will be - calculated recursively and put in the database. - """ - if self.is_genesis(): - return self.difficulty - elif b'difficulty:' + encode_hex(self.hash) in self.db: - encoded = self.db.get(b'difficulty:' + encode_hex(self.hash)) - return utils.decode_int(encoded) - else: - o = self.difficulty + self.get_parent().chain_difficulty() - # o += sum([uncle.difficulty for uncle in self.uncles]) - self.state.db.put_temporarily( - b'difficulty:' + encode_hex(self.hash), utils.encode_int(o)) - return o - - def __eq__(self, other): - """Two blocks are equal iff they have the same hash.""" - return isinstance(other, (Block, CachedBlock)) and self.hash == other.hash - - def __hash__(self): - return utils.big_endian_to_int(self.hash) - - def __ne__(self, other): - return not self.__eq__(other) - - def __gt__(self, other): - return self.number > other.number - - def __lt__(self, other): - return self.number < other.number - - def __repr__(self): - return '<%s(#%d %s)>' % (self.__class__.__name__, self.number, encode_hex(self.hash)[:8]) - - def __structlog__(self): - return encode_hex(self.hash) - - -# Gas limit adjustment algo -def calc_gaslimit(parent): - config = parent.config - decay = parent.gas_limit // config['GASLIMIT_EMA_FACTOR'] - new_contribution = ((parent.gas_used * config['BLKLIM_FACTOR_NOM']) // - config['BLKLIM_FACTOR_DEN'] // config['GASLIMIT_EMA_FACTOR']) - gl = max(parent.gas_limit - decay + new_contribution, config['MIN_GAS_LIMIT']) - if gl < config['GENESIS_GAS_LIMIT']: - gl2 = parent.gas_limit + decay - gl = min(config['GENESIS_GAS_LIMIT'], gl2) - assert check_gaslimit(parent, gl) - return gl - - -def check_gaslimit(parent, gas_limit): - config = parent.config - # block.gasLimit - parent.gasLimit <= parent.gasLimit / GasLimitBoundDivisor - gl = parent.gas_limit // config['GASLIMIT_ADJMAX_FACTOR'] - a = bool(abs(gas_limit - parent.gas_limit) <= gl) - b = bool(gas_limit >= config['MIN_GAS_LIMIT']) - return a and b - - -class CachedBlock(Block): - # note: immutable refers to: do not manipulate! - _hash_cached = None - - def _set_acct_item(self): - raise NotImplementedError - - def set_state_root(self): - raise NotImplementedError - - def revert(self): - raise NotImplementedError - - def commit_state(self): - pass - - def __hash__(self): - return utils.big_endian_to_int(self.hash) - - @property - def hash(self): - if not self._hash_cached: - self._hash_cached = super(CachedBlock, self).hash - return self._hash_cached - - @classmethod - def create_cached(cls, blk): - blk.__class__ = CachedBlock - log.debug('created cached block', blk=blk) - return blk - - -def get_block_header(db, blockhash): - assert isinstance(db, BaseDB) - bh = BlockHeader.from_block_rlp(db.get(blockhash)) - if bh.hash != blockhash: - log.warn('BlockHeader.hash is broken') - assert bh.hash == blockhash - - return bh - - -@lru_cache(1024) -def get_block(env, blockhash): - """ - Assumption: blocks loaded from the db are not manipulated - -> can be cached including hash - """ - assert isinstance(env, Env) - blk = rlp.decode(env.db.get(blockhash), Block, env=env) - return CachedBlock.create_cached(blk) - - -# def has_block(blockhash): -# return blockhash in db.DB(utils.get_db_path()) - - -def genesis(env, **kwargs): - assert isinstance(env, Env) - """Build the genesis block.""" - allowed_args = set(['start_alloc', 'prevhash', 'coinbase', 'difficulty', 'gas_limit', - 'timestamp', 'extra_data', 'mixhash', 'nonce']) - assert set(kwargs.keys()).issubset(allowed_args) - - # https://ethereum.etherpad.mozilla.org/11 - start_alloc = kwargs.get('start_alloc', env.config['GENESIS_INITIAL_ALLOC']) - header = BlockHeader( - prevhash=kwargs.get('prevhash', env.config['GENESIS_PREVHASH']), - uncles_hash=utils.sha3(rlp.encode([])), - coinbase=kwargs.get('coinbase', env.config['GENESIS_COINBASE']), - state_root=trie.BLANK_ROOT, - tx_list_root=trie.BLANK_ROOT, - receipts_root=trie.BLANK_ROOT, - bloom=0, - difficulty=kwargs.get('difficulty', env.config['GENESIS_DIFFICULTY']), - number=0, - gas_limit=kwargs.get('gas_limit', env.config['GENESIS_GAS_LIMIT']), - gas_used=0, - timestamp=kwargs.get('timestamp', 0), - extra_data=kwargs.get('extra_data', env.config['GENESIS_EXTRA_DATA']), - mixhash=kwargs.get('mixhash', env.config['GENESIS_MIXHASH']), - nonce=kwargs.get('nonce', env.config['GENESIS_NONCE']), - ) - block = Block(header, [], [], env=env) - for addr, data in start_alloc.items(): - if len(addr) == 40: - addr = decode_hex(addr) - assert len(addr) == 20 - if 'wei' in data: - block.set_balance(addr, utils.parse_int_or_hex(data['wei'])) - if 'balance' in data: - block.set_balance(addr, utils.parse_int_or_hex(data['balance'])) - if 'code' in data: - block.set_code(addr, utils.scanners['bin'](data['code'])) - if 'nonce' in data: - block.set_nonce(addr, utils.parse_int_or_hex(data['nonce'])) - if 'storage' in data: - for k, v in data['storage'].items(): - block.set_storage_data(addr, utils.big_endian_to_int(decode_hex(k[2:])), - utils.big_endian_to_int(decode_hex(v[2:]))) - block.commit_state() - block.state.db.commit() - # genesis block has predefined state root (so no additional finalization - # necessary) - return block - - -def dump_genesis_block_tests_data(env): - assert isinstance(env, Env) - import json - g = genesis(env) - data = dict( - genesis_state_root=encode_hex(g.state_root), - genesis_hash=g.hex_hash(), - genesis_rlp_hex=encode_hex(g.serialize()), - initial_alloc=dict() - ) - for addr, balance in env.config['GENESIS_INITIAL_ALLOC'].items(): - data['initial_alloc'][addr] = to_string(balance) - - print(json.dumps(data, indent=1)) diff --git a/ethereum/casper.se.py b/ethereum/casper.se.py new file mode 100644 index 000000000..016df6693 --- /dev/null +++ b/ethereum/casper.se.py @@ -0,0 +1,541 @@ +data nextGuardianIndex # map to storage index 0 +data guardians[2**50](address, orig_deposit_size, induction_height, withdrawal_height, validationCode, blockhashes[2**50], stateroots[2**50], probs[2**50], profits[2**50], basicinfo, max_seq, counter) +data deletedGuardianIndices[2**50] +data nextDeletedGuardianIndex +data numActiveGuardians +data nextCounter +data slashed[] +data inclusionRewards[] + +macro MAX_ODDS: 2**29 + +# Interpret prob as odds in scientific notation: 5 bit exponent +# (-16….15), 3 bit mantissa (1….1.875). Convert to odds per billion +# This allows 3.125% granularity, with odds between 65536:1 against +# and 1:61440 for +macro logoddsToOdds($logodds): + 2**(($logodds) / 4) * (4 + ($logodds) % 4) * 99 / 1700 + +macro convertOddsToProb($odds): + $odds * 10**9 / (10**9 + $odds) + +macro convertProbToOdds($prob): + $prob * 10**9 / (10**9 - $prob) + +# This is a simple quadratic scoring rule. +macro scoreCorrect($logodds, $odds): + (($logodds - 128) * ($logodds > 128) * (3759880483 / 128 * 10**9) + $odds) / 10000 + +macro scoreIncorrect($odds): + (0 - ($odds - 10**9) * ($odds > 10**9) * (3759880483 / 128 * 10 / 7 * 4) - $odds * $odds / 2 / 10**9) / 10000 + +macro SEQ_POS: 0 +macro PREVHASH_POS: 1 +macro DEPOSIT_SIZE_POS: 2 +macro BET_MAXHEIGHT_POS: 3 +macro PROFITS_PROCESSED_TO_POS: 4 + +macro VALIDATOR_ROUNDS: 5 + +macro INCENTIVIZATION_EMA_COEFF: 300 + +macro INCLUSION_REWARD_EQUILIBRIUM_PPB: 20 + +# VALIDATOR_ROUNDS of maximum slashing = 100% loss +# Currently: 51.5 parts per billion theoretical maximum return per block, 24.19% theoretical maximum annual reward +macro SCORING_REWARD_DIVISOR: 15398906716575978534951 +macro MIN_BET_BYTE: 0 +macro MAX_BET_BYTE: 255 +macro PER_BLOCK_BASE_COST: 74 # parts per billion: 36.89% fixed annual penalty +# Net interest rate: 10% theoretical maximum + +macro MIN_DEPOSIT: 1250 * 10**18 + +macro MAX_DEPOSIT: 200000 * 10**18 + +macro MAX_VALIDATORS: 100 + +macro ENTER_EXIT_DELAY: 110 + +macro MAXBETLENGTH: 10000 + +macro WRAPLENGTH: 40320 + +macro ETHER: 50 + +macro RLPGETBYTES32: 8 + +macro RLPGETSTRING: 9 + +macro MAX_VALIDATION_DURATION: 4000000 # number of blocks + +macro EXCESS_VALIDATION_TAX: 100 # parts per billion per block + +macro WITHDRAWAL_WAITTIME: 20 + +macro PROFIT_PACKING_NUM: 32 +macro PPN: 32 + +macro PROFIT_PACKING_BYTES: 10 +macro PPB: 10 + +macro ADDRBYTES: 23 + +macro maskinclude($top, $bottom): + 256**$top - 256**$bottom + +macro maskexclude($top, $bottom): + ~not(256**$top - 256**$bottom) + + +macro newArrayChunk($bytesPerValue, $valuesPerChunk): + string($bytesPerValue * $valuesPerChunk) + +macro(80) loadArrayChunk($bytesPerValue, $valuesPerChunk, $storearray, $memaddr, $index): + ~sloadbytes(ref($storearray[div($index, $valuesPerChunk)]), $memaddr, $bytesPerValue * $valuesPerChunk) + +macro(80) loadArrayValue($bytesPerValue, $valuesPerChunk, $memaddr, $index): + mod(~mload($memaddr + $bytesPerValue * mod($index, $valuesPerChunk) - 32 + $bytesPerValue), 256**$bytesPerValue) + +macro(80) saveArrayValue($bytesPerValue, $valuesPerChunk, $memaddr, $index, $value): + mcopy_small2($memaddr + $bytesPerValue * mod($index, $valuesPerChunk) - 32 + $bytesPerValue, $value, $bytesPerValue) + +macro(80) mcopy_small2($to, $frm, $bytes): + ~mstore($to, (~mload($to) & sub(0, 256**$bytes)) + ($frm & (256**$bytes - 1))) + +macro(80) saveArrayChunk($bytesPerValue, $valuesPerChunk, $storearray, $memaddr, $index): + ~sstorebytes(ref($storearray[div($index, $valuesPerChunk)]), $memaddr, $bytesPerValue * $valuesPerChunk) + +macro mcopy_small($to, $frm, $bytes): + ~mstore($to, (~mload($to) & sub(0, 256**$bytes)) + ($frm & (256**$bytes - 1))) + +event Reward(blockNumber, totProfit, totLoss, bmh, blockdiff) +event ProcessingBet(bettor, seq, curBlock, prevBlock, maxHeightProcessed, max_height) +event RecordingTotProfit(bettor, block, totProfit) +event Joined(index) +event BetSlashed(index:uint256, bet1:str, bet2:str) +event BlockSlashed(index:uint256, bet1:str, bet2:str) + +def const getMinDeposit(): + return self.numActiveGuardians + +# Become a guardian +def join(validationCode:bytes): + min_deposit = MIN_DEPOSIT * MAX_VALIDATORS / (MAX_VALIDATORS - self.numActiveGuardians) + assert self.numActiveGuardians < MAX_VALIDATORS and msg.value >= min_deposit and msg.value <= MAX_DEPOSIT + if self.nextDeletedGuardianIndex: + index = self.deletedGuardianIndices[self.nextDeletedGuardianIndex - 1] + self.nextDeletedGuardianIndex -= 1 + else: + index = self.nextGuardianIndex + self.nextGuardianIndex = index + 1 + self.guardians[index].address = msg.sender + self.guardians[index].counter = self.nextCounter + self.nextCounter += 1 + ~sstorebytes(ref(self.guardians[index].validationCode), validationCode, len(validationCode)) + # log(20, ~ssize(self.guardians[index].validationCode)) + basicinfo = array(10) + basicinfo[DEPOSIT_SIZE_POS] = msg.value + self.guardians[index].orig_deposit_size = msg.value + self.guardians[index].induction_height = if(block.number, block.number + ENTER_EXIT_DELAY, 0) + self.guardians[index].withdrawal_height = 2**100 + basicinfo[PROFITS_PROCESSED_TO_POS] = self.guardians[index].induction_height + self.numActiveGuardians += 1 + ~sstorebytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + return(index:uint256) + + +# Leave the guardian pool +def withdraw(index:uint256): + if self.guardians[index].withdrawal_height + WITHDRAWAL_WAITTIME <= block.number: + # Load the guardian's info + basicinfo = array(10) + ~sloadbytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + send(self.guardians[index].address, basicinfo[DEPOSIT_SIZE_POS] + self.inclusionRewards[self.guardians[index].address]) + self.inclusionRewards[self.guardians[index].address] = 0 + self.guardians[index].address = 0 + basicinfo[DEPOSIT_SIZE_POS] = 0 + self.deletedGuardianIndices[self.nextDeletedGuardianIndex] = index + self.nextDeletedGuardianIndex += 1 + self.numActiveGuardians -= 1 + ~sstorebytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + return(1:bool) + return(0:bool) + +event LogPre(hash:bytes32) +event LogPost(hash:bytes32) +event SubmitBet(seq, prevhash:bytes32, index, stateroot_prob_from:bytes1) +event ExcessRewardEvent(index, profit, blockdiff, totProfit, newBalance) +event EstProfit(profit) +event EstProfitComb(profit, profit2, lsr, stateInfo) +event ZeroSeq(index:uint256,progress:uint256) + +# Submit a bet +def submitBet(index:uint256, max_height:uint256, probs:bytes, blockhashes:bytes32[], stateroots:bytes32[], stateroot_probs:bytes, prevhash:bytes32, seqnum:uint256, sig:bytes): + # Load basic guardian information + basicinfo = array(10) + ~sloadbytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + # log(type=SubmitBet, basicinfo[SEQ_POS], basicinfo[PREVHASH_POS], index, stateroot_prob_from) + # Compute the signature hash + _calldata = string(~calldatasize()) + ~calldatacopy(_calldata, 0, ~calldatasize()) + signing_hash = ~sha3(_calldata, ~calldatasize() - 32 - ceil32(len(sig))) + # Check the sig against the guardian validation code + guardian_validation_code = string(~ssize(ref(self.guardians[index].validationCode))) + ~sloadbytes(ref(self.guardians[index].validationCode), guardian_validation_code, len(guardian_validation_code)) + sig_verified = 0 + with L = len(sig): + sig[-1] = signing_hash + ~callstatic(msg.gas - 20000, guardian_validation_code, len(guardian_validation_code), sig - 32, L + 32, ref(sig_verified), 32) + sig[-1] = L + assert sig_verified == 1 + # Check sequence number + if seqnum != basicinfo[SEQ_POS] or prevhash != basicinfo[PREVHASH_POS]: + # If someone submits a higher-seq bet, register that it has been + # submitted; we will later force the guardian to supply all bets + # up to and including this seq in order to withdraw + self.guardians[index].max_seq = max(self.guardians[index].max_seq, seqnum) + return(0:bool) + # Check basic validity + assert max_height <= block.number + assert self.guardians[index].withdrawal_height > block.number + assert len(probs) >= len(blockhashes) + assert len(probs) >= len(stateroots) + assert len(probs) >= len(stateroot_probs) + # Set seq and prevhash + basicinfo[PREVHASH_POS] = ~sha3(_calldata, ~calldatasize()) + basicinfo[SEQ_POS] = seqnum + 1 + # log(type=ProcessingBet, index, seqnum, 1, 2, 3, 4) + # Incentivize the validator that included the bet + reward = basicinfo[DEPOSIT_SIZE_POS] * (block.number - basicinfo[PROFITS_PROCESSED_TO_POS]) * INCLUSION_REWARD_EQUILIBRIUM_PPB / 10**9 + self.inclusionRewards[block.coinbase] += reward + # Process profits from last bet + guardianBalance = basicinfo[DEPOSIT_SIZE_POS] + prevProfit = 0 + with bmh = basicinfo[BET_MAXHEIGHT_POS]: + with CURPROFITBLOCK = newArrayChunk(PPB, PPN): + loadArrayChunk(PPB, PPN, self.guardians[index].profits, CURPROFITBLOCK, mod(bmh, WRAPLENGTH)) + with profit = ~signextend(PPB-1, loadArrayValue(PPB, PPN, CURPROFITBLOCK, mod(bmh, WRAPLENGTH))): + with blockdiff = block.number - basicinfo[PROFITS_PROCESSED_TO_POS]: + with totProfit = 0: + with i = 0: + while i < blockdiff: + totProfit += profit + profit = profit * (INCENTIVIZATION_EMA_COEFF - 1) / INCENTIVIZATION_EMA_COEFF + i += 1 + guardianBalance = max(0, guardianBalance + guardianBalance * totProfit / SCORING_REWARD_DIVISOR - guardianBalance * blockdiff * PER_BLOCK_BASE_COST / 10**9) + # log(type=Reward, block.number, guardianBalance * totProfit / SCORING_REWARD_DIVISOR, guardianBalance * blockdiff * PER_BLOCK_BASE_COST / 10**9, guardianBalance, blockdiff) + prevProfit = profit + # if guardianBalance > 3000 * 10**18: + # log(type=ExcessRewardEvent, i, profit, blockdiff, totProfit, guardianBalance) + # Update the maximum height of the previous bet, profits and the guardian deposit size + basicinfo[BET_MAXHEIGHT_POS] = max(bmh, max_height) + basicinfo[PROFITS_PROCESSED_TO_POS] = block.number + basicinfo[DEPOSIT_SIZE_POS] = guardianBalance + # Bet with max height 2**256 - 1 to start withdrawal + if max_height == ~sub(0, 1): + # Make sure that the guardian has submitted all bets + assert self.guardians[index].max_seq <= seqnum + # Register the guardian as having withdrawn + self.guardians[index].withdrawal_height = block.number + # Compute how many blocks the guardian has validated for + guardian_validated_for = block.number - self.guardians[index].induction_height + # Compute a tax for validating too long + if guardian_validated_for > MAX_VALIDATION_DURATION: + taxrate = (guardian_validated_for - MAX_VALIDATION_DURATION) * EXCESS_VALIDATION_TAX + basicinfo[DEPOSIT_SIZE_POS] = max(0, basicinfo[DEPOSIT_SIZE_POS] - basicinfo[DEPOSIT_SIZE_POS] * taxrate / 10**9) + # Store guardian data + ~sstorebytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + return(1:bool) + # Update blockhashes, storing blockhash correctness info in groups of 32 + with i = 0: + with v = self.guardians[index].blockhashes[mod(max_height / 32, WRAPLENGTH)]: + while i < len(blockhashes): + with h = max_height - i: + byte = not(not(~blockhash(h))) * 2 + (blockhashes[i] == ~blockhash(h)) + with offset = h % 32: + v = (v & maskexclude(offset + 1, offset)) + byte * 256**offset + if offset == 0 or i == len(blockhashes) - 1: + self.guardians[index].blockhashes[mod(h / 32, WRAPLENGTH)] = v + v = self.guardians[index].blockhashes[mod((h / 32) - 1, WRAPLENGTH)] + i += 1 + # Update stateroots, storing stateroot correctness info in groups of 32 + with i = 0: + with v = self.guardians[index].stateroots[mod(max_height / 32, WRAPLENGTH)]: + while i < len(stateroots): + with h = max_height - i: + byte = not(not(stateroots[i])) * 2 + (stateroots[i] == ~stateroot(h)) + with offset = h % 32: + v = (v & maskexclude(offset + 1, offset)) + byte * 256**offset + if offset == 0 or i == len(stateroots) - 1: + self.guardians[index].stateroots[mod(h / 32, WRAPLENGTH)] = v + v = self.guardians[index].stateroots[mod((h / 32) - 1, WRAPLENGTH)] + i += 1 + # Update probabilities; paste the probs into the self.guardians[index].probs + # array at the correct positions, assuming the probs array stores probs + # in groups of 32 + # with h = max_height + 1: + # with i = 0: + # while i < len(probs): + # with top = (h % 32) or 32: + # with bottom = max(top - len(probs) + i, 0): + # x = (self.guardians[index].probs[mod((h - 1) / 32, WRAPLENGTH)] & maskexclude(top, bottom)) + (~mload(probs + i - 32 + top) & maskinclude(top, bottom)) + # self.guardians[index].probs[mod((h - 1) / 32, WRAPLENGTH)] = x + # h -= top + # i += top + + minChanged = max_height - max(max(len(blockhashes), len(stateroots)), len(probs)) + 1 + # Incentivization + with H = max(self.guardians[index].induction_height, minChanged): + # log(type=ProgressWithDataArray, 1, [minChanged, H, max_height, len(blockhashes), len(stateroots), len(probs)]) + # log(type=Progress, 50000 + logStaterootOdds) + with PROFITBLOCK = newArrayChunk(PPB, PPN): + loadArrayChunk(PPB, PPN, self.guardians[index].profits, PROFITBLOCK, mod(H - 1, WRAPLENGTH)) + CURPROFIT = loadArrayValue(PPB, PPN, PROFITBLOCK, mod(H - 1, WRAPLENGTH)) + if H % PROFIT_PACKING_NUM == 0: + loadArrayChunk(PPB, PPN, self.guardians[index].profits, PROFITBLOCK, mod(H, WRAPLENGTH)) + stateRootInfo = div(self.guardians[index].stateroots[mod(H / 32, WRAPLENGTH)], 256**(H % 32)) + # log(type=ProgressWithData, 2, H, max_height) + while H <= max_height: + # Determine the byte that was saved as the probability + # Convert the byte to odds * 1 billion + # logodds = min(MAX_BET_BYTE, max(MIN_BET_BYTE, getch(probs, max_height - H))) + with logBlockOdds = getch(probs, max_height - H): + with blockOdds = logoddsToOdds(logBlockOdds): # mod((self.guardians[index].probs[H / 32] / 256**(H % 32)), 256) or 128 + # log(type=Progress, 3) + blockHashInfo = mod(div(self.guardians[index].blockhashes[mod(H / 32, WRAPLENGTH)], 256**(H % 32)), 256) + with invBlockOdds = 10**18 / blockOdds: + if blockHashInfo >= 2 and blockHashInfo % 2: # block hashes match, and there is a block + profitFactor = scoreCorrect(logBlockOdds, blockOdds) + scoreIncorrect(invBlockOdds) + elif blockHashInfo < 2: # there is no block + profitFactor = scoreCorrect(256 - logBlockOdds, invBlockOdds) + scoreIncorrect(blockOdds) + else: # block hashes do not match, there is a block + profitFactor = scoreIncorrect(blockOdds) + scoreIncorrect(invBlockOdds) + + # if profitFactor < 0 and (blockOdds < 10**8 or blockOdds > 10**10): + # log(type=BlockLoss, blockOdds, profitFactor, H, index, blockHashInfo) + # log(type=Progress, 1000000000 + logodds * 100000 + logStaterootOdds) + # Check if the state root bet that was made is correct. + # log(type=Progress, 4) + with logStaterootOdds = getch(stateroot_probs, max_height - H): + if (stateRootInfo & 2): + if stateRootInfo % 2: + profitFactor2 = scoreCorrect(logStaterootOdds, logoddsToOdds(logStaterootOdds)) + else: + profitFactor2 = scoreIncorrect(logoddsToOdds(logStaterootOdds)) + else: + profitFactor2 = 0 + if H % 32 == 31: + stateRootInfo = self.guardians[index].stateroots[mod((H + 1) / 32, WRAPLENGTH)] + else: + stateRootInfo = div(stateRootInfo, 256) + # log(type=EstProfitComb, profitFactor * basicinfo[DEPOSIT_SIZE_POS] / SCORING_REWARD_DIVISOR, profitFactor2 * basicinfo[DEPOSIT_SIZE_POS] / SCORING_REWARD_DIVISOR, logStaterootOdds, stateRootInfo) + # log(type=Progress, 80000 + logStaterootOdds) + # Update the profit counter + CURPROFIT = (CURPROFIT * (INCENTIVIZATION_EMA_COEFF - 1) + profitFactor + profitFactor2) / INCENTIVIZATION_EMA_COEFF + # log(type=EstProfitComb, profitFactor * basicinfo[DEPOSIT_SIZE_POS] / SCORING_REWARD_DIVISOR) + # log2(4, H, profitFactor2 + profitFactor, CURPROFIT) + saveArrayValue(PPB, PPN, PROFITBLOCK, H, CURPROFIT) + # log(type=DebugPBForBlock, PROFITBLOCK, H, CURPROFIT) + if (mod(H, PROFIT_PACKING_NUM) == (PROFIT_PACKING_NUM - 1) or H == max_height): + saveArrayChunk(PPB, PPN, self.guardians[index].profits, PROFITBLOCK, mod(H, WRAPLENGTH)) + loadArrayChunk(PPB, PPN, self.guardians[index].profits, PROFITBLOCK, mod(H + 1, WRAPLENGTH)) + H += 1 + # loadArrayChunk(PPB, PPN, self.guardians[index].profits, PROFITBLOCK, H - 1) + # log(type=DebugPB, PROFITBLOCK) + # log(type=RecordingTotProfit, index, H, loadArrayValue(PPB, PPN, PROFITBLOCK, H - 1)) + ~sstorebytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + return(1:bool) + +event DebugPB(pblock:str) +event DebugPBForBlock(pblock:str, blocknum, curprofit) +event Progress(stage) +event ProgressWithData(stage, h, mh) +event ProgressWithDataArray(stage, data:arr) +event BlockLoss(odds, loss, height, index, blockHashInfo) +# event StateLoss(odds, loss, height, actualRoot:bytes32, index, stateRootCorrectness:bytes32, probs:bytes, maxHeight) +event StateLoss(odds, loss, height, index, stateRootInfo) + + +# Randomly select a guardian using a las vegas algorithm +def const sampleGuardian(orig_seedhash:bytes32, blknumber:uint256): + n = self.nextGuardianIndex + seedhash = sha3([orig_seedhash, blknumber]:arr) + while 1: + with index = mod(seedhash, n): + if (div(seedhash, 2**128) * MAX_DEPOSIT / 2**128 < self.guardians[index].orig_deposit_size): + if blknumber >= self.guardians[index].induction_height and blknumber <= self.guardians[index].withdrawal_height: + return(index) + seedhash = sha3(seedhash) + + +# Getter methods +def const getNextGuardianIndex(): + return(self.nextGuardianIndex:uint256) + +def const getGuardianStatus(index:uint256): + if not self.guardians[index].address: # inactive + return 0 + elif block.number < self.guardians[index].induction_height: # not yet inducted + return 1 + elif block.number < self.guardians[index].withdrawal_height: # now inducted + return 2 + else: # withdrawing + return 3 + +def const getGuardianAddress(index:uint256): + return(self.guardians[index].address:address) + +def const getGuardianInductionHeight(index:uint256): + return(self.guardians[index].induction_height:uint256) + +def const getGuardianWithdrawalHeight(index:uint256): + return(self.guardians[index].withdrawal_height:uint256) + +def const getGuardianCounter(index:uint256): + return(self.guardians[index].counter) + +def const getGuardianValidationCode(index:uint256): + a = string(~ssize(ref(self.guardians[index].validationCode))) + ~sloadbytes(ref(self.guardians[index].validationCode), a, len(a)) + return(a:str) + +def const getGuardianSeq(index:uint256): + basicinfo = array(10) + ~sloadbytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + return(basicinfo[SEQ_POS]:uint256) + +def const getGuardianPrevhash(index:uint256): + basicinfo = array(10) + ~sloadbytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + return(basicinfo[PREVHASH_POS]:bytes32) + +def const getGuardianSignups(): + return(self.nextGuardianIndex:uint256) + +def const getGuardianDeposit(index:uint256): + basicinfo = array(10) + ~sloadbytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + return(basicinfo[DEPOSIT_SIZE_POS]:uint256) + +# Get information about a bet (internal method for slashing purposes) +def getBetInfo(index:uint256, max_height:uint256, probs:bytes, blockhashes:bytes32[], stateroots:bytes32[], stateroot_prob_from:bytes1, prevhash:bytes32, seqnum:uint256, sig:bytes): + _calldata = string(~calldatasize()) + ~calldatacopy(_calldata, 0, ~calldatasize()) + my_prefix = prefix(self.submitBet) + _calldata[0] = (_calldata[0] & ~sub(2**224, 1)) + my_prefix + signing_hash = ~sha3(_calldata, ~calldatasize() - 32 - ceil32(len(sig))) + # Check the sig against the guardian validation code + guardian_validation_code = string(~ssize(ref(self.guardians[index].validationCode))) + ~sloadbytes(ref(self.guardians[index].validationCode), guardian_validation_code, len(guardian_validation_code)) + sig_verified = 0 + with L = len(sig): + sig[-1] = signing_hash + ~callstatic(msg.gas - 20000, guardian_validation_code, len(guardian_validation_code), sig - 32, L + 32, ref(sig_verified), 32) + sig[-1] = L + assert sig_verified == 1 + return([signing_hash, index, seqnum]:arr) + +event Diagnostic(data:str) +event ListOfNumbers(foo:arr) +event TryingToSlashBets(bytes1:str, bytes2:str) +event TryingToSlashBlocks(bytes1:str, bytes2:str) + +# Slash two bets from the same guardian at the same height +def slashBets(bytes1:bytes, bytes2:bytes): + log(type=TryingToSlashBets, bytes1, bytes2) + assert len(bytes1) > 32 + assert len(bytes2) > 32 + my_prefix = prefix(self.getBetInfo) + my_old_prefix = prefix(self.submitBet) + bytes1[0] = (bytes1[0] & ~sub(2**224, 1)) + my_prefix + output1 = array(5) + ~call(msg.gas - 200000, self, 0, bytes1, len(bytes1), output1, 160) + bytes2[0] = (bytes2[0] & ~sub(2**224, 1)) + my_prefix + output2 = array(5) + ~call(msg.gas - 200000, self, 0, bytes2, len(bytes2), output2, 160) + assert output1[0] == 32 + assert output2[0] == 32 + assert output1[1] == 3 + assert output2[1] == 3 + assert not self.slashed[output1[2]] + assert not self.slashed[output2[2]] + # Two distinct signatures with the same index and seqnum... + if output1[3] == output2[3] and output1[4] == output2[4] and output1[2] != output2[2]: + self.slashed[output1[2]] = 1 + self.slashed[output2[2]] = 1 + basicinfo = array(10) + index = output1[3] + ~sloadbytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + deposit = basicinfo[DEPOSIT_SIZE_POS] + basicinfo[DEPOSIT_SIZE_POS] /= 2 + ~sstorebytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + ~mstore(0, block.coinbase) + ~mstore(32, deposit / 10) + ~call(12000, (self - self % 2**160) + ETHER, 0, 0, 64, 0, 0) + bytes1[0] = (bytes1[0] & ~sub(2**224, 1)) + my_old_prefix + bytes2[0] = (bytes2[0] & ~sub(2**224, 1)) + my_old_prefix + log(type=BetSlashed, output1[3], bytes1, bytes2) + +# Get information about a block (internal method for slashing purposes) +def getBlockInfo(block:bytes): + sz = len(block) + block[-1] = 0 + o = string(sz) + ~call(msg.gas - 20000, RLPGETBYTES32, 0, block - 32, sz + 32, o, sz) + blknumber = o[0] + assert blknumber <= block.number + block[-1] = 1 + ~call(msg.gas - 20000, RLPGETBYTES32, 0, block - 32, sz + 32, o, sz) + txroot = o[0] + block[-1] = 2 + ~call(msg.gas - 20000, RLPGETBYTES32, 0, block - 32, sz + 32, o, sz) + proposer = o[0] + if blknumber >= ENTER_EXIT_DELAY: + preseed = ~rngseed(blknumber - ENTER_EXIT_DELAY) + else: + preseed = ~rngseed(-1) + index = self.sampleGuardian(preseed, blknumber) + assert proposer == self.guardians[index].address + block[-1] = 3 + ~call(msg.gas - 20000, RLPGETSTRING, 0, block - 32, sz + 32, o, sz) + sig = o + 32 + # Check the sig against the guardian validation code + guardian_validation_code = string(~ssize(ref(self.guardians[index].validationCode))) + ~sloadbytes(ref(self.guardians[index].validationCode), guardian_validation_code, len(guardian_validation_code)) + sig_verified = 0 + signing_hash = sha3([blknumber, txroot]:arr) + with L = len(sig): + sig[-1] = signing_hash + ~callstatic(msg.gas - 20000, guardian_validation_code, len(guardian_validation_code), sig - 32, L + 32, ref(sig_verified), 32) + sig[-1] = L + assert sig_verified == 1 + return([signing_hash, index, blknumber]:arr) + +# Slash two blocks from the same guardian at the same height +def slashBlocks(bytes1:bytes, bytes2:bytes): + log(type=TryingToSlashBlocks, bytes1, bytes2) + assert len(bytes1) > 32 + assert len(bytes2) > 32 + output1 = self.getBlockInfo(bytes1, outitems=3) + output2 = self.getBlockInfo(bytes2, outitems=3) + assert not self.slashed[output1[0]] + assert not self.slashed[output2[0]] + # Two distinct signatures with the same index and seqnum... + if output1[1] == output2[1] and output1[2] == output2[2] and output1[0] != output2[0]: + self.slashed[output1[0]] = 1 + self.slashed[output2[0]] = 1 + basicinfo = array(10) + index = output1[1] + ~sloadbytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + deposit = basicinfo[DEPOSIT_SIZE_POS] + basicinfo[DEPOSIT_SIZE_POS] /= 2 + ~sstorebytes(ref(self.guardians[index].basicinfo), basicinfo, 320) + ~mstore(0, block.coinbase) + ~mstore(32, deposit / 10) + ~call(12000, (self - self % 2**160) + ETHER, 0, 0, 64, 0, 0) + bytes1[0] = (bytes1[0] & ~sub(2**224, 1)) + my_old_prefix + bytes2[0] = (bytes2[0] & ~sub(2**224, 1)) + my_old_prefix + log(type=BlockSlashed, output1[1], bytes1, bytes2) + + diff --git a/ethereum/config.py b/ethereum/config.py index 020f2748b..923334637 100644 --- a/ethereum/config.py +++ b/ethereum/config.py @@ -1,60 +1,43 @@ -from ethereum import utils +import utils from ethereum.db import BaseDB +import time +from utils import address, int256, trie_root, hash32, to_string, \ + sha3, zpad, normalize_address, int_to_addr, big_endian_to_int, \ + int_to_big_endian -default_config = dict( - # Genesis block difficulty - GENESIS_DIFFICULTY=131072, - # Genesis block gas limit - GENESIS_GAS_LIMIT=3141592, - # Genesis block prevhash, coinbase, nonce - GENESIS_PREVHASH=b'\x00' * 32, - GENESIS_COINBASE=b'\x00' * 20, - GENESIS_NONCE=utils.zpad(utils.encode_int(42), 8), - GENESIS_MIXHASH=b'\x00' * 32, - GENESIS_TIMESTAMP=0, - GENESIS_EXTRA_DATA=b'', - GENESIS_INITIAL_ALLOC={}, - # Minimum gas limit - MIN_GAS_LIMIT=5000, - # Gas limit adjustment algo: - # block.gas_limit=block.parent.gas_limit * 1023/1024 + - # (block.gas_used * 6 / 5) / 1024 - GASLIMIT_EMA_FACTOR=1024, - GASLIMIT_ADJMAX_FACTOR=1024, - BLKLIM_FACTOR_NOM=3, - BLKLIM_FACTOR_DEN=2, - # Block reward - BLOCK_REWARD=5000 * utils.denoms.finney, - NEPHEW_REWARD=5000 * utils.denoms.finney // 32, # BLOCK_REWARD / 32 - # GHOST constants - UNCLE_DEPTH_PENALTY_FACTOR=8, - MAX_UNCLE_DEPTH=6, # max (block.number - uncle.number) - MAX_UNCLES=2, - # Difficulty adjustment constants - DIFF_ADJUSTMENT_CUTOFF=13, - BLOCK_DIFF_FACTOR=2048, - MIN_DIFF=131072, - # PoW info - POW_EPOCH_LENGTH=30000, - # Maximum extra data length - MAX_EXTRADATA_LENGTH=32, - # Exponential difficulty timebomb period - EXPDIFF_PERIOD=100000, - EXPDIFF_FREE_PERIODS=2, - # Blank account initial nonce - ACCOUNT_INITIAL_NONCE=0, - # Homestead fork (500k on livenet?) - HOMESTEAD_FORK_BLKNUM=2**100, - HOMESTEAD_DIFF_ADJUSTMENT_CUTOFF=16, -) -assert default_config['NEPHEW_REWARD'] == \ - default_config['BLOCK_REWARD'] // 32 - - -class Env(object): - - def __init__(self, db, config=None, global_config=None): - assert isinstance(db, BaseDB) - self.db = db - self.config = config or dict(default_config) - self.global_config = global_config or dict() +# List of system addresses and global parameters +STATEROOTS = int_to_addr(20) +BLKNUMBER = int_to_addr(30) +ETHER = int_to_addr(50) +CASPER = int_to_addr(60) +ECRECOVERACCT = int_to_addr(70) +PROPOSER = int_to_addr(80) +RNGSEEDS = int_to_addr(90) +BLOCKHASHES = int_to_addr(100) +GENESIS_TIME = int_to_addr(110) +LOG = int_to_addr(120) +BET_INCENTIVIZER = int_to_addr(150) +EXECUTION_STATE = int_to_addr(160) +CREATOR = int_to_addr(170) +GAS_DEPOSIT = int_to_addr(180) +BASICSENDER = int_to_addr(190) +SYS = int_to_addr(200) +TX_ENTRY_POINT = int_to_addr(2**160 - 1) +TXGAS = 1 +TXINDEX = 2 +GAS_REMAINING = 3 +BLOOM = 2**32 +GASLIMIT = 4712388 # Pau million +NULL_SENDER = int_to_addr(0) +CONST_CALL_SENDER = int_to_addr(31415) +BLKTIME = 3.75 +# Note that this parameter must be set in the Casper contract as well +ENTER_EXIT_DELAY = 110 +# Note that this parameter must be set in the Casper contract as well +VALIDATOR_ROUNDS = 5 +# Number of shards +MAXSHARDS = 65536 +SHARD_BYTES = len(int_to_big_endian(MAXSHARDS - 1)) +ADDR_BYTES = 20 + SHARD_BYTES +ADDR_BASE_BYTES = 20 +UNHASH_MAGIC_BYTES = 'unhash:' diff --git a/ethereum/default_betting_strategy.py b/ethereum/default_betting_strategy.py new file mode 100644 index 000000000..86c4c7d2c --- /dev/null +++ b/ethereum/default_betting_strategy.py @@ -0,0 +1,112 @@ +from config import BLKTIME +from utils import DEBUG +import random + +BRAVERY = 0.9375 + +# Takes as an argument a list of values and their associated weights +# and a fraction returns, returns the value such that the desired +# fraction of other values in the list, weighted by the given weights, +# is less than that value +def weighted_percentile(values, weights, frac): + zipvals = sorted(zip(values, weights)) + target = sum(weights) * frac + while target > zipvals[0][1]: + target -= zipvals[0][1] + zipvals.pop(0) + return zipvals[0][0] + + +# Make a default bet on a block based on just personal info +def mk_initial_bet(blk_number, blk_hash, tr, genesis_time, now): + scheduled_time = BLKTIME * blk_number + genesis_time + received_time = tr.get(blk_hash, None) + # If we already received a block... + if received_time: + time_delta = abs(received_time * 0.96 + now * 0.04 - scheduled_time) + prob = 1 if time_delta < BLKTIME * 2 else 3.0 / (3.0 + time_delta / BLKTIME) + DEBUG('Betting, block received', time_delta=time_delta, prob=prob) + return 0.7 if random.random() < prob else 0.3 + # If we have not yet received a block... + else: + time_delta = now - scheduled_time + prob = 1 if time_delta < BLKTIME * 2 else 3.0 / (3.0 + time_delta / BLKTIME) + DEBUG('Betting, block not received', time_delta=time_delta, prob=prob) + return 0.5 if random.random() < prob else 0.3 + + +# Make a bet on a specific block +def bet_on_block(opinions, blk_number, blk_hash, tr, genesis_time, now): + # Do we have the block? + have_block = blk_hash and blk_hash in tr + # The list of bet probabilities to use when producing one's own bet + probs = [] + # Weights for each validator + weights = [] + # My default opinion based on (i) whether or not I have the block, + # (ii) when I saw it first if I do, and (iii) the current time + default_bet = mk_initial_bet(blk_number, blk_hash, tr, genesis_time, now) + # Go through others' opinions, check if they (i) are eligible to + # bet, and (ii) have bet; if they have, add their bet to the + # list of bets; otherwise, add the default bet in their place + opinion_count = 0 + for i in opinions.keys(): + if opinions[i].induction_height <= blk_number < opinions[i].withdrawal_height and not opinions[i].withdrawn: + p = opinions[i].get_prob(blk_number) + # If this validator has not yet bet, then add our default bet + if p is None: + probs.append(default_bet) + # If they bet toward a different block as the block hash currently being processed, then: + # * if their probability is low, that means they are betting for the null case, so take their bet as is + # * if their probability is high, that means that they are betting for a different block, so for this + # block hash flip the bet as it's a bet against this particular block hash + elif opinions[i].blockhashes[blk_number] != blk_hash and blk_hash is not None: + probs.append(min(p, max(1 - p, 0.25))) + # If they bet for the same block as is currently being processed, then take their bet as is + else: + probs.append(p) + weights.append(opinions[i].deposit_size) + opinion_count += (1 if p is not None else 0) + # The algorithm for producing your own bet based on others' bets; + # the intention is to converge toward 0 or 1 + p33 = weighted_percentile(probs, weights, 1/3.) + p50 = weighted_percentile(probs, weights, 1/2.) + p67 = weighted_percentile(probs, weights, 2/3.) + if p33 > 0.8: + o = BRAVERY + p33 * (1 - BRAVERY) + elif p67 < 0.2: + o = p67 * (1 - BRAVERY) + else: + o = min(0.85, max(0.15, p50 * 3 - (0.8 if have_block else 1.2))) + return o + +# Takes as input: (i) a list of other validators' opinions, +# (ii) a block height, (iii) a list of known blocks at that +# height, (iv) a hash -> time received map, (v) the genesis +# time, (vi) the current time +# Outputs a (block hash, probability, ask) combination where +# `ask` represents whether or not to send a network message +# asking for the block + +def bet_at_height(opinions, h, known, time_received, genesis_time, now): + # Determine candidate blocks + candidates = [o.blockhashes[h] for o in opinions.values() + if len(o.blockhashes) > h and o.blockhashes[h] not in (None, '\x00' * 32)] + for block in known: + candidates.append(block.hash) + if not len(candidates): + candidates.append('\x00' * 32) + candidates = list(set(candidates)) + # Locate highest probability + probs = [(bet_on_block(opinions, h, c, time_received, genesis_time, now), c) for c in candidates] + prob, new_block_hash = sorted(probs)[-1] + if len(probs) >= 2: + DEBUG('Voting on multiple candidates', + height=h, + options=[(a, b[:8].encode('hex')) for a, b in probs], + winner=(prob, new_block_hash[:8].encode('hex'))) + # If we don't have a block, then confidently ask + if prob > 0.7 and new_block_hash not in time_received: + return 0.7, new_block_hash, True + else: + return prob, new_block_hash, False diff --git a/ethereum/ecdsa_accounts.py b/ethereum/ecdsa_accounts.py new file mode 100644 index 000000000..91e9a0b83 --- /dev/null +++ b/ethereum/ecdsa_accounts.py @@ -0,0 +1,158 @@ +from config import BLOCKHASHES, STATEROOTS, BLKNUMBER, CASPER, GASLIMIT, NULL_SENDER, ETHER, ECRECOVERACCT, BASICSENDER, GAS_REMAINING, ADDR_BYTES +from utils import normalize_address, zpad, encode_int, big_endian_to_int, \ + encode_int32, sha3, shardify +from utils import privtoaddr as _privtoaddr +import bitcoin +from serenity_transactions import Transaction +from serenity_blocks import mk_contract_address, tx_state_transition, State, initialize_with_gas_limit, get_code +from mandatory_account_code import mandatory_account_code +import serpent +import db +import abi + +# This file provides helper methods for managing ECDSA-based accounts +# on top of Serenity + +# The "signature checker" code for ECDSA accounts +cc = """ +# We assume that data takes the following schema: +# bytes 0-31: v (ECDSA sig) +# bytes 32-63: r (ECDSA sig) +# bytes 64-95: s (ECDSA sig) +# bytes 96-127: gasprice +# bytes 128-159: sequence number (formerly called "nonce") +# bytes 172-191: to +# bytes 192-223: value +# bytes 224+: data +# ~calldatacopy(0, 0, ~calldatasize()) +# Prepare the transaction data for hashing: gas + non-sig data +~mstore(128, ~txexecgas()) +~calldatacopy(160, 96, ~calldatasize() - 96) +# Hash it +~mstore(0, ~sha3(128, ~calldatasize() - 64)) +~calldatacopy(32, 0, 96) +# Call ECRECOVER contract to get the sender +~call(5000, 1, 0, 0, 128, 0, 32) +# Check sender correctness; exception if not +if ~mload(0) != self.storage[2]: + # ~log1(0, 0, 51) + ~invalid() +# Check value sufficiency +if self.balance < ~calldataload(192) + ~calldataload(0) * ~txexecgas(): + # ~log1(0, 0, 52) + ~invalid() +# Sequence number operations +with minusone = ~sub(0, 1): + with curseq = self.storage[minusone]: + # Check sequence number correctness, exception if not + if ~calldataload(128) != curseq: + # ~log3(0, 0, 53, ~calldataload(128), curseq) + ~invalid() + # Increment sequence number + self.storage[minusone] = curseq + 1 + return(~calldataload(96)) +""" +constructor_code = serpent.compile(cc) +constructor_ct = abi.ContractTranslator(serpent.mk_full_signature(cc)) + +#The "runner" code for ECDSA accounts +rc = """ +# We assume that data takes the following schema: +# bytes 0-31: gasprice +# bytes 32-63: v (ECDSA sig) +# bytes 64-96: r (ECDSA sig) +# bytes 96-127: s (ECDSA sig) +# bytes 128-159: sequence number (formerly called "nonce") +# bytes 172-191: to +# bytes 192-223: value +# bytes 224+: data +~calldatacopy(0, 0, ~calldatasize()) +~call(msg.gas - 50000, ~calldataload(160), ~calldataload(192), 224, ~calldatasize() - 224, ~calldatasize(), 10000) +~return(~calldatasize(), ~msize() - ~calldatasize()) +""" + +runner_code = serpent.compile(rc) + +s = State('', db.EphemDB()) +initialize_with_gas_limit(s, 10**9) +tx_state_transition(s, Transaction(None, 1000000, data='', code=constructor_code)) +constructor_output_code = get_code(s, mk_contract_address(code=constructor_code)) + +# The init code for an ECDSA account. Calls the constructor storage contract to +# get the ECDSA account code, then uses mcopy to swap the default address for +# the user's pubkeyhash +account_code = serpent.compile((""" +def init(): + sstore(0, %d) + sstore(1, %d) + sstore(2, 0x82a978b3f5962a5b0957d9ee9eef472ee55b42f1) +""" % (big_endian_to_int(ECRECOVERACCT), big_endian_to_int(BASICSENDER))) + '\n' + mandatory_account_code) + +# Make the account code for a particular pubkey hash +def mk_code(pubkeyhash): + return account_code.replace('\x82\xa9x\xb3\xf5\x96*[\tW\xd9\xee\x9e\xefG.\xe5[B\xf1', pubkeyhash) + +# Provide the address corresponding to a particular public key +def privtoaddr(k, left_bound=0): + return mk_contract_address(code=mk_code(_privtoaddr(k)), left_bound=left_bound) + +# The code to validate bets made by an account in Casper +validation_code = serpent.compile(""" +# We assume that data takes the following schema: +# bytes 0-31: hash +# bytes 32-63: v (ECDSA sig) +# bytes 64-95: r (ECDSA sig) +# bytes 96-127: s (ECDSA sig) + +# Call ECRECOVER contract to get the sender +~calldatacopy(0, 0, 128) +~call(5000, 1, 0, 0, 128, 0, 32) +# Check sender correctness +return(~mload(0) == 0x82a978b3f5962a5b0957d9ee9eef472ee55b42f1) +""") + +# Make the validation code for a particular address, using a similar +# replacement technique as previously +def mk_validation_code(k): + pubkeyhash = _privtoaddr(k) + code3 = validation_code.replace('\x82\xa9x\xb3\xf5\x96*[\tW\xd9\xee\x9e\xefG.\xe5[B\xf1', pubkeyhash) + s = State('', db.EphemDB()) + initialize_with_gas_limit(s, 10**9) + tx_state_transition(s, Transaction(None, 1000000, data='', code=code3)) + return get_code(s, mk_contract_address(code=code3)) + +# Helper function for signing a block +def sign_block(block, key): + sigdata = sha3(encode_int32(block.number) + block.txroot) + v, r, s = bitcoin.ecdsa_raw_sign(sigdata, key) + block.sig = encode_int32(v) + encode_int32(r) + encode_int32(s) + return block + +# Helper function for signing a bet +def sign_bet(bet, key): + bet.sig = '' + sigdata = sha3(bet.serialize()[:-32]) + v, r, s = bitcoin.ecdsa_raw_sign(sigdata, key) + bet.sig = encode_int32(v) + encode_int32(r) + encode_int32(s) + s = bet.serialize() + bet._hash = sha3(s) + return bet + +# Creates data for a transaction with the given gasprice, to address, +# value and data +def mk_txdata(seq, gasprice, to, value, data): + return encode_int32(gasprice) + encode_int32(seq) + \ + '\x00' * (32 - ADDR_BYTES) + normalize_address(to) + encode_int32(value) + data + +# Signs data+startgas +def sign_txdata(data, gas, key): + v, r, s = bitcoin.ecdsa_raw_sign(sha3(encode_int32(gas) + data), key) + return encode_int32(v) + encode_int32(r) + encode_int32(s) + data + +# The equivalent of transactions.Transaction(nonce, gasprice, startgas, +# to, value, data).sign(key) in 1.0 +def mk_transaction(seq, gasprice, gas, to, value, data, key, create=False): + code = mk_code(_privtoaddr(key)) + addr = mk_contract_address(code=code) + data = sign_txdata(mk_txdata(seq, gasprice, to, value, data), gas, key) + return Transaction(addr, gas, data=data, code=code if create else b'') diff --git a/ethereum/fast_rlp.py b/ethereum/fast_rlp.py index 20afe888f..3db6e6384 100644 --- a/ethereum/fast_rlp.py +++ b/ethereum/fast_rlp.py @@ -1,6 +1,6 @@ import sys import rlp -from utils import int_to_big_endian +from utils import int_to_big_endian, big_endian_to_int, safe_ord import db @@ -29,11 +29,51 @@ def length_prefix(length, offset): length_string = int_to_big_endian(length) return chr(offset + 56 - 1 + len(length_string)) + length_string +def _decode_optimized(rlp): + o = [] + pos = 0 + _typ, _len, pos = consume_length_prefix(rlp, pos) + if _typ != list: + return rlp[pos: pos + _len] + while pos < len(rlp): + _, _l, _p = consume_length_prefix(rlp, pos) + o.append(_decode_optimized(rlp[pos: _l + _p])) + pos = _l + _p + return o + +def consume_length_prefix(rlp, start): + """Read a length prefix from an RLP string. + + :param rlp: the rlp string to read from + :param start: the position at which to start reading + :returns: a tuple ``(type, length, end)``, where ``type`` is either ``str`` + or ``list`` depending on the type of the following payload, + ``length`` is the length of the payload in bytes, and ``end`` is + the position of the first payload byte in the rlp string + """ + b0 = safe_ord(rlp[start]) + if b0 < 128: # single byte + return (str, 1, start) + elif b0 < 128 + 56: # short string + return (str, b0 - 128, start + 1) + elif b0 < 192: # long string + ll = b0 - 128 - 56 + 1 + l = big_endian_to_int(rlp[start + 1:start + 1 + ll]) + return (str, l, start + 1 + ll) + elif b0 < 192 + 56: # short list + return (list, b0 - 192, start + 1) + else: # long list + ll = b0 - 192 - 56 + 1 + l = big_endian_to_int(rlp[start + 1:start + 1 + ll]) + return (list, l, start + 1 + ll) + # if sys.version_info.major == 2: encode_optimized = _encode_optimized + decode_optimized = _decode_optimized else: encode_optimized = rlp.codec.encode_raw + decode_optimized = rlp.codec.decode_raw def main(): diff --git a/ethereum/fastvm.py b/ethereum/fastvm.py index 70efc3e73..acbd9235a 100644 --- a/ethereum/fastvm.py +++ b/ethereum/fastvm.py @@ -1,5 +1,5 @@ import copy -import ethereum.opcodes as opcodes +import opcodes # ####### dev hack flags ############### verify_stack_after_op = False @@ -7,16 +7,17 @@ # ###################################### import sys -from ethereum import utils from ethereum.abi import is_numeric import copy -from ethereum import opcodes import time from ethereum.slogging import get_logger from rlp.utils import encode_hex, ascii_chr -from ethereum.utils import to_string +from utils import to_string, shardify, int_to_addr, encode_int32, ADDR_BASE_BYTES +import utils import numpy +from config import BLOCKHASHES, STATEROOTS, BLKNUMBER, CASPER, GASLIMIT, NULL_SENDER, ETHER, PROPOSER, TXGAS, MAXSHARDS, EXECUTION_STATE, LOG, RNGSEEDS, CREATOR + log_log = get_logger('eth.vm.log') log_vm_exit = get_logger('eth.vm.exit') log_vm_op = get_logger('eth.vm.op') @@ -63,16 +64,21 @@ def extract_copy(self, mem, memstart, datastart, size): class Message(object): def __init__(self, sender, to, value, gas, data, - depth=0, code_address=None, is_create=False): + left_bound=0, right_bound=MAXSHARDS, + depth=0, code_address=None, is_create=False, + transfers_value=True): self.sender = sender self.to = to self.value = value self.gas = gas self.data = data self.depth = depth + self.left_bound = left_bound + self.right_bound = right_bound self.logs = [] self.code_address = code_address self.is_create = is_create + self.transfers_value = transfers_value def __repr__(self): return '' % self.to[:8] @@ -88,12 +94,18 @@ def __init__(self, **kwargs): for kw in kwargs: setattr(self, kw, kwargs[kw]) +def validate_and_get_address(addr_int, msg): + if msg.left_bound <= (addr_int // 2**160) % MAXSHARDS < msg.right_bound: + return int_to_addr(addr_int) + print 'FAIL, left: %d, at: %d, right: %d' % (msg.left_bound, (addr_int // 2**160), msg.right_bound) + return False + end_breakpoints = [ - 'JUMP', 'JUMPI', 'CALL', 'CALLCODE', 'CREATE', 'SUICIDE', 'STOP', 'RETURN', 'SUICIDE', 'INVALID', 'GAS', 'PC' + 'JUMP', 'JUMPI', 'CALL', 'CALLCODE', 'CALLSTATIC', 'CREATE', 'SUICIDE', 'STOP', 'RETURN', 'SUICIDE', 'INVALID', 'GAS', 'PC', 'BREAKPOINT' ] start_breakpoints = [ - 'JUMPDEST', 'GAS', 'PC' + 'JUMPDEST', 'GAS', 'PC', 'BREAKPOINT' ] @@ -141,7 +153,7 @@ def preprocess_code(code): cc_stack_change = 0 cc_min_req_stack = 0 cc_max_req_stack = 1024 - ops[i] = (0, 0, 1024, [0], 0) + ops[i] = [0, 0, 1024, i, 0] return ops @@ -177,6 +189,7 @@ def data_copy(compustate, size): def vm_exception(error, **kargs): + print 'EXCEPTION', error, kargs log_vm_exit.trace('EXCEPTION', cause=error, **kargs) return 0, 0, [] @@ -188,7 +201,7 @@ def peaceful_exit(cause, gas, data, **kargs): code_cache = {} -def vm_execute(ext, msg, code): +def vm_execute(ext, msg, code, breaking=False): # precompute trace flag # if we trace vm, we're in slow mode anyway trace_vm = log_vm_op.is_active('trace') @@ -208,11 +221,17 @@ def vm_execute(ext, msg, code): steps = 0 _prevop = None # for trace only + _EXSTATE = shardify(EXECUTION_STATE, msg.left_bound) + _LOG = shardify(LOG, msg.left_bound) + _CREATOR = shardify(CREATOR, msg.left_bound) + + # print 'starting to run vm', msg.to.encode('hex') while 1: # print 'op: ', op, time.time() - s # s = time.time() # stack size limit error if compustate.pc not in processed_code: + # print processed_code, map(ord, code), compustate.pc return vm_exception('INVALID START POINT') _data = processed_code[compustate.pc] @@ -253,10 +272,10 @@ def vm_execute(ext, msg, code): trace_data['sha3memory'] = \ encode_hex(utils.sha3(''.join([ascii_chr(x) for x in compustate.memory]))) - if _prevop in (op_SSTORE, op_SLOAD) or steps == 0: - trace_data['storage'] = ext.log_storage(msg.to) - # trace_data['gas'] = to_string(compustate.gas + fee) - trace_data['inst'] = op + # if _prevop in (op_SSTORE, op_SLOAD) or steps == 0: + # trace_data['storage'] = ext.log_storage(msg.to) + trace_data['approx_gas'] = to_string(compustate.gas) + trace_data['inst'] = op % 256 trace_data['pc'] = to_string(compustate.pc - 1) if steps == 0: trace_data['depth'] = msg.depth @@ -265,6 +284,7 @@ def vm_execute(ext, msg, code): trace_data['steps'] = steps # if op[:4] == 'PUSH': # trace_data['pushvalue'] = pushval + # print trace_data log_vm_op.trace('vm', **trace_data) steps += 1 _prevop = op @@ -366,10 +386,10 @@ def vm_execute(ext, msg, code): elif op == op_ADDRESS: stk.append(utils.coerce_to_int(msg.to)) elif op == op_BALANCE: - addr = utils.coerce_addr_to_hex(stk.pop() % 2**160) - stk.append(ext.get_balance(addr)) - elif op == op_ORIGIN: - stk.append(utils.coerce_to_int(ext.tx_origin)) + addr = validate_and_get_address(stk.pop(), msg) + if addr is False: + return vm_exception('OUT OF RANGE') + stk.append(utils.big_endian_to_int(ext.get_storage(ETHER, addr))) elif op == op_CALLER: stk.append(utils.coerce_to_int(msg.sender)) elif op == op_CALLVALUE: @@ -398,15 +418,17 @@ def vm_execute(ext, msg, code): mem[start + i] = utils.safe_ord(code[s1 + i]) else: mem[start + i] = 0 - elif op == op_GASPRICE: - stk.append(ext.tx_gasprice) elif op == op_EXTCODESIZE: - addr = utils.coerce_addr_to_hex(stk.pop() % 2**160) - stk.append(len(ext.get_code(addr) or b'')) + addr = validate_and_get_address(stk.pop(), msg) + if addr is False: + return vm_exception('OUT OF RANGE') + stk.append(len(ext.get_storage_at(addr, '') or b'')) elif op == op_EXTCODECOPY: - addr = utils.coerce_addr_to_hex(stk.pop() % 2**160) + addr = validate_and_get_address(stk.pop(), msg) + if addr is False: + return vm_exception('OUT OF RANGE') start, s2, size = stk.pop(), stk.pop(), stk.pop() - extcode = ext.get_code(addr) or b'' + extcode = ext.get_storage_at(addr, b'') or b'' assert utils.is_string(extcode) if not mem_extend(mem, compustate, op, start, size): return vm_exception('OOG EXTENDING MEMORY') @@ -417,19 +439,28 @@ def vm_execute(ext, msg, code): mem[start + i] = utils.safe_ord(extcode[s2 + i]) else: mem[start + i] = 0 + elif op == op_MCOPY: + to, frm, size = stk.pop(), stk.pop(), stk.pop() + if not mem_extend(mem, compustate, op, to, size): + return vm_exception('OOG EXTENDING MEMORY') + if not mem_extend(mem, compustate, op, frm, size): + return vm_exception('OOG EXTENDING MEMORY') + if not data_copy(compustate, size): + return vm_exception('OOG COPY DATA') + data = mem[frm: frm + size] + for i in range(size): + mem[to + i] = data[i] elif op < 0x50: if op == op_BLOCKHASH: - stk.append(utils.big_endian_to_int(ext.block_hash(stk.pop()))) + stk.append(utils.big_endian_to_int(ext.get_storage(BLOCKHASHES, stk.pop()))) elif op == op_COINBASE: - stk.append(utils.big_endian_to_int(ext.block_coinbase)) - elif op == op_TIMESTAMP: - stk.append(ext.block_timestamp) + stk.append(utils.big_endian_to_int(ext.get_storage(PROPOSER, '\x00' * 32))) elif op == op_NUMBER: - stk.append(ext.block_number) + stk.append(utils.big_endian_to_int(ext.get_storage(BLKNUMBER, '\x00' * 32))) elif op == op_DIFFICULTY: stk.append(ext.block_difficulty) elif op == op_GASLIMIT: - stk.append(ext.block_gas_limit) + stk.append(GASLIMIT) elif op < 0x60: if op == op_POP: stk.pop() @@ -455,20 +486,32 @@ def vm_execute(ext, msg, code): return vm_exception('OOG EXTENDING MEMORY') mem[s0] = s1 % 256 elif op == op_SLOAD: - stk.append(ext.get_storage_data(msg.to, stk.pop())) - elif op == op_SSTORE: + stk.append(utils.big_endian_to_int(ext.get_storage(msg.to, stk.pop()))) + elif op == op_SSTORE or op == op_SSTOREEXT: + if op == op_SSTOREEXT: + shard = stk.pop() + if not validate_and_get_address(256**ADDR_BYTES * shard): + return vm_exception('OUT OF RANGE') + toaddr = shardify(msg.to, shard) + else: + toaddr = msg.to s0, s1 = stk.pop(), stk.pop() - if ext.get_storage_data(msg.to, s0): + if ext.get_storage(msg.to, s0): gascost = opcodes.GSTORAGEMOD if s1 else opcodes.GSTORAGEKILL refund = 0 if s1 else opcodes.GSTORAGEREFUND else: gascost = opcodes.GSTORAGEADD if s1 else opcodes.GSTORAGEMOD refund = 0 + if toaddr == CASPER: + gascost /= 2 if compustate.gas < gascost: return vm_exception('OUT OF GAS') compustate.gas -= gascost - ext.add_refund(refund) # adds neg gascost as a refund if below zero - ext.set_storage_data(msg.to, s0, s1) + ext.set_storage(toaddr, s0, s1) + # Copy code to new shard + if op == op_SSTOREEXT: + if not ext.get_storage(toaddr, ''): + ext.set_storage(toaddr, ext.get_storage(msg.to)) elif op == op_JUMP: compustate.pc = stk.pop() opnew = processed_code[compustate.pc][4] if \ @@ -489,6 +532,14 @@ def vm_execute(ext, msg, code): stk.append(len(mem)) elif op == op_GAS: stk.append(compustate.gas) # AFTER subtracting cost 1 + elif op == op_SLOADEXT: + shard, key = stk.pop(), stk.pop() + if not validate_and_get_address(256**ADDR_BYTES * shard): + return vm_exception('OUT OF RANGE') + toaddr = shardify(msg.to, shard) + stk.append(utils.big_endian_to_int(ext.get_storage(toaddr, key))) + if not ext.get_storage(toaddr, ''): + ext.set_storage(toaddr, ext.get_storage(msg.to)) elif op_PUSH1 <= (op & 255) <= op_PUSH32: # Hide push value in high-order bytes of op stk.append(op >> 8) @@ -502,77 +553,79 @@ def vm_execute(ext, msg, code): stk[-1] = temp elif op_LOG0 <= op <= op_LOG4: - """ - 0xa0 ... 0xa4, 32/64/96/128/160 + len(data) gas - a. Opcodes LOG0...LOG4 are added, takes 2-6 stack arguments - MEMSTART MEMSZ (TOPIC1) (TOPIC2) (TOPIC3) (TOPIC4) - b. Logs are kept track of during tx execution exactly the same way as suicides - (except as an ordered list, not a set). - Each log is in the form [address, [topic1, ... ], data] where: - * address is what the ADDRESS opcode would output - * data is mem[MEMSTART: MEMSTART + MEMSZ] - * topics are as provided by the opcode - c. The ordered list of logs in the transaction are expressed as [log0, log1, ..., logN]. - """ depth = op - op_LOG0 mstart, msz = stk.pop(), stk.pop() - topics = [stk.pop() for x in range(depth)] - compustate.gas -= msz * opcodes.GLOGBYTE if not mem_extend(mem, compustate, op, mstart, msz): return vm_exception('OOG EXTENDING MEMORY') - data = b''.join(map(ascii_chr, mem[mstart: mstart + msz])) - ext.log(msg.to, topics, data) - log_log.trace('LOG', to=msg.to, topics=topics, data=list(map(utils.safe_ord, data))) - # print('LOG', msg.to, topics, list(map(ord, data))) + topics = [stk.pop() if i < depth else 0 for i in range(4)] + log_data = map(ord, ''.join(map(encode_int32, topics))) + mem[mstart: mstart + msz] + # print topics, mem[mstart: mstart + msz] + # print 'ld', log_data, msz + log_data = CallData(log_data, 0, len(log_data)) + log_gas = opcodes.GLOGBYTE * msz + opcodes.GLOGBASE + \ + len(topics) * opcodes.GLOGTOPIC + compustate.gas -= log_gas + if compustate.gas < log_gas: + return vm_exception('OUT OF GAS', needed=log_gas) + log_msg = Message(msg.to, _LOG, 0, log_gas, log_data, + depth=msg.depth + 1, code_address=_LOG) + result, gas, data = ext.msg(log_msg, '') + # print '###log###', mstart, msz, topics + if 3141592653589 in [mstart, msz] + topics: + raise Exception("Testing exception triggered!") elif op == op_CREATE: value, mstart, msz = stk.pop(), stk.pop(), stk.pop() if not mem_extend(mem, compustate, op, mstart, msz): return vm_exception('OOG EXTENDING MEMORY') - if ext.get_balance(msg.to) >= value and msg.depth < 1024: - cd = CallData(mem, mstart, msz) - create_msg = Message(msg.to, b'', value, compustate.gas, cd, msg.depth + 1) - o, gas, addr = ext.create(create_msg) - if o: - stk.append(utils.coerce_to_int(addr)) - compustate.gas = gas - else: - stk.append(0) - compustate.gas = 0 + code = mem[mstart:msz] + crate_msg = Message(msg.to, _CREATOR, value, msg.gas - 20000, code, + depth=msg.depth + 1, code_address=_CREATOR) + result, gas, data = ext.msg(create_msg, '') + if result: + addr = shardify(sha3(msg.to[-ADDR_BASE_BYTES:] + code)[32-ADDR_BASE_BYTES:], left_bound) + stk.append(big_endian_to_int(addr)) else: stk.append(0) elif op == op_CALL: gas, to, value, meminstart, meminsz, memoutstart, memoutsz = \ stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop() - if not mem_extend(mem, compustate, op, meminstart, meminsz) or \ - not mem_extend(mem, compustate, op, memoutstart, memoutsz): + if not mem_extend(mem, compustate, op, meminstart, meminsz): return vm_exception('OOG EXTENDING MEMORY') - to = utils.encode_int(to) - to = ((b'\x00' * (32 - len(to))) + to)[12:] - extra_gas = (not ext.account_exists(to)) * opcodes.GCALLNEWACCOUNT + \ - (value > 0) * opcodes.GCALLVALUETRANSFER + to = validate_and_get_address(to, msg) + if to is False: + return vm_exception('OUT OF RANGE') + extra_gas = (value > 0) * opcodes.GCALLVALUETRANSFER submsg_gas = gas + opcodes.GSTIPEND * (value > 0) if compustate.gas < gas + extra_gas: return vm_exception('OUT OF GAS', needed=gas+extra_gas) - if ext.get_balance(msg.to) >= value and msg.depth < 1024: + if utils.big_endian_to_int(ext.get_storage(ETHER, msg.to)) >= value and msg.depth < 1024: compustate.gas -= (gas + extra_gas) cd = CallData(mem, meminstart, meminsz) call_msg = Message(msg.to, to, value, submsg_gas, cd, - msg.depth + 1, code_address=to) - result, gas, data = ext.msg(call_msg) + depth=msg.depth + 1, code_address=to) + codehash = ext.get_storage(to, '') + result, gas, data = ext.msg(call_msg, ext.unhash(codehash) if codehash else '') if result == 0: stk.append(0) else: stk.append(1) + if not mem_extend(mem, compustate, op, memoutstart, min(len(data), memoutsz)): + return vm_exception('OOG EXTENDING MEMORY') compustate.gas += gas for i in range(min(len(data), memoutsz)): mem[memoutstart + i] = data[i] else: compustate.gas -= (gas + extra_gas - submsg_gas) stk.append(0) - elif op == op_CALLCODE: - gas, to, value, meminstart, meminsz, memoutstart, memoutsz = \ - stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop() + elif op == op_CALLCODE or op == op_DELEGATECALL: + if op == op_CALLCODE: + gas, to, value, meminstart, meminsz, memoutstart, memoutsz = \ + stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop() + else: + gas, to, meminstart, meminsz, memoutstart, memoutsz = \ + stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop() + value = msg.value if not mem_extend(mem, compustate, op, meminstart, meminsz) or \ not mem_extend(mem, compustate, op, memoutstart, memoutsz): return vm_exception('OOG EXTENDING MEMORY') @@ -580,14 +633,20 @@ def vm_execute(ext, msg, code): submsg_gas = gas + opcodes.GSTIPEND * (value > 0) if compustate.gas < gas + extra_gas: return vm_exception('OUT OF GAS', needed=gas+extra_gas) - if ext.get_balance(msg.to) >= value and msg.depth < 1024: + if utils.big_endian_to_int(ext.get_storage(ETHER, msg.to)) and msg.depth < 1024: compustate.gas -= (gas + extra_gas) - to = utils.encode_int(to) - to = ((b'\x00' * (32 - len(to))) + to)[12:] + to = validate_and_get_address(to, msg) + if to is False: + return vm_exception('OUT OF RANGE') cd = CallData(mem, meminstart, meminsz) - call_msg = Message(msg.to, msg.to, value, submsg_gas, cd, - msg.depth + 1, code_address=to) - result, gas, data = ext.msg(call_msg) + if op == op_CALLCODE: + call_msg = Message(msg.to, msg.to, value, submsg_gas, cd, + depth=msg.depth + 1, code_address=to) + elif op == op_DELEGATECALL: + call_msg = Message(msg.sender, msg.to, value, submsg_gas, cd, + depth=msg.depth + 1, code_address=to, transfers_value=False) + codehash = ext.get_storage(to, '') + result, gas, data = ext.msg(call_msg, ext.unhash(codehash) if codehash else '') if result == 0: stk.append(0) else: @@ -598,18 +657,95 @@ def vm_execute(ext, msg, code): else: compustate.gas -= (gas + extra_gas - submsg_gas) stk.append(0) + elif op == op_CALLSTATIC: + submsg_gas, codestart, codesz, datastart, datasz, outstart, outsz = [stk.pop() for i in range(7)] + if not mem_extend(mem, compustate, op, codestart, codesz) or \ + not mem_extend(mem, compustate, op, datastart, datasz): + return vm_exception('OOG EXTENDING MEMORY') + if compustate.gas < submsg_gas: + return vm_exception('OUT OF GAS', needed=submsg_gas) + compustate.gas -= submsg_gas + cd = CallData(mem, datastart, datasz) + call_msg = Message(msg.sender, msg.to, 0, submsg_gas, cd, depth=msg.depth + 1) + result, gas, data = ext.static_msg(call_msg, ''.join([chr(x) for x in mem[codestart:codestart + codesz]])) + if result == 0: + stk.append(0) + else: + stk.append(1) + compustate.gas += gas + if not mem_extend(mem, compustate, op, outstart, outsz): + return vm_exception('OOG EXTENDING MEMORY') + for i in range(min(len(data), outsz)): + mem[outstart + i] = data[i] elif op == op_RETURN: s0, s1 = stk.pop(), stk.pop() if not mem_extend(mem, compustate, op, s0, s1): return vm_exception('OOG EXTENDING MEMORY') return peaceful_exit('RETURN', compustate.gas, mem[s0: s0 + s1]) + elif op == op_SLOADBYTES or op == op_SLOADEXTBYTES: + if op == op_SLOADEXTBYTES: + shard = stk.pop() + if not validate_and_get_address(256**ADDR_BYTES * shard): + return vm_exception('OUT OF RANGE') + toaddr = shardify(msg.to, shard) + else: + toaddr = msg.to + s0, s1, s2 = stk.pop(), stk.pop(), stk.pop() + data = map(ord, ext.get_storage(toaddr, s0)) + if not mem_extend(mem, compustate, op, s1, min(len(data), s2)): + return vm_exception('OOG EXTENDING MEMORY') + for i in range(min(len(data), s2)): + mem[s1 + i] = data[i] + # Copy code to new shard + if op == op_SLOADEXTBYTES: + if not ext.get_storage(toaddr, ''): + ext.set_storage(toaddr, ext.get_storage(msg.to)) + elif op == op_BREAKPOINT: + if breaking: + return peaceful_exit('RETURN', compustate.gas, mem) + else: + pass + elif op == op_RNGSEED: + stk.append(utils.big_endian_to_int(ext.get_storage(RNGSEEDS, stk.pop()))) + elif op == op_SSIZEEXT: + shard, key = stk.pop(), stk.pop() + if not validate_and_get_address(256**ADDR_BYTES * shard): + return vm_exception('OUT OF RANGE') + toaddr = shardify(msg.to, shard) + stk.append(len(ext.get_storage(toaddr, key))) + if not ext.get_storage(toaddr, ''): + ext.set_storage(toaddr, ext.get_storage(msg.to)) + elif op == op_SSTOREBYTES or op == op_SSTOREEXTBYTES: + if op == op_SSTOREEXTBYTES: + shard = stk.pop() + if not validate_and_get_address(256**ADDR_BYTES * shard): + return vm_exception('OUT OF RANGE') + toaddr = shardify(msg.to, shard) + else: + toaddr = msg.to + s0, s1, s2 = stk.pop(), stk.pop(), stk.pop() + if not mem_extend(mem, compustate, op, s1, s2): + return vm_exception('OOG EXTENDING MEMORY') + data = ''.join(map(chr, mem[s1: s1 + s2])) + ext.set_storage(toaddr, s0, data) + # Copy code to new shard + if op == op_SSTOREEXTBYTES: + if not ext.get_storage(toaddr, ''): + ext.set_storage(toaddr, ext.get_storage(msg.to)) + elif op == op_SSIZE: + stk.append(len(ext.get_storage(msg.to, stk.pop()))) + elif op == op_STATEROOT: + stk.append(utils.big_endian_to_int(ext.get_storage(STATEROOTS, stk.pop()))) + elif op == op_TXGAS: + stk.append(utils.big_endian_to_int(ext.get_storage(_EXSTATE, TXGAS))) elif op == op_SUICIDE: - to = utils.encode_int(stk.pop()) - to = ((b'\x00' * (32 - len(to))) + to)[12:] - xfer = ext.get_balance(msg.to) - ext.set_balance(to, ext.get_balance(to) + xfer) - ext.set_balance(msg.to, 0) - ext.add_suicide(msg.to) + to = validate_and_get_address(stk.pop(), msg) + if to is False: + return vm_exception('OUT OF RANGE') + xfer = utils.big_endian_to_int(ext.get_storage(ETHER, msg.to)) + ext.set_storage(ETHER, to, utils.big_endian_to_int(ext.get_storage(ETHER, to)) + xfer) + ext.set_storage(ETHER, msg.to, 0) + ext.set_storage(msg.to, '', '') # print('suiciding %s %s %d' % (msg.to, to, xfer)) return 1, compustate.gas, [] diff --git a/ethereum/gas_depositor.se.py b/ethereum/gas_depositor.se.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/ethereum/gas_depositor.se.py @@ -0,0 +1 @@ + diff --git a/ethereum/guardian.py b/ethereum/guardian.py new file mode 100644 index 000000000..e8eb580b8 --- /dev/null +++ b/ethereum/guardian.py @@ -0,0 +1,920 @@ +import time +from abi import ContractTranslator, decode_abi +from utils import address, int256, trie_root, hash32, to_string, \ + sha3, zpad, normalize_address, int_to_addr, big_endian_to_int, \ + encode_int32, safe_ord, encode_int, shardify, DEBUG, rlp_decode, \ + mkid +from rlp.sedes import big_endian_int, Binary, binary, CountableList +from serenity_blocks import tx_state_transition, BLKNUMBER, \ + block_state_transition, Block, apply_msg, EmptyVMExt, State, VMExt, \ + get_code +from serenity_transactions import Transaction +from ecdsa_accounts import sign_block, privtoaddr, sign_bet, mk_transaction +from config import CASPER, BLKTIME, RNGSEEDS, NULL_SENDER, GENESIS_TIME, ENTER_EXIT_DELAY, GASLIMIT, LOG, ETHER, VALIDATOR_ROUNDS, EXECUTION_STATE, TXINDEX, BLKNUMBER, ADDR_BYTES, GAS_DEPOSIT, CONST_CALL_SENDER +from mandatory_account_code import mandatory_account_evm +from default_betting_strategy import bet_at_height +from db import OverlayDB +import fastvm as vm +import serpent +import rlp +import sys +import random +import math +import copy + +MAX_RECALC = 9 +MAX_LONG_RECALC = 14 + +NM_LIST = 0 +NM_BLOCK = 1 +NM_BET = 2 +NM_BET_REQUEST = 3 +NM_TRANSACTION = 4 +NM_GETBLOCK = 5 +NM_GETBLOCKS = 6 +NM_BLOCKS = 7 + +# Network message object +class NetworkMessage(rlp.Serializable): + fields = [ + ('typ', big_endian_int), + ('args', CountableList(binary)) + ] + + def __init__(self, typ, args): + self.typ = typ + self.args = args + +# Call a method of a function with no effect +def call_method(state, addr, ct, fun, args, gas=1000000): + data = ct.encode(fun, args) + message_data = vm.CallData([safe_ord(x) for x in data], 0, len(data)) + message = vm.Message(CONST_CALL_SENDER, addr, 0, gas, message_data) + result, gas_remained, data = apply_msg(VMExt(state.clone()), message, get_code(state, addr)) + output = ''.join(map(chr, data)) + return ct.decode(fun, output)[0] + +# Convert probability from a number to one-byte encoded form +# using scientific notation on odds with a 3-bit mantissa; +# 0 = 65536:1 odds = 0.0015%, 128 = 1:1 odds = 50%, 255 = +# 1:61440 = 99.9984% +def encode_prob(p): + lastv = '\x00' + while 1: + q = p / (1.0 - p) + exp = 0 + while q < 1: + q *= 2.0 + exp -= 1 + while q >= 2: + q /= 2.0 + exp += 1 + mantissa = int(q * 4 - 3.9999) + v = chr(max(0, min(255, exp * 4 + 128 + mantissa))) + return v + + +# Convert probability from one-byte encoded form to a number +def decode_prob(c): + c = ord(c) + q = 2.0**((c - 128) // 4) * (1 + 0.25 * ((c - 128) % 4)) + return q / (1.0 + q) + +FINALITY_LOW, FINALITY_HIGH = decode_prob('\x00'), decode_prob('\xff') + +# Be VERY careful about updating the above algorithms; if the assert below +# fails (ie. encode and decode are not inverses) then bet serialization will +# break and so casper will break +assert map(encode_prob, map(decode_prob, map(chr, range(256)))) == map(chr, range(256)), map(encode_prob, map(decode_prob, map(chr, range(256)))) + + +invhash = {} + + +# An object that stores a bet made by a guardian +class Bet(): + def __init__(self, index, max_height, probs, blockhashes, stateroots, stateroot_probs, prevhash, seq, sig): + self.index = index + self.max_height = max_height + self.probs = probs + self.blockhashes = blockhashes + self.stateroots = stateroots + self.stateroot_probs = stateroot_probs + self.prevhash = prevhash + self.seq = seq + self.sig = sig + self._hash = None + + # Serializes the bet into the function message which can be directly submitted + # to the casper contract + def serialize(self): + o = casper_ct.encode('submitBet', + [self.index, self.max_height, ''.join(map(encode_prob, self.probs)), + self.blockhashes, self.stateroots, ''.join(map(encode_prob, self.stateroot_probs)), + self.prevhash, self.seq, self.sig] + ) + self._hash = sha3(o) + return o + + # Inverse of serialization + @classmethod + def deserialize(self, betdata): + params = decode_abi(casper_ct.function_data['submitBet']['encode_types'], + betdata[4:]) + o = Bet(params[0], params[1], map(decode_prob, params[2]), params[3], + params[4], map(decode_prob, params[5]), params[6], params[7], params[8]) + o._hash = sha3(betdata) + return o + + # Warning: edit bets very carefully! Make sure hash is always correct + @property + def hash(self, recompute=False): + if not self._hash or recompute: + self._hash = sha3(self.serialize()) + return self._hash + + +# An object that stores the "current opinion" of a guardian, as computed +# from their chain of bets +class Opinion(): + def __init__(self, validation_code, index, prevhash, seq, induction_height): + self.validation_code = validation_code + self.index = index + self.blockhashes = [] + self.stateroots = [] + self.probs = [] + self.stateroot_probs = [] + self.prevhash = prevhash + self.seq = seq + self.induction_height = induction_height + self.withdrawal_height = 2**100 + self.withdrawn = False + + def process_bet(self, bet): + # TODO: check crypto + if bet.seq != self.seq: + sys.stderr.write('Bet sequence number does not match expectation: actual %d desired %d\n' % (bet.seq, self.seq)) + return False + if bet.prevhash != self.prevhash: + sys.stderr.write('Bet hash does not match prevhash: actual %s desired %s. Seq: %d \n' % + (bet.prevhash.encode('hex'), self.prevhash.encode('hex'), bet.seq)) + if self.withdrawn: + raise Exception("Bet made after withdrawal! Slashing condition triggered!") + # Update seq and hash + self.seq = bet.seq + 1 + self.prevhash = bet.hash + # A bet with max height 2**256 - 1 signals withdrawal + if bet.max_height == 2**256 - 1: + self.withdrawn = True + self.withdrawal_height = self.max_height + DEBUG("Guardian leaving!", index=bet.index) + return True + # Extend probs, blockhashes and state roots arrays as needed + while len(self.probs) <= bet.max_height: + self.probs.append(None) + self.blockhashes.append(None) + self.stateroots.append(None) + self.stateroot_probs.append(None) + # Update probabilities, blockhashes and stateroots + for i in range(len(bet.probs)): + self.probs[bet.max_height - i] = bet.probs[i] + for i in range(len(bet.blockhashes)): + self.blockhashes[bet.max_height - i] = bet.blockhashes[i] + for i in range(len(bet.stateroots)): + self.stateroots[bet.max_height - i] = bet.stateroots[i] + for i in range(len(bet.stateroot_probs)): + self.stateroot_probs[bet.max_height - i] = bet.stateroot_probs[i] + return True + + def get_prob(self, h): + return self.probs[h] if h < len(self.probs) else None + + def get_blockhash(self, h): + return self.blockhashes[h] if h < len(self.probs) else None + + def get_stateroot(self, h): + return self.stateroots[h] if h < len(self.probs) else None + + @property + def max_height(self): + return len(self.probs) - 1 + +# Helper method for calling Casper +casper_ct = ContractTranslator(serpent.mk_full_signature('ethereum/casper.se.py')) + +def call_casper(state, fun, args, gas=1000000): + return call_method(state, CASPER, casper_ct, fun, args, gas) + +# Accepts any state less than ENTER_EXIT_DELAY blocks old +def is_block_valid(state, block): + # Determine the desired proposer address and the validation code + guardian_index = get_guardian_index(state, block.number) + guardian_address = call_casper(state, 'getGuardianAddress', [guardian_index]) + guardian_code = call_casper(state, 'getGuardianValidationCode', [guardian_index]) + assert isinstance(guardian_code, (str, bytes)) + # Check block proposer correctness + if block.proposer != normalize_address(guardian_address): + sys.stderr.write('Block proposer check for %d failed: actual %s desired %s\n' % + (block.number, block.proposer.encode('hex'), guardian_address)) + return False + # Check signature correctness + message_data = vm.CallData([safe_ord(x) for x in (sha3(encode_int32(block.number) + block.txroot) + block.sig)], 0, 32 + len(block.sig)) + message = vm.Message(NULL_SENDER, '\x00' * 20, 0, 1000000, message_data) + _, _, signature_check_result = apply_msg(EmptyVMExt, message, guardian_code) + if signature_check_result != [0] * 31 + [1]: + sys.stderr.write('Block signature check failed. Actual result: %s\n' % str(signature_check_result)) + return False + return True + +# Helper method for getting the guardian index for a particular block number +gvi_cache = {} + +def get_guardian_index(state, blknumber): + if blknumber not in gvi_cache: + preseed = state.get_storage(RNGSEEDS, blknumber - ENTER_EXIT_DELAY if blknumber >= ENTER_EXIT_DELAY else 2**256 - 1) + gvi_cache[blknumber] = call_casper(state, 'sampleGuardian', [preseed, blknumber], gas=3000000) + return gvi_cache[blknumber] + +# The default betting strategy; initialize with the genesis block and a privkey +class defaultBetStrategy(): + def __init__(self, genesis_state, key, clockwrong=False, bravery=0.92, + crazy_bet=False, double_block_suicide=2**200, + double_bet_suicide=2**200, min_gas_price=10**9): + DEBUG("Initializing betting strategy") + # An ID for purposes of the network simulator + self.id = mkid() + # Guardian's private key + self.key = key + # Guardian's address on the network + self.addr = privtoaddr(key) + # The bet strategy's database + self.db = genesis_state.db + # This counter is incremented every time a guardian joins; + # it allows us to re-process the guardian set and refresh + # the guardians that we have + self.guardian_signups = call_casper(genesis_state, 'getGuardianSignups', []) + # A dict of opinion objects containing the current opinions of all + # guardians + self.opinions = {} + # A dict of lists of bets received from guardians + self.bets = {} + # The probabilities that you are betting + self.probs = [] + # Your finalized block hashes + self.finalized_hashes = [] + # Your state roots + self.stateroots = [] + # Which counters have been processed + self.counters = {} + # A dict containing the highest-sequence-number bet processed for + # each guardian + self.highest_bet_processed = {} + # The time when you received an object + self.time_received = {} + # Hash lookup map; used mainly to check whether or not something has + # already been received and processed + self.objects = {} + # Blocks selected for each height + self.blocks = [] + # When you last explicitly requested to ask for a block; stored to + # prevent excessively frequent lookups + self.last_asked_for_block = {} + # When you last explicitly requested to ask for bets from a given + # guardian; stored to prevent excessively frequent lookups + self.last_asked_for_bets = {} + # Pool of transactions worth including + self.txpool = {} + # Map of hash -> (tx, [(blknum, index), ...]) for transactions that + # are in blocks that are not fully confirmed + self.unconfirmed_txindex = {} + # Map of hash -> (tx, [(blknum, index), ...]) for transactions that + # are in blocks that are fully confirmed + self.finalized_txindex = {} + # Counter for number of times a transaction entered an exceptional + # condition + self.tx_exceptions = {} + # Last time you made a bet; stored to prevent excessively + # frequent betting + self.last_bet_made = 0 + # Last time sent a getblocks message; stored to prevent excessively + # frequent getting + self.last_time_sent_getblocks = 0 + # Your guardian index + self.index = -1 + self.former_index = None + # Store the genesis block state here + self.genesis_state_root = genesis_state.root + # Store the timestamp of the genesis block + self.genesis_time = big_endian_to_int(genesis_state.get_storage(GENESIS_TIME, '\x00' * 32)) + # Last block that you produced + self.last_block_produced = -1 + # Next height at which you are eligible to produce (could be None) + self.next_block_to_produce = -1 + # Deliberately sabotage my clock? (for testing purposes) + self.clockwrong = clockwrong + # How quickly to converge toward finalization? + self.bravery = bravery + assert 0 < self.bravery <= 1 + # Am I making crazy bets? (for testing purposes) + self.crazy_bet = crazy_bet + # What block number to create two blocks at, destroying my guardian + # slot (for testing purposes; for non-byzantine nodes set to some really + # high number) + self.double_block_suicide = double_block_suicide + # What seq to create two bets at (also destructively, for testing purposes) + self.double_bet_suicide = double_bet_suicide + # Next submission delay (should be 0 on livenet; nonzero for testing purposes) + self.next_submission_delay = random.randrange(-BLKTIME * 2, BLKTIME * 6) if self.clockwrong else 0 + # List of proposers for blocks; calculated into the future just-in-time + self.proposers = [] + # Prevhash (for betting) + self.prevhash = '\x00' * 32 + # Sequence number (for betting) + self.seq = 0 + # Transactions I want to track + self.tracked_tx_hashes = [] + # If we only partially calculate state roots, store the index at which + # to start calculating next time you make a bet + self.calc_state_roots_from = 0 + # Minimum gas price that I accept + self.min_gas_price = min_gas_price + # Create my guardian set + self.update_guardian_set(genesis_state) + DEBUG('Found %d guardians in genesis' % len(self.opinions)) + # The height at which this guardian is added + self.induction_height = call_casper(genesis_state, 'getGuardianInductionHeight', [self.index]) if self.index >= 0 else 2**100 + DEBUG("Initialized guardian", + address=self.addr.encode('hex'), + index=self.index, + induction_height=self.induction_height) + self.withdrawn = False + # Max height which is finalized from your point of view + self.max_finalized_height = -1 + # Recently discovered blocks + self.recently_discovered_blocks = [] + # When will I suicide? + if self.double_block_suicide < 2**40: + if self.double_block_suicide < self.next_block_to_produce: + DEBUG("Suiciding at block %d" % self.next_block_to_produce) + else: + DEBUG("Suiciding at some block after %d" % self.double_block_suicide) + DEBUG('List of', proposers=self.proposers) + # Am I byzantine? + self.byzantine = self.crazy_bet or self.double_block_suicide < 2**80 or self.double_bet_suicide < 2**80 + + # Compute as many future block proposers as possible + def add_proposers(self): + h = len(self.finalized_hashes) - 1 + while h >= 0 and self.stateroots[h] in (None, '\x00' * 32): + h -= 1 + state = State(self.stateroots[h] if h >= 0 else self.genesis_state_root, self.db) + maxh = h + ENTER_EXIT_DELAY - 1 + for h in range(len(self.proposers), maxh): + self.proposers.append(get_guardian_index(state, h)) + if self.proposers[-1] == self.index: + self.next_block_to_produce = h + return + self.next_block_to_produce = None + + def receive_block(self, block): + # If you already processed the block, return + if block.hash in self.objects: + return + DEBUG('Received block', + number=block.number, + hash=block.hash.encode('hex')[:16], + recipient=self.index) + # Update the lengths of our main lists to make sure they can store + # the data we will be calculating + while len(self.blocks) <= block.number: + self.blocks.append(None) + self.stateroots.append(None) + self.finalized_hashes.append(None) + self.probs.append(0.5) + # If we are not sufficiently synced, try to sync previous blocks first + if block.number >= self.calc_state_roots_from + ENTER_EXIT_DELAY - 1: + sys.stderr.write('Not sufficiently synced to receive this block (%d)\n' % block.number) + if self.last_time_sent_getblocks < self.now - 5: + DEBUG('asking for blocks', index=self.index) + self.network.broadcast(self, rlp.encode(NetworkMessage(NM_GETBLOCKS, [encode_int(self.max_finalized_height+1)]))) + self.last_time_sent_getblocks = self.now + return + # If the block is invalid, return + check_state = self.get_state_at_height(block.number - ENTER_EXIT_DELAY + 1) + if not is_block_valid(check_state, block): + sys.stderr.write("ERR: Received invalid block: %d %s\n" % (block.number, block.hash.encode('hex')[:16])) + return + check_state2 = self.get_state_at_height(min(self.max_finalized_height, self.calc_state_roots_from - 1)) + # Try to update the set of guardians + vs = call_casper(check_state2, 'getGuardianSignups', []) + if vs > self.guardian_signups: + DEBUG('updating guardian signups', shouldbe=vs, lastcached=self.guardian_signups) + self.guardian_signups = vs + self.update_guardian_set(check_state2) + # Add the block to our list of blocks + if not self.blocks[block.number]: + self.blocks[block.number] = block + else: + DEBUG('Caught a double block!') + bytes1 = rlp.encode(self.blocks[block.number].header) + bytes2 = rlp.encode(block.header) + new_tx = Transaction(CASPER, 500000 + 1000 * len(bytes1) + 1000 * len(bytes2), + data=casper_ct.encode('slashBlocks', [bytes1, bytes2])) + self.add_transaction(new_tx, track=True) + # Store the block as having been received + self.objects[block.hash] = block + self.time_received[block.hash] = self.now + self.recently_discovered_blocks.append(block.number) + time_delay = self.now - (self.genesis_time + BLKTIME * block.number) + DEBUG("Received good block", + height=block.number, + hash=block.hash.encode('hex')[:16], + time_delay=time_delay) + # Add transactions to the unconfirmed transaction index + for i, g in enumerate(block.transaction_groups): + for j, tx in enumerate(g): + if tx.hash not in self.finalized_txindex: + if tx.hash not in self.unconfirmed_txindex: + self.unconfirmed_txindex[tx.hash] = (tx, []) + self.unconfirmed_txindex[tx.hash][1].append((block.number, block.hash, i, j)) + # Re-broadcast the block + self.network.broadcast(self, rlp.encode(NetworkMessage(NM_BLOCK, [rlp.encode(block)]))) + # Bet + if (self.index % VALIDATOR_ROUNDS) == (block.number % VALIDATOR_ROUNDS): + DEBUG("betting", index=self.index, height=block.number) + self.mkbet() + + # Try to update the set of guardians + def update_guardian_set(self, check_state): + for i in range(call_casper(check_state, 'getNextGuardianIndex', [])): + ctr = call_casper(check_state, 'getGuardianCounter', [i]) + # Ooh, we found a new guardian + if ctr not in self.counters: + self.counters[ctr] = 1 + ih = call_casper(check_state, 'getGuardianInductionHeight', [i]) + valaddr = call_casper(check_state, 'getGuardianAddress', [i]) + valcode = call_casper(check_state, 'getGuardianValidationCode', [i]) + self.opinions[i] = Opinion(valcode, i, '\x00' * 32, 0, ih) + self.opinions[i].deposit_size = call_casper(check_state, 'getGuardianDeposit', [i]) + DEBUG('Guardian inducted', index=i, address=valaddr, my_index=self.index) + self.bets[i] = {} + self.highest_bet_processed[i] = -1 + # Is the new guardian me? + if valaddr == self.addr.encode('hex'): + self.index = i + self.add_proposers() + self.induction_height = ih + DEBUG('I have been inducted!', index=self.index) + DEBUG('Tracking %d opinions' % len(self.opinions)) + + def receive_bet(self, bet): + # Do not process the bet if (i) we already processed it, or (ii) it + # comes from a guardian not in the current guardian set + if bet.hash in self.objects or bet.index not in self.opinions: + return + # Record when the bet came and that it came + self.objects[bet.hash] = bet + self.time_received[bet.hash] = self.now + # Re-broadcast it + self.network.broadcast(self, rlp.encode(NetworkMessage(NM_BET, [bet.serialize()]))) + # Do we have a duplicate? If so, slash it + if bet.seq in self.bets[bet.index]: + DEBUG('Caught a double bet!') + bytes1 = self.bets[bet.index][bet.seq].serialize() + bytes2 = bet.serialize() + new_tx = Transaction(CASPER, 500000 + 1000 * len(bytes1) + 1000 * len(bytes2), + data=casper_ct.encode('slashBets', [bytes1, bytes2])) + self.add_transaction(new_tx, track=True) + # Record it + self.bets[bet.index][bet.seq] = bet + # If we have an unbroken chain of bets from 0 to N, and last round + # we had an unbroken chain only from 0 to M, then process bets + # M+1...N. For example, if we had bets 0, 1, 2, 4, 5, 7, now we + # receive 3, then we assume bets 0, 1, 2 were already processed + # but now process 3, 4, 5 (but NOT 7) + DEBUG('receiving a bet', seq=bet.seq, index=bet.index, recipient=self.index) + proc = 0 + while (self.highest_bet_processed[bet.index] + 1) in self.bets[bet.index]: + assert self.opinions[bet.index].process_bet(self.bets[bet.index][self.highest_bet_processed[bet.index] + 1]) + self.highest_bet_processed[bet.index] += 1 + proc += 1 + # Sanity check + for i in range(0, self.highest_bet_processed[bet.index] + 1): + assert i in self.bets[bet.index] + assert self.opinions[bet.index].seq == self.highest_bet_processed[bet.index] + 1 + # If we did not process any bets after receiving a bet, that + # implies that we are missing some bets. Ask for them. + if not proc and self.last_asked_for_bets.get(bet.index, 0) < self.now + 10: + self.network.send_to_one(self, rlp.encode(NetworkMessage(NM_BET_REQUEST, map(encode_int, [bet.index, self.highest_bet_processed[bet.index] + 1])))) + self.last_asked_for_bets[bet.index] = self.now + + # Make a bet that signifies that we do not want to make any more bets + def withdraw(self): + o = sign_bet(Bet(self.index, 2**256 - 1, [], [], [], [], self.prevhash, self.seq, ''), self.key) + payload = rlp.encode(NetworkMessage(NM_BET, [o.serialize()])) + self.prevhash = o.hash + self.seq += 1 + self.network.broadcast(self, payload) + self.receive_bet(o) + self.former_index = self.index + self.index = -1 + self.withdrawn = True + + # Take one's ether out + def finalizeWithdrawal(self): + txdata = casper_ct.encode('withdraw', [self.former_index]) + tx = mk_transaction(0, 1, 1000000, CASPER, 0, txdata, k, True) + v = tx_state_transition(genesis, tx) + + # Compute as many state roots as possible + def recalc_state_roots(self): + recalc_limit = MAX_RECALC if self.calc_state_roots_from > len(self.blocks) - 20 else MAX_LONG_RECALC + frm = self.calc_state_roots_from + DEBUG('recalculating', limit=recalc_limit, want=len(self.blocks)-frm) + run_state = self.get_state_at_height(frm - 1) + for h in range(frm, len(self.blocks))[:recalc_limit]: + prevblknum = big_endian_to_int(run_state.get_storage(BLKNUMBER, '\x00' * 32)) + assert prevblknum == h + prob = self.probs[h] or 0.5 + block_state_transition(run_state, self.blocks[h] if prob >= 0.5 else None) + self.stateroots[h] = run_state.root + blknum = big_endian_to_int(run_state.get_storage(BLKNUMBER, '\x00' * 32)) + assert blknum == h + 1 + # If there are some state roots that we have not calculated, just leave them empty + for h in range(frm + recalc_limit, len(self.blocks)): + self.stateroots[h] = '\x00' * 32 + # Where to calculate state roots from next time + self.calc_state_roots_from = min(frm + recalc_limit, len(self.blocks)) + # Check integrity + for i in range(self.calc_state_roots_from): + assert self.stateroots[i] not in ('\x00' * 32, None) + + # Get a state object that we run functions or process blocks against + + # finalized version (safer) + def get_finalized_state(self): + h = min(self.calc_state_roots_from - 1, self.max_finalized_height) + return State(self.stateroots[h] if h >= 0 else self.genesis_state_root, self.db) + + # optimistic version (more up-to-date) + def get_optimistic_state(self): + h = self.calc_state_roots_from - 1 + return State(self.stateroots[h] if h >= 0 else self.genesis_state_root, self.db) + + # Get a state object at a given height + def get_state_at_height(self, h): + return State(self.stateroots[h] if h >= 0 else self.genesis_state_root, self.db) + + # Construct a bet + def mkbet(self): + # Bet at most once every two seconds to save on computational costs + if self.now < self.last_bet_made + 2: + return + self.last_bet_made = self.now + # Height at which to start signing + sign_from = max(0, self.max_finalized_height) + # Keep track of the lowest state root that we should change + DEBUG('Making probs', frm=sign_from, to=len(self.blocks) - 1) + # State root probs + srp = [] + srp_accum = FINALITY_HIGH + # Bet on each height independently using our betting strategy + for h in range(sign_from, len(self.blocks)): + # Get the probability that we should bet + prob, new_block_hash, ask = \ + bet_at_height(self.opinions, + h, + [self.blocks[h]] if self.blocks[h] else [], + self.time_received, + self.genesis_time, + self.now) + # Do we need to ask for a block from the network? + if ask and (new_block_hash not in self.last_asked_for_block or self.last_asked_for_block[new_block_hash] < self.now + 12): + DEBUG('Suspiciously missing a block, asking for it explicitly.', + number=h, hash=new_block_hash.encode('hex')[:16]) + self.network.broadcast(self, rlp.encode(NetworkMessage(NM_GETBLOCK, [new_block_hash]))) + self.last_asked_for_block[h] = self.now + # Did our preferred block hash change? + if self.blocks[h] and new_block_hash != self.blocks[h].hash: + if new_block_hash not in (None, '\x00' * 32): + DEBUG('Changing block selection', height=h, + pre=self.blocks[h].hash[:8].encode('hex'), + post=new_block_hash[:8].encode('hex')) + assert self.objects[new_block_hash].number == h + self.blocks[h] = self.objects[new_block_hash] + self.recently_discovered_blocks.append(h) + # If the probability of a block flips to the other side of 0.5, + # that means that we should recalculate the state root at least + # from that point (and possibly earlier) + if ((prob - 0.5) * (self.probs[h] - 0.5) <= 0 or (self.probs[h] >= 0.5 and \ + h in self.recently_discovered_blocks)) and h < self.calc_state_roots_from: + DEBUG('Rewinding', num_blocks=self.calc_state_roots_from - h) + self.calc_state_roots_from = h + self.probs[h] = prob + # Compute the state root probabilities + if srp_accum == FINALITY_HIGH and prob >= FINALITY_HIGH: + srp.append(FINALITY_HIGH) + else: + srp_accum *= prob + srp.append(max(srp_accum, FINALITY_LOW)) + # Finalized! + if prob < FINALITY_LOW or prob > FINALITY_HIGH: + DEBUG('Finalizing', height=h, my_index=self.index) + # Set the finalized hash + self.finalized_hashes[h] = self.blocks[h].hash if prob > FINALITY_HIGH else '\x00' * 32 + # Try to increase the max finalized height + while h == self.max_finalized_height + 1: + self.max_finalized_height = h + DEBUG('Increasing max finalized height', new_height=h) + if not h % 10: + for i in self.opinions.keys(): + self.opinions[i].deposit_size = call_casper(self.get_optimistic_state(), 'getGuardianDeposit', [i]) + # Recalculate state roots + rootstart = max(self.calc_state_roots_from, self.induction_height) + self.recalc_state_roots() + # Sanity check + assert len(self.probs) == len(self.blocks) == len(self.stateroots) + # If we are supposed to actually make a bet... (if not, all the code + # above is simply for personal information, ie. for a listening node + # to determine its opinion on what the correct chain is) + if self.index >= 0 and len(self.blocks) > self.induction_height and not self.withdrawn and len(self.recently_discovered_blocks): + # Create and sign the bet + blockstart = max(min(self.recently_discovered_blocks), self.induction_height) + probstart = min(max(sign_from, self.induction_height), blockstart, rootstart) + srprobstart = max(sign_from, self.induction_height) - sign_from + assert len(srp[srprobstart:]) <= len(self.probs[probstart:]) + assert srprobstart + sign_from >= probstart + o = sign_bet(Bet(self.index, + len(self.blocks) - 1, + self.probs[probstart:][::-1], + [x.hash if x else '\x00' * 32 for x in self.blocks[blockstart:]][::-1], + self.stateroots[rootstart:][::-1], + [x if (self.stateroots[i] != '\x00' * 32) else FINALITY_LOW for i, x in enumerate(srp)][srprobstart:][::-1], + self.prevhash, + self.seq, ''), + self.key) + # Reset the recently discovered blocks array, so that we do not needlessly resubmit hashes + self.recently_discovered_blocks = [] + # Update my prevhash and seq + self.prevhash = o.hash + self.seq += 1 + # Send the bet over the network + payload = rlp.encode(NetworkMessage(NM_BET, [o.serialize()])) + self.network.broadcast(self, payload) + # Process it myself + self.receive_bet(o) + # Create two bets of the same seq (for testing purposes) + if self.seq > self.double_bet_suicide and len(o.probs): + DEBUG('MOO HA HA DOUBLE BETTING') + o.probs[0] *= 0.9 + o = sign_bet(o, self.key) + payload = rlp.encode(NetworkMessage(NM_BET, [o.serialize()])) + self.network.broadcast(self, payload) + + # Upon receiving any kind of network message + # Arguments: payload, ID of the node sending the message (used to + # direct-send replies back) + def on_receive(self, objdata, sender_id): + obj = rlp_decode(objdata, NetworkMessage) + if obj.typ == NM_BLOCK: + blk = rlp_decode(obj.args[0], Block) + self.receive_block(blk) + elif obj.typ == NM_BET: + bet = Bet.deserialize(obj.args[0]) + self.receive_bet(bet) + elif obj.typ == NM_BET_REQUEST: + index = big_endian_to_int(obj.args[0]) + seq = big_endian_to_int(obj.args[1]) + if index not in self.bets: + return + bets = [self.bets[index][x] for x in range(seq, self.highest_bet_processed[index] + 1)] + if len(bets): + messages = [rlp.encode(NetworkMessage(NM_BET, [bet.serialize()])) for bet in bets] + self.network.direct_send(self, sender_id, rlp.encode(NetworkMessage(NM_LIST, messages))) + elif obj.typ == NM_TRANSACTION: + tx = rlp_decode(obj.args[0], Transaction) + if self.should_i_include_transaction(tx): + self.add_transaction(tx) + elif obj.typ == NM_GETBLOCK: + # Asking for block by number: + if len(obj.args[0]) < 32: + blknum = big_endian_to_int(obj.args[0]) + if blknum < len(self.blocks) and self.blocks[blknum]: + self.network.direct_send(self, sender_id, rlp.encode(NetworkMessage(NM_BLOCK, [rlp.encode(self.blocks[blknum])]))) + # Asking for block by hash + else: + o = self.objects.get(obj.args[0], None) + if isinstance(o, Block): + self.network.direct_send(self, sender_id, rlp.encode(NetworkMessage(NM_BLOCK, [rlp.encode(o)]))) + elif obj.typ == NM_GETBLOCKS: + blknum = big_endian_to_int(obj.args[0]) + messages = [] + for h in range(blknum, len(self.blocks))[:30]: + if self.blocks[h]: + messages.append(rlp.encode(NetworkMessage(NM_BLOCK, [rlp.encode(self.blocks[h])]))) + self.network.direct_send(self, sender_id, rlp.encode(NetworkMessage(NM_LIST, messages))) + if blknum < len(self.blocks) and self.blocks[blknum]: + self.network.direct_send(self, sender_id, rlp.encode(NetworkMessage(NM_BLOCK, [rlp.encode(self.blocks[blknum])]))) + elif obj.typ == NM_LIST: + for x in obj.args: + self.on_receive(x, sender_id) + + def should_i_include_transaction(self, tx): + check_state = self.get_optimistic_state() + o = tx_state_transition(check_state, tx, override_gas=250000+tx.intrinsic_gas, breaking=True) + if not o: + DEBUG('No output from running transaction', + hash=tx.hash.encode('hex')[:16]) + return False + output = ''.join(map(chr, o)) + # Make sure that the account code matches + if get_code(check_state, tx.addr).rstrip('\x00') != mandatory_account_evm: + DEBUG('Account EVM mismatch', + hash=tx.hash.encode('hex')[:16], + shouldbe=mandatory_account_evm, + reallyis=get_code(check_state, tx.addr)) + return False + # Make sure that the right gas price is in memory (and implicitly that the tx succeeded) + if len(output) < 32: + DEBUG('Min gas price not found in output, not including transaction', + hash=tx.hash.encode('hex')[:16]) + return False + # Make sure that the gas price is sufficient + if big_endian_to_int(output[:32]) < self.min_gas_price: + DEBUG('Gas price too low', + shouldbe=self.min_gas_price, + reallyis=big_endian_to_int(output[:32]), + hash=tx.hash.encode('hex')[:16]) + return False + DEBUG('Transaction passes, should be included', + hash=tx.hash.encode('hex')[:16]) + return True + + def add_transaction(self, tx, track=False): + if tx.hash not in self.objects or self.time_received.get(tx.hash, 0) < self.now - 15: + DEBUG('Received transaction', hash=tx.hash.encode('hex')[:16]) + self.objects[tx.hash] = tx + self.time_received[tx.hash] = self.now + self.txpool[tx.hash] = tx + if track: + self.tracked_tx_hashes.append(tx.hash) + self.network.broadcast(self, rlp.encode(NetworkMessage(NM_TRANSACTION, [rlp.encode(tx)]))) + + def make_block(self): + # Transaction inclusion algorithm + gas = GASLIMIT + txs = [] + # Try to include transactions in txpool + for h, tx in self.txpool.items(): + # If a transaction is not in the unconfirmed index AND not in the + # finalized index, then add it + if h not in self.unconfirmed_txindex and h not in self.finalized_txindex: + DEBUG('Adding transaction', + hash=tx.hash.encode('hex')[:16], + blknum=self.next_block_to_produce) + if tx.gas > gas: + break + txs.append(tx) + gas -= tx.gas + # Publish most recent bets to the blockchain + h = 0 + while h < len(self.stateroots) and self.stateroots[h] not in (None, '\x00' * 32): + h += 1 + latest_state_root = self.stateroots[h-1] if h else self.genesis_state_root + assert latest_state_root not in ('\x00' * 32, None) + latest_state = State(latest_state_root, self.db) + ops = self.opinions.items() + random.shuffle(ops) + DEBUG('Producing block', + number=self.next_block_to_produce, + known=len(self.blocks), + check_root_height=h-1) + for i, o in ops: + latest_bet = call_casper(latest_state, 'getGuardianSeq', [i]) + bet_height = latest_bet + while bet_height in self.bets[i]: + DEBUG('Inserting bet', seq=latest_bet, index=i) + bet = self.bets[i][bet_height] + new_tx = Transaction(CASPER, 200000 + 6600 * len(bet.probs) + 10000 * len(bet.blockhashes + bet.stateroots), + data=bet.serialize()) + if bet.max_height == 2**256 - 1: + self.tracked_tx_hashes.append(new_tx.hash) + if new_tx.gas > gas: + break + txs.append(new_tx) + gas -= new_tx.gas + bet_height += 1 + if o.seq < latest_bet: + self.network.send_to_one(self, rlp.encode(NetworkMessage(NM_BET_REQUEST, map(encode_int, [i, o.seq + 1])))) + self.last_asked_for_bets[i] = self.now + # Process the unconfirmed index for the transaction. Note that a + # transaction could theoretically get included in the chain + # multiple times even within the same block, though if the account + # used to process the transaction is sane the transaction should + # fail all but one time + for h, (tx, positions) in self.unconfirmed_txindex.items(): + i = 0 + while i < len(positions): + # We see this transaction at index `index` of block number `blknum` + blknum, blkhash, groupindex, txindex = positions[i] + if self.stateroots[blknum] in (None, '\x00' * 32): + i += 1 + continue + # Probability of the block being included + p = self.probs[blknum] + # Try running it + if p > 0.95: + grp_shard = self.blocks[blknum].summaries[groupindex].left_bound + logdata = State(self.stateroots[blknum], self.db).get_storage(shardify(LOG, grp_shard), txindex) + logresult = big_endian_to_int(rlp.decode(rlp.descend(logdata, 0))) + # If the transaction passed and the block is finalized... + if p > 0.9999 and logresult == 2: + DEBUG('Transaction finalized', + hash=tx.hash.encode('hex')[:16], + blknum=blknum, + blkhash=blkhash.encode('hex')[:16], + grpindex=groupindex, + txindex=txindex) + # Remove it from the txpool + if h in self.txpool: + del self.txpool[h] + # Add it to the finalized index + if h not in self.finalized_txindex: + self.finalized_txindex[h] = (tx, []) + self.finalized_txindex[h][1].append((blknum, blkhash, groupindex, txindex, rlp.decode(logdata))) + positions.pop(i) + # If the transaction was included but exited with an error (eg. due to a sequence number mismatch) + elif p > 0.95 and logresult == 1: + positions.pop(i) + self.tx_exceptions[h] = self.tx_exceptions.get(h, 0) + 1 + DEBUG('Transaction inclusion finalized but transaction failed for the %dth time' % self.tx_exceptions[h], + hash=tx.hash.encode('hex')[:16]) + # 10 strikes and we're out + if self.tx_exceptions[h] >= 10: + if h in self.txpool: + del self.txpool[h] + # If the transaction failed (eg. due to OOG from block gaslimit), + # remove it from the unconfirmed index, but not the expool, so + # that we can try to add it again + elif logresult == 0: + DEBUG('Transaction finalization attempt failed', hash=tx.hash.encode('hex')[:16]) + positions.pop(i) + else: + i += 1 + # If the block that the transaction was in didn't pass through, + # remove it from the unconfirmed index, but not the expool, so + # that we can try to add it again + elif p < 0.05: + DEBUG('Transaction finalization attempt failed', hash=tx.hash.encode('hex')[:16]) + positions.pop(i) + # Otherwise keep the transaction in the unconfirmed index + else: + i += 1 + if len(positions) == 0: + del self.unconfirmed_txindex[h] + # Produce the block + b = sign_block(Block(transactions=txs, number=self.next_block_to_produce, proposer=self.addr), self.key) + # Broadcast it + self.network.broadcast(self, rlp.encode(NetworkMessage(NM_BLOCK, [rlp.encode(b)]))) + self.receive_block(b) + # If byzantine, produce two blocks + if b.number >= self.double_block_suicide: + DEBUG('## Being evil and making two blocks!!\n\n') + new_tx = mk_transaction(1, 1, 1000000, '\x33' * ADDR_BYTES, 1, '', self.key, True) + txs2 = [tx for tx in txs] + [new_tx] + b2 = sign_block(Block(transactions=txs2, number=self.next_block_to_produce, proposer=self.addr), self.key) + self.network.broadcast(self, rlp.encode(NetworkMessage(NM_BLOCK, [rlp.encode(b2)]))) + # Extend the list of block proposers + self.last_block_produced = self.next_block_to_produce + self.add_proposers() + # Log it + time_delay = self.now - (self.genesis_time + BLKTIME * b.number) + DEBUG('Making block', my_index=self.index, number=b.number, + hash=b.hash.encode('hex')[:16], time_delay=time_delay) + return b + + # Run every tick + def tick(self): + # DEBUG('bet tick called', at=self.now, id=self.id, index=self.index) + mytime = self.now + # If (i) we should be making blocks, and (ii) the time has come to + # produce a block, then produce a block + if self.index >= 0 and self.next_block_to_produce is not None: + target_time = self.genesis_time + BLKTIME * self.next_block_to_produce + # DEBUG('maybe I should make a block', at=self.now, target_time=target_time ) + if mytime >= target_time + self.next_submission_delay: + DEBUG('making a block') + self.recalc_state_roots() + self.make_block() + self.next_submission_delay = random.randrange(-BLKTIME * 2, BLKTIME * 6) if self.clockwrong else 0 + elif self.next_block_to_produce is None: + # DEBUG('add_prop', at=self.now, id=self.id) + self.add_proposers() + if self.last_bet_made < self.now - BLKTIME * VALIDATOR_ROUNDS * 1.5: + # DEBUG('mk bet', at=self.now, id=self.id) + self.mkbet() + + + @property + def now(self): + return self.network.now diff --git a/ethereum/mandatory_account_code.py b/ethereum/mandatory_account_code.py new file mode 100644 index 000000000..3a6be948f --- /dev/null +++ b/ethereum/mandatory_account_code.py @@ -0,0 +1,41 @@ +from utils import big_endian_to_int +from config import GAS_DEPOSIT, NULL_SENDER +import serpent +from abi import ContractTranslator + +# The code that every account must have in order for miners to accept +# transactions going to it +mandatory_account_code = """ +# Copy the calldata to bytes 32...x+32 +~calldatacopy(64, 0, ~calldatasize()) +# If we are getting a message NOT from the origin object, then just +# pass it along to the runner code +if msg.sender != %d: + ~mstore(0, 0) + ~delegatecall(msg.gas - 50000, self.storage[1], 64, ~calldatasize(), 64 + ~calldatasize(), 10000) + ~return(64 + ~calldatasize(), ~msize() - 64 - ~calldatasize()) +# Run the sig checker code; self.storage[0] = sig checker +# sig checker should return gas price +if not ~delegatecall(250000, self.storage[0], 64, ~calldatasize(), 32, 32): + ~invalid() +# Compute the gas payment deposit +~mstore(0, ~mload(32) * ~txexecgas()) +# Send the gas payment into the deposit contract +if self.balance < ~mload(0): + ~invalid() +~call(2000, %d, ~mload(0), 0, 0, 0, 32) +# Do the main call; self.storage[1] = main running code +~breakpoint() +x = ~delegatecall(msg.gas - 50000, self.storage[1], 64, ~calldatasize(), 64, 10000) +# Call the deposit contract to refund +~mstore(0, ~mload(32) * msg.gas) +~call(2000, %d, ~mload(0), 0, 32, 0, 32) +~return(64, ~msize() - 64) +""" % (big_endian_to_int(NULL_SENDER), big_endian_to_int(GAS_DEPOSIT), big_endian_to_int(GAS_DEPOSIT)) + +mandatory_account_evm = serpent.compile(mandatory_account_code) +# Strip off the initiation wrapper +mandatory_account_evm = mandatory_account_evm[mandatory_account_evm.find('\x56')+1:] +mandatory_account_evm = mandatory_account_evm[:mandatory_account_evm[:-1].rfind('\xf3')+1] + +mandatory_account_ct = ContractTranslator(serpent.mk_full_signature(mandatory_account_code)) diff --git a/ethereum/network.py b/ethereum/network.py new file mode 100644 index 000000000..4ba502624 --- /dev/null +++ b/ethereum/network.py @@ -0,0 +1,227 @@ +import random +import sys +import time +from utils import DEBUG + +# A network simulator + + +class NetworkSimulatorBase(): + + start_time = time.time() + + def __init__(self, latency=50, agents=[], reliability=0.9, broadcast_success_rate=1.0): + self.agents = agents + self.latency_distribution_sample = transform( + normal_distribution(latency, (latency * 2) // 5), lambda x: max(x, 0)) + self.time = 0 + self.objqueue = {} + self.peers = {} + self.reliability = reliability + self.broadcast_success_rate = broadcast_success_rate + self.time_sleeping = 0 + self.time_running = 0 + self.sleepdebt = 0 + + def generate_peers(self, num_peers=5): + self.peers = {} + for a in self.agents: + p = [] + while len(p) <= num_peers // 2: + p.append(random.choice(self.agents)) + if p[-1] == a: + p.pop() + self.peers[a.id] = list(set(self.peers.get(a.id, []) + p)) + for peer in p: + self.peers[peer.id] = list(set(self.peers.get(peer.id, []) + [a])) + + def tick(self): + if self.time in self.objqueue: + for sender_id, recipient, obj in self.objqueue[self.time]: + if random.random() < self.reliability: + recipient.on_receive(obj, sender_id) + del self.objqueue[self.time] + for a in self.agents: + a.tick() + self.time += 1 + + def run(self, seconds, sleep=0): + t = 0 + while 1: + a = time.time() + self.tick() + timedelta = time.time() - a + if sleep > timedelta: + tsleep = sleep - timedelta + sleepdebt_repayment = min(self.sleepdebt, tsleep * 0.5) + time.sleep(tsleep - sleepdebt_repayment) + self.time_sleeping += tsleep - sleepdebt_repayment + self.sleepdebt -= sleepdebt_repayment + else: + self.sleepdebt += timedelta - sleep + self.time_running += timedelta + print 'Tick finished in: %.2f. Total sleep %.2f, running %.2f' % (timedelta, self.time_sleeping, self.time_running) + if self.sleepdebt > 0: + print 'Sleep debt: %.2f' % self.sleepdebt + t += time.time() - a + if t >= seconds: + return + + def broadcast(self, sender, obj): + assert isinstance(obj, (str, bytes)) + if random.random() < self.broadcast_success_rate: + for p in self.peers[sender.id]: + recv_time = self.time + self.latency_distribution_sample() + if recv_time not in self.objqueue: + self.objqueue[recv_time] = [] + self.objqueue[recv_time].append((sender.id, p, obj)) + + def send_to_one(self, sender, obj): + assert isinstance(obj, (str, bytes)) + if random.random() < self.broadcast_success_rate: + p = random.choice(self.peers[sender.id]) + recv_time = self.time + self.latency_distribution_sample() + if recv_time not in self.objqueue: + self.objqueue[recv_time] = [] + self.objqueue[recv_time].append((sender.id, p, obj)) + + def direct_send(self, sender, to_id, obj): + if random.random() < self.broadcast_success_rate * self.reliability: + for a in self.agents: + if a.id == to_id: + recv_time = self.time + self.latency_distribution_sample() + if recv_time not in self.objqueue: + self.objqueue[recv_time] = [] + self.objqueue[recv_time].append((sender.id, a, obj)) + + def knock_offline_random(self, n): + ko = {} + while len(ko) < n: + c = random.choice(self.agents) + ko[c.id] = c + for c in ko.values(): + self.peers[c.id] = [] + for a in self.agents: + self.peers[a.id] = [x for x in self.peers[a.id] if x.id not in ko] + + def partition(self): + a = {} + while len(a) < len(self.agents) / 2: + c = random.choice(self.agents) + a[c.id] = c + for c in self.agents: + if c.id in a: + self.peers[c.id] = [x for x in self.peers[c.id] if x.id in a] + else: + self.peers[c.id] = [x for x in self.peers[c.id] if x.id not in a] + + @property + def now(self): + return time.time() + + +def normal_distribution(mean, standev): + def f(): + return int(random.normalvariate(mean, standev)) + + return f + + +def exponential_distribution(mean): + def f(): + total = 0 + while 1: + total += 1 + if not random.randrange(32): + break + return int(total * 0.03125 * mean) + + return f + + +def convolve(*args): + def f(): + total = 0 + for arg in args: + total += arg() + return total + + return f + + +def transform(dist, xformer): + def f(): + return xformer(dist()) + + return f + + +class SimPyNetworkSimulator(NetworkSimulatorBase): + + start_time = 0 + + def __init__(self, latency=50, agents=[], reliability=0.9, broadcast_success_rate=1.0): + import simpy + NetworkSimulatorBase.__init__(self, latency, agents, reliability, broadcast_success_rate) + self.simenv = simpy.Environment() + + @property + def now(self): + return self.simenv.now + + def tick_loop(self, agent, tick_delay): + ASYNC_CLOCKS = True + if ASYNC_CLOCKS: + deviation = id(self) % 1000 # ms + yield self.simenv.timeout(deviation / 1000.) + + while True: + yield self.simenv.timeout(tick_delay) + # DEBUG('ticking agent', at=self.now, id=agent.id) + agent.tick() + + def run(self, seconds, sleep=0): + self.simenv._queue = [] + assert len(self.agents) < 20 + for a in self.agents: + self.simenv.process(self.tick_loop(a, sleep)) + self.simenv.run(until=self.now + seconds) + + def moo(self): + print 7 + + def receive_later(self, sender_id, recipient, obj): + print 4 + delay = self.latency_distribution_sample() + print 5, delay + yield self.simenv.timeout(delay) + # raise Exception("cow") + # DEBUG('receiving message', at=self.now, id=agent.id) + # recipient.on_receive(obj, sender_id) + + def broadcast(self, sender, obj): + assert isinstance(obj, (str, bytes)) + print 1 + if random.random() < self.broadcast_success_rate: + print 2 + for p in self.peers[sender.id]: + print 3 + self.moo() + self.receive_later(sender.id, p, obj) + print 3.1 + + def send_to_one(self, sender, obj): + assert isinstance(obj, (str, bytes)) + if random.random() < self.broadcast_success_rate: + p = random.choice(self.peers[sender.id]) + self.receive_later(sender.id, p, obj) + + def direct_send(self, sender, to_id, obj): + if random.random() < self.broadcast_success_rate * self.reliability: + for p in self.agents: + if p.id == to_id: + self.receive_later(sender.id, p, obj) + + +NetworkSimulator = NetworkSimulatorBase +#NetworkSimulator = SimPyNetworkSimulator diff --git a/ethereum/opcodes.py b/ethereum/opcodes.py index 6e4dd2810..8f6b50ebe 100644 --- a/ethereum/opcodes.py +++ b/ethereum/opcodes.py @@ -38,6 +38,7 @@ 0x3a: ['GASPRICE', 0, 1, 2], 0x3b: ['EXTCODESIZE', 1, 1, 20], 0x3c: ['EXTCODECOPY', 4, 0, 20], + 0x3d: ['MCOPY', 3, 0, 3], 0x40: ['BLOCKHASH', 1, 1, 20], 0x41: ['COINBASE', 0, 1, 2], 0x42: ['TIMESTAMP', 0, 1, 2], @@ -56,16 +57,29 @@ 0x59: ['MSIZE', 0, 1, 2], 0x5a: ['GAS', 0, 1, 2], 0x5b: ['JUMPDEST', 0, 0, 1], - 0xa0: ['LOG0', 2, 0, 375], - 0xa1: ['LOG1', 3, 0, 750], - 0xa2: ['LOG2', 4, 0, 1125], - 0xa3: ['LOG3', 5, 0, 1500], - 0xa4: ['LOG4', 6, 0, 1875], + 0x5c: ['SLOADEXT', 2, 1, 50], + 0x5d: ['SSTOREEXT', 3, 0, 0], + 0x5e: ['SLOADEXTBYTES', 3, 0, 50], + 0x5f: ['SSTOREEXTBYTES', 2, 0, 2500], + 0xa0: ['LOG0', 2, 0, 0], + 0xa1: ['LOG1', 3, 0, 0], + 0xa2: ['LOG2', 4, 0, 0], + 0xa3: ['LOG3', 5, 0, 0], + 0xa4: ['LOG4', 6, 0, 0], 0xf0: ['CREATE', 3, 1, 32000], 0xf1: ['CALL', 7, 1, 40], 0xf2: ['CALLCODE', 7, 1, 40], 0xf3: ['RETURN', 2, 0, 0], - 0xf4: ['DELEGATECALL', 2, 0, 0], + 0xf4: ['DELEGATECALL', 6, 1, 40], + 0xf5: ['BREAKPOINT', 0, 0, 1], + 0xf6: ['RNGSEED', 1, 1, 50], + 0xf7: ['SSIZEEXT', 2, 1, 50], + 0xf8: ['SLOADBYTES', 3, 0, 50], + 0xf9: ['SSTOREBYTES', 2, 0, 2500], + 0xfa: ['SSIZE', 1, 1, 50], + 0xfb: ['STATEROOT', 1, 1, 50], + 0xfc: ['TXGAS', 0, 1, 50], + 0xfd: ['CALLSTATIC', 7, 1, 50], 0xff: ['SUICIDE', 1, 0, 0], } @@ -94,6 +108,10 @@ GCONTRACTBYTE = 200 # one byte of code in contract creation GCALLVALUETRANSFER = 9000 # non-zero-valued call GLOGBYTE = 8 # cost of a byte of logdata +GLOGBASE = 375 # basic log +GLOGTOPIC = 375 # log topic +GCREATE = 32000 # contract creation base cost +GGASDEPOSIT = 2000 # cost of using gas deposit process GTXCOST = 21000 # TX BASE GAS COST GTXDATAZERO = 4 # TX DATA ZERO BYTE GAS COST @@ -106,6 +124,11 @@ GIDENTITYBASE = 15 # Base cost of indentity GIDENTITYWORD = 3 # Cost of identity per word GECRECOVER = 3000 # Cost of ecrecover op +GECADD = 200 # Cost of ecadd op +GECMUL = 1000 # Cost of ecmul op +GMODEXP = 200 # Cost of modexp op +GRLPBASE = 30 # Base cost of RLP decoding +GRLPWORD = 6 # Cost of RLP decoding per word GSTIPEND = 2300 diff --git a/ethereum/processblock.py b/ethereum/processblock.py deleted file mode 100644 index 01748abd2..000000000 --- a/ethereum/processblock.py +++ /dev/null @@ -1,311 +0,0 @@ -import sys -import rlp -from rlp.sedes import CountableList, binary -from rlp.utils import decode_hex, encode_hex, ascii_chr, str_to_bytes -from ethereum import opcodes -from ethereum import utils -from ethereum import specials -from ethereum import bloom -from ethereum import vm as vm -from ethereum.exceptions import * -from ethereum.utils import safe_ord, normalize_address - -sys.setrecursionlimit(100000) - -from ethereum.slogging import get_logger -log_tx = get_logger('eth.pb.tx') -log_msg = get_logger('eth.pb.msg') -log_state = get_logger('eth.pb.msg.state') - -TT255 = 2 ** 255 -TT256 = 2 ** 256 -TT256M1 = 2 ** 256 - 1 - -OUT_OF_GAS = -1 - -# contract creating transactions send to an empty address -CREATE_CONTRACT_ADDRESS = b'' - - -def mk_contract_address(sender, nonce): - return utils.sha3(rlp.encode([normalize_address(sender), nonce]))[12:] - - -def verify(block, parent): - from ethereum import blocks - try: - block2 = rlp.decode(rlp.encode(block), blocks.Block, - env=parent.env, parent=parent) - assert block == block2 - return True - except blocks.VerificationFailed: - return False - - -class Log(rlp.Serializable): - - # TODO: original version used zpad (here replaced by int32.serialize); had - # comment "why zpad"? - fields = [ - ('address', utils.address), - ('topics', CountableList(utils.int32)), - ('data', binary) - ] - - def __init__(self, address, topics, data): - if len(address) == 40: - address = decode_hex(address) - assert len(address) == 20 - super(Log, self).__init__(address, topics, data) - - def bloomables(self): - return [self.address] + [utils.int32.serialize(x) for x in self.topics] - - def to_dict(self): - return { - "bloom": encode_hex(bloom.b64(bloom.bloom_from_list(self.bloomables()))), - "address": encode_hex(self.address), - "data": b'0x' + encode_hex(self.data), - "topics": [encode_hex(utils.int32.serialize(t)) - for t in self.topics] - } - - def __repr__(self): - return '' % \ - (encode_hex(self.address), self.topics, self.data) - - -def intrinsic_gas_used(tx): - num_zero_bytes = str_to_bytes(tx.data).count(ascii_chr(0)) - num_non_zero_bytes = len(tx.data) - num_zero_bytes - return (opcodes.GTXCOST - + opcodes.GTXDATAZERO * num_zero_bytes - + opcodes.GTXDATANONZERO * num_non_zero_bytes) - - -def validate_transaction(block, tx): - - def rp(what, actual, target): - return '%r: %r actual:%r target:%r' % (tx, what, actual, target) - - # (1) The transaction signature is valid; - if not tx.sender: # sender is set and validated on Transaction initialization - raise UnsignedTransaction(tx) - - # (2) the transaction nonce is valid (equivalent to the - # sender account's current nonce); - acctnonce = block.get_nonce(tx.sender) - if acctnonce != tx.nonce: - raise InvalidNonce(rp('nonce', tx.nonce, acctnonce)) - - # (3) the gas limit is no smaller than the intrinsic gas, - # g0, used by the transaction; - if tx.startgas < intrinsic_gas_used(tx): - raise InsufficientStartGas(rp('startgas', tx.startgas, intrinsic_gas_used)) - - # (4) the sender account balance contains at least the - # cost, v0, required in up-front payment. - total_cost = tx.value + tx.gasprice * tx.startgas - if block.get_balance(tx.sender) < total_cost: - raise InsufficientBalance(rp('balance', block.get_balance(tx.sender), total_cost)) - - # check block gas limit - if block.gas_used + tx.startgas > block.gas_limit: - raise BlockGasLimitReached(rp('gaslimit', block.gas_used + tx.startgas, block.gas_limit)) - - return True - - -def apply_transaction(block, tx): - validate_transaction(block, tx) - - log_tx.debug('TX NEW', tx_dict=tx.log_dict()) - # start transacting ################# - block.increment_nonce(tx.sender) - # print block.get_nonce(tx.sender), '@@@' - - intrinsic_gas = intrinsic_gas_used(tx) - if block.number >= block.config['HOMESTEAD_FORK_BLKNUM']: - assert tx.s * 2 < transactions.secpk1n - if not tx.to or tx.to == CREATE_CONTRACT_ADDRESS: - intrinsic_gas += opcodes.CREATE[3] - if tx.startgas < intrinsic_gas: - raise InsufficientStartGas(rp('startgas', tx.startgas, intrinsic_gas)) - - # buy startgas - assert block.get_balance(tx.sender) >= tx.startgas * tx.gasprice - block.delta_balance(tx.sender, -tx.startgas * tx.gasprice) - message_gas = tx.startgas - intrinsic_gas - message_data = vm.CallData([safe_ord(x) for x in tx.data], 0, len(tx.data)) - message = vm.Message(tx.sender, tx.to, tx.value, message_gas, message_data, code_address=tx.to) - - # MESSAGE - ext = VMExt(block, tx) - if tx.to and tx.to != CREATE_CONTRACT_ADDRESS: - result, gas_remained, data = apply_msg(ext, message) - log_tx.debug('_res_', result=result, gas_remained=gas_remained, data=data) - else: # CREATE - result, gas_remained, data = create_contract(ext, message) - assert utils.is_numeric(gas_remained) - log_tx.debug('_create_', result=result, gas_remained=gas_remained, data=data) - - assert gas_remained >= 0 - - log_tx.debug("TX APPLIED", result=result, gas_remained=gas_remained, - data=data) - - if not result: # 0 = OOG failure in both cases - log_tx.debug('TX FAILED', reason='out of gas', - startgas=tx.startgas, gas_remained=gas_remained) - block.gas_used += tx.startgas - block.delta_balance(block.coinbase, tx.gasprice * tx.startgas) - output = b'' - success = 0 - else: - log_tx.debug('TX SUCCESS', data=data) - gas_used = tx.startgas - gas_remained - block.refunds += len(set(block.suicides)) * opcodes.GSUICIDEREFUND - if block.refunds > 0: - log_tx.debug('Refunding', gas_refunded=min(block.refunds, gas_used // 2)) - gas_remained += min(block.refunds, gas_used // 2) - gas_used -= min(block.refunds, gas_used // 2) - block.refunds = 0 - # sell remaining gas - block.delta_balance(tx.sender, tx.gasprice * gas_remained) - block.delta_balance(block.coinbase, tx.gasprice * gas_used) - block.gas_used += gas_used - if tx.to: - output = b''.join(map(ascii_chr, data)) - else: - output = data - success = 1 - block.commit_state() - suicides = block.suicides - block.suicides = [] - for s in suicides: - block.ether_delta -= block.get_balance(s) - block.set_balance(s, 0) - block.del_account(s) - block.add_transaction_to_list(tx) - block.logs = [] - return success, output - - -# External calls that can be made from inside the VM. To use the EVM with a -# different blockchain system, database, set parameters for testing, just -# swap out the functions here -class VMExt(): - - def __init__(self, block, tx): - self._block = block - self.get_code = block.get_code - self.get_balance = block.get_balance - self.set_balance = block.set_balance - self.set_storage_data = block.set_storage_data - self.get_storage_data = block.get_storage_data - self.log_storage = lambda x: block.account_to_dict(x)['storage'] - self.add_suicide = lambda x: block.suicides.append(x) - self.add_refund = lambda x: \ - setattr(block, 'refunds', block.refunds + x) - self.block_hash = lambda x: block.get_ancestor_hash(block.number - x) \ - if (1 <= block.number - x <= 256 and x <= block.number) else b'' - self.block_coinbase = block.coinbase - self.block_timestamp = block.timestamp - self.block_number = block.number - self.block_difficulty = block.difficulty - self.block_gas_limit = block.gas_limit - self.log = lambda addr, topics, data: \ - block.add_log(Log(addr, topics, data)) - self.tx_origin = tx.sender - self.tx_gasprice = tx.gasprice - self.create = lambda msg: create_contract(self, msg) - self.msg = lambda msg: _apply_msg(self, msg, self.get_code(msg.code_address)) - self.account_exists = block.account_exists - self.post_homestead_hardfork = lambda: block.number >= block.config['HOMESTEAD_FORK_BLKNUM'] - - -def apply_msg(ext, msg): - return _apply_msg(ext, msg, ext.get_code(msg.code_address)) - - -def _apply_msg(ext, msg, code): - trace_msg = log_msg.is_active('trace') - if trace_msg: - log_msg.debug("MSG APPLY", sender=encode_hex(msg.sender), to=encode_hex(msg.to), - gas=msg.gas, value=msg.value, - data=encode_hex(msg.data.extract_all())) - if log_state.is_active('trace'): - log_state.trace('MSG PRE STATE SENDER', account=msg.sender, - bal=ext.get_balance(msg.sender), - state=ext.log_storage(msg.sender)) - log_state.trace('MSG PRE STATE RECIPIENT', account=msg.to, - bal=ext.get_balance(msg.to), - state=ext.log_storage(msg.to)) - # log_state.trace('CODE', code=code) - # Transfer value, instaquit if not enough - snapshot = ext._block.snapshot() - if not ext._block.transfer_value(msg.sender, msg.to, msg.value): - log_msg.debug('MSG TRANSFER FAILED', have=ext.get_balance(msg.to), - want=msg.value) - return 1, msg.gas, [] - # Main loop - if msg.code_address in specials.specials: - res, gas, dat = specials.specials[msg.code_address](ext, msg) - else: - res, gas, dat = vm.vm_execute(ext, msg, code) - # gas = int(gas) - # assert utils.is_numeric(gas) - if trace_msg: - log_msg.debug('MSG APPLIED', gas_remained=gas, - sender=msg.sender, to=msg.to, data=dat) - if log_state.is_active('trace'): - log_state.trace('MSG PRE STATE SENDER', account=msg.sender, - bal=ext.get_balance(msg.sender), - state=ext.log_storage(msg.sender)) - log_state.trace('MSG PRE STATE RECIPIENT', account=msg.to, - bal=ext.get_balance(msg.to), - state=ext.log_storage(msg.to)) - - if res == 0: - log_msg.debug('REVERTING') - ext._block.revert(snapshot) - - return res, gas, dat - - -def create_contract(ext, msg): - #print('CREATING WITH GAS', msg.gas) - sender = decode_hex(msg.sender) if len(msg.sender) == 40 else msg.sender - if ext.tx_origin != msg.sender: - ext._block.increment_nonce(msg.sender) - nonce = utils.encode_int(ext._block.get_nonce(msg.sender) - 1) - msg.to = mk_contract_address(sender, nonce) - b = ext.get_balance(msg.to) - if b > 0: - ext.set_balance(msg.to, b) - ext._block.set_nonce(msg.to, 0) - ext._block.set_code(msg.to, b'') - ext._block.reset_storage(msg.to) - msg.is_create = True - # assert not ext.get_code(msg.to) - code = msg.data.extract_all() - msg.data = vm.CallData([], 0, 0) - res, gas, dat = _apply_msg(ext, msg, code) - assert utils.is_numeric(gas) - - if res: - if not len(dat): - return 1, gas, msg.to - gcost = len(dat) * opcodes.GCONTRACTBYTE - if gas >= gcost: - gas -= gcost - else: - dat = [] - #print('CONTRACT CREATION OOG', 'have', gas, 'want', gcost) - if ext._block.number >= ext._block.config['HOMESTEAD_FORK_BLKNUM']: - return 0, 0, b'' - log_msg.debug('CONTRACT CREATION OOG', have=gas, want=gcost) - ext._block.set_code(msg.to, b''.join(map(ascii_chr, dat))) - return 1, gas, msg.to - else: - return 0, gas, b'' diff --git a/ethereum/ringsig.se.py b/ethereum/ringsig.se.py new file mode 100644 index 000000000..2176fac0e --- /dev/null +++ b/ethereum/ringsig.se.py @@ -0,0 +1,151 @@ +# TOTALLY NOT TESTED AND LIKELY BROKEN AT THIS POINT; AWAITING A TEST SUITE +data dummy[2**50] +data participants[2**40](x, y) +data participantsCount +data spendsCount +data IValuesConsumed[] + +PARTICIPANTS_PER_BUCKET = 5 +DENOMINATION = 10**17 + +Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240 +Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424 +P = -4294968273 +ECADD = 5 +ECMUL = 6 +MODEXP = 7 + +# Macro to do elliptic curve multiplication; ecmul([x, y], n) -> [x', y'] +macro ecmul($p, $n): + with $x = array(2): + ~call(msg.gas - 20000, ECMUL, 0, [$p[0], $p[1], $n], 96, $x, 64) + $x + +# Macro to do elliptic curve addition; ecadd([x1, y1], [x2, y2]) -> [x', y'] +macro ecadd($a, $b): + with $x = array(2): + ~call(msg.gas - 20000, ECADD, 0, [$a[0], $a[1], $b[0], $b[1]], 128, $x, 64) + $x + +# Macro to do elliptic curve subtraction; ecadd([x1, y1], [x2, y2]) -> [x', y'] +macro ecsubtract($a, $b): + with $x = array(2): + ~call(msg.gas - 20000, ECADD, 0, [$a[0], $a[1], $b[0], P - $b[1]], 128, $x, 64) + $x + +event ValueLogEvent(i:uint256) + +event PubkeyLogEvent(x:uint256, y:uint256) + +event PubkeyTripleLogEvent(x:uint256, y:uint256, z:uint256) + +event Deposit(x:uint256, y:uint256, bucketId:uint256) + +event Withdrawal(toaddr:address, bucketId:uint256) +event Progress(index:uint256:indexed) +event Gas(gas:uint256:indexed) +event BadSignature() +event HashResult(x:uint256:indexed, y:uint256:indexed) + +# Hash a public key to get a public key +def hash_pubkey_to_pubkey(pub:arr): + with x = sha3(pub:arr): + while 1: + xcubed = mulmod(mulmod(x, x, P), x, P) + beta = 0 + ~call(msg.gas - 6000, MODEXP, 0, [addmod(xcubed, 7, P), div(P + 1, 4), P], 96, ref(beta), 32) + y = beta * mod(beta, 2) + (P - beta) * (1 - mod(beta, 2)) + # Return if the result is not a quadratic residue + if addmod(xcubed, 7, P) == mulmod(y, y, P): + return([x, y]:arr) + x = mod(x + 1, P) + +# Get the list of public keys for a given bucket ID +def const getPubs(bucketId:uint256): + pubs = array(2 * PARTICIPANTS_PER_BUCKET) + i = 0 + while i < PARTICIPANTS_PER_BUCKET: + pubs[2 * i] = self.participants[bucketId * 5 + i].x + pubs[2 * i + 1] = self.participants[bucketId * 5 + i].y + i += 1 + return(pubs:arr) + +def const getNextIndex(): + return(self.participantsCount) + +def const getSpendsCount(): + return(self.spendsCount) + +# Submit ETH into the mixer +def submit(x:uint256, y:uint256): + if msg.value != DENOMINATION: + send(msg.sender, msg.value) + stop + self.participants[self.participantsCount].x = x + self.participants[self.participantsCount].y = y + self.participantsCount += 1 + log(type=Deposit, x, y, bucketId) + return((self.participantsCount - 1) / PARTICIPANTS_PER_BUCKET) + +event EValues(left:uint256:indexed, right:uint256:indexed) +event Pub(x:uint256:indexed, y:uint256:indexed) +event Sub(x:uint256:indexed, y:uint256:indexed) + +# Withdraw ETH from the mixer by submitting a ring signature +def withdraw(to:address, x0:uint256, s:uint256[], Ix:uint256, Iy:uint256, bucketId:uint256): + # Ensure that the bucket is full + assert self.participantsCount >= (bucketId + 1) * PARTICIPANTS_PER_BUCKET + # Ensure that this user has not yet withdrawn + assert not self.IValuesConsumed[sha3([Ix, Iy]:arr)] + # Number of pubkeys + n = PARTICIPANTS_PER_BUCKET + # Decompress the provided I value + # Iy = self.recover_y(Ix, Iy) + # Store the list of intermediate values in the "ring" + e = array(n + 1) + # Set the first value in the ring to that provided in the signature + e[0] = [x0, sha3(x0)] + G = [Gx, Gy] + i = 1 + while i < n + 1: + # log(type=Progress, 100 + i) + prev_i = (i - 1) % n + # Decompress the public key + pub_xi = self.participants[bucketId * 5 + (i % n)].x + pub_yi = self.participants[bucketId * 5 + (i % n)].y + # Create the point objects + pub = [pub_xi, pub_yi] + # log(type=Pub, pub_xi, pub_yi) + I = [Ix, Iy] + # Create the next values in the ring based on the provided s value + k1 = ecmul(G, s[prev_i]) + k2 = ecmul(pub, e[prev_i][1]) + pub1 = ecsubtract(k1, k2) + # log(type=Pub, pub1[0], pub1[1]) + # log(type=Gas, msg.gas) + k3 = self.hash_pubkey_to_pubkey(pub, outitems=2) + # log(type=Sub, k3[0], k3[1]) + k4 = ecmul(k3, s[prev_i]) + k5 = ecmul(I, e[prev_i][1]) + pub2 = ecsubtract(k4, k5) + # log(type=Pub, pub2[0], pub2[1]) + left = sha3([to, pub1[0], pub1[1], pub2[0], pub2[1]]:arr) + right = sha3(left) + e[i] = [left, right] + # log(type=EValues, left, right) + # log(type=Gas, msg.gas) + i += 1 + log(type=Progress, 6) + # Check that the ring is consistent + if e[n][0] == e[0][0] and e[n][1] == e[0][1]: + # Check that this I value has not yet been used + self.IValuesConsumed[sha3([Ix, Iy]:arr)] = 1 + # Send, taking a 1% fee to pay for gas + send(to, DENOMINATION * 99 / 100) + log(type=Withdrawal, to, bucketId) + self.spendsCount += 1 + # Lazy shim for now: hardcode 25 shannon gas price + return(25 * 10**9) + log(type=Progress, 8) + log(type=BadSignature) + return(0) diff --git a/ethereum/ringsig_tester.py b/ethereum/ringsig_tester.py new file mode 100644 index 000000000..1f98d1e2f --- /dev/null +++ b/ethereum/ringsig_tester.py @@ -0,0 +1,122 @@ +import bitcoin as b +import utils + +def hash_array(arr): + o = '' + for x in arr: + if isinstance(x, (int, long)): + x = utils.zpad(utils.encode_int(x), 32) + o += x + return utils.big_endian_to_int(utils.sha3(o)) + +def hash_value(x): + if isinstance(x, (int, long)): + x = utils.zpad(utils.encode_int(x), 32) + return utils.big_endian_to_int(utils.sha3(x)) + +def hash_to_pubkey(x): + from bitcoin import A, B, P, encode_pubkey + x = hash_array(x) if isinstance(x, list) else hash_value(x) + while 1: + xcubedaxb = (x*x*x+A*x+B) % P + beta = pow(xcubedaxb, (P+1)//4, P) + y = beta if beta % 2 else (P - beta) + # Return if the result is not a quadratic residue + if (xcubedaxb - y*y) % P == 0: + return (x, y) + x = x + 1 + +def ringsig_sign_substitute(msg, priv, pubs): + # Number of pubkeys + n = len(pubs) + # My pubkey + my_pub = b.decode_pubkey(b.privtopub(priv)) + # Compute my index in the pubkey list + my_index = 0 + while my_index < n: + if pubs[my_index] == my_pub: + break + my_index += 1 + assert my_index < n + # Compute the signer's I value + I = b.multiply(hash_to_pubkey(list(my_pub)), priv) + # Select a random ephemeral key + k = b.hash_to_int(b.random_key()) + # Store the list of intermediate values in the "ring" + e = [None] * n + # Compute the entry in the ring corresponding to the signer's index + kpub = b.privtopub(k) + kmulpub = b.multiply(hash_to_pubkey(list(my_pub)), k) + orig_left = hash_array([msg, kpub[0], kpub[1], kmulpub[0], kmulpub[1]]) + orig_right = hash_value(orig_left) + e[my_index] = {"left": orig_left, "right": orig_right} + # Map of intermediate s values (part of the signature) + s = [None] * n + for i in list(range(my_index + 1, n)) + list(range(my_index + 1)): + prev_i = (i - 1) % n + # In your position in the ring, set the s value based on your private + # knowledge of k; this lets you "invert" the hash function in order to + # ensure a consistent ring. At all other positions, select a random s + if i == my_index: + s[prev_i] = b.add_privkeys(k, b.mul_privkeys(e[prev_i]["right"], priv)) + else: + s[prev_i] = b.hash_to_int(b.random_key()) + # Create the next values in the ring based on the chosen s value + pub1 = b.subtract_pubkeys(b.privtopub(s[prev_i]), + b.multiply(pubs[i], e[prev_i]["right"])) + pub2 = b.subtract_pubkeys(b.multiply(hash_to_pubkey(list(pubs[i])), s[prev_i]), + b.multiply(I, e[prev_i]["right"])) + left = hash_array([msg, pub1[0], pub1[1], pub2[0], pub2[1]]) + right = hash_value(left) + e[i] = {"left": left, "right": right} + # Check that the ring is consistent + assert (left, right) == (orig_left, orig_right) + # Return the first value in the ring, the s values, and the signer's + # I value in compressed form + return (e[0]["left"], s, I[0], I[1]) + + +def ringsig_verify_substitute(msghash, x0, s, Ix, Iy, pubs): + # Number of pubkeys + n = len(pubs) + # Create list of pubkeys as (x, y) points + # Decompress the provided I value + I = Ix, Iy + # Store the list of intermediate values in the "ring" + e = [None] * (n + 1) + # Set the first value in the ring to that provided in the signature + e[0] = [x0, hash_value(x0)] + i = 1 + while i < n + 1: + # print 'pub', pubs[i % n][0], pubs[i % n][1] + prev_i = (i - 1) % n + # Create the next values in the ring based on the provided s value + pub1 = b.subtract_pubkeys(b.privtopub(s[prev_i]), + b.multiply(pubs[i % n], e[prev_i][1])) + # print 'pub', pub1[0], pub1[1] + pub2 = b.subtract_pubkeys(b.multiply(hash_to_pubkey(list(pubs[i % n])), s[prev_i]), + b.multiply(I, e[prev_i][1])) + # print 'pub', pub2[0], pub2[1] + left = hash_array([msghash, pub1[0], pub1[1], pub2[0], pub2[1]]) + right = hash_value(left) + # FOR DEBUGGING + # if i >= 1: + # print 'pre', pubs[i % n] + # print 'pub1', pub1 + # print 'pub2', pub2 + # print 'left', left + # print 'right', right + e[i] = [left, right] + # print 'evalues', left, right + i += 1 + # Check that the ring is consistent + return(e[n][0] == e[0][0] and e[n][1] == e[0][1]) + +# Testing +print 'Ringsig python implementation sanity checking' +privs = [b.sha256(str(i)) for i in range(5)] +pubs = [b.decode_pubkey(b.privtopub(k)) for k in privs] +sigs = [ringsig_sign_substitute('\x35' * 32, k, pubs) for k in privs] +vers = [ringsig_verify_substitute('\x35' * 32, x0, s, Ix, Iy, pubs) for x0, s, Ix, Iy in sigs] +assert vers == [True] * len(vers) +print 'Ringsig python implementation sanity check passed' diff --git a/ethereum/serenity_blocks.py b/ethereum/serenity_blocks.py new file mode 100644 index 000000000..41b946a69 --- /dev/null +++ b/ethereum/serenity_blocks.py @@ -0,0 +1,445 @@ +from rlp.sedes import big_endian_int, Binary, binary, CountableList +from utils import address, int256, trie_root, hash32, to_string, \ + sha3, zpad, normalize_address, int_to_addr, big_endian_to_int, \ + encode_int, safe_ord, encode_int32, encode_hex, shardify, \ + get_shard, match_shard, mk_contract_address +from db import EphemDB, OverlayDB +from serenity_transactions import Transaction +import fastvm as vm +from config import BLOCKHASHES, STATEROOTS, BLKNUMBER, CASPER, GAS_REMAINING, GASLIMIT, NULL_SENDER, ETHER, PROPOSER, RNGSEEDS, TXGAS, TXINDEX, LOG, MAXSHARDS, UNHASH_MAGIC_BYTES, EXECUTION_STATE, ADDR_BYTES, ADDR_BASE_BYTES +import rlp +import trie +import specials +TT255 = 2 ** 255 +TT256 = 2 ** 256 +TT256M1 = 2 ** 256 - 1 + + +# Block header (~150 bytes in the normal case); light clients download these +class BlockHeader(rlp.Serializable): + fields = [ + ('number', big_endian_int), + ('txroot', trie_root), + ('proposer', address), + ('sig', binary) + ] + + def __init__(self, number=0, txroot=trie.BLANK_ROOT, proposer='\x00'*20, sig=b''): + fields = {k: v for k, v in locals().items() if k != 'self'} + super(BlockHeader, self).__init__(**fields) + + @property + def hash(self): + return sha3(rlp.encode(self)) + + +class TransactionGroupSummary(rlp.Serializable): + fields = [ + ('gas_limit', big_endian_int), + ('left_bound', big_endian_int), + ('right_bound', big_endian_int), + ('transaction_hash', binary) + ] + + def __init__(self, gas_limit=GASLIMIT, left_bound=0, right_bound=2**160, txgroup=[], transaction_hash=None): + self.gas_limit = gas_limit + self.left_bound = left_bound + self.right_bound = right_bound + self.transaction_hash = transaction_hash or sha3(rlp.encode(txgroup)) + + +# The entire block, including the transactions. Note that the concept of +# extra data is non-existent; if a proposer wants extra data they should +# just make the first transaction a dummy containing that data +class Block(rlp.Serializable): + fields = [ + ('header', BlockHeader), + ('summaries', CountableList(TransactionGroupSummary)), + ('transaction_groups', CountableList(CountableList(Transaction))) + ] + + def __init__(self, header=None, transactions=[], transaction_groups=None, summaries=None, number=None, proposer='\x00' * 20, sig=b''): + if transaction_groups is None or summaries is None or header is None: + if transaction_groups is not None or summaries is not None or header is not None: + raise Exception("If you supply one of txgroups/summaries/header you must supply all of them!") + # TODO: Later, create a smarter algorithm for this + # For now, we just create a big super-group with a global range + # containing all of the desired transactions + self.transaction_groups = [transactions] + for tx in transactions: + assert tx.left_bound % (tx.right_bound - tx.left_bound) == 0 + assert 0 <= tx.left_bound < tx.right_bound <= MAXSHARDS + self.summaries = [TransactionGroupSummary(GASLIMIT, 0, MAXSHARDS, transactions)] + self.summaries[0].intrinsic_gas = sum([tx.intrinsic_gas for tx in transactions]) + assert self.summaries[0].intrinsic_gas < GASLIMIT + self.header = BlockHeader(number, sha3(rlp.encode(self.summaries)), proposer, sig) + else: + prevright = 0 + for s, g in zip(summaries, transaction_groups): + # Check tx hash matches + assert s.transaction_hash == sha3(rlp.encode(g)) + # Bounds must reflect a node in the binary tree (eg. 12-14 is valid, + # so is 13-14 or 14-15, but 13-15 is not) + assert s.left_bound % (s.right_bound - s.left_bound) == 0 + # Summaries must be disjoint and in sorted order with bounds valid and + # within the global bounds + assert 0 <= prevright <= s.left_bound < s.right_bound <= MAXSHARDS + # Check that all transaction bounds are a subset of the summary + for tx in g: + assert s.left_bound <= tx.left_bound < tx.right_bound <= s.right_bound + s.intrinsic_gas = sum([tx.intrinsic_gas for tx in g]) + prevright = s.right_bound + # Check gas limit condition + assert sum([s.intrinsic_gas for s in summaries]) < GASLIMIT + # Check header transaction root matches + assert header.txroot == sha3(rlp.encode(summaries)) + self.summaries, self.transaction_groups, self.header = summaries, transaction_groups, header + + def add_transaction(tx, group_id=0): + self.transaction_groups[group_id].append(tx) + self.summaries[group_id].transaction_hash = sha3(rlp.encode(self.transaction_groups[group_id])) + self.header.txroot = sha3(rlp.encode(self.summaries[group_id])) + + @property + def hash(self): return self.header.hash + + @property + def number(self): return self.header.number + @number.setter + def number(self, number): self.header.number = number + + @property + def sig(self): return self.header.sig + @sig.setter + def sig(self, sig): self.header.sig = sig + + @property + def proposer(self): return self.header.proposer + @proposer.setter + def proposer(self, proposer): self.header.proposer = proposer + + @property + def txroot(self): return self.header.txroot + + +# An object representing the state. In Serenity, the state will be just a +# trie of accounts with storage; _all_ intermediate state, including gas +# used, logs, transaction index, etc, is placed into contracts. This greatly +# simplifies a large amount of handling code +class State(): + def __init__(self, state_root, db): + self.state = trie.Trie(db) + self.state.root_hash = state_root + self.db = self.state.db + # The state uses a journaling cache data structure in order to + # facilitate revert operations while maintaining very high efficiency + # for updates. Note that the cache is designed to handle commits + # happening at any time; commits can be reverted too. Committing is + # done automatically whenever a root is requested; for this reason, + # use the State.root method to get the root instead of poking into + # State.state.root_hash directly + self.journal = [] + self.cache = {} + self.modified = {} + + def set_storage(self, addr, k, v): + if isinstance(k, (int, long)): + k = encode_int32(k) + if isinstance(v, (int, long)): + v = encode_int32(v) + addr = normalize_address(addr) + self.journal.append((addr, k, self.get_storage(addr, k))) + self.cache[addr][k] = v + if addr not in self.modified: + self.modified[addr] = {} + self.modified[addr][k] = True + + def commit(self): + rt = self.state.root_hash + for addr, subcache in self.cache.items(): + t = trie.Trie(self.state.db) + t.root_hash = self.state.get(addr) + modified = False + # updates = [] + for key, value in subcache.items(): + if key in self.modified.get(addr, {}) and value != t.get(key): + # updates.append((key, value)) + t.update(key, value) + modified = True + # if len(updates) > 10: + # print 'Saving %d key/value pairs in address %s' % (len(updates), addr.encode('hex')) + # for update in updates: + # print update + if modified: + self.state.update(addr, t.root_hash) + self.journal.append(('~root', (self.cache, self.modified), rt)) + self.cache = {} + self.modified = {} + + def get_storage(self, addr, k): + if isinstance(k, (int, long)): + k = encode_int32(k) + addr = normalize_address(addr) + if addr not in self.cache: + self.cache[addr] = {} + elif k in self.cache[addr]: + return self.cache[addr][k] + t = trie.Trie(self.state.db) + t.root_hash = self.state.get(addr) + v = t.get(k) + self.cache[addr][k] = v + return v + + @property + def root(self): + self.commit() + return self.state.root_hash + + # Creates a new state using an overlay of the existing state. Updates to + # the cloned state will NOT affect the parent state. + def clone(self): + self.commit() + return State(self.root, OverlayDB(self.state.db)) + + # Converts the state to a dictionary + def to_dict(self): + state_dump = {} + for address, v in self.state.to_dict().items(): + acct_dump = {} + acct_trie = trie.Trie(self.state.db) + acct_trie.root_hash = v + for key, v in acct_trie.to_dict().items(): + acct_dump[encode_hex(key)] = encode_hex(v) + state_dump[encode_hex(address)] = acct_dump + for address, v in self.cache.items(): + if address not in state_dump: + state_dump[encode_hex(address)] = {} + for key, val in v.items(): + if val: + state_dump[encode_hex(address)][encode_hex(key)] = encode_hex(val) + if not state_dump[encode_hex(address)]: + del state_dump[encode_hex(address)] + return state_dump + + def account_to_dict(self, account): + acct_trie = trie.Trie(self.state.db) + acct_trie.root_hash = self.state.get(normalize_address(account)) + print 'rt', repr(acct_trie.root_hash) + acct_dump = {} + for key, val in acct_trie.to_dict().items(): + acct_dump[encode_hex(key)] = encode_hex(val) + if account in self.cache: + for key, val in self.cache[account].items(): + if val: + acct_dump[key] = val + elif key in acct_dump: + del acct_dump[key] + return acct_dump + + # Returns a value x, where State.revert(x) at any later point will return + # you to the point at which the snapshot was made. + def snapshot(self): + return len(self.journal) + + # Reverts to the provided snapshot + def revert(self, snapshot): + while len(self.journal) > snapshot: + addr, key, preval = self.journal.pop() + if addr == '~root': + self.state.root_hash = preval + self.cache, self.modified = key + else: + self.cache[addr][key] = preval + +def initialize_with_gas_limit(state, gas_limit, left_bound=0): + state.set_storage(shardify(EXECUTION_STATE, left_bound), GAS_REMAINING, gas_limit) + + +transition_cache_map = {} + +# Processes a block on top of a state to reach a new state +def block_state_transition(state, block, listeners=[]): + pre = state.root + # Determine the current block number, block proposer and block hash + blknumber = big_endian_to_int(state.get_storage(BLKNUMBER, 0)) + blkproposer = block.proposer if block else '\x00' * ADDR_BYTES + blkhash = block.hash if block else '\x00' * 32 + # Put the state root in storage + if blknumber: + state.set_storage(STATEROOTS, encode_int32(blknumber - 1), state.root) + # Put the proposer in storage + state.set_storage(PROPOSER, 0, blkproposer) + # If the block exists (ie. is not NONE), process every transaction + if block: + assert block.number == blknumber, (block.number, blknumber) + # Initialize the GAS_CONSUMED variable to _just_ the sum of + # intrinsic gas of each transaction (ie. tx data consumption + # only, not computation) + for s, g in zip(block.summaries, block.transaction_groups): + _EXSTATE = shardify(EXECUTION_STATE, s.left_bound) + _LOG = shardify(LOG, s.left_bound) + # Set the txindex to 0 to start off + state.set_storage(_EXSTATE, TXINDEX, 0) + # Initialize the gas remaining variable + initialize_with_gas_limit(state, s.gas_limit - s.intrinsic_gas, s.left_bound) + # Apply transactions sequentially + print 'Block %d contains %d transactions and %d intrinsic gas' % (blknumber, sum([len(g) for g in block.transaction_groups]), sum([summ.intrinsic_gas for summ in block.summaries])) + for tx in g: + tx_state_transition(state, tx, s.left_bound, s.right_bound, listeners=listeners) + assert big_endian_to_int(state.get_storage(_EXSTATE, TXINDEX)) == len(g) + for i in range(len(g)): + assert state.get_storage(_LOG, i) + # Put the block hash in storage + state.set_storage(BLOCKHASHES, encode_int32(blknumber), blkhash) + # Put the next block number in storage + state.set_storage(BLKNUMBER, 0, encode_int32(blknumber + 1)) + # Update the RNG seed (the lower 64 bits contains the number of validators, + # the upper 192 bits are pseudorandom) + prevseed = state.get_storage(RNGSEEDS, encode_int32(blknumber - 1)) if blknumber else '\x00' * 32 + newseed = big_endian_to_int(sha3(prevseed + blkproposer)) + newseed = newseed - (newseed % 2**64) + big_endian_to_int(state.get_storage(CASPER, 0)) + state.set_storage(RNGSEEDS, encode_int32(blknumber), newseed) + # Consistency checking + check_key = pre+(block.hash if block else 'NONE') + # print '(block', blknumber, ') pre', repr(pre), 'block', repr(block.hash if block else None), 'post', state.root + if check_key not in transition_cache_map: + transition_cache_map[check_key] = state.root + else: + assert transition_cache_map[check_key] == state.root + + +RLPEMPTYLIST = rlp.encode([]) + +def tx_state_transition(state, tx, left_bound=0, right_bound=MAXSHARDS, listeners=[], breaking=False, override_gas=2**255): + _EXSTATE = shardify(EXECUTION_STATE, left_bound) + _LOG = shardify(LOG, left_bound) + # Get index + txindex = big_endian_to_int(state.get_storage(_EXSTATE, TXINDEX)) + # Get prior gas used + gas_remaining = big_endian_to_int(state.get_storage(_EXSTATE, GAS_REMAINING)) + # If there is not enough gas left for this transaction, it's a no-op + if gas_remaining - tx.exec_gas < 0: + print 'UNABLE TO EXECUTE transaction due to gas limits: %d have, %d required' % \ + (gas_remaining, tx.exec_gas) + state.set_storage(_LOG, txindex, rlp.encode([encode_int(0)])) + state.set_storage(_EXSTATE, TXINDEX, txindex + 1) + return None + # If the recipient is out of range, it's a no-op + if not (left_bound <= get_shard(tx.addr) < right_bound): + print 'UNABLE TO EXECUTE transaction due to out-of-range' + state.set_storage(_LOG, txindex, rlp.encode([encode_int(0)])) + state.set_storage(_EXSTATE, TXINDEX, txindex + 1) + return None + # Set an object in the state for tx gas + state.set_storage(_EXSTATE, TXGAS, encode_int32(tx.gas)) + ext = VMExt(state, listeners=listeners) + # Empty the log store + state.set_storage(_LOG, txindex, RLPEMPTYLIST) + # Create the account if it does not yet exist + if tx.code and not state.get_storage(tx.addr, b''): + message = vm.Message(NULL_SENDER, tx.addr, 0, tx.exec_gas, vm.CallData([], 0, 0), left_bound, right_bound) + message.gas = min(message.gas, override_gas) + result, execution_start_gas, data = apply_msg(ext, message, tx.code, breaking=breaking) + if not result: + state.set_storage(_LOG, txindex, rlp.encode([encode_int(1)])) + state.set_storage(_EXSTATE, TXINDEX, txindex + 1) + return None + code = ''.join([chr(x) for x in data]) + put_code(state, tx.addr, code) + else: + execution_start_gas = min(tx.exec_gas, override_gas) + # Process VM execution + message_data = vm.CallData([safe_ord(x) for x in tx.data], 0, len(tx.data)) + message = vm.Message(NULL_SENDER, tx.addr, 0, execution_start_gas, message_data) + assert state.get_storage(_LOG, txindex) == RLPEMPTYLIST + result, msg_gas_remained, data = \ + apply_msg(ext, message, get_code(state, tx.addr), breaking=breaking) + assert 0 <= msg_gas_remained <= execution_start_gas <= tx.exec_gas + # Set gas used + state.set_storage(_EXSTATE, GAS_REMAINING, gas_remaining - tx.exec_gas + msg_gas_remained) + # Places a log in storage + logs = state.get_storage(_LOG, txindex) + state.set_storage(_LOG, txindex, rlp.insert(logs, 0, encode_int(2 if result else 1))) + # Increments the txindex + state.set_storage(_EXSTATE, TXINDEX, txindex + 1) + return data + +def get_code(state, address): + codehash = state.get_storage(address, '') + return state.db.get(UNHASH_MAGIC_BYTES + codehash) if codehash else '' + +def put_code(state, address, code): + codehash = sha3(code) + state.db.put(UNHASH_MAGIC_BYTES + codehash, code) + state.set_storage(address, '', codehash) + +# External calls that can be made from inside the VM. To use the EVM with a +# different blockchain system, database, set parameters for testing, just +# swap out the functions here +class VMExt(): + + def __init__(self, state, listeners=[]): + self._state = state + self._listeners = listeners + self.set_storage = state.set_storage + self.get_storage = state.get_storage + self.log_storage = state.account_to_dict + self.unhash = lambda x: state.db.get(UNHASH_MAGIC_BYTES + x) + self.puthashdata = lambda d: state.db.put(UNHASH_MAGIC_BYTES + sha3(d), d) + self.msg = lambda msg, code: apply_msg(self, msg, code) + self.static_msg = lambda msg, code: apply_msg(EmptyVMExt, msg, code) + + +# An empty VMExt instance that can be used to employ the EVM "purely" +# without accessing state. This is used for Casper signature verifications +class _EmptyVMExt(): + + def __init__(self): + self._state = State('', EphemDB()) + self.set_storage = lambda addr, k, v: None + self.get_storage = lambda addr, k: '' + self.log = lambda topics, mem: None + self.log_storage = lambda addr: None + self.unhash = lambda x: '' + self.msg = lambda msg, code: apply_msg(self, msg, code) + self.static_msg = lambda msg, code: apply_msg(EmptyVMExt, msg, code) + +EmptyVMExt = _EmptyVMExt() + +eve_cache = {} + +# Processes a message +def apply_msg(ext, msg, code, breaking=False): + _SENDER_ETHER = match_shard(ETHER, msg.sender) + _RECIPIENT_ETHER = match_shard(ETHER, msg.to) + cache_key = msg.sender + msg.to + str(msg.value) + msg.data.extract_all() + code + if ext is EmptyVMExt and cache_key in eve_cache: + return eve_cache[cache_key] + # Transfer value, instaquit if not enough + snapshot = ext._state.snapshot() + if msg.transfers_value: + if big_endian_to_int(ext.get_storage(_SENDER_ETHER, msg.sender)) < msg.value: + print 'MSG TRANSFER FAILED' + return 1, msg.gas, [] + elif msg.value: + ext.set_storage(_SENDER_ETHER, msg.sender, big_endian_to_int(ext.get_storage(_SENDER_ETHER, msg.sender)) - msg.value) + ext.set_storage(_RECIPIENT_ETHER, msg.to, big_endian_to_int(ext.get_storage(_RECIPIENT_ETHER, msg.to)) + msg.value) + # Main loop + msg_to_raw = big_endian_to_int(msg.to) + if msg_to_raw in specials.specials: + res, gas, dat = specials.specials[msg_to_raw](ext, msg) + else: + res, gas, dat = vm.vm_execute(ext, msg, code, breaking=breaking) + # If the message failed, revert execution + if res == 0: + print 'REVERTING %d gas from account 0x%s to account 0x%s with data 0x%s' % \ + (msg.gas, msg.sender.encode('hex'), msg.to.encode('hex'), msg.data.extract_all().encode('hex')) + # if 200000 < msg.gas < 500000: + # raise Exception("123") + ext._state.revert(snapshot) + # Otherwise, all good + else: + pass # print 'MSG APPLY SUCCESSFUL' + + eve_cache[cache_key] = (res, gas if res else 0, dat) + return res, gas if res else 0, dat diff --git a/ethereum/serenity_transactions.py b/ethereum/serenity_transactions.py new file mode 100644 index 000000000..14fc765f5 --- /dev/null +++ b/ethereum/serenity_transactions.py @@ -0,0 +1,47 @@ +from rlp.sedes import big_endian_int, Binary, binary, CountableList +from rlp.utils import decode_hex, encode_hex, ascii_chr, str_to_bytes +from utils import address, int256, trie_root, hash32, to_string, sha3, \ + zpad, normalize_address, int_to_addr, big_endian_to_int, shardify +from db import EphemDB +from config import MAXSHARDS, SHARD_BYTES +import opcodes +import rlp + +class Transaction(rlp.Serializable): + fields = [ + ('addr', address), + ('gas', big_endian_int), + ('left_bound', big_endian_int), + ('right_bound', big_endian_int), + ('data', binary), + ('code', binary) + ] + + def __init__(self, addr, gas, left_bound=0, right_bound=MAXSHARDS, data='', code=b''): + self.addr = addr or shardify(sha3('\x00' * 20 + code)[12:], left_bound) + self.gas = gas + self.left_bound = left_bound + self.right_bound = right_bound + self.data = data + self.code = code + assert len(self.addr) == 20 + SHARD_BYTES and (self.code == b'' or shardify(sha3('\x00' * 20 + self.code)[12:], left_bound) == self.addr) + assert self.exec_gas >= 0 + assert isinstance(self.left_bound, int) + assert isinstance(self.right_bound, int) + + @property + def hash(self): + return sha3(rlp.encode(self)) + + @property + def intrinsic_gas(self): + num_zero_bytes = str_to_bytes(self.data).count(ascii_chr(0)) + num_non_zero_bytes = len(self.data) - num_zero_bytes + return opcodes.GTXCOST + \ + num_zero_bytes * opcodes.GTXDATAZERO + \ + num_non_zero_bytes * opcodes.GTXDATANONZERO + \ + len(self.code) * opcodes.GCONTRACTBYTE + + @property + def exec_gas(self): + return self.gas - self.intrinsic_gas diff --git a/ethereum/slogging.py b/ethereum/slogging.py index 9c69627a6..7f323e158 100644 --- a/ethereum/slogging.py +++ b/ethereum/slogging.py @@ -245,7 +245,8 @@ def get_logger(name=None): def DEBUG(msg, *args, **kwargs): """temporary logger during development that is always on""" logger = getLogger("DEBUG") - logger.addHandler(StreamHandler()) + if not logger.handlers: + logger.addHandler(StreamHandler()) logger.propagate = False logger.setLevel(logging.DEBUG) logger.DEV(msg, *args, **kwargs) diff --git a/ethereum/specials.py b/ethereum/specials.py index 0b02689c4..cc3320c08 100644 --- a/ethereum/specials.py +++ b/ethereum/specials.py @@ -1,13 +1,19 @@ import bitcoin -from ethereum import utils, opcodes -from ethereum.utils import safe_ord, decode_hex +import utils +import opcodes +from utils import safe_ord, decode_hex, big_endian_to_int, \ + encode_int32, match_shard, shardify, sha3, zpad, ADDR_BASE_BYTES, \ + mk_contract_address from rlp.utils import ascii_chr +from config import ETHER, BLOOM, LOG, EXECUTION_STATE, TXINDEX, CREATOR, \ + NULL_SENDER, GAS_DEPOSIT, ADDR_BYTES +import rlp ZERO_PRIVKEY_ADDR = decode_hex('3f17f1962b36e491b30a40b2405849e597ba5fb5') def proc_ecrecover(ext, msg): - # print('ecrecover proc', msg.gas) + # print('ecrecover proc', msg.gas, msg.data.extract_all()) OP_GAS = opcodes.GECRECOVER gas_cost = OP_GAS if msg.gas < gas_cost: @@ -24,10 +30,63 @@ def proc_ecrecover(ext, msg): if recovered_addr in (False, (0, 0)): return 1, msg.gas - gas_cost, [] pub = bitcoin.encode_pubkey(recovered_addr, 'bin') - o = [0] * 12 + [safe_ord(x) for x in utils.sha3(pub[1:])[-20:]] + o = [0] * (32 - ADDR_BASE_BYTES) + [safe_ord(x) for x in utils.sha3(pub[1:])[-ADDR_BASE_BYTES:]] return 1, msg.gas - gas_cost, o +def proc_ecadd(ext, msg): + # print 'proc ecadd' + OP_GAS = opcodes.GECADD + gas_cost = OP_GAS + if msg.gas < gas_cost: + return 0, 0, [] + x1 = msg.data.extract32(0) + y1 = msg.data.extract32(32) + x2 = msg.data.extract32(64) + y2 = msg.data.extract32(96) + # point not on curve + if (x1*x1*x1+7-y1*y1) % bitcoin.P != 0: + return 0, 0, [] + # point not on curve + if (x2*x2*x2+7-y2*y2) % bitcoin.P != 0: + return 0, 0, [] + c, d = bitcoin.fast_add((x1, y1), (x2, y2)) + c2, d2 = encode_int32(c), encode_int32(d) + return 1, msg.gas - gas_cost, map(ord, c2 + d2) + + +def proc_ecmul(ext, msg): + # print 'proc ecmul' + OP_GAS = opcodes.GECMUL + gas_cost = OP_GAS + if msg.gas < gas_cost: + # print 'insufficient gas' + return 0, 0, [] + x1 = msg.data.extract32(0) + y1 = msg.data.extract32(32) + n = msg.data.extract32(64) + # point not on curve + if (x1*x1*x1+7-y1*y1) % bitcoin.P != 0: + # print 'bad point', x1, y1 + return 0, 0, [] + c, d = bitcoin.fast_multiply((x1, y1), n) + c2, d2 = encode_int32(c), encode_int32(d) + # print 'returning from ecmul' + return 1, msg.gas - gas_cost, map(ord, c2 + d2) + + +def proc_modexp(ext, msg): + # print 'proc modexp' + OP_GAS = opcodes.GMODEXP + gas_cost = OP_GAS + if msg.gas < gas_cost: + return 0, 0, [] + b = msg.data.extract32(0) + e = msg.data.extract32(32) + m = msg.data.extract32(64) + return 1, msg.gas - gas_cost, map(ord, encode_int32(pow(b, e, m))) + + def proc_sha256(ext, msg): # print('sha256 proc', msg.gas) OP_GAS = opcodes.GSHA256BASE + \ @@ -63,14 +122,136 @@ def proc_identity(ext, msg): msg.data.extract_copy(o, 0, 0, len(o)) return 1, msg.gas - gas_cost, o +def proc_send_ether(ext, msg): + SENDER_ETHER = match_shard(ETHER, msg.sender) + TO_ETHER = match_shard(ETHER, msg.to) + OP_GAS = opcodes.GCALLVALUETRANSFER + gas_cost = OP_GAS + if msg.gas < gas_cost: + return 0, 0, [] + to = utils.int_to_addr(msg.data.extract32(0) % 2**160) + value = msg.data.extract32(32) + prebal = utils.big_endian_to_int(ext.get_storage(SENDER_ETHER, msg.sender)) + if prebal >= value: + # print 'xferring %d wei from %s to %s' % (value, msg.sender.encode('hex'), to.encode('hex')) + ext.set_storage(TO_ETHER, to, utils.big_endian_to_int(ext.get_storage(TO_ETHER, to)) + value) + ext.set_storage(SENDER_ETHER, msg.sender, prebal - value) + return 1, msg.gas - gas_cost, [0] * 31 + [1] + else: + return 1, msg.gas - gas_cost, [0] * 32 + +def proc_log(ext, msg): + _LOG = shardify(LOG, msg.left_bound) + _EXSTATE = shardify(EXECUTION_STATE, msg.left_bound) + data = msg.data.extract_all() + topics = [data[i*32:i*32+32] for i in range(0, 128, 32)] + OP_GAS = opcodes.GLOGBYTE * max(len(data) - 128, 0) + \ + opcodes.GLOGBASE + len([t for t in topics if t]) * opcodes.GLOGTOPIC + gas_cost = OP_GAS + if msg.gas < gas_cost: + return 0, 0, [] + bloom = big_endian_to_int(ext.get_storage(_LOG, BLOOM)) or 0 + for t in topics: + if t: + t += '\x00' * (32 - len(t)) + h = sha3(t) + for i in range(5): + bloom |= 2**ord(h[i]) + ext.set_storage(_LOG, BLOOM, encode_int32(bloom)) + # print big_endian_to_int(state.get_storage(TXINDEX, 0)), state.get_storage(LOG, state.get_storage(TXINDEX, 0)).encode('hex') + old_storage = ext.get_storage(_LOG, zpad(ext.get_storage(_EXSTATE, TXINDEX), 32)) + new_storage = rlp.append(old_storage, data) + ext.set_storage(_LOG, ext.get_storage(_EXSTATE, TXINDEX), new_storage) + for listener in ext._listeners: + listener(msg.sender, map(big_endian_to_int, topics), data[128:]) + return 1, msg.gas - gas_cost, [0] * 32 + +def proc_rlp_get(ext, msg, output_string=0): + # print('rlpget proc', msg.gas) + OP_GAS = opcodes.GRLPBASE + \ + (utils.ceil32(msg.data.size) // 32) * opcodes.GRLPWORD + gas_cost = OP_GAS + if msg.gas < gas_cost: + return 0, 0, [] + try: + data = msg.data.extract_all() + rlpdata = rlp.decode(data[32:]) + index = big_endian_to_int(data[:32]) + assert isinstance(rlpdata[index], str) + if output_string: + return 1, msg.gas - gas_cost, map(ord, encode_int32(len(rlpdata[index])) + rlpdata[index]) + else: + assert len(rlpdata[index]) <= 32 + return 1, msg.gas - gas_cost, [0] * (32 - len(rlpdata[index])) + map(ord, rlpdata[index]) + except: + return 0, 0, [] + +def proc_rlp_get_bytes32(ext, msg): + return proc_rlp_get(ext, msg, False) + +def proc_rlp_get_string(ext, msg): + return proc_rlp_get(ext, msg, True) + +def proc_create(ext, msg): + import fastvm as vm + OP_GAS = opcodes.GCREATE + gas_cost = OP_GAS + if msg.gas < gas_cost: + return 0, 0, [] + code = msg.data.extract_all() + addr = mk_contract_address(sender=msg.sender, code=code, left_bound=msg.left_bound) + exec_gas = msg.gas - gas_cost + # Create the account if it does not yet exist + if not ext.get_storage(addr, b''): + message = vm.Message(NULL_SENDER, addr, msg.value, exec_gas, vm.CallData([], 0, 0), msg.left_bound, msg.right_bound) + result, execution_start_gas, data = ext.msg(message, code) + if not result: + return 0, 0, [] + code = ''.join([chr(x) for x in data]) + ext.puthashdata(code) + ext.set_storage(addr, '', sha3(code)) + return 1, execution_start_gas, map(ord, zpad(addr, 32)) + else: + # Contract already exists + return 0, 0, [] + +def proc_gas_deposit(ext, msg): + OP_GAS = opcodes.GGASDEPOSIT + gas_cost = OP_GAS + if msg.gas < gas_cost: + return 0, 0, [] + if msg.value > 0: + ext.set_storage(msg.to, msg.sender, big_endian_to_int(ext.get_storage(msg.to, msg.sender)) + msg.value) + return 1, 0, [] + else: + refund = msg.data.extract32(0) + curbal = big_endian_to_int(ext.get_storage(msg.to, msg.sender)) + if refund <= curbal: + return 0, 0, [] + msg1 = vm.Message(msg.to, msg.sender, refund, 0, 0, 0, 0) + result, _, _ = ext.msg(message, code) + assert result # This should never fail + coinbase = ext.get_storage(PROPOSER, '\x00' * 32) + msg2 = vm.Message(msg.to, coinbase, curbal - refund, 0, 0, 0, 0) + result, _, _ = ext.msg(message, code) + assert result # This should never fail + ext.set_storage(msg.to, msg.sender, 0) + return 1, 0, [] + specials = { - decode_hex(k): v for k, v in - { - '0000000000000000000000000000000000000001': proc_ecrecover, - '0000000000000000000000000000000000000002': proc_sha256, - '0000000000000000000000000000000000000003': proc_ripemd160, - '0000000000000000000000000000000000000004': proc_identity, - }.items() + 1: proc_ecrecover, + 2: proc_sha256, + 3: proc_ripemd160, + 4: proc_identity, + 5: proc_ecadd, + 6: proc_ecmul, + 7: proc_modexp, + 8: proc_rlp_get_bytes32, + 9: proc_rlp_get_string, + big_endian_to_int(ETHER): proc_send_ether, + big_endian_to_int(LOG): proc_log, + big_endian_to_int(CREATOR): proc_create, + big_endian_to_int(GAS_DEPOSIT): proc_gas_deposit, } if __name__ == '__main__': diff --git a/ethereum/test.py b/ethereum/test.py new file mode 100644 index 000000000..19c789fd7 --- /dev/null +++ b/ethereum/test.py @@ -0,0 +1,426 @@ +from serenity_blocks import State, tx_state_transition, mk_contract_address, \ + block_state_transition, initialize_with_gas_limit, get_code, put_code +from serenity_transactions import Transaction +from db import EphemDB, OverlayDB +import serpent +import ringsig_tester +from config import BLOCKHASHES, STATEROOTS, BLKNUMBER, CASPER, GASLIMIT, NULL_SENDER, ETHER, ECRECOVERACCT, BASICSENDER, RNGSEEDS, GENESIS_TIME, ENTER_EXIT_DELAY, BET_INCENTIVIZER, GAS_REMAINING, CREATOR, GAS_DEPOSIT +from utils import privtoaddr, normalize_address, zpad, encode_int, \ + big_endian_to_int, encode_int32, shardify, sha3, int_to_addr +import ecdsa_accounts +import abi +import sys +import guardian +from guardian import call_method, casper_ct, defaultBetStrategy, Bet, encode_prob +from mandatory_account_code import mandatory_account_ct, mandatory_account_evm, mandatory_account_code +import time +import network +import os +import bitcoin + +# Maybe add logging +# from ethereum.slogging import LogRecorder, configure_logging, set_level +# config_string = ':info,eth.vm.log:trace,eth.vm.op:trace,eth.vm.stack:trace,eth.vm.exit:trace' +# configure_logging(config_string=config_string) + +# Listener; prints out logs in json format +def my_listen(sender, topics, data): + jsondata = casper_ct.listen(sender, topics, data) + if jsondata and jsondata["_event_type"] in ('BlockLoss', 'StateLoss'): + if not bets[jsondata['index']].byzantine: + if jsondata['loss'] < 0: + if jsondata['odds'] < 10**7 and jsondata["_event_type"] == 'BlockLoss': + index = jsondata['index'] + height = jsondata['height'] + print 'bettor current probs', bets[index].probs[:height] + raise Exception("Odds waaaay too low! %r" % jsondata) + if jsondata['odds'] > 10**11: + index = jsondata['index'] + height = jsondata['height'] + print 'bettor stateroots:', bets[index].stateroots + print 'bettor opinion:', bets[index].opinions[index].stateroots + if len(bets[0].stateroots) < height: + print 'in bettor 0 stateroots:', repr(bets[0].stateroots[height]) + raise Exception("Odds waaaay too high! %r" % jsondata) + if jsondata and jsondata["_event_type"] == 'ExcessRewardEvent': + raise Exception("Excess reward event: %r" % jsondata) + ecdsa_accounts.constructor_ct.listen(sender, topics, data) + mandatory_account_ct.listen(sender, topics, data) + jsondata = ringsig_ct.listen(sender, topics, data) + +# Get command line parameters +def get_arg(flag, typ, default): + if flag in sys.argv: + return typ(sys.argv[sys.argv.index(flag) + 1]) + else: + return default + +MAX_NODES = get_arg('--maxnodes', int, 12) +assert MAX_NODES >= 5, "Need at least 5 max nodes" +CLOCKWRONG = get_arg('--clockwrong', int, 0) +CLOCKWRONG_CUMUL = CLOCKWRONG + 1 +BRAVE = get_arg('--brave', int, 0) +BRAVE_CUMUL = CLOCKWRONG_CUMUL + BRAVE +CRAZYBET = get_arg('--crazybet', int, 0) +CRAZYBET_CUMUL = BRAVE_CUMUL + CRAZYBET +DBL_BLK_SUICIDE = get_arg('--dblblk', int, 0) +DBL_BLK_SUICIDE_CUMUL = CRAZYBET_CUMUL + DBL_BLK_SUICIDE +DBL_BET_SUICIDE = get_arg('--dblbet', int, 0) +DBL_BET_SUICIDE_CUMUL = DBL_BLK_SUICIDE_CUMUL + DBL_BET_SUICIDE +assert 0 <= CLOCKWRONG_CUMUL <= BRAVE_CUMUL <= CRAZYBET_CUMUL <= DBL_BLK_SUICIDE_CUMUL <= DBL_BET_SUICIDE_CUMUL <= MAX_NODES, \ + "Negative numbers or too many nodes with special properties" + +print 'Running with %d maximum nodes: %d with wonky clocks, %d brave, %d crazy-betting, %d double-block suiciding, %d double-bet suiciding' % (MAX_NODES, CLOCKWRONG, BRAVE, CRAZYBET, DBL_BLK_SUICIDE, DBL_BET_SUICIDE) + +def mk_bet_strategy(state, index, key): + return defaultBetStrategy(state.clone(), key, + clockwrong=(1 <= index < CLOCKWRONG_CUMUL), + bravery=(0.997 if CLOCKWRONG_CUMUL <= index < BRAVE_CUMUL else 0.92), + crazy_bet=(BRAVE_CUMUL <= index < CRAZYBET_CUMUL), + double_block_suicide=(5 if CRAZYBET_CUMUL <= index < DBL_BLK_SUICIDE_CUMUL else 2**80), + double_bet_suicide=(1 if DBL_BLK_SUICIDE_CUMUL <= index < DBL_BET_SUICIDE_CUMUL else 2**80)) + +# Create the genesis +genesis = State('', EphemDB()) +initialize_with_gas_limit(genesis, 10**9) +gc = genesis.clone() +# Unleash the kraken....err, I mean casper +casper_file = os.path.join(os.path.split(__file__)[0], 'casper.se.py') +casper_hash_file = os.path.join(os.path.split(__file__)[0], '_casper.hash') +casper_evm_file = os.path.join(os.path.split(__file__)[0], '_casper.evm') +# Cache compilation of Casper to save time +try: + h = sha3(open(casper_file).read()).encode('hex') + assert h == open(casper_hash_file).read() + code = open(casper_evm_file).read() +except: + h = sha3(open(casper_file).read()).encode('hex') + code = serpent.compile(casper_file) + open(casper_evm_file, 'w').write(code) + open(casper_hash_file, 'w').write(h) +# Add Casper contract to blockchain +tx_state_transition(gc, Transaction(None, 4000000, data='', code=code)) +put_code(genesis, CASPER, get_code(gc, mk_contract_address(code=code))) +print 'Casper added' +casper_ct = abi.ContractTranslator(serpent.mk_full_signature(casper_file)) +# Ringsig file and ct +ringsig_file = os.path.join(os.path.split(__file__)[0], 'ringsig.se.py') +ringsig_code = serpent.compile(open(ringsig_file).read()) +ringsig_ct = abi.ContractTranslator(serpent.mk_full_signature(open(ringsig_file).read())) + +# Get the code for the basic ecrecover account +code2 = ecdsa_accounts.constructor_code +tx_state_transition(gc, Transaction(None, 1000000, data='', code=code2)) +put_code(genesis, ECRECOVERACCT, get_code(gc, mk_contract_address(code=code2))) +print 'ECRECOVER account added' + +# Get the code for the basic EC sender account +code2 = ecdsa_accounts.runner_code +tx_state_transition(gc, Transaction(None, 1000000, data='', code=code2)) +put_code(genesis, BASICSENDER, get_code(gc, mk_contract_address(code=code2))) +print 'Basic sender account added' + +# Generate the initial set of keys +keys = [zpad(encode_int(x+1), 32) for x in range(0, MAX_NODES - 2)] +# Create a second set of 4 keys +secondkeys = [zpad(encode_int(x+1), 32) for x in range(MAX_NODES - 2, MAX_NODES)] +# Initialize the first keys +for i, k in enumerate(keys): + # Generate the address + a = ecdsa_accounts.privtoaddr(k) + assert big_endian_to_int(genesis.get_storage(a, 2**256 - 1)) == 0 + # Give them 1600 ether + genesis.set_storage(ETHER, a, 1600 * 10**18) + # Make their validation code + vcode = ecdsa_accounts.mk_validation_code(k) + print 'Length of validation code:', len(vcode) + # Make the transaction to join as a Casper guardian + txdata = casper_ct.encode('join', [vcode]) + tx = ecdsa_accounts.mk_transaction(0, 25 * 10**9, 1000000, CASPER, 1500 * 10**18, txdata, k, True) + print 'Joining' + v = tx_state_transition(genesis, tx, listeners=[my_listen]) + index = casper_ct.decode('join', ''.join(map(chr, v)))[0] + print 'Joined with index', index + print 'Length of account code:', len(get_code(genesis, a)) + # Check that the EVM that each account must have at the end + # to get transactions included by default is there + assert mandatory_account_evm == get_code(genesis, a).rstrip('\x00') + # Check sequence number + assert big_endian_to_int(genesis.get_storage(a, 2**256 - 1)) == 1 + # Check that we actually joined Casper with the right + # validation code + vcode2 = call_method(genesis, CASPER, casper_ct, 'getGuardianValidationCode', [index]) + assert vcode2 == vcode + +# Give the secondary keys some ether as well +for i, k in enumerate(secondkeys): + # Generate the address + a = ecdsa_accounts.privtoaddr(k) + assert big_endian_to_int(genesis.get_storage(a, 2**256 - 1)) == 0 + # Give them 1600 ether + genesis.set_storage(ETHER, a, 1600 * 10**18) + +# Set the starting RNG seed to equal to the number of casper guardians +# in genesis +genesis.set_storage(RNGSEEDS, encode_int32(2**256 - 1), genesis.get_storage(CASPER, 0)) +# Set the genesis timestamp +genesis.set_storage(GENESIS_TIME, encode_int32(0), int(network.NetworkSimulator.start_time + 5)) +print 'genesis time', int(network.NetworkSimulator.start_time + 5), '\n' * 10 +# Create betting strategy objects for every guardian +bets = [mk_bet_strategy(genesis, i, k) for i, k in enumerate(keys)] +# Minimum max finalized height +min_mfh = -1 + +# Transactions to status report on +check_txs = [] + +# Function to check consistency between everything +def check_correctness(bets): + global min_mfh + print '#'*80 + # Max finalized heights for each bettor strategy + mfhs = [bet.max_finalized_height for bet in bets if not bet.byzantine] + mchs = [bet.calc_state_roots_from for bet in bets if not bet.byzantine] + mfchs = [min(bet.max_finalized_height, bet.calc_state_roots_from) for bet in bets if not bet.byzantine] + new_min_mfh = min(mfchs) + print 'Max finalized heights: %r' % [bet.max_finalized_height for bet in bets] + print 'Max calculated stateroots: %r' % [bet.calc_state_roots_from for bet in bets] + print 'Max height received: %r' % [len(bet.blocks) for bet in bets] + # Induction heights of each guardian + print 'Registered induction heights: %r' % [[op.induction_height for op in bet.opinions.values()] for bet in bets] + # Withdrawn? + print 'Withdrawn?: %r' % [(bet.withdrawn, bet.seq) for bet in bets] + # Probabilities + # print 'Probs: %r' % {i: [bet.probs[i] if i < len(bet.probs) else None for bet in bets] for i in range(new_min_mfh, max([len(bet.blocks) for bet in bets]))} + # Data about bets from each guardian according to every other guardian + print 'Now: %.2f' % n.now + print 'According to each guardian...' + for bet in bets: + print ('(%d) Bets received: %r, blocks received: %s. Last bet made: %.2f.' % (bet.index, [((str(op.seq) + ' (withdrawn)') if op.withdrawn else op.seq) for op in bet.opinions.values()], ''.join(['1' if b else '0' for b in bet.blocks]), bet.last_bet_made)) + print 'Probs (in 0-255 repr, from %d):' % (new_min_mfh + 1), map(lambda x: ord(encode_prob(x)), bet.probs[new_min_mfh + 1:]) + # Indices of guardians + print 'Indices: %r' % [bet.index for bet in bets] + # Number of blocks received by each guardian + print 'Blocks received: %r' % [len(bet.blocks) for bet in bets] + # Number of blocks received by each guardian + print 'Blocks missing: %r' % [[h for h in range(len(bet.blocks)) if not bet.blocks[h]] for bet in bets] + # Makes sure all block hashes for all heights up to the minimum finalized + # height are the same + print 'Verifying finalized block hash equivalence' + for j in range(1, len(bets)): + if not bets[j].byzantine and not bets[j-1].byzantine: + j_hashes = bets[j].finalized_hashes[:(new_min_mfh+1)] + jm1_hashes = bets[j-1].finalized_hashes[:(new_min_mfh+1)] + assert j_hashes == jm1_hashes, (j_hashes, jm1_hashes) + # Checks state roots for finalized heights and makes sure that they are + # consistent + print 'Verifying finalized state root correctness' + state = State(genesis.root if min_mfh < 0 else bets[0].stateroots[min_mfh], OverlayDB(bets[0].db)) + for b in bets: + if not b.byzantine: + for i in range(new_min_mfh): + assert b.stateroots[i] not in ('\x00' * 32, None) + print 'Executing blocks %d to %d' % (min_mfh + 1, max(min_mfh, new_min_mfh) + 1) + for i in range(min_mfh + 1, max(min_mfh, new_min_mfh) + 1): + assert state.root == bets[0].stateroots[i-1] if i > 0 else genesis.root + block = bets[j].objects[bets[0].finalized_hashes[i]] if bets[0].finalized_hashes[i] != '\x00' * 32 else None + block0 = bets[0].objects[bets[0].finalized_hashes[i]] if bets[0].finalized_hashes[i] != '\x00' * 32 else None + assert block0 == block + block_state_transition(state, block, listeners=[my_listen]) + if state.root != bets[0].stateroots[i] and i != max(min_mfh, new_min_mfh): + print bets[0].calc_state_roots_from, bets[j].calc_state_roots_from + print bets[0].max_finalized_height, bets[j].max_finalized_height + print 'my state', state.to_dict() + print 'given state', State(bets[0].stateroots[i], bets[0].db).to_dict() + import rlp + print 'block', repr(rlp.encode(block)) + sys.stderr.write('State root mismatch at block %d!\n' % i) + sys.stderr.write('state.root: %s\n' % state.root.encode('hex')) + sys.stderr.write('bet: %s\n' % bets[0].stateroots[i].encode('hex')) + raise Exception(" ") + min_mfh = new_min_mfh + print 'Min common finalized height: %d, integrity checks passed' % new_min_mfh + # Last and next blocks to propose by each guardian + print 'Last block created: %r' % [bet.last_block_produced for bet in bets] + print 'Next blocks to create: %r' % [bet.next_block_to_produce for bet in bets] + # Assert equivalence of proposer lists + min_proposer_length = min([len(bet.proposers) for bet in bets]) + for i in range(1, len(bets)): + assert bets[i].proposers[:min_proposer_length] == bets[0].proposers[:min_proposer_length] + # Guardian sequence numbers as seen by themselves + print 'Guardian seqs online: %r' % [bet.seq for bet in bets] + # Guardian sequence numbers as recorded in the chain + print 'Guardian seqs on finalized chain (%d): %r' % (new_min_mfh, [call_method(state, CASPER, casper_ct, 'getGuardianSeq', [bet.index if bet.index >= 0 else bet.former_index]) for bet in bets]) + h = 0 + while h < len(bets[3].stateroots) and bets[3].stateroots[h] not in (None, '\x00' * 32): + h += 1 + speculative_state = State(bets[3].stateroots[h-1] if h else genesis.root, OverlayDB(bets[3].db)) + print 'Guardian seqs on speculative chain (%d): %r' % (h-1, [call_method(speculative_state, CASPER, casper_ct, 'getGuardianSeq', [bet.index if bet.index >= 0 else bet.former_index]) for bet in bets]) + # Guardian deposit sizes (over 1500 * 10**18 means profit) + print 'Guardian deposit sizes: %r' % [call_method(state, CASPER, casper_ct, 'getGuardianDeposit', [bet.index]) for bet in bets if bet.index >= 0] + print 'Estimated guardian excess gains: %r' % [call_method(state, CASPER, casper_ct, 'getGuardianDeposit', [bet.index]) - 1500 * 10**18 + 47 / 10**9. * 1500 * 10**18 * min_mfh for bet in bets if bet.index >= 0] + for bet in bets: + if bet.index >= 0 and big_endian_to_int(state.get_storage(BLKNUMBER, '\x00' * 32)) >= bet.induction_height: + assert (call_method(state, CASPER, casper_ct, 'getGuardianDeposit', [bet.index]) >= 1499 * 10**18) or bet.byzantine, (bet.double_bet_suicide, bet.byzantine) + # Account signing nonces + print 'Account signing nonces: %r' % [big_endian_to_int(state.get_storage(bet.addr, encode_int32(2**256 - 1))) for bet in bets] + # Transaction status + print 'Transaction status in unconfirmed_txindex: %r' % [bets[0].unconfirmed_txindex.get(tx.hash, None) for tx in check_txs] + print 'Transaction status in finalized_txindex: %r' % [bets[0].finalized_txindex.get(tx.hash, None) for tx in check_txs] + print 'Transaction exceptions: %r' % [bets[0].tx_exceptions.get(tx.hash, 0) for tx in check_txs] + +# Simulate a network +n = network.NetworkSimulator(latency=4, agents=bets, broadcast_success_rate=0.9) +n.generate_peers(5) +for _bet in bets: + _bet.network = n + +# Submitting ring sig contract as a transaction +print 'Submitting ring sig contract\n\n' +ringsig_addr = mk_contract_address(sender=bets[0].addr, code=ringsig_code) +print 'Ringsig address', ringsig_addr.encode('hex') +tx3 = ecdsa_accounts.mk_transaction(1, 25 * 10**9, 2000000, CREATOR, 0, ringsig_code, bets[0].key) +bets[0].add_transaction(tx3) +check_txs.extend([tx3]) +ringsig_account_code = serpent.compile((""" +def init(): + sstore(0, %d) + sstore(1, %d) +""" % (big_endian_to_int(ringsig_addr), big_endian_to_int(ringsig_addr))) + '\n' + mandatory_account_code) +ringsig_account_addr = mk_contract_address(sender=bets[0].addr, code=ringsig_account_code) +tx4 = ecdsa_accounts.mk_transaction(2, 25 * 10**9, 2000000, CREATOR, 0, ringsig_account_code, bets[0].key) +bets[0].add_transaction(tx4) +check_txs.extend([tx4]) +print 'Ringsig account address', ringsig_account_addr.encode('hex') +# Keep running until the min finalized height reaches 20 +while 1: + n.run(25, sleep=0.25) + check_correctness(bets) + if min_mfh >= 36: + print 'Reached breakpoint' + break + print 'Min mfh:', min_mfh + print 'Peer lists:', [[p.id for p in n.peers[bet.id]] for bet in bets] + +recent_state = State(bets[0].stateroots[min_mfh], bets[0].db) +assert get_code(recent_state, ringsig_addr) +assert get_code(recent_state, ringsig_account_addr) +print 'Length of ringsig contract: %d' % len(get_code(recent_state, ringsig_addr)) + +# Create transactions for a few new guardians to join +print '#' * 80 + '\n' + '#' * 80 +print 'Generating transactions to include new guardians' +for i, k in enumerate(secondkeys): + index = len(keys) + i + # Make their validation code + vcode = ecdsa_accounts.mk_validation_code(k) + # Make the transaction to join as a Casper guardian + txdata = casper_ct.encode('join', [vcode]) + tx = ecdsa_accounts.mk_transaction(0, 25 * 10**9, 1000000, CASPER, 1500 * 10**18, txdata, k, create=True) + print 'Making transaction: ', tx.hash.encode('hex') + bets[0].add_transaction(tx) + check_txs.extend([tx]) + +THRESHOLD1 = 115 + 10 * (CLOCKWRONG + CRAZYBET + BRAVE) +THRESHOLD2 = THRESHOLD1 + ENTER_EXIT_DELAY + +orig_ring_pubs = [] +# Publish submits to ringsig contract +print 'Sending to ringsig contract\n\n' +for bet in bets[1:6]: + x, y = bitcoin.privtopub(bitcoin.decode_privkey(bet.key)) + orig_ring_pubs.append((x, y)) + data = ringsig_ct.encode('submit', [x, y]) + tx = ecdsa_accounts.mk_transaction(1, 25 * 10**9, 750000, ringsig_account_addr, 10**17, data, bet.key) + assert bet.should_i_include_transaction(tx) + bet.add_transaction(tx, True) + check_txs.extend([tx]) +# Keep running until the min finalized height reaches 75. We expect that by +# this time all transactions from the previous phase have been included +while 1: + n.run(25, sleep=0.25) + check_correctness(bets) + if min_mfh > THRESHOLD1: + print 'Reached breakpoint' + break + print 'Min mfh:', min_mfh + + +recent_state = State(bets[0].stateroots[min_mfh], bets[0].db) +next_index = call_method(recent_state, ringsig_account_addr, ringsig_ct, 'getNextIndex', []) +assert next_index == 5, ("Next index: %d, should be 5" % next_index) +ring_pub_data = call_method(recent_state, ringsig_account_addr, ringsig_ct, 'getPubs', [0]) +ring_pubs = [(ring_pub_data[i] % 2**256, ring_pub_data[i+1] % 2**256) for i in range(0, len(ring_pub_data), 2)] +print sorted(ring_pubs), sorted(orig_ring_pubs) +assert sorted(ring_pubs) == sorted(orig_ring_pubs) +print 'Submitted public keys:', ring_pubs + +# Create ringsig withdrawal transactions +for i, bet in enumerate(bets[1:6]): + x, y = bitcoin.privtopub(bitcoin.decode_privkey(bet.key)) + target_addr = 2000 + i + x0, s, Ix, Iy = ringsig_tester.ringsig_sign_substitute(encode_int32(target_addr), bitcoin.decode_privkey(bet.key), ring_pubs) + print 'Verifying ring signature using python code' + assert ringsig_tester.ringsig_verify_substitute(encode_int32(target_addr), x0, s, Ix, Iy, ring_pubs) + data = ringsig_ct.encode('withdraw', [int_to_addr(target_addr), x0, s, Ix, Iy, 0]) + tx = Transaction(ringsig_account_addr, 1000000, data=data, code=b'') + print 'Verifying tx includability' + assert bet.should_i_include_transaction(tx) + bet.add_transaction(tx) + check_txs.extend([tx]) + +# Create bet objects for the new guardians +state = State(genesis.root, bets[0].db) +secondbets = [mk_bet_strategy(state, len(bets) + i, k) for i, k in enumerate(secondkeys)] +for bet in secondbets: + bet.network = n +n.agents.extend(secondbets) +n.generate_peers(5) +print 'Increasing number of peers in the network to %d!' % MAX_NODES +recent_state = State(bets[0].stateroots[min_mfh], bets[0].db) +# Check that all signups are successful +signups = call_method(recent_state, CASPER, casper_ct, 'getGuardianSignups', []) +print 'Guardians signed up: %d' % signups +assert signups == MAX_NODES +print 'All new guardians inducted' +print 'Induction heights: %r' % [call_method(recent_state, CASPER, casper_ct, 'getGuardianInductionHeight', [i]) for i in range(len(keys + secondkeys))] + +# Keep running until the min finalized height reaches ~175. We expect that by +# this time all guardians will be actively betting off of each other's bets +while 1: + n.run(25, sleep=0.25) + check_correctness(bets) + print 'Min mfh:', min_mfh + print 'Induction heights: %r' % [call_method(recent_state, CASPER, casper_ct, 'getGuardianInductionHeight', [i]) for i in range(len(keys + secondkeys))] + if min_mfh > THRESHOLD2: + print 'Reached breakpoint' + break + +# Create transactions for old guardians to leave +print '#' * 80 + '\n' + '#' * 80 +print 'Generating transactions to withdraw some guardians' +for bet in bets[:3]: + bet.withdraw() + +BLK_DISTANCE = len(bet.blocks) - min_mfh + + +# Keep running until the min finalized height reaches ~290. +while 1: + n.run(25, sleep=0.25) + check_correctness(bets) + print 'Min mfh:', min_mfh + print 'Withdrawal heights: %r' % [call_method(recent_state, CASPER, casper_ct, 'getGuardianWithdrawalHeight', [i]) for i in range(len(keys + secondkeys))] + if min_mfh > 200 + BLK_DISTANCE + ENTER_EXIT_DELAY: + print 'Reached breakpoint' + break + # Exit early if the withdrawal step already completed + recent_state = bets[0].get_finalized_state() + if len([i for i in range(50) if call_method(recent_state, CASPER, casper_ct, 'getGuardianStatus', [i]) == 2]) == MAX_NODES - 3: + break + +recent_state = bets[0].get_optimistic_state() +# Check that the only remaining active guardians are the ones that have not +# yet signed out. +print 'Guardian statuses: %r' % [call_method(recent_state, CASPER, casper_ct, 'getGuardianStatus', [i]) for i in range(MAX_NODES)] +assert len([i for i in range(50) if call_method(recent_state, CASPER, casper_ct, 'getGuardianStatus', [i]) == 2]) == MAX_NODES - 3 diff --git a/ethereum/test_scoring_rule.py b/ethereum/test_scoring_rule.py new file mode 100644 index 000000000..23edddb39 --- /dev/null +++ b/ethereum/test_scoring_rule.py @@ -0,0 +1,39 @@ +INCENTIVIZATION_EMA_COEFF = 300 +VALIDATOR_ROUNDS = 6 +BLKTIME = 7.5 +LOWCAP, HIGHCAP = 0, 255 +DESIRED_ANNUAL_MAXRETURN = 0.1 + +def logoddsToOdds(logodds): + return 2**(logodds // 4) * (4 + (logodds) % 4) * 99 / 1700 + +MAXODDS = logoddsToOdds(HIGHCAP) / 10**9 + +def scoreCorrect(logodds, odds): + return (max(logodds - 128, 0) * MAXODDS / 128 * 10**9 + odds) / 10000 + +def scoreIncorrect(odds): + return (0 - max(odds - 10**9, 0) * MAXODDS / 128 * 10 / 7 * 4 - odds * odds / 2 / 10**9) / 10000 + +s = [0] * 256 +f = [0] * 256 +for i in range(LOWCAP, HIGHCAP + 1): + s[i] = scoreCorrect(i, logoddsToOdds(i)) + f[i] = scoreIncorrect(logoddsToOdds(i)) + if i > 12: + rat = (f[i] - f[i-1]) * 1.0 / (s[i] - s[i-1] + 0.0000000001) + print 'Logodds', i, 'sdiff', s[i], 'fdiff', f[i], 'odds', logoddsToOdds(i) * 10**-9, 'ratio', rat + + +def annualPercent(i): + return (1 + i)**(31556926./BLKTIME)*100-100 + +print 'maxodds', MAXODDS +maxdivisor = -scoreIncorrect(logoddsToOdds(HIGHCAP)) * VALIDATOR_ROUNDS / INCENTIVIZATION_EMA_COEFF +print 'maxdivisor', maxdivisor +interest_per_block = (scoreCorrect(HIGHCAP, logoddsToOdds(HIGHCAP)) * 1.0 / maxdivisor) +print 'earnings per block: %.2f ppb, %.2f%% annualized, for 1500 eth: %d' % (interest_per_block*10**9, annualPercent(interest_per_block), interest_per_block*1500*10**18) +interest_per_block_cum = interest_per_block * 2 +print 'earnings per block incl stateroots: %.2f ppb, %.2f%% annualized, for 1500 eth: %d' % (interest_per_block_cum*10**9, annualPercent(interest_per_block_cum), interest_per_block_cum*1500*10**18) +demurrage = interest_per_block_cum - (1.1**(BLKTIME/31556926.) - 1) +print 'recommended demurrage ppb: %.2f, %.2f%% annualized' % (demurrage * 10**9, annualPercent(demurrage)) diff --git a/ethereum/transactions.py b/ethereum/transactions.py deleted file mode 100644 index bde421065..000000000 --- a/ethereum/transactions.py +++ /dev/null @@ -1,165 +0,0 @@ -from bitcoin import encode_pubkey, N, P -try: - from c_secp256k1 import ecdsa_raw_sign, ecdsa_raw_recover -except ImportError: - from bitcoin import ecdsa_raw_sign, ecdsa_raw_recover -import rlp -from rlp.sedes import big_endian_int, binary -from rlp.utils import decode_hex, encode_hex -from ethereum import bloom -from ethereum import utils -from ethereum.processblock import mk_contract_address, intrinsic_gas_used -from ethereum.utils import TT256 -from ethereum.exceptions import InvalidTransaction -from ethereum.slogging import get_logger -log = get_logger('eth.chain.tx') - -# in the yellow paper it is specified that s should be smaller than secpk1n (eq.205) -secpk1n = 115792089237316195423570985008687907852837564279074904382605163141518161494337 - - -class Transaction(rlp.Serializable): - - """ - A transaction is stored as: - [nonce, gasprice, startgas, to, value, data, v, r, s] - - nonce is the number of transactions already sent by that account, encoded - in binary form (eg. 0 -> '', 7 -> '\x07', 1000 -> '\x03\xd8'). - - (v,r,s) is the raw Electrum-style signature of the transaction without the - signature made with the private key corresponding to the sending account, - with 0 <= v <= 3. From an Electrum-style signature (65 bytes) it is - possible to extract the public key, and thereby the address, directly. - - A valid transaction is one where: - (i) the signature is well-formed (ie. 0 <= v <= 3, 0 <= r < P, 0 <= s < N, - 0 <= r < P - N if v >= 2), and - (ii) the sending account has enough funds to pay the fee and the value. - """ - - fields = [ - ('nonce', big_endian_int), - ('gasprice', big_endian_int), - ('startgas', big_endian_int), - ('to', utils.address), - ('value', big_endian_int), - ('data', binary), - ('v', big_endian_int), - ('r', big_endian_int), - ('s', big_endian_int), - ] - - _sender = None - - def __init__(self, nonce, gasprice, startgas, to, value, data, v=0, r=0, s=0): - to = utils.normalize_address(to, allow_blank=True) - assert len(to) == 20 or len(to) == 0 - super(Transaction, self).__init__(nonce, gasprice, startgas, to, value, data, v, r, s) - self.logs = [] - - if self.gasprice >= TT256 or self.startgas >= TT256 or \ - self.value >= TT256 or self.nonce >= TT256: - raise InvalidTransaction("Values way too high!") - if self.startgas < intrinsic_gas_used(self): - raise InvalidTransaction("Startgas too low") - - log.debug('deserialized tx', tx=encode_hex(self.hash)[:8]) - - @property - def sender(self): - - if not self._sender: - # Determine sender - if self.v: - if self.r >= N or self.s >= N or self.v < 27 or self.v > 28 \ - or self.r == 0 or self.s == 0: - raise InvalidTransaction("Invalid signature values!") - log.debug('recovering sender') - rlpdata = rlp.encode(self, UnsignedTransaction) - rawhash = utils.sha3(rlpdata) - pub = ecdsa_raw_recover(rawhash, (self.v, self.r, self.s)) - if pub is False: - raise InvalidTransaction("Invalid signature values (x^3+7 is non-residue)") - if pub == (0, 0): - raise InvalidTransaction("Invalid signature (zero privkey cannot sign)") - pub = encode_pubkey(pub, 'bin') - self._sender = utils.sha3(pub[1:])[-20:] - assert self.sender == self._sender - else: - self._sender = 0 - return self._sender - - @sender.setter - def sender(self, value): - self._sender = value - - def sign(self, key): - """Sign this transaction with a private key. - - A potentially already existing signature would be overridden. - """ - if key in (0, '', '\x00' * 32): - raise InvalidTransaction("Zero privkey cannot sign") - rawhash = utils.sha3(rlp.encode(self, UnsignedTransaction)) - self.v, self.r, self.s = ecdsa_raw_sign(rawhash, key) - self.sender = utils.privtoaddr(key) - return self - - @property - def hash(self): - return utils.sha3(rlp.encode(self)) - - def log_bloom(self): - "returns int" - bloomables = [x.bloomables() for x in self.logs] - return bloom.bloom_from_list(utils.flatten(bloomables)) - - def log_bloom_b64(self): - return bloom.b64(self.log_bloom()) - - def to_dict(self): - # TODO: previous version used printers - d = {} - for name, _ in self.__class__.fields: - d[name] = getattr(self, name) - d['sender'] = self.sender - d['hash'] = encode_hex(self.hash) - return d - - def log_dict(self): - d = self.to_dict() - d['sender'] = encode_hex(d['sender'] or '') - d['to'] = encode_hex(d['to']) - return d - - @property - def creates(self): - "returns the address of a contract created by this tx" - if self.to == '': - return mk_contract_address(self.sender, self.nonce) - - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.hash == other.hash - - def __hash__(self): - return utils.big_endian_to_int(self.hash) - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - return '' % encode_hex(self.hash)[:4] - - def __structlog__(self): - return encode_hex(self.hash) - - -UnsignedTransaction = Transaction.exclude(['v', 'r', 's']) - - -def contract(nonce, gasprice, startgas, endowment, code, v=0, r=0, s=0): - """A contract is a special transaction without the `to` argument.""" - tx = Transaction(nonce, gasprice, startgas, '', endowment, code, v, r, s) - return tx diff --git a/ethereum/utils.py b/ethereum/utils.py index bbe85571b..2e2e41fa3 100644 --- a/ethereum/utils.py +++ b/ethereum/utils.py @@ -9,15 +9,23 @@ from rlp.sedes import big_endian_int, BigEndianInt, Binary from rlp.utils import decode_hex, encode_hex, ascii_chr, str_to_bytes import random +import copy big_endian_to_int = lambda x: big_endian_int.deserialize(str_to_bytes(x).lstrip(b'\x00')) int_to_big_endian = lambda x: big_endian_int.serialize(x) + TT256 = 2 ** 256 TT256M1 = 2 ** 256 - 1 TT255 = 2 ** 255 +# Number of shards +MAXSHARDS = 65536 +SHARD_BYTES = len(int_to_big_endian(MAXSHARDS - 1)) +ADDR_BYTES = 20 + SHARD_BYTES +ADDR_BASE_BYTES = 20 + if sys.version_info.major == 2: is_numeric = lambda x: isinstance(x, (int, long)) is_string = lambda x: isinstance(x, (str, unicode)) @@ -113,32 +121,15 @@ def privtoaddr(x, extended=False): return add_checksum(o) if extended else o -def add_checksum(x): - if len(x) in (40, 48): - x = decode_hex(x) - if len(x) == 24: - return x - return x + sha3(x)[:4] - - -def check_and_strip_checksum(x): - if len(x) in (40, 48): - x = decode_hex(x) - assert len(x) == 24 and sha3(x[:20])[:4] == x[-4:] - return x[:20] - - def normalize_address(x, allow_blank=False): if allow_blank and x == '': return '' - if len(x) in (42, 50) and x[:2] == '0x': + if x[:2] == '0x': x = x[2:] - if len(x) in (40, 48): + if len(x) == 2 * ADDR_BYTES: x = decode_hex(x) - if len(x) == 24: - assert len(x) == 24 and sha3(x[:20])[:4] == x[-4:] - x = x[:20] - if len(x) != 20: + if len(x) != ADDR_BYTES: + print x.encode('hex') raise Exception("Invalid address format!") return x @@ -155,35 +146,35 @@ def zunpad(x): def int_to_addr(x): - o = [''] * 20 - for i in range(20): - o[19 - i] = ascii_chr(x & 0xff) + o = [''] * ADDR_BYTES + for i in range(ADDR_BYTES): + o[ADDR_BYTES - 1 - i] = ascii_chr(x & 0xff) x >>= 8 return b''.join(o) def coerce_addr_to_bin(x): if is_numeric(x): - return encode_hex(zpad(big_endian_int.serialize(x), 20)) - elif len(x) == 40 or len(x) == 0: + return encode_hex(zpad(big_endian_int.serialize(x), ADDR_BYTES)) + elif len(x) == ADDR_BYTES * 2 or len(x) == 0: return decode_hex(x) else: - return zpad(x, 20)[-20:] + return zpad(x, ADDR_BYTES)[-ADDR_BYTES:] def coerce_addr_to_hex(x): if is_numeric(x): - return encode_hex(zpad(big_endian_int.serialize(x), 20)) - elif len(x) == 40 or len(x) == 0: + return encode_hex(zpad(big_endian_int.serialize(x), ADDR_BYTES)) + elif len(x) == ADDR_BYTES * 2 or len(x) == 0: return x else: - return encode_hex(zpad(x, 20)[-20:]) + return encode_hex(zpad(x, ADDR_BYTES)[-ADDR_BYTES:]) def coerce_to_int(x): if is_numeric(x): return x - elif len(x) == 40: + elif len(x) == ADDR_BYTES * 2: return big_endian_to_int(decode_hex(x)) else: return big_endian_to_int(x) @@ -192,7 +183,7 @@ def coerce_to_int(x): def coerce_to_bytes(x): if is_numeric(x): return big_endian_int.serialize(x) - elif len(x) == 40: + elif len(x) == ADDR_BYTES * 2: return decode_hex(x) else: return x @@ -233,8 +224,8 @@ def decode_bin(v): def decode_addr(v): '''decodes an address from serialization''' - if len(v) not in [0, 20]: - raise Exception("Serialized addresses must be empty or 20 bytes long!") + if len(v) not in [0, ADDR_BYTES]: + raise Exception("Serialized addresses must be empty or %d bytes long!" % ADDR_BYTES) return encode_hex(v) @@ -270,6 +261,10 @@ def encode_int256(v): return zpad(int_to_big_endian(v), 256) +def encode_int32(v): + return zpad(int_to_big_endian(v), 32) + + def scan_bin(v): if v[:2] in ('0x', b'0x'): return decode_hex(v[2:]) @@ -386,6 +381,17 @@ def dump_state(trie): res += '%r:%r\n' % (encode_hex(k), encode_hex(v)) return res +def shardify(address, shard): + assert len(address) in (ADDR_BYTES - SHARD_BYTES, ADDR_BYTES) + return encode_int32(shard)[-SHARD_BYTES:] + address[-ADDR_BASE_BYTES:] + +def get_shard(address): + assert len(address) == ADDR_BYTES + return big_endian_to_int(address[:SHARD_BYTES]) + +def match_shard(addr, shard_source): + return shard_source[:SHARD_BYTES] + addr[SHARD_BYTES:] + class Denoms(): @@ -402,13 +408,22 @@ def __init__(self): denoms = Denoms() -address = Binary.fixed_length(20, allow_empty=True) +address = Binary.fixed_length(ADDR_BYTES, allow_empty=True) int20 = BigEndianInt(20) int32 = BigEndianInt(32) int256 = BigEndianInt(256) hash32 = Binary.fixed_length(32) trie_root = Binary.fixed_length(32, allow_empty=True) +# Cached RLP decoding +rlp_dict = {} + +def rlp_decode(*args): + cache_key = str(args) + if cache_key not in rlp_dict: + rlp_dict[cache_key] = rlp.decode(*args) + return copy.deepcopy(rlp_dict[cache_key]) + class bcolors: HEADER = '\033[95m' @@ -420,8 +435,23 @@ class bcolors: BOLD = '\033[1m' UNDERLINE = '\033[4m' +import slogging def DEBUG(msg, *args, **kwargs): - from ethereum import slogging - - slogging.DEBUG(msg, *args, **kwargs) + o = msg + ' ' + for k, v in kwargs.items(): + o += '%s=%s, ' % (str(k), str(v)) + print o[:-2] + # slogging.DEBUG(msg, *args, **kwargs) + + +# Determines the contract address for a piece of code and a given creator +# address (contracts created from outside get creator '\x00' * 20) +def mk_contract_address(sender='\x00'*ADDR_BASE_BYTES, left_bound=0, code=''): + return shardify(sha3(sender + code)[32-ADDR_BASE_BYTES:], left_bound) + +# Helper for making an ID +next_id = [4000] +def mkid(): + next_id[0] += 1 + return next_id[0] - 1 diff --git a/ethereum/vm.py b/ethereum/vm.py index dfcaf3615..78fc1a2a2 100644 --- a/ethereum/vm.py +++ b/ethereum/vm.py @@ -5,10 +5,10 @@ # ###################################### import sys -from ethereum import utils +import utils from ethereum.abi import is_numeric import copy -from ethereum import opcodes +import opcodes import time from ethereum.slogging import get_logger from rlp.utils import encode_hex, ascii_chr @@ -25,6 +25,8 @@ TT256M1 = 2 ** 256 - 1 TT255 = 2 ** 255 +from config import BLOCKHASHES, STATEROOTS, BLKNUMBER, CASPER, GAS_CONSUMED, GASLIMIT, NULL_SENDER, ETHER, PROPOSER, TXGAS + class CallData(object): @@ -100,6 +102,7 @@ def preprocess_code(code): if i < len(code): ops.append(['INVALID', 0, 0, 0, byte, 0]) i += 1 + import serpent return ops @@ -330,10 +333,8 @@ def vm_execute(ext, msg, code): elif op == 'ADDRESS': stk.append(utils.coerce_to_int(msg.to)) elif op == 'BALANCE': - addr = utils.coerce_addr_to_hex(stk.pop() % 2**160) - stk.append(ext.get_balance(addr)) - elif op == 'ORIGIN': - stk.append(utils.coerce_to_int(ext.tx_origin)) + addr = utils.int_to_addr(stk.pop() % 2**160) + stk.append(ext.get_storage_data(ETHER, addr)) elif op == 'CALLER': stk.append(utils.coerce_to_int(msg.sender)) elif op == 'CALLVALUE': @@ -362,15 +363,13 @@ def vm_execute(ext, msg, code): mem[start + i] = processed_code[s1 + i][4] else: mem[start + i] = 0 - elif op == 'GASPRICE': - stk.append(ext.tx_gasprice) elif op == 'EXTCODESIZE': addr = utils.coerce_addr_to_hex(stk.pop() % 2**160) - stk.append(len(ext.get_code(addr) or b'')) + stk.append(len(ext.get_storage_at(addr, b'') or b'')) elif op == 'EXTCODECOPY': addr = utils.coerce_addr_to_hex(stk.pop() % 2**160) start, s2, size = stk.pop(), stk.pop(), stk.pop() - extcode = ext.get_code(addr) or b'' + extcode = ext.get_storage_at(addr, b'') or b'' assert utils.is_string(extcode) if not mem_extend(mem, compustate, op, start, size): return vm_exception('OOG EXTENDING MEMORY') @@ -381,19 +380,28 @@ def vm_execute(ext, msg, code): mem[start + i] = utils.safe_ord(extcode[s2 + i]) else: mem[start + i] = 0 + elif op == 'MCOPY': + to, frm, size = stk.pop(), stk.pop(), stk.pop() + if not mem_extend(mem, compustate, op, to, size): + return vm_exception('OOG EXTENDING MEMORY') + if not mem_extend(mem, compustate, op, frm, size): + return vm_exception('OOG EXTENDING MEMORY') + if not data_copy(compustate, size): + return vm_exception('OOG COPY DATA') + data = mem[frm: frm + size] + for i in range(size): + mem[to + i] = data[i] elif opcode < 0x50: - if op == 'BLOCKHASH': - stk.append(utils.big_endian_to_int(ext.block_hash(stk.pop()))) - elif op == 'COINBASE': - stk.append(utils.big_endian_to_int(ext.block_coinbase)) - elif op == 'TIMESTAMP': - stk.append(ext.block_timestamp) + if op == 'COINBASE': + stk.append(utils.big_endian_to_int(ext.get_storage(PROPOSER, '\x00' * 32))) + elif op == 'BLOCKHASH': + stk.append(utils.big_endian_to_int(ext.get_storage(BLOCKHASHES, stk.pop()))) elif op == 'NUMBER': - stk.append(ext.block_number) + stk.append(utils.big_endian_to_int(ext.get_storage(BLKNUMBER, '\x00' * 32))) elif op == 'DIFFICULTY': stk.append(ext.block_difficulty) elif op == 'GASLIMIT': - stk.append(ext.block_gas_limit) + stk.append(GASLIMIT) elif opcode < 0x60: if op == 'POP': stk.pop() @@ -417,20 +425,21 @@ def vm_execute(ext, msg, code): return vm_exception('OOG EXTENDING MEMORY') mem[s0] = s1 % 256 elif op == 'SLOAD': - stk.append(ext.get_storage_data(msg.to, stk.pop())) + stk.append(utils.big_endian_to_int(ext.get_storage(msg.to, stk.pop())[:32])) elif op == 'SSTORE': s0, s1 = stk.pop(), stk.pop() - if ext.get_storage_data(msg.to, s0): + if ext.get_storage(msg.to, s0): gascost = opcodes.GSTORAGEMOD if s1 else opcodes.GSTORAGEKILL refund = 0 if s1 else opcodes.GSTORAGEREFUND else: gascost = opcodes.GSTORAGEADD if s1 else opcodes.GSTORAGEMOD refund = 0 + if msg.to == CASPER: + gascost /= 2 if compustate.gas < gascost: return vm_exception('OUT OF GAS') compustate.gas -= gascost - ext.add_refund(refund) # adds neg gascost as a refund if below zero - ext.set_storage_data(msg.to, s0, s1) + ext.set_storage(msg.to, s0, s1) elif op == 'JUMP': compustate.pc = stk.pop() opnew = processed_code[compustate.pc][0] if \ @@ -463,31 +472,11 @@ def vm_execute(ext, msg, code): temp = stk[-depth - 1] stk[-depth - 1] = stk[-1] stk[-1] = temp - elif op[:3] == 'LOG': - """ - 0xa0 ... 0xa4, 32/64/96/128/160 + len(data) gas - a. Opcodes LOG0...LOG4 are added, takes 2-6 stack arguments - MEMSTART MEMSZ (TOPIC1) (TOPIC2) (TOPIC3) (TOPIC4) - b. Logs are kept track of during tx execution exactly the same way as suicides - (except as an ordered list, not a set). - Each log is in the form [address, [topic1, ... ], data] where: - * address is what the ADDRESS opcode would output - * data is mem[MEMSTART: MEMSTART + MEMSZ] - * topics are as provided by the opcode - c. The ordered list of logs in the transaction are expressed as [log0, log1, ..., logN]. - """ depth = int(op[3:]) mstart, msz = stk.pop(), stk.pop() topics = [stk.pop() for x in range(depth)] - compustate.gas -= msz * opcodes.GLOGBYTE - if not mem_extend(mem, compustate, op, mstart, msz): - return vm_exception('OOG EXTENDING MEMORY') - data = b''.join(map(ascii_chr, mem[mstart: mstart + msz])) - ext.log(msg.to, topics, data) - log_log.trace('LOG', to=msg.to, topics=topics, data=list(map(utils.safe_ord, data))) - # print('LOG', msg.to, topics, list(map(ord, data))) - + print '###log###', mstart, msz, topics elif op == 'CREATE': value, mstart, msz = stk.pop(), stk.pop(), stk.pop() if not mem_extend(mem, compustate, op, mstart, msz): @@ -507,26 +496,29 @@ def vm_execute(ext, msg, code): elif op == 'CALL': gas, to, value, meminstart, meminsz, memoutstart, memoutsz = \ stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop() - if not mem_extend(mem, compustate, op, meminstart, meminsz) or \ - not mem_extend(mem, compustate, op, memoutstart, memoutsz): + if not mem_extend(mem, compustate, op, meminstart, meminsz): return vm_exception('OOG EXTENDING MEMORY') to = utils.encode_int(to) to = ((b'\x00' * (32 - len(to))) + to)[12:] - extra_gas = (not ext.account_exists(to)) * opcodes.GCALLNEWACCOUNT + \ - (value > 0) * opcodes.GCALLVALUETRANSFER + extra_gas = (value > 0) * opcodes.GCALLVALUETRANSFER submsg_gas = gas + opcodes.GSTIPEND * (value > 0) if compustate.gas < gas + extra_gas: return vm_exception('OUT OF GAS', needed=gas+extra_gas) - if ext.get_balance(msg.to) >= value and msg.depth < 1024: + if ext.get_storage(ETHER, msg.to) >= value and msg.depth < 1024: compustate.gas -= (gas + extra_gas) cd = CallData(mem, meminstart, meminsz) call_msg = Message(msg.to, to, value, submsg_gas, cd, - msg.depth + 1, code_address=to) - result, gas, data = ext.msg(call_msg) + msg.depth + 1) + # print 'CALLING', to.encode('hex'), map(ord, cd.extract_all()) + assert isinstance(ext.get_storage(to, ''), (str, bytes)), ext.get_storage(to, '') + result, gas, data = ext.msg(call_msg, ext.get_storage(to, '')) + # print 'CALLRESULT', result, data if result == 0: stk.append(0) else: stk.append(1) + if not mem_extend(mem, compustate, op, memoutstart, min(len(data), memoutsz)): + return vm_exception('OOG EXTENDING MEMORY') compustate.gas += gas for i in range(min(len(data), memoutsz)): mem[memoutstart + i] = data[i] @@ -548,15 +540,15 @@ def vm_execute(ext, msg, code): to = utils.encode_int(to) to = ((b'\x00' * (32 - len(to))) + to)[12:] cd = CallData(mem, meminstart, meminsz) - if ext.post_homestead_hardfork() and op == 'DELEGATECALL': + if op == 'DELEGATECALL': call_msg = Message(msg.sender, msg.to, msg.value, submsg_gas, cd, - msg.depth + 1, code_address=to) + msg.depth + 1) elif op == 'DELEGATECALL': return vm_exception('OPCODE INACTIVE') else: call_msg = Message(msg.to, msg.to, value, submsg_gas, cd, - msg.depth + 1, code_address=to) - result, gas, data = ext.msg(call_msg) + msg.depth + 1) + result, gas, data = ext.msg(call_msg, ext.get_storage(msg.to, '')) if result == 0: stk.append(0) else: @@ -567,17 +559,57 @@ def vm_execute(ext, msg, code): else: compustate.gas -= (gas + extra_gas - submsg_gas) stk.append(0) + elif op == 'CALLSTATIC': + submsg_gas, codestart, codesz, datastart, datasz, outstart, outsz = [stk.pop() for i in range(7)] + if not mem_extend(mem, compustate, op, codestart, codesz) or \ + not mem_extend(mem, compustate, op, datastart, datasz): + return vm_exception('OOG EXTENDING MEMORY') + if compustate.gas < submsg_gas: + return vm_exception('OUT OF GAS', needed=submsg_gas) + compustate.gas -= submsg_gas + cd = CallData(mem, datastart, datasz) + call_msg = Message(msg.sender, msg.to, 0, submsg_gas, cd, msg.depth + 1) + result, gas, data = ext.static_msg(call_msg, ''.join([chr(x) for x in mem[codestart:codestart + codesz]])) + if result == 0: + stk.append(0) + else: + stk.append(1) + compustate.gas += gas + if not mem_extend(mem, compustate, op, outstart, outsz): + return vm_exception('OOG EXTENDING MEMORY') + for i in range(min(len(data), outsz)): + mem[outstart + i] = data[i] elif op == 'RETURN': s0, s1 = stk.pop(), stk.pop() if not mem_extend(mem, compustate, op, s0, s1): return vm_exception('OOG EXTENDING MEMORY') + # print 'RETURNING', mem[s0: s0 + s1] return peaceful_exit('RETURN', compustate.gas, mem[s0: s0 + s1]) + elif op == 'SLOADBYTES': + s0, s1, s2 = stk.pop(), stk.pop(), stk.pop() + data = map(ord, ext.get_storage(msg.to, s0)) + if not mem_extend(mem, compustate, op, s1, min(len(data), s2)): + return vm_exception('OOG EXTENDING MEMORY') + for i in range(min(len(data), s2)): + mem[s1 + i] = data[i] + elif op == 'SSTOREBYTES': + s0, s1, s2 = stk.pop(), stk.pop(), stk.pop() + if not mem_extend(mem, compustate, op, s1, s2): + return vm_exception('OOG EXTENDING MEMORY') + data = ''.join(map(chr, mem[s1: s1 + s2])) + ext.set_storage(msg.to, s0, data) + elif op == 'SSIZE': + stk.append(len(ext.get_storage(msg.to, stk.pop()))) + elif op == 'STATEROOT': + stk.append(utils.big_endian_to_int(ext.get_storage(STATEROOTS, stk.pop()))) + elif op == 'TXGAS': + stk.append(utils.big_endian_to_int(ext.get_storage(TXGAS, '\x00' * 32))) elif op == 'SUICIDE': to = utils.encode_int(stk.pop()) to = ((b'\x00' * (32 - len(to))) + to)[12:] - xfer = ext.get_balance(msg.to) - ext.set_balance(to, ext.get_balance(to) + xfer) - ext.set_balance(msg.to, 0) + xfer = big_endian_to_int(ext.get_storage(ETHER, msg.to)) + ext.set_storage(ETHER, to, big_endian_to_int(ext.get_balance(to)) + xfer) + ext.set_storage(ETHER, msg.to, 0) ext.add_suicide(msg.to) # print('suiciding %s %s %d' % (msg.to, to, xfer)) return 1, compustate.gas, [] @@ -586,28 +618,3 @@ def vm_execute(ext, msg, code): # for a in stk: # assert is_numeric(a), (op, stk) # assert a >= 0 and a < 2**256, (a, op, stk) - - -class VmExtBase(): - - def __init__(self): - self.get_code = lambda addr: b'' - self.get_balance = lambda addr: 0 - self.set_balance = lambda addr, balance: 0 - self.set_storage_data = lambda addr, key, value: 0 - self.get_storage_data = lambda addr, key: 0 - self.log_storage = lambda addr: 0 - self.add_suicide = lambda addr: 0 - self.add_refund = lambda x: 0 - self.block_prevhash = 0 - self.block_coinbase = 0 - self.block_timestamp = 0 - self.block_number = 0 - self.block_difficulty = 0 - self.block_gas_limit = 0 - self.log = lambda addr, topics, data: 0 - self.tx_origin = b'0' * 40 - self.tx_gasprice = 0 - self.create = lambda msg: 0, 0, 0 - self.call = lambda msg: 0, 0, 0 - self.sendmsg = lambda msg: 0, 0, 0