Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
e44ea31
EIP 101 implementation plus casper contract registration tested
Nov 29, 2015
6165909
Made bet strategy initialization work
Nov 30, 2015
7a223fc
Validators are producing blocks now!
Dec 1, 2015
96e26bb
Validators making bets (though not yet coverging)
Dec 1, 2015
9d90620
Removed account_exists opcode
Dec 2, 2015
b8fc4c4
Removed a few extraneous files
Dec 2, 2015
da01969
Bets seem to be partially converging!
Dec 2, 2015
8f35c1c
Fixed small bug with stateprobs
Dec 2, 2015
7fda1f6
Got basic consensus working!
Dec 3, 2015
962329a
Halfway to supporting addition and removal of validators, need some r…
Dec 4, 2015
38f6032
Further progress on validator induction
Dec 5, 2015
1ef213a
Added method to ask for blocks if we failed to receive them the first…
Dec 5, 2015
c197ab2
Validator induction works
Dec 6, 2015
535a349
Validator induction works
Dec 7, 2015
c53f8bf
Code stabilizing, much more comments...
Dec 8, 2015
e5b8e3d
Test fully passes!
Dec 14, 2015
42a448b
Added better state caching and fixed the fastvm for greater efficiency
Dec 18, 2015
4c36d13
Expanding upon the sample-per-block based approach to increse scalabi…
Dec 21, 2015
40fe2e3
Yay, works!
Dec 23, 2015
1cb1671
Validators increased from 12 to 13
Dec 23, 2015
8992958
Another ~11% speedup to casper incentivization
Dec 25, 2015
fe2ab88
16 nodes work!
Dec 26, 2015
9a14719
Switched incentivization order, and implemented sharding scaffolding
Jan 29, 2016
737effb
Added sharding-related opcodes
Jan 29, 2016
5a19312
Added extra ec opcodes; also test passes fully at least sometimes
Jan 29, 2016
29d1530
Test runs with 23 nodes
Jan 31, 2016
7f0405c
Some efficiency and correctness improvements
Feb 2, 2016
6aa343c
Added slashing rules for double-signing, and 33rd percentile grabbing…
Feb 9, 2016
2b82606
Switch to simpy
heikoheiko Feb 12, 2016
5b2c73c
Added double-block slashing
Feb 13, 2016
e29ea30
Fixed a bug preventing downloading old blocks
Feb 14, 2016
bf0bf0f
Changed gas inclusion algorithm
Feb 20, 2016
952bb70
Changed gas inclusion algorithm
Feb 21, 2016
774609c
Added ring signature test
Feb 27, 2016
1eb8eb1
Added previously missing files
Feb 27, 2016
9cf2c4f
A few comments added, and constants changed
Feb 28, 2016
8abc2e8
A few more bugfixes
Feb 29, 2016
8e1f89f
Some refactoring and renaming
Mar 1, 2016
998f981
Added new state root voting algorithm, plus script to compute some co…
Mar 3, 2016
cd0e2ce
Parameter changes to allow 12 nodes simultaneously running
Mar 3, 2016
e4ed6f6
Upped maximum number of blocks you can validate to 4m ~= 1 year
Mar 3, 2016
0933826
whitespace and file path fix
pipermerriam Mar 15, 2016
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 32 additions & 24 deletions ethereum/abi.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import sys
import re
import yaml # use yaml instead of json to get non unicode (works with ascii only data)
from ethereum import utils
import utils
from rlp.utils import decode_hex, encode_hex
from ethereum.utils import encode_int, zpad, big_endian_to_int, is_numeric, is_string, ceil32
from ethereum.utils import isnumeric
from utils import encode_int, zpad, big_endian_to_int, is_numeric, is_string, ceil32, ADDR_BYTES, isnumeric
import ast
import copy


def json_decode(x):
Expand Down Expand Up @@ -100,24 +100,24 @@ def decode(self, name, data):
def is_unknown_type(self, name):
return self.function_data[name]["is_unknown_type"]

def listen(self, log, noprint=False):
if not len(log.topics) or log.topics[0] not in self.event_data:
def listen(self, sender, topics, data, noprint=False):
if not len(topics) or topics[0] not in self.event_data:
return
types = self.event_data[log.topics[0]]['types']
name = self.event_data[log.topics[0]]['name']
names = self.event_data[log.topics[0]]['names']
indexed = self.event_data[log.topics[0]]['indexed']
types = self.event_data[topics[0]]['types']
name = self.event_data[topics[0]]['name']
names = self.event_data[topics[0]]['names']
indexed = self.event_data[topics[0]]['indexed']
indexed_types = [types[i] for i in range(len(types))
if indexed[i]]
unindexed_types = [types[i] for i in range(len(types))
if not indexed[i]]
# print('listen', log.data.encode('hex'), log.topics)
deserialized_args = decode_abi(unindexed_types, log.data)
deserialized_args = decode_abi(unindexed_types, data)
o = {}
c1, c2 = 0, 0
for i in range(len(names)):
if indexed[i]:
topic_bytes = utils.zpad(utils.encode_int(log.topics[c1 + 1]), 32)
topic_bytes = utils.zpad(utils.encode_int(topics[c1 + 1]), 32)
o[names[i]] = decode_single(process_type(indexed_types[c1]),
topic_bytes)
c1 += 1
Expand Down Expand Up @@ -153,7 +153,7 @@ def decint(n):
return n
elif is_numeric(n):
raise EncodingError("Number out of range: %r" % n)
elif is_string(n) and len(n) == 40:
elif is_string(n) and len(n) == ADDR_BYTES * 2:
return big_endian_to_int(decode_hex(n))
elif is_string(n) and len(n) <= 32:
return big_endian_to_int(n)
Expand Down Expand Up @@ -232,39 +232,38 @@ def encode_single(typ, arg):
assert sub == ''
if isnumeric(arg):
return zpad(encode_int(arg), 32)
elif len(arg) == 20:
elif len(arg) == ADDR_BYTES:
return zpad(arg, 32)
elif len(arg) == 40:
elif len(arg) == ADDR_BYTES * 2:
return zpad(decode_hex(arg), 32)
elif len(arg) == 42 and arg[2:] == '0x':
elif len(arg) == ADDR_BYTES * 2 + 2 and arg[2:] == '0x':
return zpad(decode_hex(arg[2:]), 32)
else:
raise EncodingError("Could not parse address: %r" % arg)
raise EncodingError("Unhandled type: %r %r" % (base, sub))

proctype_cache = {}

def process_type(typ):
if typ in proctype_cache:
return proctype_cache[typ]
# Crazy reg expression to separate out base type component (eg. uint),
# size (eg. 256, 128x128, none), array component (eg. [], [45], none)
regexp = '([a-z]*)([0-9]*x?[0-9]*)((\[[0-9]*\])*)'
base, sub, arr, _ = re.match(regexp, utils.to_string_for_regexp(typ)).groups()
arrlist = re.findall('\[[0-9]*\]', arr)
assert len(''.join(arrlist)) == len(arr), \
"Unknown characters found in array declaration"
# Check validity of string type
if base == 'string' or base == 'bytes':
assert re.match('^[0-9]*$', sub), \
"String type must have no suffix or numerical suffix"
# Check validity of integer type
elif base == 'uint' or base == 'int':
if base == 'uint' or base == 'int':
assert re.match('^[0-9]+$', sub), \
"Integer type must have numerical suffix"
assert 8 <= int(sub) <= 256, \
"Integer size out of bounds"
assert int(sub) % 8 == 0, \
"Integer size must be multiple of 8"
# Check validity of string type
if base == 'string' or base == 'bytes':
elif base == 'string' or base == 'bytes':
assert re.match('^[0-9]*$', sub), \
"String type must have no suffix or numerical suffix"
assert not sub or int(sub) <= 32, \
Expand All @@ -285,7 +284,9 @@ def process_type(typ):
# Check validity of address type
elif base == 'address':
assert sub == '', "Address cannot have suffix"
return base, sub, [ast.literal_eval(x) for x in arrlist]
o = base, sub, [ast.literal_eval(x) for x in arrlist]
proctype_cache[typ] = o
return o


# Returns the static size of a type, or None if dynamic
Expand Down Expand Up @@ -374,7 +375,7 @@ def encode_abi(types, args):
def decode_single(typ, data):
base, sub, _ = typ
if base == 'address':
return encode_hex(data[12:])
return encode_hex(data[32-ADDR_BYTES:])
elif base == 'string' or base == 'bytes' or base == 'hash':
return data[:int(sub)] if len(sub) else data
elif base == 'uint':
Expand All @@ -392,8 +393,13 @@ def decode_single(typ, data):
return bool(int(data.encode('hex'), 16))


decode_abi_cache = {}

# Decodes multiple arguments using the head/tail mechanism
def decode_abi(types, data):
cache_key = str(types) + data
if cache_key in decode_abi_cache:
return copy.deepcopy(decode_abi_cache[cache_key])
# Process types
proctypes = [process_type(typ) for typ in types]
# Get sizes of everything
Expand Down Expand Up @@ -431,7 +437,9 @@ def decode_abi(types, data):
next_offset = start_positions[i + 1]
outs[i] = data[offset:next_offset]
# Recursively decode them all
return [dec(proctypes[i], outs[i]) for i in range(len(outs))]
o = [dec(proctypes[i], outs[i]) for i in range(len(outs))]
decode_abi_cache[cache_key] = copy.deepcopy(o)
return o


# Decode a single value (static or dynamic)
Expand Down
Loading