Skip to content

Commit 5430e63

Browse files
committed
v0.1.18 made w3.eth.get_logs more robust and added unit tests for it.
1 parent f9e9962 commit 5430e63

File tree

4 files changed

+275
-14
lines changed

4 files changed

+275
-14
lines changed

IceCreamSwapWeb3/EthAdvanced.py

Lines changed: 50 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -136,11 +136,36 @@ def get_logs(
136136
assert "fromBlock" not in filter_params and "toBlock" not in filter_params
137137
return self.get_logs_inner(filter_params, no_retry=no_retry)
138138

139+
from_block_original: BlockIdentifier = filter_params.get("fromBlock", "earliest")
140+
to_block_original: BlockIdentifier = filter_params.get("toBlock", "latest")
141+
139142
# sanitizing block numbers, could be strings like "latest"
140-
filter_params["fromBlock"] = from_block = self.get_block_number_from_identifier(filter_params.get("fromBlock", "earliest"))
141-
filter_params["toBlock"] = to_block = self.get_block_number_from_identifier(filter_params.get("toBlock", "latest"))
143+
if isinstance(from_block_original, int):
144+
from_block_body = None
145+
from_block = from_block_original
146+
else:
147+
from_block_body = self.get_block(from_block_original)
148+
from_block = from_block_body["number"]
149+
filter_params = {**filter_params, "fromBlock": from_block}
150+
151+
if isinstance(to_block_original, int):
152+
to_block_body = None
153+
to_block = to_block_original
154+
else:
155+
to_block_body = self.get_block(to_block_original)
156+
to_block = to_block_body["number"]
157+
filter_params = {**filter_params, "toBlock": to_block}
158+
142159
assert to_block >= from_block, f"{from_block=}, {to_block=}"
143160

161+
# if logs for a single block are queried, and we know the block hash, query by it
162+
if from_block == to_block and (from_block_body or to_block_body):
163+
block_body = from_block_body if from_block_body else to_block_body
164+
single_hash_filter = {**filter_params, "blockHash": block_body["hash"]}
165+
del single_hash_filter["fromBlock"]
166+
del single_hash_filter["toBlock"]
167+
return self.get_logs_inner(single_hash_filter, no_retry=no_retry)
168+
144169
# note: fromBlock and toBlock are both inclusive. e.g. 5 to 6 are 2 blocks
145170
num_blocks = to_block - from_block + 1
146171

@@ -162,18 +187,31 @@ def get_logs(
162187
# simply ignores logs from the missing block
163188
# to prevent this, we get the latest blocks individually by their hashes
164189
unstable_blocks = self.w3.unstable_blocks
165-
if to_block > self.w3.latest_seen_block - unstable_blocks and to_block > self.block_number - unstable_blocks:
190+
if to_block > self.w3.latest_seen_block - unstable_blocks and to_block > (last_stable_block := (self.get_block_number() - unstable_blocks)):
166191
results = []
167-
while to_block > self.w3.latest_seen_block - unstable_blocks and to_block >= from_block:
168-
single_hash_filter = {**filter_params, "blockHash": self.get_block(to_block)["hash"]}
169-
del single_hash_filter["fromBlock"]
170-
del single_hash_filter["toBlock"]
171-
results += self.get_logs_inner(single_hash_filter, no_retry=no_retry)
172-
to_block -= 1
192+
if from_block <= last_stable_block:
193+
results += self.get_logs({**filter_params, "toBlock": last_stable_block}, **kwargs)
194+
195+
# get all block hashes and ensure they build upon each other
196+
block_hashes = []
197+
for block_number in range(max(last_stable_block + 1, from_block), to_block + 1):
198+
block = self.get_block(block_number, no_retry=no_retry)
199+
if block_hashes:
200+
# make sure chain of blocks is consistent with each block building on the previous one
201+
assert block["parentHash"] == block_hashes[-1], f"{block_hashes[-1]=}, {block['parentHash']=}"
202+
if block_number == from_block and from_block_body is not None:
203+
assert block["hash"] == from_block_body["hash"], f"{from_block_body['hash']=}, {block['hash']=}"
204+
if block_number == to_block and to_block_body is not None:
205+
assert block["hash"] == to_block_body["hash"], f"{to_block_body['hash']=}, {block['hash']=}"
206+
block_hashes.append(block["hash"])
207+
208+
single_hash_filter = filter_params.copy()
209+
del single_hash_filter["fromBlock"]
210+
del single_hash_filter["toBlock"]
211+
for block_hash in block_hashes:
212+
results += self.get_logs_inner({**single_hash_filter, "blockHash": block_hash}, no_retry=no_retry)
173213
if p_bar is not None:
174214
p_bar.update(1)
175-
if to_block >= from_block:
176-
results += self.get_logs({**filter_params, "toBlock": to_block}, **kwargs)
177215
return results
178216

179217
# getting logs for a single block, which is not at the chain head. No drama
@@ -192,7 +230,7 @@ def get_logs(
192230
)
193231
if till_block >= to_block:
194232
return results
195-
return results + self.get_logs({**filter_params, "fromBlock": till_block+1}, **kwargs)
233+
return results + self.get_logs({**filter_params, "fromBlock": till_block + 1}, **kwargs)
196234
except Exception as e:
197235
print(f"Getting logs from SubSquid threw exception {repr(e)}, falling back to RPC")
198236

IceCreamSwapWeb3/FilterTest.py

Lines changed: 223 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,223 @@
1+
import unittest
2+
from unittest.mock import MagicMock, patch
3+
from . import Web3Advanced
4+
5+
6+
class TestWeb3AdvancedGetLogs(unittest.TestCase):
7+
8+
@classmethod
9+
def setUpClass(cls):
10+
# Instantiate the class only once
11+
cls.eth_advanced = Web3Advanced(node_url="https://rpc-core.icecreamswap.com").eth
12+
13+
def setUp(self):
14+
# Mock self.eth_advanced.w3 and its properties
15+
self.eth_advanced.w3 = MagicMock()
16+
self.eth_advanced.w3.filter_block_range = 1000 # Set a default filter block range
17+
self.eth_advanced.w3.unstable_blocks = 10 # Set default unstable blocks
18+
self.eth_advanced.w3.latest_seen_block = 1000 # Set the latest seen block
19+
20+
# Mock get_block_number
21+
self.eth_advanced.get_block_number = MagicMock(return_value=1000)
22+
23+
# Mock get_block
24+
def mock_get_block(block_identifier, no_retry=False):
25+
if isinstance(block_identifier, int):
26+
block_number = block_identifier
27+
elif block_identifier == 'latest':
28+
block_number = 1000
29+
else:
30+
# Handle other block identifiers as needed
31+
block_number = None
32+
if block_number is None:
33+
raise Exception(f"Invalid block identifier: {block_identifier}")
34+
block_hash = f"hash_{block_number}"
35+
parent_hash = f"hash_{block_number - 1}" if block_number > 0 else None
36+
return {'number': block_number, 'hash': block_hash, 'parentHash': parent_hash}
37+
self.eth_advanced.get_block = MagicMock(side_effect=mock_get_block)
38+
39+
# Mock _get_logs
40+
self.logs_storage = {} # To simulate storage of logs per block
41+
def mock__get_logs(filter_params):
42+
if 'blockHash' in filter_params:
43+
# Single block query
44+
block_number = int(filter_params['blockHash'].split('_')[1])
45+
logs = self.logs_storage.get(block_number, [])
46+
return logs
47+
else:
48+
from_block = filter_params.get('fromBlock', 0)
49+
to_block = filter_params.get('toBlock', 0)
50+
logs = []
51+
for block_number in range(from_block, to_block + 1):
52+
block_logs = self.logs_storage.get(block_number, [])
53+
logs.extend(block_logs)
54+
return logs
55+
self.eth_advanced._get_logs = MagicMock(side_effect=mock__get_logs)
56+
57+
def test_get_logs_no_duplicates_no_missing_blocks_correct_order(self):
58+
# Prepare test data
59+
from_block = 900
60+
to_block = 950
61+
filter_params = {'fromBlock': from_block, 'toBlock': to_block}
62+
63+
# Simulate logs for each block
64+
for block_number in range(from_block, to_block + 1):
65+
self.logs_storage[block_number] = [{'blockNumber': block_number, 'logIndex': 0}]
66+
67+
# Call get_logs
68+
logs = self.eth_advanced.get_logs(filter_params, use_subsquid=False)
69+
70+
# Collect block numbers from logs
71+
actual_block_numbers = [log['blockNumber'] for log in logs]
72+
expected_block_numbers = list(range(from_block, to_block + 1))
73+
74+
# Assertions
75+
self.assertEqual(len(actual_block_numbers), len(set(actual_block_numbers)), "Duplicate logs found")
76+
self.assertEqual(sorted(actual_block_numbers), expected_block_numbers, "Missing or extra logs found")
77+
self.assertEqual(actual_block_numbers, expected_block_numbers, "Logs are not in correct order")
78+
79+
def test_get_logs_range_exceeds_filter_block_range(self):
80+
# Adjust filter_block_range to force splitting
81+
self.eth_advanced.w3.filter_block_range = 10
82+
83+
# Prepare test data
84+
from_block = 50
85+
to_block = 100
86+
filter_params = {'fromBlock': from_block, 'toBlock': to_block}
87+
88+
# Simulate logs for each block
89+
for block_number in range(from_block, to_block + 1):
90+
self.logs_storage[block_number] = [{'blockNumber': block_number, 'logIndex': 0}]
91+
92+
# Call get_logs
93+
logs = self.eth_advanced.get_logs(filter_params, use_subsquid=False)
94+
95+
# Collect block numbers from logs
96+
actual_block_numbers = [log['blockNumber'] for log in logs]
97+
expected_block_numbers = list(range(from_block, to_block + 1))
98+
99+
# Assertions
100+
self.assertEqual(len(actual_block_numbers), len(set(actual_block_numbers)), "Duplicate logs found")
101+
self.assertEqual(sorted(actual_block_numbers), expected_block_numbers, "Missing or extra logs found")
102+
self.assertEqual(actual_block_numbers, expected_block_numbers, "Logs are not in correct order")
103+
104+
def test_get_logs_splits_on_error(self):
105+
# Prepare test data
106+
from_block = 50
107+
to_block = 100
108+
filter_params = {'fromBlock': from_block, 'toBlock': to_block}
109+
110+
for block_number in range(from_block, to_block + 1):
111+
self.logs_storage[block_number] = [{'blockNumber': block_number, 'logIndex': 0}]
112+
113+
# Simulate logs for each block except when more than 10 logs are requested, then raise exception
114+
def mock__get_logs(filter_params):
115+
from_block = filter_params.get('fromBlock', 0)
116+
to_block = filter_params.get('toBlock', 0)
117+
if to_block - from_block + 1 > 10:
118+
raise Exception("Simulated RPC error")
119+
logs = []
120+
for block_number in range(from_block, to_block + 1):
121+
logs.extend(self.logs_storage.get(block_number, []))
122+
return logs
123+
self.eth_advanced._get_logs.side_effect = mock__get_logs
124+
125+
# Call get_logs
126+
logs = self.eth_advanced.get_logs(filter_params, use_subsquid=False)
127+
128+
# Collect block numbers from logs
129+
actual_block_numbers = [log['blockNumber'] for log in logs]
130+
expected_block_numbers = list(range(from_block, to_block + 1))
131+
132+
# Assertions
133+
self.assertEqual(len(actual_block_numbers), len(set(actual_block_numbers)), "Duplicate logs found")
134+
self.assertEqual(sorted(actual_block_numbers), expected_block_numbers, "Missing or extra logs found")
135+
self.assertEqual(actual_block_numbers, expected_block_numbers, "Logs are not in correct order")
136+
137+
@patch('IceCreamSwapWeb3.EthAdvanced.get_filter')
138+
def test_get_logs_uses_subsquid(self, mock_get_filter):
139+
# Prepare test data
140+
from_block = 800
141+
to_block = 850
142+
filter_params = {'fromBlock': from_block, 'toBlock': to_block}
143+
144+
for block_number in range(to_block - 10, to_block + 1):
145+
self.logs_storage[block_number] = [{'blockNumber': block_number, 'logIndex': 0}]
146+
147+
# Simulate logs returned by get_filter
148+
def mock_get_filter_func(chain_id, filter_params, partial_allowed, p_bar):
149+
till_block = to_block - 10
150+
logs = []
151+
for block_number in range(from_block, till_block + 1):
152+
logs.append({'blockNumber': block_number, 'logIndex': 0})
153+
return till_block, logs
154+
mock_get_filter.side_effect = mock_get_filter_func
155+
156+
# Call get_logs with use_subsquid=True
157+
logs = self.eth_advanced.get_logs(filter_params, use_subsquid=True)
158+
159+
# Collect block numbers from logs
160+
actual_block_numbers = [log['blockNumber'] for log in logs]
161+
expected_block_numbers = list(range(from_block, to_block + 1))
162+
163+
# Assertions
164+
self.assertEqual(len(actual_block_numbers), len(set(actual_block_numbers)), "Duplicate logs found")
165+
self.assertEqual(sorted(actual_block_numbers), expected_block_numbers, "Missing or extra logs found")
166+
self.assertEqual(actual_block_numbers, expected_block_numbers, "Logs are not in correct order")
167+
168+
def test_get_logs_unstable_blocks_handling(self):
169+
# Prepare test data where to_block is within the latest unstable blocks
170+
unstable_blocks = self.eth_advanced.w3.unstable_blocks
171+
latest_block = self.eth_advanced.get_block_number()
172+
from_block = latest_block - unstable_blocks - 5
173+
to_block = latest_block # This will be within the unstable blocks
174+
175+
filter_params = {'fromBlock': from_block, 'toBlock': to_block}
176+
177+
# Simulate logs for each block
178+
for block_number in range(from_block, to_block + 1):
179+
self.logs_storage[block_number] = [{'blockNumber': block_number, 'logIndex': 0}]
180+
181+
# Ensure get_block returns consistent block hashes and parent hashes
182+
def mock_get_block(block_identifier, no_retry=False):
183+
if isinstance(block_identifier, int):
184+
block_number = block_identifier
185+
elif block_identifier == 'latest':
186+
block_number = latest_block
187+
else:
188+
# Handle other block identifiers as needed
189+
block_number = None
190+
if block_number is None:
191+
raise Exception(f"Invalid block identifier: {block_identifier}")
192+
block_hash = f"hash_{block_number}"
193+
parent_hash = f"hash_{block_number - 1}" if block_number > 0 else None
194+
return {'number': block_number, 'hash': block_hash, 'parentHash': parent_hash}
195+
196+
self.eth_advanced.get_block.side_effect = mock_get_block
197+
198+
# Mock get_logs_inner to simulate fetching logs by blockHash
199+
def mock_get_logs_inner(filter_params, no_retry=False):
200+
block_hash = filter_params.get('blockHash')
201+
if block_hash:
202+
block_number = int(block_hash.split('_')[1])
203+
return self.logs_storage.get(block_number, [])
204+
else:
205+
return []
206+
207+
self.eth_advanced.get_logs_inner = MagicMock(side_effect=mock_get_logs_inner)
208+
209+
# Call get_logs
210+
logs = self.eth_advanced.get_logs(filter_params, use_subsquid=False)
211+
212+
# Collect block numbers from logs
213+
actual_block_numbers = [log['blockNumber'] for log in logs]
214+
expected_block_numbers = list(range(from_block, to_block + 1))
215+
216+
# Assertions
217+
self.assertEqual(len(actual_block_numbers), len(set(actual_block_numbers)), "Duplicate logs found")
218+
self.assertEqual(sorted(actual_block_numbers), expected_block_numbers, "Missing or extra logs found")
219+
self.assertEqual(actual_block_numbers, expected_block_numbers, "Logs are not in correct order")
220+
221+
222+
if __name__ == '__main__':
223+
unittest.main()

IceCreamSwapWeb3/Web3Advanced.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def __init__(
3636
self,
3737
node_url: str,
3838
should_retry: bool = True,
39-
unstable_blocks: int = int(os.getenv("UNSTABLE_BLOCKS", 3)), # not all nodes might have latest n blocks, these are seen as unstable
39+
unstable_blocks: int = int(os.getenv("UNSTABLE_BLOCKS", 5)), # not all nodes might have latest n blocks, these are seen as unstable
4040
):
4141
patch_error_formatters()
4242
self.node_url = node_url

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from setuptools import setup, find_packages
22

3-
VERSION = '0.1.17'
3+
VERSION = '0.1.18'
44
DESCRIPTION = 'IceCreamSwap Web3.py wrapper'
55
LONG_DESCRIPTION = 'IceCreamSwap Web3.py wrapper with automatic retries, multicall and other advanced functionality'
66

0 commit comments

Comments
 (0)