File tree Expand file tree Collapse file tree 1 file changed +36
-0
lines changed Expand file tree Collapse file tree 1 file changed +36
-0
lines changed Original file line number Diff line number Diff line change @@ -322,6 +322,9 @@ def main(args):
322
322
if args .justblocks :
323
323
return
324
324
325
+ scan_state (gethdb , leveldb )
326
+ return
327
+
325
328
state_root = canonical_head .state_root
326
329
logger .info (f'starting state trie import: { humanize_hash (state_root )} ' )
327
330
@@ -357,6 +360,39 @@ def main(args):
357
360
loger .info ('successfully imported state trie and all storage tries' )
358
361
359
362
363
+ def scan_state (gethdb : GethDatabase , trinitydb : LevelDB ):
364
+ """
365
+ Imports state, but by indiscriminately copying over everything which might be part of
366
+ the state trie. This copies more data than necessary, but is likely to be much faster
367
+ than iterating all state.
368
+ """
369
+ logger .debug ('scan_state: bulk-importing state entries' )
370
+
371
+ iterator = gethdb .db .iterator (
372
+ start = b'\x00 ' * 32 ,
373
+ stop = b'\xff ' * 32 ,
374
+ include_start = True ,
375
+ include_stop = True ,
376
+ )
377
+
378
+ imported_entries = 0
379
+ skipped_keys = 0
380
+ bucket = b'\x00 ' * 2
381
+ for key , value in iterator :
382
+ if len (key ) != 32 :
383
+ skipped_keys += 1
384
+ continue
385
+ trinitydb [key ] = value
386
+ imported_entries += 1
387
+
388
+ if key >= bucket :
389
+ logger .debug (f'imported: { bucket .hex ()} skipped={ skipped_keys } ' )
390
+ if bucket == b'\xff ' * 2 :
391
+ break
392
+ bucket = (int .from_bytes (bucket , 'big' ) + 1 ).to_bytes (2 , 'big' )
393
+
394
+ logger .info (f'scan_state: successfully imported { imported_entries } state entries' )
395
+
360
396
361
397
if __name__ == "__main__" :
362
398
logging .basicConfig (
You can’t perform that action at this time.
0 commit comments