@@ -3,8 +3,11 @@ use std::{collections::HashMap, sync::Arc};
33use async_trait:: async_trait;
44use derive_more:: AsRef ;
55use futures:: future:: try_join_all;
6- use hyperlane_core:: { Delivery , HyperlaneDomain , HyperlaneMessage , InterchainGasPayment , H512 } ;
7- use tokio:: { sync:: mpsc:: Receiver as MpscReceiver , task:: JoinHandle } ;
6+ use hyperlane_core:: {
7+ rpc_clients:: RPC_RETRY_SLEEP_DURATION , Delivery , HyperlaneDomain , HyperlaneMessage ,
8+ InterchainGasPayment , H512 ,
9+ } ;
10+ use tokio:: { sync:: mpsc:: Receiver as MpscReceiver , task:: JoinHandle , time:: sleep} ;
811use tracing:: { info, info_span, instrument, trace, Instrument } ;
912
1013use hyperlane_base:: {
@@ -15,6 +18,8 @@ use hyperlane_base::{
1518
1619use crate :: { db:: ScraperDb , settings:: ScraperSettings , store:: HyperlaneDbStore } ;
1720
21+ const CURSOR_INSTANTIATION_ATTEMPTS : usize = 10 ;
22+
1823/// A message explorer scraper agent
1924#[ derive( Debug , AsRef ) ]
2025#[ allow( unused) ]
@@ -125,17 +130,17 @@ impl BaseAgent for Scraper {
125130 }
126131 } ;
127132
128- match self . scrape ( scraper) . await {
129- Ok ( scraper_task) => {
130- tasks. push ( scraper_task) ;
131- }
133+ let scraper_task = match self
134+ . try_n_times_to_scrape ( scraper, CURSOR_INSTANTIATION_ATTEMPTS )
135+ . await
136+ {
137+ Ok ( s) => s,
132138 Err ( err) => {
133- tracing:: error!( ?err, ?scraper. domain, "Failed to scrape domain" ) ;
134- self . chain_metrics
135- . set_critical_error ( scraper. domain . name ( ) , true ) ;
139+ tracing:: error!( ?err, ?scraper. domain, "Failed to scrape chain" ) ;
136140 continue ;
137141 }
138- }
142+ } ;
143+ tasks. push ( scraper_task) ;
139144 tasks. push ( metrics_updater. spawn ( ) ) ;
140145 }
141146 tasks. push ( self . runtime_metrics . spawn ( ) ) ;
@@ -146,6 +151,31 @@ impl BaseAgent for Scraper {
146151}
147152
148153impl Scraper {
154+ /// Try to scrape attempts times before giving up.
155+ async fn try_n_times_to_scrape (
156+ & self ,
157+ scraper : & ChainScraper ,
158+ attempts : usize ,
159+ ) -> eyre:: Result < JoinHandle < ( ) > > {
160+ for i in 0 ..attempts {
161+ let scraper_task = match self . scrape ( scraper) . await {
162+ Ok ( s) => s,
163+ Err ( err) => {
164+ tracing:: error!( ?err, ?scraper. domain, attempt_count=i, "Failed to scrape chain" ) ;
165+ sleep ( RPC_RETRY_SLEEP_DURATION ) . await ;
166+ continue ;
167+ }
168+ } ;
169+
170+ self . chain_metrics
171+ . set_critical_error ( scraper. domain . name ( ) , false ) ;
172+ return Ok ( scraper_task) ;
173+ }
174+ self . chain_metrics
175+ . set_critical_error ( scraper. domain . name ( ) , true ) ;
176+ Err ( eyre:: eyre!( "Failed to scrape chain" ) )
177+ }
178+
149179 /// Sync contract data and other blockchain with the current chain state.
150180 /// This will spawn long-running contract sync tasks
151181 #[ instrument( fields( domain=%scraper. domain. name( ) ) , skip_all) ]
0 commit comments