Skip to content

Commit 8bc7030

Browse files
authored
Merge pull request #55 from hnez/rauc-bundle-polling-wait
rauc: perform retries with exponential backoff
2 parents e1e6ef5 + 9d0e394 commit 8bc7030

File tree

1 file changed

+21
-2
lines changed

1 file changed

+21
-2
lines changed

src/dbus/rauc.rs

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,8 @@ mod imports {
7474
}
7575

7676
const RELOAD_RATE_LIMIT: Duration = Duration::from_secs(10 * 60);
77+
const RETRY_INTERVAL_MIN: Duration = Duration::from_secs(60);
78+
const RETRY_INTERVAL_MAX: Duration = Duration::from_secs(60 * 60);
7779

7880
use imports::*;
7981

@@ -188,6 +190,8 @@ async fn channel_polling_task(
188190
) {
189191
let proxy = InstallerProxy::new(&conn).await.unwrap();
190192

193+
let mut retry_interval = RETRY_INTERVAL_MIN;
194+
191195
while let Some(mut channel) = channels
192196
.try_get()
193197
.and_then(|chs| chs.into_iter().find(|ch| ch.name == name))
@@ -201,11 +205,26 @@ async fn channel_polling_task(
201205

202206
if let Err(e) = channel.poll(&proxy, slot_status.as_deref()).await {
203207
warn!(
204-
"Failed to fetch update for update channel \"{}\": {}",
205-
channel.name, e
208+
"Failed to fetch update for update channel \"{}\": {}. Retrying in {}s.",
209+
channel.name,
210+
e,
211+
retry_interval.as_secs()
206212
);
213+
214+
if retry_interval < RETRY_INTERVAL_MAX {
215+
sleep(retry_interval).await;
216+
217+
// Perform a (limited) exponential backoff on the retry interval to recover
218+
// fast from short-term issues while also preventing the update server from
219+
// being DDOSed by excessive retries.
220+
retry_interval *= 2;
221+
222+
continue;
223+
}
207224
}
208225

226+
retry_interval = RETRY_INTERVAL_MIN;
227+
209228
channels.modify(|chs| {
210229
let mut chs = chs?;
211230
let channel_prev = chs.iter_mut().find(|ch| ch.name == name)?;

0 commit comments

Comments
 (0)