Skip to content

Commit ef4f634

Browse files
committed
ACME: use increasing intervals with timeout when polling.
Previously, we limited polling for challenge and order status to a fixed number of tries. The updated algorithm will use increasing intervals and give up when the total wait timeout has elapsed.
1 parent 5a27f0c commit ef4f634

File tree

1 file changed

+56
-31
lines changed

1 file changed

+56
-31
lines changed

src/acme.rs

Lines changed: 56 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ pub mod solvers;
3232
pub mod types;
3333

3434
const DEFAULT_RETRY_INTERVAL: Duration = Duration::from_secs(1);
35+
const MAX_RETRY_INTERVAL: Duration = Duration::from_secs(8);
3536
static REPLAY_NONCE: http::HeaderName = http::HeaderName::from_static("replay-nonce");
3637

3738
pub struct NewCertificateOutput {
@@ -55,6 +56,9 @@ where
5556
nonce: NoncePool,
5657
directory: types::Directory,
5758
solvers: Vec<Box<dyn solvers::ChallengeSolver + Send + 'a>>,
59+
authorization_timeout: Duration,
60+
finalize_timeout: Duration,
61+
network_error_retries: usize,
5862
}
5963

6064
#[derive(Default)]
@@ -106,6 +110,9 @@ where
106110
nonce: Default::default(),
107111
directory: Default::default(),
108112
solvers: Vec::new(),
113+
authorization_timeout: Duration::from_secs(60),
114+
finalize_timeout: Duration::from_secs(60),
115+
network_error_retries: 3,
109116
})
110117
}
111118

@@ -152,14 +159,14 @@ where
152159
url: &Uri,
153160
payload: P,
154161
) -> Result<http::Response<Bytes>> {
155-
let mut fails = 0;
156-
157162
let mut nonce = if let Some(nonce) = self.nonce.get() {
158163
nonce
159164
} else {
160165
self.get_nonce().await?
161166
};
162167

168+
let mut tries = core::iter::repeat(DEFAULT_RETRY_INTERVAL).take(self.network_error_retries);
169+
163170
ngx_log_debug!(self.log.as_ptr(), "sending request to {url:?}");
164171
let res = loop {
165172
let body = crate::jws::sign_jws(
@@ -183,13 +190,15 @@ where
183190

184191
let res = match self.http.request(req).await {
185192
Ok(res) => res,
186-
Err(e) if fails >= 3 => return Err(e.into()),
187-
// TODO: limit retries to connection errors
188-
Err(_) => {
189-
fails += 1;
190-
sleep(DEFAULT_RETRY_INTERVAL).await;
191-
ngx_log_debug!(self.log.as_ptr(), "retrying: {} of 3", fails + 1);
192-
continue;
193+
Err(err) => {
194+
// TODO: limit retries to connection errors
195+
if let Some(tm) = tries.next() {
196+
sleep(tm).await;
197+
ngx_log_debug!(self.log.as_ptr(), "retrying failed request ({err})");
198+
continue;
199+
} else {
200+
return Err(err.into());
201+
}
193202
}
194203
};
195204

@@ -210,15 +219,13 @@ where
210219
types::ErrorKind::BadNonce | types::ErrorKind::RateLimited
211220
);
212221

213-
if !retriable || fails >= 3 {
214-
self.nonce.add(nonce);
215-
return Err(err.into());
222+
if retriable && wait_for_retry(&res, &mut tries).await {
223+
ngx_log_debug!(self.log.as_ptr(), "retrying failed request ({err})");
224+
continue;
216225
}
217226

218-
fails += 1;
219-
220-
wait_for_retry(&res).await;
221-
ngx_log_debug!(self.log.as_ptr(), "retrying: {} of 3", fails + 1);
227+
self.nonce.add(nonce);
228+
return Err(err.into());
222229
};
223230

224231
self.nonce.add_from_response(&res);
@@ -381,12 +388,9 @@ where
381388
}
382389
};
383390

384-
let mut tries = 10;
385-
386-
while order.status == OrderStatus::Processing && tries > 0 {
387-
tries -= 1;
388-
wait_for_retry(&res).await;
391+
let mut tries = backoff(MAX_RETRY_INTERVAL, self.finalize_timeout);
389392

393+
while order.status == OrderStatus::Processing && wait_for_retry(&res, &mut tries).await {
390394
drop(order);
391395
res = self.post(&order_url, b"").await?;
392396
order = serde_json::from_slice(res.body())?;
@@ -432,20 +436,18 @@ where
432436
return Err(anyhow!("unexpected challenge status {:?}", result.status));
433437
}
434438

435-
wait_for_retry(&res).await;
436-
437-
let mut tries = 10;
439+
let mut tries = backoff(MAX_RETRY_INTERVAL, self.authorization_timeout);
440+
wait_for_retry(&res, &mut tries).await;
438441

439442
let result = loop {
440443
let res = self.post(&url, b"").await?;
441444
let result: types::Authorization = serde_json::from_slice(res.body())?;
442445

443-
if result.status != AuthorizationStatus::Pending || tries == 0 {
446+
if result.status != AuthorizationStatus::Pending
447+
|| !wait_for_retry(&res, &mut tries).await
448+
{
444449
break result;
445450
}
446-
447-
tries -= 1;
448-
wait_for_retry(&res).await;
449451
};
450452

451453
ngx_log_debug!(
@@ -499,13 +501,36 @@ pub fn make_certificate_request(
499501
}
500502

501503
/// Waits until the next retry attempt is allowed.
502-
async fn wait_for_retry<B>(res: &http::Response<B>) {
504+
async fn wait_for_retry<B>(
505+
res: &http::Response<B>,
506+
policy: &mut impl Iterator<Item = Duration>,
507+
) -> bool {
508+
let Some(interval) = policy.next() else {
509+
return false;
510+
};
511+
503512
let retry_after = res
504513
.headers()
505514
.get(http::header::RETRY_AFTER)
506515
.and_then(parse_retry_after)
507-
.unwrap_or(DEFAULT_RETRY_INTERVAL);
508-
sleep(retry_after).await
516+
.unwrap_or(interval);
517+
518+
sleep(retry_after).await;
519+
true
520+
}
521+
522+
/// Generate increasing intervals saturated at `max` until `timeout` has passed.
523+
fn backoff(max: Duration, timeout: Duration) -> impl Iterator<Item = Duration> {
524+
let first = (Duration::ZERO, Duration::from_secs(1));
525+
let stop = Time::now() + timeout;
526+
527+
core::iter::successors(Some(first), move |prev: &(Duration, Duration)| {
528+
if Time::now() >= stop {
529+
return None;
530+
}
531+
Some((prev.1, prev.0.saturating_add(prev.1)))
532+
})
533+
.map(move |(_, x)| x.min(max))
509534
}
510535

511536
fn parse_retry_after(val: &http::HeaderValue) -> Option<Duration> {

0 commit comments

Comments
 (0)