5151# ' highlight the download destination. Workflow:
5252# ' * User executes, e.g., `use_course("bit.ly/xxx-yyy-zzz")`.
5353# ' * User is asked to notice and confirm the location of the new folder. Specify
54- # ' `destdir` to prevent this.
54+ # ' `destdir` or configure the `"usethis.destdir"` option to prevent this.
5555# ' * User is asked if they'd like to delete the ZIP file.
5656# ' * If new folder contains an `.Rproj` file, a new instance of RStudio is
5757# ' launched. Otherwise, the folder is opened in the file manager, e.g. Finder
@@ -130,6 +130,10 @@ use_zip <- function(url,
130130# ' filename is generated from the input URL. In either case, the filename is
131131# ' sanitized. Returns the path to downloaded ZIP file, invisibly.
132132# '
133+ # ' `tidy_download()` is setup to retry after a download failure. In an
134+ # ' interactive session, it asks for user's consent. All retries use a longer
135+ # ' connect timeout.
136+ # '
133137# ' ## DropBox
134138# '
135139# ' To make a folder available for ZIP download, create a shared link for it:
@@ -236,21 +240,22 @@ use_zip <- function(url,
236240NULL
237241
238242# 1. downloads from `url`
239- # 2. determines filename from content-description header (with fallbacks)
240- # 3. returned path has content-type and content-description as attributes
243+ # 2. calls a retry-capable helper to download the ZIP file
244+ # 3. determines filename from content-description header (with fallbacks)
245+ # 4. returned path has content-type and content-description as attributes
241246tidy_download <- function (url , destdir = getwd()) {
242247 check_path_is_directory(destdir )
243248 tmp <- file_temp(" tidy-download-" )
244- h <- curl :: new_handle( noprogress = FALSE , progressfunction = progress_fun )
245- curl :: curl_download (url , tmp , quiet = FALSE , mode = " wb " , handle = h )
249+
250+ h <- download_url (url , destfile = tmp )
246251 ui_line()
247252
248253 cd <- content_disposition(h )
249254 base_name <- make_filename(cd , fallback = path_file(url ))
250255 full_path <- path(destdir , base_name )
251256
252257 if (! can_overwrite(full_path )) {
253- ui_stop(" Aborting. " )
258+ ui_stop(" Aborting to avoid overwriting {ui_path(full_path)} " )
254259 }
255260 attr(full_path , " content-type" ) <- content_type(h )
256261 attr(full_path , " content-disposition" ) <- cd
@@ -259,6 +264,81 @@ tidy_download <- function(url, destdir = getwd()) {
259264 invisible (full_path )
260265}
261266
267+ download_url <- function (url ,
268+ destfile ,
269+ handle = curl :: new_handle(),
270+ n_tries = 3 ,
271+ retry_connecttimeout = 40L ) {
272+ handle_options <- list (noprogress = FALSE , progressfunction = progress_fun )
273+ curl :: handle_setopt(handle , .list = handle_options )
274+
275+ we_should_retry <- function (i , n_tries , status ) {
276+ if (i > = n_tries ) {
277+ FALSE
278+ } else if (inherits(status , " error" )) {
279+ # TODO: find a way to detect a (connect) timeout more specifically?
280+ # https://github.com/jeroen/curl/issues/154
281+ # https://ec.haxx.se/usingcurl/usingcurl-timeouts
282+ # "Failing to connect within the given time will cause curl to exit with a
283+ # timeout exit code (28)."
284+ # (however, note that all timeouts lead to this same exit code)
285+ # https://ec.haxx.se/usingcurl/usingcurl-returns
286+ # "28. Operation timeout. The specified time-out period was reached
287+ # according to the conditions. curl offers several timeouts, and this exit
288+ # code tells one of those timeout limits were reached."
289+ # https://github.com/curl/curl/blob/272282a05416e42d2cc4a847a31fd457bc6cc827/lib/strerror.c#L143-L144
290+ # "Timeout was reached" <-- actual message we could potentially match
291+ TRUE
292+ } else {
293+ FALSE
294+ }
295+ }
296+
297+ status <- try_download(url , destfile , handle = handle )
298+ if (inherits(status , " error" ) && interactive()) {
299+ ui_oops(status $ message )
300+ if (ui_nope("
301+ Download failed :(
302+ See above for everything we know about why it failed.
303+ Shall we try a couple more times, with a longer timeout?
304+ " )) {
305+ n_tries <- 1
306+ }
307+ }
308+
309+ i <- 1
310+ # invariant: we have made i download attempts
311+ while (we_should_retry(i , n_tries , status )) {
312+ if (i == 1 ) {
313+ curl :: handle_setopt(
314+ handle ,
315+ .list = c(connecttimeout = retry_connecttimeout ))
316+ }
317+ i <- i + 1
318+ ui_info(" Retrying download ... attempt {i}" )
319+ status <- try_download(url , destfile , handle = handle )
320+ }
321+
322+ if (inherits(status , " error" )) {
323+ stop(status )
324+ }
325+
326+ invisible (handle )
327+ }
328+
329+ try_download <- function (url , destfile , quiet = FALSE , mode = " wb" , handle ) {
330+ tryCatch(
331+ curl :: curl_download(
332+ url = url ,
333+ destfile = destfile ,
334+ quiet = quiet ,
335+ mode = mode ,
336+ handle = handle
337+ ),
338+ error = function (e ) e
339+ )
340+ }
341+
262342tidy_unzip <- function (zipfile , cleanup = FALSE ) {
263343 base_path <- path_dir(zipfile )
264344
0 commit comments