diff --git a/README.md b/README.md index 1ef1105..531833b 100644 --- a/README.md +++ b/README.md @@ -42,8 +42,9 @@ sudo mandb ```text wcurl ... -wcurl [--curl-options ]... [--no-decode-filename] [-o|-O|--output ] [--dry-run] [--] ... -wcurl [--curl-options=]... [--no-decode-filename] [--output=] [--dry-run] [--] ... +wcurl -i ... +wcurl [--curl-options ]... [--no-decode-filename] [-o|-O|--output ] [-i|--input-file ]... [--dry-run] [--] []... +wcurl [--curl-options=]... [--no-decode-filename] [--output=] [--input-file=]... [--dry-run] [--] []... wcurl -V|--version wcurl -h|--help ``` @@ -85,6 +86,12 @@ should be using curl directly if your use case is not covered. the end (curl >= 7.83.0). If this option is provided multiple times, only the last value is considered. +* `-i, --input-file=` + + Download all URLs listed in the input file. Can be used multiple times and + mixed with URLs as parameters. This is equivalent to setting `@\` as an + URL argument. Lines starting with `#` are ignored. + * `--no-decode-filename` Don't percent-decode the output filename, even if the percent-encoding in the @@ -112,6 +119,8 @@ instead forwarded to the curl invocation. URL to be downloaded. Anything that is not a parameter is considered an URL. Whitespaces are percent-encoded and the URL is passed to curl, which then performs the parsing. May be specified more than once. +Arguments starting with `@` are considered as a file containing multiple URLs to +be downloaded; `@\` is equivalent to using `--input-file \`. # Examples diff --git a/wcurl b/wcurl index 1b9b659..aba5bb0 100755 --- a/wcurl +++ b/wcurl @@ -49,8 +49,9 @@ usage() ${PROGRAM_NAME} -- a simple wrapper around curl to easily download files. Usage: ${PROGRAM_NAME} ... - ${PROGRAM_NAME} [--curl-options ]... [--no-decode-filename] [-o|-O|--output ] [--dry-run] [--] ... - ${PROGRAM_NAME} [--curl-options=]... [--no-decode-filename] [--output=] [--dry-run] [--] ... + ${PROGRAM_NAME} -i ... + ${PROGRAM_NAME} [--curl-options ]... [--no-decode-filename] [-o|-O|--output ] [-i|--input-file ]... [--dry-run] [--] []... + ${PROGRAM_NAME} [--curl-options=]... [--no-decode-filename] [--output=] [--input-file=]... [--dry-run] [--] []... ${PROGRAM_NAME} -h|--help ${PROGRAM_NAME} -V|--version @@ -64,6 +65,10 @@ Options: number appended to the end (curl >= 7.83.0). If this option is provided multiple times, only the last value is considered. + -i, --input-file : Download all URLs listed in the input file. Can be used multiple times + and mixed with URLs as parameters. This is equivalent to setting + "@" as an URL argument. Lines starting with "#" are ignored. + --no-decode-filename: Don't percent-decode the output filename, even if the percent-encoding in the URL was done by wcurl, e.g.: The URL contained whitespaces. @@ -79,6 +84,8 @@ Options: : URL to be downloaded. Anything that is not a parameter is considered an URL. Whitespaces are percent-encoded and the URL is passed to curl, which then performs the parsing. May be specified more than once. + Arguments starting with "@" are considered as a file containing multiple URLs to be + downloaded; "@" is equivalent to using "--input-file ". _EOF_ } @@ -116,6 +123,34 @@ readonly PER_URL_PARAMETERS="\ # Whether to invoke curl or not. DRY_RUN="false" +# Add URLs to list of URLs to be downloaded. +# If the argument starts with "@", then it's a file containing the URLs +# to be downloaded (an "input file"). +# When parsing an input file, ignore lines starting with "#". +# This function also percent-encodes the whitespaces in URLs. +add_urls() +{ + case "$1" in + @*) + while read -r url; do + case "$url" in + \#*) : ;; + *) + # Percent-encode whitespaces into %20, since wget supports those URLs. + newurl=$(printf "%s\n" "${url}" | sed 's/ /%20/g') + URLS="${URLS} ${newurl}" + ;; + esac + done < "${1#@}" + ;; + *) + # Percent-encode whitespaces into %20, since wget supports those URLs. + newurl=$(printf "%s\n" "${1}" | sed 's/ /%20/g') + URLS="${URLS} ${newurl}" + ;; + esac +} + # Sanitize parameters. sanitize() { @@ -279,6 +314,19 @@ while [ -n "${1-}" ]; do OUTPUT_PATH="${opt}" ;; + --input-file=*) + add_urls "@$(printf "%s\n" "${1}" | sed 's/^--input-file=//')" + ;; + + -i | --input-file) + shift + add_urls "@${1}" + ;; + + -i*) + add_urls "@$(printf "%s\n" "${1}" | sed 's/^-i//')" + ;; + --no-decode-filename) DECODE_FILENAME="false" ;; @@ -296,10 +344,8 @@ while [ -n "${1-}" ]; do --) # This is the start of the list of URLs. shift - for url in "$@"; do - # Encode whitespaces into %20, since wget supports those URLs. - newurl=$(printf "%s\n" "${url}" | sed 's/ /%20/g') - URLS="${URLS} ${newurl}" + for arg in "$@"; do + add_urls "${arg}" done break ;; @@ -310,9 +356,7 @@ while [ -n "${1-}" ]; do *) # This must be a URL. - # Encode whitespaces into %20, since wget supports those URLs. - newurl=$(printf "%s\n" "${1}" | sed 's/ /%20/g') - URLS="${URLS} ${newurl}" + add_urls "${1}" ;; esac shift diff --git a/wcurl.md b/wcurl.md index 4111af5..9a9c86e 100644 --- a/wcurl.md +++ b/wcurl.md @@ -18,9 +18,11 @@ Added-in: n/a **wcurl \...** -**wcurl [--curl-options \]... [--dry-run] [--no-decode-filename] [-o|-O|--output \] [--] \...** +**wcurl -i \...** -**wcurl [--curl-options=\]... [--dry-run] [--no-decode-filename] [--output=\] [--] \...** +**wcurl [--curl-options \]... [--dry-run] [--no-decode-filename] [-o|-O|--output \] [-i|--input-file \]... [--] [\]...** + +**wcurl [--curl-options=\]... [--dry-run] [--no-decode-filename] [--output=\] [--input-file=\]... [--] [\]...** **wcurl -V|--version** @@ -82,6 +84,12 @@ URLs are provided, resulting files share the same name with a number appended to the end (curl \>= 7.83.0). If this option is provided multiple times, only the last value is considered. +## -i, --input-file=\ + +Download all URLs listed in the input file. Can be used multiple times and +mixed with URLs as parameters. This is equivalent to setting `@\` as an +URL argument. Lines starting with `#` are ignored. + ## --no-decode-filename Don't percent-decode the output filename, even if the percent-encoding in the @@ -109,6 +117,8 @@ is instead forwarded to the curl invocation. URL to be downloaded. Anything that is not a parameter is considered an URL. Whitespaces are percent-encoded and the URL is passed to curl, which then performs the parsing. May be specified more than once. +Arguments starting with `@` are considered as a file containing multiple URLs to be +downloaded; `@\` is equivalent to using `--input-file \`. # EXAMPLES