|
| 1 | +#!/bin/bash |
| 2 | + |
| 3 | +## |
| 4 | +## Downloads package SDKs from bincache and loads them with |
| 5 | +## docker. Downloads package listings from bincache. Version can be |
| 6 | +## taken either from the latest nightly tag in the passed scripts |
| 7 | +## directory (with the -s option) or from specified version ID and |
| 8 | +## build ID (with -v and -b options). The results are written to the |
| 9 | +## passed downloads directory. |
| 10 | +## |
| 11 | +## Parameters: |
| 12 | +## -b <ID>: build ID, conflicts with -s |
| 13 | +## -h: this help |
| 14 | +## -s <DIR>: scripts repo directory, conflicts with -v and -b |
| 15 | +## -v <ID>: version ID, conflicts with -s |
| 16 | +## -nd: skip downloading of docker images |
| 17 | +## -p: download packages SDK images instead of the standard one (valid |
| 18 | +## only when downloading docker images) |
| 19 | +## -nl: skip downloading of listings |
| 20 | +## -x <FILE>: cleanup file |
| 21 | +## |
| 22 | +## Positional: |
| 23 | +## 1: downloads directory |
| 24 | +## |
| 25 | + |
| 26 | +set -euo pipefail |
| 27 | + |
| 28 | +source "$(dirname "${BASH_SOURCE[0]}")/impl/util.sh" |
| 29 | +source "${PKG_AUTO_IMPL_DIR}/cleanups.sh" |
| 30 | + |
| 31 | +CLEANUP_FILE= |
| 32 | +SCRIPTS= |
| 33 | +VERSION_ID= |
| 34 | +BUILD_ID= |
| 35 | +SKIP_DOCKER= |
| 36 | +SKIP_LISTINGS= |
| 37 | +PKGS_SDK= |
| 38 | + |
| 39 | +while [[ ${#} -gt 0 ]]; do |
| 40 | + case ${1} in |
| 41 | + -b) |
| 42 | + if [[ -n ${SCRIPTS} ]]; then |
| 43 | + fail '-b cannot be used at the same time with -s' |
| 44 | + fi |
| 45 | + if [[ -z ${2:-} ]]; then |
| 46 | + fail 'missing value for -b' |
| 47 | + fi |
| 48 | + BUILD_ID=${2} |
| 49 | + shift 2 |
| 50 | + ;; |
| 51 | + -h) |
| 52 | + print_help |
| 53 | + exit 0 |
| 54 | + ;; |
| 55 | + -p) |
| 56 | + PKGS_SDK=x |
| 57 | + shift |
| 58 | + ;; |
| 59 | + -s) |
| 60 | + if [[ -n ${VERSION_ID} ]] || [[ -n ${BUILD_ID} ]]; then |
| 61 | + fail '-s cannot be used at the same time with -v or -b' |
| 62 | + fi |
| 63 | + if [[ -z ${2:-} ]]; then |
| 64 | + fail 'missing value for -s' |
| 65 | + fi |
| 66 | + SCRIPTS=${2} |
| 67 | + shift 2 |
| 68 | + ;; |
| 69 | + -v) |
| 70 | + if [[ -n ${SCRIPTS} ]]; then |
| 71 | + fail '-v cannot be used at the same time with -s' |
| 72 | + fi |
| 73 | + if [[ -z ${2:-} ]]; then |
| 74 | + fail 'missing value for -v' |
| 75 | + fi |
| 76 | + VERSION_ID=${2} |
| 77 | + shift 2 |
| 78 | + ;; |
| 79 | + -x) |
| 80 | + if [[ -z ${2:-} ]]; then |
| 81 | + fail 'missing value for -x' |
| 82 | + fi |
| 83 | + CLEANUP_FILE=${2} |
| 84 | + shift 2 |
| 85 | + ;; |
| 86 | + -nd) |
| 87 | + SKIP_DOCKER=x |
| 88 | + shift |
| 89 | + ;; |
| 90 | + -nl) |
| 91 | + SKIP_LISTINGS=x |
| 92 | + shift |
| 93 | + ;; |
| 94 | + --) |
| 95 | + shift |
| 96 | + break |
| 97 | + ;; |
| 98 | + -*) |
| 99 | + fail "unknown flag '${1}'" |
| 100 | + ;; |
| 101 | + *) |
| 102 | + break |
| 103 | + ;; |
| 104 | + esac |
| 105 | +done |
| 106 | + |
| 107 | +if [[ ${#} -ne 1 ]]; then |
| 108 | + fail 'Expected one positional parameter: a downloads directory' |
| 109 | +fi |
| 110 | + |
| 111 | +DOWNLOADS_DIR=$(realpath "${1}"); shift |
| 112 | + |
| 113 | +if [[ -z ${SCRIPTS} ]] && [[ -z ${VERSION_ID} ]]; then |
| 114 | + fail 'need to pass either -s or -v (latter with the optional -b too)' |
| 115 | +fi |
| 116 | + |
| 117 | +if [[ -n ${CLEANUP_FILE} ]]; then |
| 118 | + dirname_out "${CLEANUP_FILE}" cleanup_dir |
| 119 | + # shellcheck disable=SC2154 # cleanup_dir is assigned in dirname_out |
| 120 | + mkdir -p "${cleanup_dir}" |
| 121 | + unset cleanup_dir |
| 122 | + setup_cleanups file "${CLEANUP_FILE}" |
| 123 | +else |
| 124 | + setup_cleanups ignore |
| 125 | +fi |
| 126 | + |
| 127 | +if [[ ! -d "${DOWNLOADS_DIR}" ]]; then |
| 128 | + add_cleanup "rmdir ${DOWNLOADS_DIR@Q}" |
| 129 | + mkdir "${DOWNLOADS_DIR}" |
| 130 | +fi |
| 131 | + |
| 132 | +function download() { |
| 133 | + local url output |
| 134 | + url="${1}"; shift |
| 135 | + output="${1}"; shift |
| 136 | + |
| 137 | + info "Downloading ${url}" |
| 138 | + curl \ |
| 139 | + --fail \ |
| 140 | + --show-error \ |
| 141 | + --location \ |
| 142 | + --retry-delay 1 \ |
| 143 | + --retry 60 \ |
| 144 | + --retry-connrefused \ |
| 145 | + --retry-max-time 60 \ |
| 146 | + --connect-timeout 20 \ |
| 147 | + "${url}" >"${output}" |
| 148 | +} |
| 149 | + |
| 150 | +if [[ -n ${SCRIPTS} ]]; then |
| 151 | + # shellcheck disable=SC1091 # sourcing generated file |
| 152 | + VERSION_ID=$(source "${SCRIPTS}/sdk_container/.repo/manifests/version.txt"; printf '%s' "${FLATCAR_VERSION_ID}") |
| 153 | + # shellcheck disable=SC1091 # sourcing generated file |
| 154 | + BUILD_ID=$(source "${SCRIPTS}/sdk_container/.repo/manifests/version.txt"; printf '%s' "${FLATCAR_BUILD_ID}") |
| 155 | +fi |
| 156 | + |
| 157 | +ver_plus="${VERSION_ID}${BUILD_ID:++}${BUILD_ID}" |
| 158 | +ver_dash="${VERSION_ID}${BUILD_ID:+-}${BUILD_ID}" |
| 159 | + |
| 160 | +exts=(zst bz2 gz) |
| 161 | + |
| 162 | +# shellcheck disable=SC2034 # used indirectly as cmds_name and cmds |
| 163 | +zst_cmds=( |
| 164 | + zstd |
| 165 | +) |
| 166 | + |
| 167 | +# shellcheck disable=SC2034 # used indirectly as cmds_name and cmds |
| 168 | +bz2_cmds=( |
| 169 | + lbunzip2 |
| 170 | + pbunzip2 |
| 171 | + bunzip2 |
| 172 | +) |
| 173 | + |
| 174 | +# shellcheck disable=SC2034 # used indirectly as cmds_name and cmds |
| 175 | +gz_cmds=( |
| 176 | + unpigz |
| 177 | + gunzip |
| 178 | +) |
| 179 | + |
| 180 | +function download_sdk() { |
| 181 | + local image_name=${1}; shift |
| 182 | + local tarball_name=${1}; shift |
| 183 | + local url_dir=${1}; shift |
| 184 | + |
| 185 | + if docker images --format '{{.Repository}}:{{.Tag}}' | grep -q -x -F "${image_name}"; then |
| 186 | + return 0 |
| 187 | + fi |
| 188 | + |
| 189 | + info "No ${image_name} available in docker, pulling it from bincache" |
| 190 | + local ext full_tarball_name tb |
| 191 | + for ext in "${exts[@]}"; do |
| 192 | + full_tarball_name="${tarball_name}.tar.${ext}" |
| 193 | + tb="${DOWNLOADS_DIR}/${full_tarball_name}" |
| 194 | + if [[ -s ${tb} ]]; then |
| 195 | + break; |
| 196 | + else |
| 197 | + add_cleanup "rm -f ${tb@Q}" |
| 198 | + if download "${url_dir}/${full_tarball_name}" "${tb}"; then |
| 199 | + break |
| 200 | + fi |
| 201 | + fi |
| 202 | + done |
| 203 | + info "Loading ${image_name} into docker" |
| 204 | + cmds_name="${ext}_cmds" |
| 205 | + if ! declare -p "${cmds_name}" >/dev/null 2>/dev/null; then |
| 206 | + fail "Failed to extract ${tb@Q} - no tools to extract ${ext@Q} files" |
| 207 | + fi |
| 208 | + declare -n cmds="${ext}_cmds" |
| 209 | + loaded= |
| 210 | + for cmd in "${cmds[@]}"; do |
| 211 | + if ! command -v "${cmd}" >/dev/null; then |
| 212 | + info "${cmd@Q} is not available" |
| 213 | + continue |
| 214 | + fi |
| 215 | + info "Using ${cmd@Q} to extract the tarball" |
| 216 | + "${cmd}" -d -c "${tb}" | docker load |
| 217 | + add_cleanup "docker rmi ${image_name@Q}" |
| 218 | + loaded=x |
| 219 | + break |
| 220 | + done |
| 221 | + if [[ -z ${loaded} ]]; then |
| 222 | + fail "Failed to extract ${tb@Q} - no known available tool to extract it" |
| 223 | + fi |
| 224 | + unset -n cmds |
| 225 | +} |
| 226 | + |
| 227 | +URL_DIR="https://bincache.flatcar-linux.net/containers/${ver_dash}" |
| 228 | + |
| 229 | +if [[ -z ${SKIP_DOCKER} ]] && [[ -z ${PKGS_SDK} ]]; then |
| 230 | + download_sdk "ghcr.io/flatcar/flatcar-sdk-all:${ver_dash}" "flatcar-sdk-all-${ver_dash}" "${URL_DIR}" |
| 231 | +fi |
| 232 | + |
| 233 | +declare -a dsal_arches |
| 234 | +get_valid_arches dsal_arches |
| 235 | + |
| 236 | +for arch in "${dsal_arches[@]}"; do |
| 237 | + if [[ -z ${SKIP_DOCKER} ]] && [[ -n ${PKGS_SDK} ]]; then |
| 238 | + download_sdk "flatcar-packages-${arch}:${ver_dash}" "flatcar-packages-${arch}-${ver_dash}.tar.${ext}" "${URL_DIR}" |
| 239 | + fi |
| 240 | + |
| 241 | + if [[ -z ${SKIP_LISTINGS} ]]; then |
| 242 | + listing_dir="${DOWNLOADS_DIR}/${arch}" |
| 243 | + add_cleanup "rmdir ${listing_dir@Q}" |
| 244 | + mkdir "${listing_dir}" |
| 245 | + base_url="https://bincache.flatcar-linux.net/images/${arch}/${ver_plus}" |
| 246 | + |
| 247 | + for infix in '' 'rootfs-included-sysexts'; do |
| 248 | + index_html="${listing_dir}/${infix}${infix:+-}index.html" |
| 249 | + url="${base_url}${infix:+/}${infix}" |
| 250 | + add_cleanup "rm -f ${index_html@Q}" |
| 251 | + download "${url}/" "${index_html}" |
| 252 | + |
| 253 | + # get names of all files ending with _packages.txt |
| 254 | + mapfile -t listing_files < <(grep -F '_packages.txt"' "${index_html}" | sed -e 's#.*"\(\./\)\?\([^"]*\)".*#\2#') |
| 255 | + |
| 256 | + for listing in "${listing_files[@]}"; do |
| 257 | + info "Downloading ${listing} for ${arch}" |
| 258 | + listing_path="${listing_dir}/${listing}" |
| 259 | + add_cleanup "rm -f ${listing_path@Q}" |
| 260 | + download "${url}/${listing}" "${listing_path}" |
| 261 | + done |
| 262 | + done |
| 263 | + fi |
| 264 | +done |
| 265 | +info 'Done' |
0 commit comments