|
| 1 | +#!/usr/bin/env python3 |
| 2 | +# SPDX-License-Identifier: Apache-2.0 |
| 3 | + |
| 4 | +""" |
| 5 | +Update gardenlinux.yml with new versions from GitHub releases. |
| 6 | +
|
| 7 | +This script checks the GitHub releases feed for gardenlinux/gardenlinux |
| 8 | +and adds new OpenStack image versions to etc/images/gardenlinux.yml. |
| 9 | +""" |
| 10 | + |
| 11 | +import hashlib |
| 12 | +import os |
| 13 | +import re |
| 14 | +import shutil |
| 15 | +import sys |
| 16 | +from datetime import datetime |
| 17 | +from typing import Optional |
| 18 | + |
| 19 | +import patoolib |
| 20 | +import requests |
| 21 | +import ruamel.yaml |
| 22 | +import typer |
| 23 | +from loguru import logger |
| 24 | + |
| 25 | +app = typer.Typer() |
| 26 | + |
| 27 | +GITHUB_API_URL = "https://api.github.com/repos/gardenlinux/gardenlinux/releases" |
| 28 | +GARDENLINUX_YML_PATH = "etc/images/gardenlinux.yml" |
| 29 | +MINIO_SERVER = "nbg1.your-objectstorage.com" |
| 30 | +MINIO_BUCKET = "osism/openstack-images" |
| 31 | + |
| 32 | + |
| 33 | +def get_latest_releases(max_releases: int = 10) -> list: |
| 34 | + """ |
| 35 | + Fetch latest releases from GitHub API. |
| 36 | +
|
| 37 | + Args: |
| 38 | + max_releases: Maximum number of releases to fetch |
| 39 | +
|
| 40 | + Returns: |
| 41 | + List of release objects |
| 42 | + """ |
| 43 | + try: |
| 44 | + response = requests.get( |
| 45 | + GITHUB_API_URL, params={"per_page": max_releases}, timeout=30 |
| 46 | + ) |
| 47 | + response.raise_for_status() |
| 48 | + return response.json() |
| 49 | + except requests.RequestException as e: |
| 50 | + logger.error(f"Failed to fetch releases from GitHub: {e}") |
| 51 | + return [] |
| 52 | + |
| 53 | + |
| 54 | +def extract_openstack_image(release: dict) -> Optional[dict]: |
| 55 | + """ |
| 56 | + Extract OpenStack gardener_prod image information from a release. |
| 57 | +
|
| 58 | + Args: |
| 59 | + release: GitHub release object |
| 60 | +
|
| 61 | + Returns: |
| 62 | + Dictionary with version, url, and build_date, or None if not found |
| 63 | + Note: checksum is NOT included here - it must be calculated separately |
| 64 | + """ |
| 65 | + version = release["tag_name"] |
| 66 | + published_at = release.get("published_at") |
| 67 | + |
| 68 | + if not published_at: |
| 69 | + logger.warning(f"Release {version} has no published_at date") |
| 70 | + return None |
| 71 | + |
| 72 | + # Parse the published date |
| 73 | + try: |
| 74 | + dt = datetime.strptime(published_at, "%Y-%m-%dT%H:%M:%SZ") |
| 75 | + build_date = dt.date() |
| 76 | + except ValueError as e: |
| 77 | + logger.warning(f"Failed to parse date for {version}: {e}") |
| 78 | + return None |
| 79 | + |
| 80 | + # Find the openstack-gardener_prod-amd64 tar.xz asset |
| 81 | + pattern = re.compile(r"^openstack-gardener_prod-amd64-.*\.tar\.xz$") |
| 82 | + |
| 83 | + for asset in release.get("assets", []): |
| 84 | + if pattern.match(asset["name"]): |
| 85 | + url = asset["browser_download_url"] |
| 86 | + |
| 87 | + # Extract commit hash from filename |
| 88 | + # Format: openstack-gardener_prod-amd64-VERSION-HASH.tar.xz |
| 89 | + filename = asset["name"] |
| 90 | + match = re.search(r"-([a-f0-9]{8})\.tar\.xz$", filename) |
| 91 | + commit_hash = match.group(1) if match else "unknown" |
| 92 | + |
| 93 | + # Construct mirror URL |
| 94 | + mirror_url = ( |
| 95 | + f"https://{MINIO_SERVER}/{MINIO_BUCKET}/" |
| 96 | + f"gardenlinux/{version}/openstack-gardener_prod-amd64-{version}-{commit_hash}.qcow2" |
| 97 | + ) |
| 98 | + |
| 99 | + return { |
| 100 | + "version": version, |
| 101 | + "url": url, |
| 102 | + "mirror_url": mirror_url, |
| 103 | + "build_date": build_date, |
| 104 | + "archive_filename": filename, |
| 105 | + } |
| 106 | + |
| 107 | + logger.warning(f"No OpenStack gardener_prod-amd64 image found in release {version}") |
| 108 | + return None |
| 109 | + |
| 110 | + |
| 111 | +def calculate_qcow2_checksum(archive_url: str, archive_filename: str) -> Optional[str]: |
| 112 | + """ |
| 113 | + Download tar.xz archive, extract qcow2 file, calculate SHA256 checksum. |
| 114 | +
|
| 115 | + Args: |
| 116 | + archive_url: URL to download the tar.xz archive from |
| 117 | + archive_filename: Name of the archive file |
| 118 | +
|
| 119 | + Returns: |
| 120 | + SHA256 checksum in format "sha256:HEXDIGEST" or None if failed |
| 121 | + """ |
| 122 | + logger.info(f"Downloading archive from {archive_url}") |
| 123 | + |
| 124 | + # Create a unique temporary directory for extraction |
| 125 | + # Extract version from archive_filename for unique temp dir name |
| 126 | + version_match = re.search(r"amd64-(.*?)\.tar\.xz$", archive_filename) |
| 127 | + version_part = version_match.group(1) if version_match else "unknown" |
| 128 | + temp_dir = f"temp_extract_{version_part}" |
| 129 | + |
| 130 | + try: |
| 131 | + # Download the tar.xz archive |
| 132 | + response = requests.get(archive_url, stream=True, timeout=300) |
| 133 | + response.raise_for_status() |
| 134 | + |
| 135 | + with open(archive_filename, "wb") as fp: |
| 136 | + for chunk in response.iter_content(chunk_size=8192): |
| 137 | + fp.write(chunk) |
| 138 | + |
| 139 | + logger.info(f"Downloaded {archive_filename}") |
| 140 | + |
| 141 | + # Create temporary directory for extraction |
| 142 | + os.makedirs(temp_dir, exist_ok=True) |
| 143 | + logger.info(f"Created temporary extraction directory: {temp_dir}") |
| 144 | + |
| 145 | + # Extract the archive to temporary directory |
| 146 | + logger.info(f"Extracting {archive_filename} to {temp_dir}") |
| 147 | + patoolib.extract_archive(archive_filename, outdir=temp_dir) |
| 148 | + |
| 149 | + # Find the extracted qcow2 file |
| 150 | + # Expected format: openstack-gardener_prod-amd64-VERSION-HASH.qcow2 |
| 151 | + qcow2_pattern = re.compile(r"^openstack-gardener_prod-amd64-.*\.qcow2$") |
| 152 | + qcow2_file = None |
| 153 | + |
| 154 | + for file in os.listdir(temp_dir): |
| 155 | + if qcow2_pattern.match(file): |
| 156 | + qcow2_file = os.path.join(temp_dir, file) |
| 157 | + break |
| 158 | + |
| 159 | + if not qcow2_file: |
| 160 | + logger.error("No qcow2 file found after extraction") |
| 161 | + # Clean up temp directory and archive |
| 162 | + if os.path.exists(temp_dir): |
| 163 | + shutil.rmtree(temp_dir) |
| 164 | + if os.path.exists(archive_filename): |
| 165 | + os.remove(archive_filename) |
| 166 | + return None |
| 167 | + |
| 168 | + logger.info(f"Found extracted file: {qcow2_file}") |
| 169 | + |
| 170 | + # Calculate SHA256 checksum of qcow2 file |
| 171 | + logger.info(f"Calculating SHA256 checksum of {qcow2_file}") |
| 172 | + sha256_hash = hashlib.sha256() |
| 173 | + |
| 174 | + with open(qcow2_file, "rb") as fp: |
| 175 | + # Read file in chunks to handle large files |
| 176 | + for chunk in iter(lambda: fp.read(8192), b""): |
| 177 | + sha256_hash.update(chunk) |
| 178 | + |
| 179 | + checksum = f"sha256:{sha256_hash.hexdigest()}" |
| 180 | + logger.info(f"Calculated checksum: {checksum}") |
| 181 | + |
| 182 | + # Clean up all temporary files and directories |
| 183 | + logger.info("Cleaning up temporary files") |
| 184 | + if os.path.exists(temp_dir): |
| 185 | + shutil.rmtree(temp_dir) |
| 186 | + logger.info(f"Removed temporary directory: {temp_dir}") |
| 187 | + if os.path.exists(archive_filename): |
| 188 | + os.remove(archive_filename) |
| 189 | + logger.info(f"Removed archive file: {archive_filename}") |
| 190 | + |
| 191 | + return checksum |
| 192 | + |
| 193 | + except requests.RequestException as e: |
| 194 | + logger.error(f"Failed to download archive: {e}") |
| 195 | + # Clean up temp directory and archive if they exist |
| 196 | + if os.path.exists(temp_dir): |
| 197 | + shutil.rmtree(temp_dir) |
| 198 | + if os.path.exists(archive_filename): |
| 199 | + os.remove(archive_filename) |
| 200 | + return None |
| 201 | + except Exception as e: |
| 202 | + logger.error(f"Failed to calculate checksum: {e}") |
| 203 | + # Clean up temp directory and archive if they exist |
| 204 | + if os.path.exists(temp_dir): |
| 205 | + shutil.rmtree(temp_dir) |
| 206 | + if os.path.exists(archive_filename): |
| 207 | + os.remove(archive_filename) |
| 208 | + return None |
| 209 | + |
| 210 | + |
| 211 | +def version_exists(versions: list, version_number: str) -> bool: |
| 212 | + """ |
| 213 | + Check if a version already exists in the versions list. |
| 214 | +
|
| 215 | + Args: |
| 216 | + versions: List of version dictionaries |
| 217 | + version_number: Version string to check |
| 218 | +
|
| 219 | + Returns: |
| 220 | + True if version exists, False otherwise |
| 221 | + """ |
| 222 | + return any(v.get("version") == version_number for v in versions) |
| 223 | + |
| 224 | + |
| 225 | +@app.command() |
| 226 | +def main( |
| 227 | + debug: bool = typer.Option(False, "--debug", help="Enable debug logging"), |
| 228 | + dry_run: bool = typer.Option( |
| 229 | + False, "--dry-run", help="Do not write changes to file" |
| 230 | + ), |
| 231 | + max_releases: int = typer.Option( |
| 232 | + 10, "--max-releases", help="Maximum number of releases to check" |
| 233 | + ), |
| 234 | +): |
| 235 | + """ |
| 236 | + Update gardenlinux.yml with new versions from GitHub releases. |
| 237 | + """ |
| 238 | + if debug: |
| 239 | + level = "DEBUG" |
| 240 | + else: |
| 241 | + level = "INFO" |
| 242 | + |
| 243 | + logger.remove() |
| 244 | + log_fmt = ( |
| 245 | + "<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level: <8}</level> | " |
| 246 | + "<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>" |
| 247 | + ) |
| 248 | + logger.add(sys.stderr, format=log_fmt, level=level, colorize=True) |
| 249 | + |
| 250 | + logger.info("Checking for new Garden Linux releases") |
| 251 | + |
| 252 | + # Fetch latest releases from GitHub |
| 253 | + releases = get_latest_releases(max_releases) |
| 254 | + if not releases: |
| 255 | + logger.error("No releases found") |
| 256 | + return |
| 257 | + |
| 258 | + logger.info(f"Found {len(releases)} releases") |
| 259 | + |
| 260 | + # Load current gardenlinux.yml |
| 261 | + logger.info(f"Loading {GARDENLINUX_YML_PATH}") |
| 262 | + ryaml = ruamel.yaml.YAML() |
| 263 | + ryaml.preserve_quotes = True |
| 264 | + ryaml.default_flow_style = False |
| 265 | + ryaml.width = 4096 |
| 266 | + |
| 267 | + try: |
| 268 | + with open(GARDENLINUX_YML_PATH) as fp: |
| 269 | + data = ryaml.load(fp) |
| 270 | + except FileNotFoundError: |
| 271 | + logger.error(f"File {GARDENLINUX_YML_PATH} not found") |
| 272 | + return |
| 273 | + except Exception as e: |
| 274 | + logger.error(f"Failed to load {GARDENLINUX_YML_PATH}: {e}") |
| 275 | + return |
| 276 | + |
| 277 | + # Find the Garden Linux image entry |
| 278 | + if "images" not in data or not data["images"]: |
| 279 | + logger.error("No images found in YAML file") |
| 280 | + return |
| 281 | + |
| 282 | + # Assuming there's only one image definition for Garden Linux |
| 283 | + image = data["images"][0] |
| 284 | + current_versions = image.get("versions", []) |
| 285 | + |
| 286 | + logger.info(f"Current versions in file: {len(current_versions)}") |
| 287 | + if current_versions: |
| 288 | + latest = current_versions[0].get("version", "unknown") |
| 289 | + logger.info(f"Latest version in file: {latest}") |
| 290 | + |
| 291 | + # Process only the latest release and append to the end if new |
| 292 | + latest_release = releases[0] |
| 293 | + image_info = extract_openstack_image(latest_release) |
| 294 | + |
| 295 | + if not image_info: |
| 296 | + logger.info("No OpenStack image found in latest release") |
| 297 | + return |
| 298 | + |
| 299 | + version_number = image_info["version"] |
| 300 | + |
| 301 | + if version_exists(current_versions, version_number): |
| 302 | + logger.info(f"Latest version {version_number} already exists") |
| 303 | + return |
| 304 | + |
| 305 | + logger.info(f"Found new version: {version_number}") |
| 306 | + logger.info(f" URL: {image_info['url']}") |
| 307 | + logger.info(f" Build date: {image_info['build_date']}") |
| 308 | + |
| 309 | + # Calculate the checksum by downloading and extracting the qcow2 file |
| 310 | + logger.info("Calculating checksum of qcow2 file inside archive") |
| 311 | + checksum = calculate_qcow2_checksum( |
| 312 | + image_info["url"], image_info["archive_filename"] |
| 313 | + ) |
| 314 | + |
| 315 | + if not checksum: |
| 316 | + logger.error("Failed to calculate checksum, aborting") |
| 317 | + return |
| 318 | + |
| 319 | + logger.info(f" Checksum: {checksum}") |
| 320 | + |
| 321 | + # Add checksum to image_info and remove archive_filename |
| 322 | + image_info["checksum"] = checksum |
| 323 | + del image_info["archive_filename"] |
| 324 | + |
| 325 | + logger.info("Adding new version to the end of versions list") |
| 326 | + |
| 327 | + # Append the new version to the end of the list |
| 328 | + current_versions.append(image_info) |
| 329 | + |
| 330 | + if dry_run: |
| 331 | + logger.info("Dry-run mode: not writing changes to file") |
| 332 | + logger.info(f"Would add version {version_number}") |
| 333 | + else: |
| 334 | + # Write updated YAML |
| 335 | + logger.info(f"Writing updated file to {GARDENLINUX_YML_PATH}") |
| 336 | + try: |
| 337 | + with open(GARDENLINUX_YML_PATH, "w") as fp: |
| 338 | + ryaml.explicit_start = True |
| 339 | + ryaml.default_flow_style = False |
| 340 | + ryaml.width = 4096 |
| 341 | + ryaml.indent(sequence=4, offset=2) |
| 342 | + ryaml.dump(data, fp) |
| 343 | + logger.info("File updated successfully") |
| 344 | + except Exception as e: |
| 345 | + logger.error(f"Failed to write file: {e}") |
| 346 | + return |
| 347 | + |
| 348 | + |
| 349 | +if __name__ == "__main__": |
| 350 | + app() |
0 commit comments