diff --git a/relenv/build/__init__.py b/relenv/build/__init__.py index 080a4511..14613727 100644 --- a/relenv/build/__init__.py +++ b/relenv/build/__init__.py @@ -111,6 +111,12 @@ def setup_parser( action="store_true", help="Log build output to stdout instead of displaying a simplified status.", ) + build_subparser.add_argument( + "--compact-pretty", + default=False, + action="store_true", + help="Use compact UI without progress bars (simpler, less detailed).", + ) build_subparser.add_argument( "--log-level", default="warning", @@ -180,8 +186,11 @@ def main(args: argparse.Namespace) -> None: steps = [_.strip() for _ in args.steps] if args.no_pretty: show_ui = False + expanded_ui = False else: show_ui = True + # Expanded UI is default, --compact-pretty disables it + expanded_ui = not args.compact_pretty def signal_handler(_signal: int, frame: FrameType | None) -> None: sys.exit(1) @@ -197,4 +206,5 @@ def signal_handler(_signal: int, frame: FrameType | None) -> None: download_only=args.download_only, show_ui=show_ui, log_level=args.log_level.upper(), + expanded_ui=expanded_ui, ) diff --git a/relenv/build/common.py b/relenv/build/common.py deleted file mode 100644 index 4d71b211..00000000 --- a/relenv/build/common.py +++ /dev/null @@ -1,1799 +0,0 @@ -# Copyright 2022-2025 Broadcom. -# SPDX-License-Identifier: Apache-2.0 -""" -Build process common methods. -""" -from __future__ import annotations - -import fnmatch -import hashlib -import io -import logging -import multiprocessing -import os -import os.path -import pathlib -import pprint -import random -import re -import shutil -import subprocess -import sys -import tempfile -import time -import tarfile -from types import ModuleType -from typing import ( - Any, - Callable, - Dict, - IO, - List, - MutableMapping, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -from typing import TYPE_CHECKING, TypedDict - -if TYPE_CHECKING: - from multiprocessing.synchronize import Event as SyncEvent -else: - SyncEvent = Any - -from relenv.common import ( - DATA_DIR, - LINUX, - MODULE_DIR, - RelenvException, - build_arch, - download_url, - extract_archive, - format_shebang, - get_download_location, - get_toolchain, - get_triplet, - runcmd, - work_dirs, - Version, - WorkDirs, -) -import relenv.relocate - - -PathLike = Union[str, os.PathLike[str]] - -log = logging.getLogger(__name__) - - -GREEN = "\033[0;32m" -YELLOW = "\033[1;33m" -RED = "\033[0;31m" -END = "\033[0m" -MOVEUP = "\033[F" - - -CICD = "CI" in os.environ -NODOWLOAD = False - - -RELENV_PTH = ( - "import os; " - "import sys; " - "from importlib import util; " - "from pathlib import Path; " - "spec = util.spec_from_file_location(" - "'relenv.runtime', str(Path(__file__).parent / 'site-packages' / 'relenv' / 'runtime.py')" - "); " - "mod = util.module_from_spec(spec); " - "sys.modules['relenv.runtime'] = mod; " - "spec.loader.exec_module(mod); mod.bootstrap();" -) - - -SYSCONFIGDATA = """ -import pathlib, sys, platform, os, logging - -log = logging.getLogger(__name__) - -def build_arch(): - machine = platform.machine() - return machine.lower() - -def get_triplet(machine=None, plat=None): - if not plat: - plat = sys.platform - if not machine: - machine = build_arch() - if plat == "darwin": - return f"{machine}-macos" - elif plat == "win32": - return f"{machine}-win" - elif plat == "linux": - return f"{machine}-linux-gnu" - else: - raise RelenvException("Unknown platform {}".format(platform)) - - - -pydir = pathlib.Path(__file__).resolve().parent -if sys.platform == "win32": - DEFAULT_DATA_DIR = pathlib.Path.home() / "AppData" / "Local" / "relenv" -else: - DEFAULT_DATA_DIR = pathlib.Path.home() / ".local" / "relenv" - -if "RELENV_DATA" in os.environ: - DATA_DIR = pathlib.Path(os.environ["RELENV_DATA"]).resolve() -else: - DATA_DIR = DEFAULT_DATA_DIR - -buildroot = pydir.parent.parent - -toolchain = DATA_DIR / "toolchain" / get_triplet() - -build_time_vars = {} -for key in _build_time_vars: - val = _build_time_vars[key] - orig = val - if isinstance(val, str): - val = val.format( - BUILDROOT=buildroot, - TOOLCHAIN=toolchain, - ) - build_time_vars[key] = val -""" - - -def print_ui( - events: MutableMapping[str, "multiprocessing.synchronize.Event"], - processes: MutableMapping[str, multiprocessing.Process], - fails: Sequence[str], - flipstat: Optional[Dict[str, Tuple[int, float]]] = None, -) -> None: - """ - Prints the UI during the relenv building process. - - :param events: A dictionary of events that are updated during the build process - :type events: dict - :param processes: A dictionary of build processes - :type processes: dict - :param fails: A list of processes that have failed - :type fails: list - :param flipstat: A dictionary of process statuses, defaults to {} - :type flipstat: dict, optional - """ - if flipstat is None: - flipstat = {} - if CICD: - sys.stdout.flush() - return - uiline = [] - for name in events: - if not events[name].is_set(): - status = " {}.".format(YELLOW) - elif name in processes: - now = time.time() - if name not in flipstat: - flipstat[name] = (0, now) - if flipstat[name][1] < now: - flipstat[name] = (1 - flipstat[name][0], now + random.random()) - status = " {}{}".format(GREEN, " " if flipstat[name][0] == 1 else ".") - elif name in fails: - status = " {}\u2718".format(RED) - else: - status = " {}\u2718".format(GREEN) - uiline.append(status) - uiline.append(" " + END) - sys.stdout.write("\r") - sys.stdout.write("".join(uiline)) - sys.stdout.flush() - - -def verify_checksum(file: PathLike, checksum: Optional[str]) -> bool: - """ - Verify the checksum of a file. - - Supports both SHA-1 (40 hex chars) and SHA-256 (64 hex chars) checksums. - The hash algorithm is auto-detected based on checksum length. - - :param file: The path to the file to check. - :type file: str - :param checksum: The checksum to verify against (SHA-1 or SHA-256) - :type checksum: str - - :raises RelenvException: If the checksum verification failed - - :return: True if it succeeded, or False if the checksum was None - :rtype: bool - """ - if checksum is None: - log.error("Can't verify checksum because none was given") - return False - - # Auto-detect hash type based on length - # SHA-1: 40 hex chars, SHA-256: 64 hex chars - if len(checksum) == 64: - hash_algo = hashlib.sha256() - hash_name = "sha256" - elif len(checksum) == 40: - hash_algo = hashlib.sha1() - hash_name = "sha1" - else: - raise RelenvException( - f"Invalid checksum length {len(checksum)}. Expected 40 (SHA-1) or 64 (SHA-256)" - ) - - with open(file, "rb") as fp: - hash_algo.update(fp.read()) - file_checksum = hash_algo.hexdigest() - if checksum != file_checksum: - raise RelenvException( - f"{hash_name} checksum verification failed. expected={checksum} found={file_checksum}" - ) - return True - - -def all_dirs(root: PathLike, recurse: bool = True) -> List[str]: - """ - Get all directories under and including the given root. - - :param root: The root directory to traverse - :type root: str - :param recurse: Whether to recursively search for directories, defaults to True - :type recurse: bool, optional - - :return: A list of directories found - :rtype: list - """ - root_str = os.fspath(root) - paths: List[str] = [root_str] - for current_root, dirs, _files in os.walk(root_str): - if not recurse and current_root != root_str: - continue - for name in dirs: - paths.append(os.path.join(current_root, name)) - return paths - - -def populate_env(env: MutableMapping[str, str], dirs: "Dirs") -> None: - """Populate environment variables for a build step. - - This default implementation intentionally does nothing; specific steps may - provide their own implementation via the ``populate_env`` hook. - """ - _ = env - _ = dirs - - -def build_default(env: MutableMapping[str, str], dirs: "Dirs", logfp: IO[str]) -> None: - """ - The default build function if none is given during the build process. - - :param env: The environment dictionary - :type env: dict - :param dirs: The working directories - :type dirs: ``relenv.build.common.Dirs`` - :param logfp: A handle for the log file - :type logfp: file - """ - cmd = [ - "./configure", - "--prefix={}".format(dirs.prefix), - ] - if env["RELENV_HOST"].find("linux") > -1: - cmd += [ - "--build={}".format(env["RELENV_BUILD"]), - "--host={}".format(env["RELENV_HOST"]), - ] - runcmd(cmd, env=env, stderr=logfp, stdout=logfp) - runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp) - runcmd(["make", "install"], env=env, stderr=logfp, stdout=logfp) - - -def build_openssl_fips( - env: MutableMapping[str, str], dirs: "Dirs", logfp: IO[str] -) -> None: - return build_openssl(env, dirs, logfp, fips=True) - - -def build_openssl( - env: MutableMapping[str, str], - dirs: "Dirs", - logfp: IO[str], - fips: bool = False, -) -> None: - """ - Build openssl. - - :param env: The environment dictionary - :type env: dict - :param dirs: The working directories - :type dirs: ``relenv.build.common.Dirs`` - :param logfp: A handle for the log file - :type logfp: file - """ - arch = "aarch64" - if sys.platform == "darwin": - plat = "darwin64" - if env["RELENV_HOST_ARCH"] == "x86_64": - arch = "x86_64-cc" - elif env["RELENV_HOST_ARCH"] == "arm64": - arch = "arm64-cc" - else: - raise RelenvException(f"Unable to build {env['RELENV_HOST_ARCH']}") - extended_cmd = [] - else: - plat = "linux" - if env["RELENV_HOST_ARCH"] == "x86_64": - arch = "x86_64" - elif env["RELENV_HOST_ARCH"] == "aarch64": - arch = "aarch64" - else: - raise RelenvException(f"Unable to build {env['RELENV_HOST_ARCH']}") - extended_cmd = [ - "-Wl,-z,noexecstack", - ] - if fips: - extended_cmd.append("enable-fips") - cmd = [ - "./Configure", - f"{plat}-{arch}", - f"--prefix={dirs.prefix}", - "--openssldir=/etc/ssl", - "--libdir=lib", - "--api=1.1.1", - "--shared", - "--with-rand-seed=os,egd", - "enable-md2", - "enable-egd", - "no-idea", - ] - cmd.extend(extended_cmd) - runcmd( - cmd, - env=env, - stderr=logfp, - stdout=logfp, - ) - runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp) - if fips: - shutil.copy( - pathlib.Path("providers") / "fips.so", - pathlib.Path(dirs.prefix) / "lib" / "ossl-modules", - ) - else: - runcmd(["make", "install_sw"], env=env, stderr=logfp, stdout=logfp) - - -def build_sqlite(env: MutableMapping[str, str], dirs: "Dirs", logfp: IO[str]) -> None: - """ - Build sqlite. - - :param env: The environment dictionary - :type env: dict - :param dirs: The working directories - :type dirs: ``relenv.build.common.Dirs`` - :param logfp: A handle for the log file - :type logfp: file - """ - # extra_cflags=('-Os ' - # '-DSQLITE_ENABLE_FTS5 ' - # '-DSQLITE_ENABLE_FTS4 ' - # '-DSQLITE_ENABLE_FTS3_PARENTHESIS ' - # '-DSQLITE_ENABLE_JSON1 ' - # '-DSQLITE_ENABLE_RTREE ' - # '-DSQLITE_TCL=0 ' - # ) - # configure_pre=[ - # '--enable-threadsafe', - # '--enable-shared=no', - # '--enable-static=yes', - # '--disable-readline', - # '--disable-dependency-tracking', - # ] - cmd = [ - "./configure", - # "--with-shared", - # "--without-static", - "--enable-threadsafe", - "--disable-readline", - "--disable-dependency-tracking", - "--prefix={}".format(dirs.prefix), - # "--enable-add-ons=nptl,ports", - ] - if env["RELENV_HOST"].find("linux") > -1: - cmd += [ - "--build={}".format(env["RELENV_BUILD_ARCH"]), - "--host={}".format(env["RELENV_HOST"]), - ] - runcmd(cmd, env=env, stderr=logfp, stdout=logfp) - runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp) - runcmd(["make", "install"], env=env, stderr=logfp, stdout=logfp) - - -def update_ensurepip(directory: pathlib.Path) -> None: - """ - Update bundled dependencies for ensurepip (pip & setuptools). - """ - # ensurepip bundle location - bundle_dir = directory / "ensurepip" / "_bundled" - - # Make sure the destination directory exists - bundle_dir.mkdir(parents=True, exist_ok=True) - - # Detect existing whl. Later versions of python don't include setuptools. We - # only want to update whl files that python expects to be there - pip_version = "25.2" - setuptools_version = "80.9.0" - update_pip = False - update_setuptools = False - for file in bundle_dir.glob("*.whl"): - - log.debug("Checking whl: %s", str(file)) - if file.name.startswith("pip-"): - found_version = file.name.split("-")[1] - log.debug("Found version %s", found_version) - if Version(found_version) >= Version(pip_version): - log.debug("Found correct pip version or newer: %s", found_version) - else: - file.unlink() - update_pip = True - if file.name.startswith("setuptools-"): - found_version = file.name.split("-")[1] - log.debug("Found version %s", found_version) - if Version(found_version) >= Version(setuptools_version): - log.debug( - "Found correct setuptools version or newer: %s", found_version - ) - else: - file.unlink() - update_setuptools = True - - # Download whl files and update __init__.py - init_file = directory / "ensurepip" / "__init__.py" - if update_pip: - whl = f"pip-{pip_version}-py3-none-any.whl" - whl_path = "b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa" - url = f"https://files.pythonhosted.org/packages/{whl_path}/{whl}" - download_url(url=url, dest=bundle_dir) - assert (bundle_dir / whl).exists() - - # Update __init__.py - old = "^_PIP_VERSION.*" - new = f'_PIP_VERSION = "{pip_version}"' - patch_file(path=init_file, old=old, new=new) - - # setuptools - if update_setuptools: - whl = f"setuptools-{setuptools_version}-py3-none-any.whl" - whl_path = "a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772" - url = f"https://files.pythonhosted.org/packages/{whl_path}/{whl}" - download_url(url=url, dest=bundle_dir) - assert (bundle_dir / whl).exists() - - # setuptools - old = "^_SETUPTOOLS_VERSION.*" - new = f'_SETUPTOOLS_VERSION = "{setuptools_version}"' - patch_file(path=init_file, old=old, new=new) - - log.debug("ensurepip __init__.py contents:") - log.debug(init_file.read_text()) - - -def patch_file(path: PathLike, old: str, new: str) -> None: - """ - Search a file line by line for a string to replace. - - :param path: Location of the file to search - :type path: str - :param old: The value that will be replaced - :type path: str - :param new: The value that will replace the 'old' value. - :type path: str - """ - log.debug("Patching file: %s", path) - with open(path, "r") as fp: - content = fp.read() - new_content = "" - for line in content.splitlines(): - line = re.sub(old, new, line) - new_content += line + "\n" - with open(path, "w") as fp: - fp.write(new_content) - - -def get_dependency_version(name: str, platform: str) -> Optional[Dict[str, str]]: - """ - Get dependency version and metadata from python-versions.json. - - Returns dict with keys: version, url, sha256, and any extra fields (e.g., sqliteversion) - Returns None if dependency not found. - - :param name: Dependency name (openssl, sqlite, xz) - :param platform: Platform name (linux, darwin, win32) - :return: Dict with version, url, sha256, and extra fields, or None - """ - versions_file = MODULE_DIR / "python-versions.json" - if not versions_file.exists(): - return None - - import json - - data = json.loads(versions_file.read_text()) - dependencies = data.get("dependencies", {}) - - if name not in dependencies: - return None - - # Get the latest version for this dependency that supports the platform - dep_versions = dependencies[name] - for version, info in sorted( - dep_versions.items(), - key=lambda x: [int(n) for n in x[0].split(".")], - reverse=True, - ): - if platform in info.get("platforms", []): - # Build result dict with version, url, sha256, and any extra fields - result = { - "version": version, - "url": info["url"], - "sha256": info.get("sha256", ""), - } - # Add any extra fields (like sqliteversion for SQLite) - for key, value in info.items(): - if key not in ["url", "sha256", "platforms"]: - result[key] = value - return result - - return None - - -class Download: - """ - A utility that holds information about content to be downloaded. - - :param name: The name of the download - :type name: str - :param url: The url of the download - :type url: str - :param signature: The signature of the download, defaults to None - :type signature: str - :param destination: The path to download the file to - :type destination: str - :param version: The version of the content to download - :type version: str - :param sha1: The sha1 sum of the download - :type sha1: str - - """ - - def __init__( - self, - name: str, - url: str, - fallback_url: Optional[str] = None, - signature: Optional[str] = None, - destination: PathLike = "", - version: str = "", - checksum: Optional[str] = None, - ) -> None: - self.name = name - self.url_tpl = url - self.fallback_url_tpl = fallback_url - self.signature_tpl = signature - self._destination: pathlib.Path = pathlib.Path() - if destination: - self._destination = pathlib.Path(destination) - self.version = version - self.checksum = checksum - - def copy(self) -> "Download": - return Download( - self.name, - self.url_tpl, - self.fallback_url_tpl, - self.signature_tpl, - self.destination, - self.version, - self.checksum, - ) - - @property - def destination(self) -> pathlib.Path: - return self._destination - - @destination.setter - def destination(self, value: Optional[PathLike]) -> None: - if value: - self._destination = pathlib.Path(value) - else: - self._destination = pathlib.Path() - - @property - def url(self) -> str: - return self.url_tpl.format(version=self.version) - - @property - def fallback_url(self) -> Optional[str]: - if self.fallback_url_tpl: - return self.fallback_url_tpl.format(version=self.version) - return None - - @property - def signature_url(self) -> str: - if self.signature_tpl is None: - raise RelenvException("Signature template not configured") - return self.signature_tpl.format(version=self.version) - - @property - def filepath(self) -> pathlib.Path: - _, name = self.url.rsplit("/", 1) - return self.destination / name - - @property - def formatted_url(self) -> str: - return self.url_tpl.format(version=self.version) - - def fetch_file(self) -> Tuple[str, bool]: - """ - Download the file. - - :return: The path to the downloaded content, and whether it was downloaded. - :rtype: tuple(str, bool) - """ - try: - return download_url(self.url, self.destination, CICD), True - except Exception as exc: - fallback = self.fallback_url - if fallback: - print(f"Download failed {self.url} ({exc}); trying fallback url") - return download_url(fallback, self.destination, CICD), True - raise - - def fetch_signature(self, version: Optional[str] = None) -> Tuple[str, bool]: - """ - Download the file signature. - - :return: The path to the downloaded signature. - :rtype: str - """ - return download_url(self.signature_url, self.destination, CICD), True - - def exists(self) -> bool: - """ - True when the artifact already exists on disk. - - :return: True when the artifact already exists on disk - :rtype: bool - """ - return self.filepath.exists() - - def valid_hash(self) -> None: - pass - - @staticmethod - def validate_signature(archive: PathLike, signature: Optional[PathLike]) -> bool: - """ - True when the archive's signature is valid. - - :param archive: The path to the archive to validate - :type archive: str - :param signature: The path to the signature to validate against - :type signature: str - - :return: True if it validated properly, else False - :rtype: bool - """ - if signature is None: - log.error("Can't check signature because none was given") - return False - try: - runcmd( - ["gpg", "--verify", signature, archive], - stderr=subprocess.PIPE, - stdout=subprocess.PIPE, - ) - return True - except RelenvException as exc: - log.error("Signature validation failed on %s: %s", archive, exc) - return False - - @staticmethod - def validate_checksum(archive: PathLike, checksum: Optional[str]) -> bool: - """ - True when when the archive matches the sha1 hash. - - :param archive: The path to the archive to validate - :type archive: str - :param checksum: The sha1 sum to validate against - :type checksum: str - :return: True if the sums matched, else False - :rtype: bool - """ - try: - verify_checksum(archive, checksum) - return True - except RelenvException as exc: - log.error("sha1 validation failed on %s: %s", archive, exc) - return False - - def __call__( - self, - force_download: bool = False, - show_ui: bool = False, - exit_on_failure: bool = False, - ) -> bool: - """ - Downloads the url and validates the signature and sha1 sum. - - :return: Whether or not validation succeeded - :rtype: bool - """ - os.makedirs(self.filepath.parent, exist_ok=True) - - downloaded = False - if force_download: - _, downloaded = self.fetch_file() - else: - file_is_valid = False - dest = get_download_location(self.url, self.destination) - if self.checksum and os.path.exists(dest): - file_is_valid = self.validate_checksum(dest, self.checksum) - if file_is_valid: - log.debug("%s already downloaded, skipping.", self.url) - else: - _, downloaded = self.fetch_file() - valid = True - if downloaded: - if self.signature_tpl is not None: - sig, _ = self.fetch_signature() - valid_sig = self.validate_signature(self.filepath, sig) - valid = valid and valid_sig - if self.checksum is not None: - valid_checksum = self.validate_checksum(self.filepath, self.checksum) - valid = valid and valid_checksum - - if not valid: - log.warning("Checksum did not match %s: %s", self.name, self.checksum) - if show_ui: - sys.stderr.write( - f"\nChecksum did not match {self.name}: {self.checksum}\n" - ) - sys.stderr.flush() - if exit_on_failure and not valid: - sys.exit(1) - return valid - - -class Recipe(TypedDict): - """Typed description of a build recipe entry.""" - - build_func: Callable[[MutableMapping[str, str], "Dirs", IO[str]], None] - wait_on: List[str] - download: Optional[Download] - - -class Dirs: - """ - A container for directories during build time. - - :param dirs: A collection of working directories - :type dirs: ``relenv.common.WorkDirs`` - :param name: The name of this collection - :type name: str - :param arch: The architecture being worked with - :type arch: str - """ - - def __init__(self, dirs: WorkDirs, name: str, arch: str, version: str) -> None: - # XXX name is the specific to a step where as everything - # else here is generalized to the entire build - self.name = name - self.version = version - self.arch = arch - self.root = dirs.root - self.build = dirs.build - self.downloads = dirs.download - self.logs = dirs.logs - self.sources = dirs.src - self.tmpbuild = tempfile.mkdtemp(prefix="{}_build".format(name)) - self.source: Optional[pathlib.Path] = None - - @property - def toolchain(self) -> Optional[pathlib.Path]: - if sys.platform == "darwin": - return get_toolchain(root=self.root) - elif sys.platform == "win32": - return get_toolchain(root=self.root) - else: - return get_toolchain(self.arch, self.root) - - @property - def _triplet(self) -> str: - if sys.platform == "darwin": - return "{}-macos".format(self.arch) - elif sys.platform == "win32": - return "{}-win".format(self.arch) - else: - return "{}-linux-gnu".format(self.arch) - - @property - def prefix(self) -> pathlib.Path: - return self.build / f"{self.version}-{self._triplet}" - - def __getstate__(self) -> Dict[str, Any]: - """ - Return an object used for pickling. - - :return: The picklable state - """ - return { - "name": self.name, - "arch": self.arch, - "root": self.root, - "build": self.build, - "downloads": self.downloads, - "logs": self.logs, - "sources": self.sources, - "tmpbuild": self.tmpbuild, - } - - def __setstate__(self, state: Dict[str, Any]) -> None: - """ - Unwrap the object returned from unpickling. - - :param state: The state to unpickle - :type state: dict - """ - self.name = state["name"] - self.arch = state["arch"] - self.root = state["root"] - self.downloads = state["downloads"] - self.logs = state["logs"] - self.sources = state["sources"] - self.build = state["build"] - self.tmpbuild = state["tmpbuild"] - - def to_dict(self) -> Dict[str, Any]: - """ - Get a dictionary representation of the directories in this collection. - - :return: A dictionary of all the directories - :rtype: dict - """ - return { - x: getattr(self, x) - for x in [ - "root", - "prefix", - "downloads", - "logs", - "sources", - "build", - "toolchain", - ] - } - - -class Builder: - """ - Utility that handles the build process. - - :param root: The root of the working directories for this build - :type root: str - :param recipies: The instructions for the build steps - :type recipes: list - :param build_default: The default build function, defaults to ``build_default`` - :type build_default: types.FunctionType - :param populate_env: The default function to populate the build environment, defaults to ``populate_env`` - :type populate_env: types.FunctionType - :param force_download: If True, forces downloading the archives even if they exist, defaults to False - :type force_download: bool - :param arch: The architecture being built - :type arch: str - """ - - def __init__( - self, - root: Optional[PathLike] = None, - recipies: Optional[Dict[str, Recipe]] = None, - build_default: Callable[ - [MutableMapping[str, str], "Dirs", IO[str]], None - ] = build_default, - populate_env: Callable[[MutableMapping[str, str], "Dirs"], None] = populate_env, - arch: str = "x86_64", - version: str = "", - ) -> None: - self.root = root - self.dirs: WorkDirs = work_dirs(root) - self.build_arch = build_arch() - self.build_triplet = get_triplet(self.build_arch) - self.arch = arch - self.sources = self.dirs.src - self.downloads = self.dirs.download - - if recipies is None: - self.recipies: Dict[str, Recipe] = {} - else: - self.recipies = recipies - - self.build_default = build_default - self.populate_env = populate_env - self.version = version - self.set_arch(self.arch) - - def copy(self, version: str, checksum: Optional[str]) -> "Builder": - recipies: Dict[str, Recipe] = {} - for name in self.recipies: - recipe = self.recipies[name] - recipies[name] = { - "build_func": recipe["build_func"], - "wait_on": list(recipe["wait_on"]), - "download": recipe["download"].copy() if recipe["download"] else None, - } - build = Builder( - self.root, - recipies, - self.build_default, - self.populate_env, - self.arch, - version, - ) - python_download = build.recipies["python"].get("download") - if python_download is None: - raise RelenvException("Python recipe is missing a download entry") - python_download.version = version - python_download.checksum = checksum - return build - - def set_arch(self, arch: str) -> None: - """ - Set the architecture for the build. - - :param arch: The arch to build - :type arch: str - """ - self.arch = arch - self._toolchain: Optional[pathlib.Path] = None - - @property - def toolchain(self) -> Optional[pathlib.Path]: - """Lazily fetch toolchain only when needed.""" - if self._toolchain is None and sys.platform == "linux": - self._toolchain = get_toolchain(self.arch, self.dirs.root) - return self._toolchain - - @property - def triplet(self) -> str: - return get_triplet(self.arch) - - @property - def prefix(self) -> pathlib.Path: - return self.dirs.build / f"{self.version}-{self.triplet}" - - @property - def _triplet(self) -> str: - if sys.platform == "darwin": - return "{}-macos".format(self.arch) - elif sys.platform == "win32": - return "{}-win".format(self.arch) - else: - return "{}-linux-gnu".format(self.arch) - - def add( - self, - name: str, - build_func: Optional[Callable[..., Any]] = None, - wait_on: Optional[Sequence[str]] = None, - download: Optional[Dict[str, Any]] = None, - ) -> None: - """ - Add a step to the build process. - - :param name: The name of the step - :type name: str - :param build_func: The function that builds this step, defaults to None - :type build_func: types.FunctionType, optional - :param wait_on: Processes to wait on before running this step, defaults to None - :type wait_on: list, optional - :param download: A dictionary of download information, defaults to None - :type download: dict, optional - """ - if wait_on is None: - wait_on_list: List[str] = [] - else: - wait_on_list = list(wait_on) - if build_func is None: - build_func = self.build_default - download_obj: Optional[Download] = None - if download is not None: - download_obj = Download(name, destination=self.downloads, **download) - self.recipies[name] = { - "build_func": build_func, - "wait_on": wait_on_list, - "download": download_obj, - } - - def run( - self, - name: str, - event: "multiprocessing.synchronize.Event", - build_func: Callable[..., Any], - download: Optional[Download], - show_ui: bool = False, - log_level: str = "WARNING", - ) -> Any: - """ - Run a build step. - - :param name: The name of the step to run - :type name: str - :param event: An event to track this process' status and alert waiting steps - :type event: ``multiprocessing.Event`` - :param build_func: The function to use to build this step - :type build_func: types.FunctionType - :param download: The ``Download`` instance for this step - :type download: ``Download`` - - :return: The output of the build function - """ - root_log = logging.getLogger(None) - if sys.platform == "win32": - if not show_ui: - handler = logging.StreamHandler() - handler.setLevel(logging.getLevelName(log_level)) - root_log.addHandler(handler) - - for handler in root_log.handlers: - if isinstance(handler, logging.StreamHandler): - handler.setFormatter( - logging.Formatter(f"%(asctime)s {name} %(message)s") - ) - - if not self.dirs.build.exists(): - os.makedirs(self.dirs.build, exist_ok=True) - - dirs = Dirs(self.dirs, name, self.arch, self.version) - os.makedirs(dirs.sources, exist_ok=True) - os.makedirs(dirs.logs, exist_ok=True) - os.makedirs(dirs.prefix, exist_ok=True) - - while event.is_set() is False: - time.sleep(0.3) - - logfp = io.open(os.path.join(dirs.logs, "{}.log".format(name)), "w") - handler = logging.FileHandler(dirs.logs / f"{name}.log") - root_log.addHandler(handler) - root_log.setLevel(logging.NOTSET) - - # DEBUG: Uncomment to debug - # logfp = sys.stdout - - cwd = os.getcwd() - if download: - extract_archive(dirs.sources, str(download.filepath)) - dirs.source = dirs.sources / download.filepath.name.split(".tar")[0] - os.chdir(dirs.source) - else: - os.chdir(dirs.prefix) - - if sys.platform == "win32": - env = os.environ.copy() - else: - env = { - "PATH": os.environ["PATH"], - } - env["RELENV_DEBUG"] = "1" - env["RELENV_BUILDENV"] = "1" - env["RELENV_HOST"] = self.triplet - env["RELENV_HOST_ARCH"] = self.arch - env["RELENV_BUILD"] = self.build_triplet - env["RELENV_BUILD_ARCH"] = self.build_arch - python_download = self.recipies["python"].get("download") - if python_download is None: - raise RelenvException("Python recipe is missing download configuration") - env["RELENV_PY_VERSION"] = python_download.version - env["RELENV_PY_MAJOR_VERSION"] = env["RELENV_PY_VERSION"].rsplit(".", 1)[0] - if "RELENV_DATA" in os.environ: - env["RELENV_DATA"] = os.environ["RELENV_DATA"] - if self.build_arch != self.arch: - native_root = DATA_DIR / "native" - env["RELENV_NATIVE_PY"] = str(native_root / "bin" / "python3") - - self.populate_env(env, dirs) - - _ = dirs.to_dict() - for k in _: - log.info("Directory %s %s", k, _[k]) - for k in env: - log.info("Environment %s %s", k, env[k]) - try: - return build_func(env, dirs, logfp) - except Exception: - log.exception("Build failure") - sys.exit(1) - finally: - os.chdir(cwd) - log.removeHandler(handler) - logfp.close() - - def cleanup(self) -> None: - """ - Clean up the build directories. - """ - shutil.rmtree(self.prefix) - - def clean(self) -> None: - """ - Completely clean up the remnants of a relenv build. - """ - # Clean directories - for _ in [self.prefix, self.sources]: - try: - shutil.rmtree(_) - except PermissionError: - sys.stderr.write(f"Unable to remove directory: {_}") - except FileNotFoundError: - pass - # Clean files - archive = f"{self.prefix}.tar.xz" - for _ in [archive]: - try: - os.remove(_) - except FileNotFoundError: - pass - - def download_files( - self, - steps: Optional[Sequence[str]] = None, - force_download: bool = False, - show_ui: bool = False, - ) -> None: - """ - Download all of the needed archives. - - :param steps: The steps to download archives for, defaults to None - :type steps: list, optional - """ - step_names = list(steps) if steps is not None else list(self.recipies) - - fails: List[str] = [] - processes: Dict[str, multiprocessing.Process] = {} - events: Dict[str, SyncEvent] = {} - if show_ui: - sys.stdout.write("Starting downloads \n") - log.info("Starting downloads") - if show_ui: - print_ui(events, processes, fails) - for name in step_names: - download = self.recipies[name]["download"] - if download is None: - continue - event = multiprocessing.Event() - event.set() - events[name] = event - proc = multiprocessing.Process( - name=name, - target=download, - kwargs={ - "force_download": force_download, - "show_ui": show_ui, - "exit_on_failure": True, - }, - ) - proc.start() - processes[name] = proc - - while processes: - for proc in list(processes.values()): - proc.join(0.3) - # DEBUG: Comment to debug - if show_ui: - print_ui(events, processes, fails) - if proc.exitcode is None: - continue - processes.pop(proc.name) - if proc.exitcode != 0: - fails.append(proc.name) - if show_ui: - print_ui(events, processes, fails) - sys.stdout.write("\n") - if fails and False: - if show_ui: - print_ui(events, processes, fails) - sys.stderr.write("The following failures were reported\n") - for fail in fails: - sys.stderr.write(fail + "\n") - sys.stderr.flush() - sys.exit(1) - - def build( - self, - steps: Optional[Sequence[str]] = None, - cleanup: bool = True, - show_ui: bool = False, - log_level: str = "WARNING", - ) -> None: - """ - Build! - - :param steps: The steps to run, defaults to None - :type steps: list, optional - :param cleanup: Whether to clean up or not, defaults to True - :type cleanup: bool, optional - """ # noqa: D400 - fails: List[str] = [] - events: Dict[str, SyncEvent] = {} - waits: Dict[str, List[str]] = {} - processes: Dict[str, multiprocessing.Process] = {} - - if show_ui: - sys.stdout.write("Starting builds\n") - # DEBUG: Comment to debug - print_ui(events, processes, fails) - log.info("Starting builds") - - step_names = list(steps) if steps is not None else list(self.recipies) - - for name in step_names: - event = multiprocessing.Event() - events[name] = event - recipe = self.recipies[name] - kwargs = dict(recipe) - kwargs["show_ui"] = show_ui - kwargs["log_level"] = log_level - - # Determine needed dependency recipies. - wait_on_seq = cast(List[str], kwargs.pop("wait_on", [])) - wait_on_list = list(wait_on_seq) - for dependency in wait_on_list[:]: - if dependency not in step_names: - wait_on_list.remove(dependency) - - waits[name] = wait_on_list - if not waits[name]: - event.set() - - proc = multiprocessing.Process( - name=name, target=self.run, args=(name, event), kwargs=kwargs - ) - proc.start() - processes[name] = proc - - # Wait for the processes to finish and check if we should send any - # dependency events. - while processes: - for proc in list(processes.values()): - proc.join(0.3) - if show_ui: - # DEBUG: Comment to debug - print_ui(events, processes, fails) - if proc.exitcode is None: - continue - processes.pop(proc.name) - if proc.exitcode != 0: - fails.append(proc.name) - is_failure = True - else: - is_failure = False - for name in waits: - if proc.name in waits[name]: - if is_failure: - if name in processes: - processes[name].terminate() - time.sleep(0.1) - waits[name].remove(proc.name) - if not waits[name] and not events[name].is_set(): - events[name].set() - - if fails: - sys.stderr.write("The following failures were reported\n") - last_outs = {} - for fail in fails: - log_file = self.dirs.logs / f"{fail}.log" - try: - with io.open(log_file) as fp: - fp.seek(0, 2) - end = fp.tell() - ind = end - 4096 - if ind > 0: - fp.seek(ind) - else: - fp.seek(0) - last_out = fp.read() - if show_ui: - sys.stderr.write("=" * 20 + f" {fail} " + "=" * 20 + "\n") - sys.stderr.write(fp.read() + "\n\n") - except FileNotFoundError: - last_outs[fail] = f"Log file not found: {log_file}" - log.error("Build step %s has failed", fail) - log.error(last_out) - if show_ui: - sys.stderr.flush() - if cleanup: - log.debug("Performing cleanup.") - self.cleanup() - sys.exit(1) - if show_ui: - time.sleep(0.3) - print_ui(events, processes, fails) - sys.stdout.write("\n") - sys.stdout.flush() - if cleanup: - log.debug("Performing cleanup.") - self.cleanup() - - def check_prereqs(self) -> List[str]: - """ - Check pre-requsists for build. - - This method verifies all requrements for a successful build are satisfied. - - :return: Returns a list of string describing failed checks - :rtype: list - """ - fail: List[str] = [] - if sys.platform == "linux": - if not self.toolchain or not self.toolchain.exists(): - fail.append( - f"Toolchain for {self.arch} does not exist. Please pip install ppbt." - ) - return fail - - def __call__( - self, - steps: Optional[Sequence[str]] = None, - arch: Optional[str] = None, - clean: bool = True, - cleanup: bool = True, - force_download: bool = False, - download_only: bool = False, - show_ui: bool = False, - log_level: str = "WARNING", - ) -> None: - """ - Set the architecture, define the steps, clean if needed, download what is needed, and build. - - :param steps: The steps to run, defaults to None - :type steps: list, optional - :param arch: The architecture to build, defaults to None - :type arch: str, optional - :param clean: If true, cleans the directories first, defaults to True - :type clean: bool, optional - :param cleanup: Cleans up after build if true, defaults to True - :type cleanup: bool, optional - :param force_download: Whether or not to download the content if it already exists, defaults to True - :type force_download: bool, optional - """ - log = logging.getLogger(None) - log.setLevel(logging.NOTSET) - - stream_handler: Optional[logging.Handler] = None - if not show_ui: - stream_handler = logging.StreamHandler() - stream_handler.setLevel(logging.getLevelName(log_level)) - log.addHandler(stream_handler) - - os.makedirs(self.dirs.logs, exist_ok=True) - file_handler = logging.FileHandler(self.dirs.logs / "build.log") - file_handler.setLevel(logging.INFO) - log.addHandler(file_handler) - - if arch: - self.set_arch(arch) - - step_names = list(steps) if steps is not None else list(self.recipies) - - failures = self.check_prereqs() - if not download_only and failures: - for _ in failures: - sys.stderr.write(f"{_}\n") - sys.stderr.flush() - sys.exit(1) - - if clean: - self.clean() - - if self.build_arch != self.arch: - native_root = DATA_DIR / "native" - if not native_root.exists(): - if "RELENV_NATIVE_PY_VERSION" in os.environ: - version = os.environ["RELENV_NATIVE_PY_VERSION"] - else: - version = self.version - from relenv.create import create - - create("native", DATA_DIR, version=version) - - # Start a process for each build passing it an event used to notify each - # process if it's dependencies have finished. - try: - self.download_files( - step_names, force_download=force_download, show_ui=show_ui - ) - if download_only: - return - self.build(step_names, cleanup, show_ui=show_ui, log_level=log_level) - finally: - log.removeHandler(file_handler) - if stream_handler is not None: - log.removeHandler(stream_handler) - - -class Builds: - """Collection of platform-specific builders.""" - - def __init__(self) -> None: - self.builds: Dict[str, Builder] = {} - - def add(self, platform: str, *args: Any, **kwargs: Any) -> Builder: - if "builder" in kwargs: - build_candidate = kwargs.pop("builder") - if args or kwargs: - raise RuntimeError( - "builder keyword can not be used with other kwargs or args" - ) - build = cast(Builder, build_candidate) - else: - build = Builder(*args, **kwargs) - self.builds[platform] = build - return build - - -builds = Builds() - - -def patch_shebang(path: PathLike, old: str, new: str) -> bool: - """ - Replace a file's shebang. - - :param path: The path of the file to patch - :type path: str - :param old: The old shebang, will only patch when this is found - :type old: str - :param name: The new shebang to be written - :type name: str - """ - with open(path, "rb") as fp: - try: - data = fp.read(len(old.encode())).decode() - except UnicodeError: - return False - except Exception as exc: - log.warning("Unhandled exception: %r", exc) - return False - if data != old: - log.warning("Shebang doesn't match: %s %r != %r", path, old, data) - return False - data = fp.read().decode() - with open(path, "w") as fp: - fp.write(new) - fp.write(data) - with open(path, "r") as fp: - data = fp.read() - log.info("Patched shebang of %s => %r", path, data) - return True - - -def patch_shebangs(path: PathLike, old: str, new: str) -> None: - """ - Traverse directory and patch shebangs. - - :param path: The of the directory to traverse - :type path: str - :param old: The old shebang, will only patch when this is found - :type old: str - :param name: The new shebang to be written - :type name: str - """ - for root, _dirs, files in os.walk(str(path)): - for file in files: - patch_shebang(os.path.join(root, file), old, new) - - -def install_sysdata( - mod: ModuleType, - destfile: PathLike, - buildroot: PathLike, - toolchain: Optional[PathLike], -) -> None: - """ - Create a Relenv Python environment's sysconfigdata. - - Helper method used by the `finalize` build method to create a Relenv - Python environment's sysconfigdata. - - :param mod: The module to operate on - :type mod: ``types.ModuleType`` - :param destfile: Path to the file to write the data to - :type destfile: str - :param buildroot: Path to the root of the build - :type buildroot: str - :param toolchain: Path to the root of the toolchain - :type toolchain: str - """ - data = {} - - def fbuildroot(s: str) -> str: - return s.replace(str(buildroot), "{BUILDROOT}") - - def ftoolchain(s: str) -> str: - return s.replace(str(toolchain), "{TOOLCHAIN}") - - # XXX: keymap is not used, remove it? - # keymap = { - # "BINDIR": (fbuildroot,), - # "BINLIBDEST": (fbuildroot,), - # "CFLAGS": (fbuildroot, ftoolchain), - # "CPPLAGS": (fbuildroot, ftoolchain), - # "CXXFLAGS": (fbuildroot, ftoolchain), - # "datarootdir": (fbuildroot,), - # "exec_prefix": (fbuildroot,), - # "LDFLAGS": (fbuildroot, ftoolchain), - # "LDSHARED": (fbuildroot, ftoolchain), - # "LIBDEST": (fbuildroot,), - # "prefix": (fbuildroot,), - # "SCRIPTDIR": (fbuildroot,), - # } - for key in sorted(mod.build_time_vars): - val = mod.build_time_vars[key] - if isinstance(val, str): - for _ in (fbuildroot, ftoolchain): - val = _(val) - log.info("SYSCONFIG [%s] %s => %s", key, mod.build_time_vars[key], val) - data[key] = val - - with open(destfile, "w", encoding="utf8") as f: - f.write( - "# system configuration generated and used by" " the relenv at runtime\n" - ) - f.write("_build_time_vars = ") - pprint.pprint(data, stream=f) - f.write(SYSCONFIGDATA) - - -def find_sysconfigdata(pymodules: PathLike) -> str: - """ - Find sysconfigdata directory for python installation. - - :param pymodules: Path to python modules (e.g. lib/python3.10) - :type pymodules: str - - :return: The name of the sysconig data module - :rtype: str - """ - for root, dirs, files in os.walk(pymodules): - for file in files: - if file.find("sysconfigdata") > -1 and file.endswith(".py"): - return file[:-3] - raise RelenvException("Unable to locate sysconfigdata module") - - -def install_runtime(sitepackages: PathLike) -> None: - """ - Install a base relenv runtime. - """ - site_dir = pathlib.Path(sitepackages) - relenv_pth = site_dir / "relenv.pth" - with io.open(str(relenv_pth), "w") as fp: - fp.write(RELENV_PTH) - - # Lay down relenv.runtime, we'll pip install the rest later - relenv = site_dir / "relenv" - os.makedirs(relenv, exist_ok=True) - - for name in [ - "runtime.py", - "relocate.py", - "common.py", - "buildenv.py", - "__init__.py", - ]: - src = MODULE_DIR / name - dest = relenv / name - with io.open(src, "r") as rfp: - with io.open(dest, "w") as wfp: - wfp.write(rfp.read()) - - -def finalize( - env: MutableMapping[str, str], - dirs: Dirs, - logfp: IO[str], -) -> None: - """ - Run after we've fully built python. - - This method enhances the newly created python with Relenv's runtime hacks. - - :param env: The environment dictionary - :type env: dict - :param dirs: The working directories - :type dirs: ``relenv.build.common.Dirs`` - :param logfp: A handle for the log file - :type logfp: file - """ - # Run relok8 to make sure the rpaths are relocatable. - relenv.relocate.main(dirs.prefix, log_file_name=str(dirs.logs / "relocate.py.log")) - # Install relenv-sysconfigdata module - libdir = pathlib.Path(dirs.prefix) / "lib" - - def find_pythonlib(libdir: pathlib.Path) -> Optional[str]: - for _root, dirs, _files in os.walk(libdir): - for entry in dirs: - if entry.startswith("python"): - return entry - return None - - python_lib = find_pythonlib(libdir) - if python_lib is None: - raise RelenvException("Unable to locate python library directory") - - pymodules = libdir / python_lib - - # update ensurepip - update_ensurepip(pymodules) - - cwd = os.getcwd() - modname = find_sysconfigdata(pymodules) - path = sys.path - sys.path = [str(pymodules)] - try: - mod = __import__(str(modname)) - finally: - os.chdir(cwd) - sys.path = path - - dest = pymodules / f"{modname}.py" - install_sysdata(mod, dest, dirs.prefix, dirs.toolchain) - - # Lay down site customize - bindir = pathlib.Path(dirs.prefix) / "bin" - sitepackages = pymodules / "site-packages" - install_runtime(sitepackages) - - # Install pip - python_exe = str(dirs.prefix / "bin" / "python3") - if env["RELENV_HOST_ARCH"] != env["RELENV_BUILD_ARCH"]: - env["RELENV_CROSS"] = str(dirs.prefix) - python_exe = env["RELENV_NATIVE_PY"] - logfp.write("\nRUN ENSURE PIP\n") - - env.pop("RELENV_BUILDENV") - - runcmd( - [python_exe, "-m", "ensurepip"], - env=env, - stderr=logfp, - stdout=logfp, - ) - - # Fix the shebangs in the scripts python layed down. Order matters. - shebangs = [ - "#!{}".format(bindir / f"python{env['RELENV_PY_MAJOR_VERSION']}"), - "#!{}".format( - bindir / f"python{env['RELENV_PY_MAJOR_VERSION'].split('.', 1)[0]}" - ), - ] - newshebang = format_shebang("/python3") - for shebang in shebangs: - log.info("Patch shebang %r with %r", shebang, newshebang) - patch_shebangs( - str(pathlib.Path(dirs.prefix) / "bin"), - shebang, - newshebang, - ) - - if sys.platform == "linux": - pyconf = f"config-{env['RELENV_PY_MAJOR_VERSION']}-{env['RELENV_HOST']}" - patch_shebang( - str(pymodules / pyconf / "python-config.py"), - "#!{}".format(str(bindir / f"python{env['RELENV_PY_MAJOR_VERSION']}")), - format_shebang("../../../bin/python3"), - ) - - toolchain_path = dirs.toolchain - if toolchain_path is None: - raise RelenvException("Toolchain path is required for linux builds") - shutil.copy( - pathlib.Path(toolchain_path) - / env["RELENV_HOST"] - / "sysroot" - / "lib" - / "libstdc++.so.6", - libdir, - ) - - # Moved in python 3.13 or removed? - if (pymodules / "cgi.py").exists(): - patch_shebang( - str(pymodules / "cgi.py"), - "#! /usr/local/bin/python", - format_shebang("../../bin/python3"), - ) - - def runpip(pkg: Union[str, os.PathLike[str]], upgrade: bool = False) -> None: - logfp.write(f"\nRUN PIP {pkg} {upgrade}\n") - target: Optional[pathlib.Path] = None - python_exe = str(dirs.prefix / "bin" / "python3") - if sys.platform == LINUX: - if env["RELENV_HOST_ARCH"] != env["RELENV_BUILD_ARCH"]: - target = pymodules / "site-packages" - python_exe = env["RELENV_NATIVE_PY"] - cmd = [ - python_exe, - "-m", - "pip", - "install", - str(pkg), - ] - if upgrade: - cmd.append("--upgrade") - if target: - cmd.append("--target={}".format(target)) - runcmd(cmd, env=env, stderr=logfp, stdout=logfp) - - runpip("wheel") - # This needs to handle running from the root of the git repo and also from - # an installed Relenv - if (MODULE_DIR.parent / ".git").exists(): - runpip(MODULE_DIR.parent, upgrade=True) - else: - runpip("relenv", upgrade=True) - globs = [ - "/bin/python*", - "/bin/pip*", - "/bin/relenv", - "/lib/python*/ensurepip/*", - "/lib/python*/site-packages/*", - "/include/*", - "*.so", - "/lib/*.so.*", - "*.py", - # Mac specific, factor this out - "*.dylib", - ] - archive = f"{ dirs.prefix }.tar.xz" - log.info("Archive is %s", archive) - with tarfile.open(archive, mode="w:xz") as fp: - create_archive(fp, dirs.prefix, globs, logfp) - - -def create_archive( - tarfp: tarfile.TarFile, - toarchive: PathLike, - globs: Sequence[str], - logfp: Optional[IO[str]] = None, -) -> None: - """ - Create an archive. - - :param tarfp: A pointer to the archive to be created - :type tarfp: file - :param toarchive: The path to the directory to archive - :type toarchive: str - :param globs: A list of filtering patterns to match against files to be added - :type globs: list - :param logfp: A pointer to the log file - :type logfp: file - """ - log.debug("Current directory %s", os.getcwd()) - log.debug("Creating archive %s", tarfp.name) - for root, _dirs, files in os.walk(toarchive): - relroot = pathlib.Path(root).relative_to(toarchive) - for f in files: - relpath = relroot / f - matches = False - for g in globs: - candidate = pathlib.Path("/") / relpath - if fnmatch.fnmatch(str(candidate), g): - matches = True - break - if matches: - log.debug("Adding %s", relpath) - tarfp.add(relpath, arcname=str(relpath), recursive=False) - else: - log.debug("Skipping %s", relpath) diff --git a/relenv/build/common/__init__.py b/relenv/build/common/__init__.py new file mode 100644 index 00000000..4bfc0d1c --- /dev/null +++ b/relenv/build/common/__init__.py @@ -0,0 +1,48 @@ +# Copyright 2022-2025 Broadcom. +# SPDX-License-Identifier: Apache-2.0 +""" +Build process common methods. + +This module has been split into focused submodules for better organization. +All public APIs are re-exported here for backward compatibility. +""" +from __future__ import annotations + +from .builders import ( + build_openssl, + build_openssl_fips, + build_sqlite, +) + +from .install import ( + update_ensurepip, + install_runtime, + finalize, + create_archive, + patch_file, +) + +from .builder import ( + Dirs, + builds, + get_dependency_version, +) + + +__all__ = [ + # Builder classes and instances + "Dirs", + "builds", + # Dependency version management + "get_dependency_version", + # Install functions + "finalize", + "install_runtime", + "create_archive", + "update_ensurepip", + "patch_file", + # Builders (specific build functions) + "build_openssl", + "build_openssl_fips", + "build_sqlite", +] diff --git a/relenv/build/common/_sysconfigdata_template.py b/relenv/build/common/_sysconfigdata_template.py new file mode 100644 index 00000000..d499d391 --- /dev/null +++ b/relenv/build/common/_sysconfigdata_template.py @@ -0,0 +1,72 @@ +# Copyright 2022-2025 Broadcom. +# SPDX-License-Identifier: Apache-2.0 +# mypy: ignore-errors +# flake8: noqa +""" +Template for sysconfigdata module generated at build time. + +This file is used as a template to generate the _sysconfigdata module +that CPython uses at runtime. It is copied verbatim (after the header comments) +into the generated sysconfigdata file. + +The _build_time_vars dictionary is written before this content. + +Note: mypy and flake8 errors are ignored for this template file as it contains +code that is valid only in the context of the generated sysconfigdata module +(e.g., _build_time_vars is injected, RelenvException is in generated context). +""" + +import pathlib +import sys +import platform +import os +import logging + +log = logging.getLogger(__name__) + + +def build_arch(): + machine = platform.machine() + return machine.lower() + + +def get_triplet(machine=None, plat=None): + if not plat: + plat = sys.platform + if not machine: + machine = build_arch() + if plat == "darwin": + return f"{machine}-macos" + elif plat == "win32": + return f"{machine}-win" + elif plat == "linux": + return f"{machine}-linux-gnu" + else: + raise RelenvException("Unknown platform {}".format(platform)) + + +pydir = pathlib.Path(__file__).resolve().parent +if sys.platform == "win32": + DEFAULT_DATA_DIR = pathlib.Path.home() / "AppData" / "Local" / "relenv" +else: + DEFAULT_DATA_DIR = pathlib.Path.home() / ".local" / "relenv" + +if "RELENV_DATA" in os.environ: + DATA_DIR = pathlib.Path(os.environ["RELENV_DATA"]).resolve() +else: + DATA_DIR = DEFAULT_DATA_DIR + +buildroot = pydir.parent.parent + +toolchain = DATA_DIR / "toolchain" / get_triplet() + +build_time_vars = {} +for key in _build_time_vars: + val = _build_time_vars[key] + orig = val + if isinstance(val, str): + val = val.format( + BUILDROOT=buildroot, + TOOLCHAIN=toolchain, + ) + build_time_vars[key] = val diff --git a/relenv/build/common/builder.py b/relenv/build/common/builder.py new file mode 100644 index 00000000..5a36a110 --- /dev/null +++ b/relenv/build/common/builder.py @@ -0,0 +1,907 @@ +# Copyright 2022-2025 Broadcom. +# SPDX-License-Identifier: Apache-2.0 +""" +Builder and Builds classes for managing the build process. +""" +from __future__ import annotations + +import io +import json +import logging +import multiprocessing +import os +import pathlib +import shutil +import sys +import time +from typing import ( + Any, + Callable, + Dict, + IO, + List, + MutableMapping, + Optional, + Sequence, + TypedDict, + Union, + cast, +) +import tempfile + +from relenv.common import ( + DATA_DIR, + MODULE_DIR, + ConfigurationError, + build_arch, + extract_archive, + get_toolchain, + get_triplet, + work_dirs, + WorkDirs, +) + +from .download import Download +from .ui import ( + LineCountHandler, + load_build_stats, + print_ui, + print_ui_expanded, + update_build_stats, + BuildStats, +) +from .builders import build_default as _default_build_func + +# Type alias for path-like objects +PathLike = Union[str, os.PathLike[str]] + +log = logging.getLogger(__name__) + + +def _default_populate_env(env: MutableMapping[str, str], dirs: "Dirs") -> None: + """Default populate_env implementation (does nothing). + + This default implementation intentionally does nothing; specific steps may + provide their own implementation via the ``populate_env`` hook. + """ + _ = env + _ = dirs + + +def get_dependency_version(name: str, platform: str) -> Optional[Dict[str, str]]: + """ + Get dependency version and metadata from python-versions.json. + + Returns dict with keys: version, url, sha256, and any extra fields (e.g., sqliteversion) + Returns None if dependency not found. + + :param name: Dependency name (openssl, sqlite, xz) + :param platform: Platform name (linux, darwin, win32) + :return: Dict with version, url, sha256, and extra fields, or None + """ + versions_file = MODULE_DIR / "python-versions.json" + if not versions_file.exists(): + return None + + data = json.loads(versions_file.read_text()) + dependencies = data.get("dependencies", {}) + + if name not in dependencies: + return None + + # Get the latest version for this dependency that supports the platform + dep_versions = dependencies[name] + for version, info in sorted( + dep_versions.items(), + key=lambda x: [int(n) for n in x[0].split(".")], + reverse=True, + ): + if platform in info.get("platforms", []): + # Build result dict with version, url, sha256, and any extra fields + result = { + "version": version, + "url": info["url"], + "sha256": info.get("sha256", ""), + } + # Add any extra fields (like sqliteversion for SQLite) + for key, value in info.items(): + if key not in ["url", "sha256", "platforms"]: + result[key] = value + return result + + return None + + +# Public alias for _default_populate_env for backward compatibility +populate_env = _default_populate_env + + +class Dirs: + """ + A container for directories during build time. + + :param dirs: A collection of working directories + :type dirs: ``relenv.common.WorkDirs`` + :param name: The name of this collection + :type name: str + :param arch: The architecture being worked with + :type arch: str + """ + + def __init__(self, dirs: WorkDirs, name: str, arch: str, version: str) -> None: + # XXX name is the specific to a step where as everything + # else here is generalized to the entire build + self.name = name + self.version = version + self.arch = arch + self.root = dirs.root + self.build = dirs.build + self.downloads = dirs.download + self.logs = dirs.logs + self.sources = dirs.src + self.tmpbuild = tempfile.mkdtemp(prefix="{}_build".format(name)) + self.source: Optional[pathlib.Path] = None + + @property + def toolchain(self) -> Optional[pathlib.Path]: + """Get the toolchain directory path for the current platform.""" + if sys.platform == "darwin": + return get_toolchain(root=self.root) + elif sys.platform == "win32": + return get_toolchain(root=self.root) + else: + return get_toolchain(self.arch, self.root) + + @property + def _triplet(self) -> str: + if sys.platform == "darwin": + return "{}-macos".format(self.arch) + elif sys.platform == "win32": + return "{}-win".format(self.arch) + else: + return "{}-linux-gnu".format(self.arch) + + @property + def prefix(self) -> pathlib.Path: + """Get the build prefix directory path.""" + return self.build / f"{self.version}-{self._triplet}" + + def __getstate__(self) -> Dict[str, Any]: + """ + Return an object used for pickling. + + :return: The picklable state + """ + return { + "name": self.name, + "arch": self.arch, + "root": self.root, + "build": self.build, + "downloads": self.downloads, + "logs": self.logs, + "sources": self.sources, + "tmpbuild": self.tmpbuild, + } + + def __setstate__(self, state: Dict[str, Any]) -> None: + """ + Unwrap the object returned from unpickling. + + :param state: The state to unpickle + :type state: dict + """ + self.name = state["name"] + self.arch = state["arch"] + self.root = state["root"] + self.downloads = state["downloads"] + self.logs = state["logs"] + self.sources = state["sources"] + self.build = state["build"] + self.tmpbuild = state["tmpbuild"] + + def to_dict(self) -> Dict[str, Any]: + """ + Get a dictionary representation of the directories in this collection. + + :return: A dictionary of all the directories + :rtype: dict + """ + return { + x: getattr(self, x) + for x in [ + "root", + "prefix", + "downloads", + "logs", + "sources", + "build", + "toolchain", + ] + } + + +class Recipe(TypedDict): + """Typed description of a build recipe entry.""" + + build_func: Callable[[MutableMapping[str, str], Dirs, IO[str]], None] + wait_on: List[str] + download: Optional[Download] + + +class Builder: + """ + Utility that handles the build process. + + :param root: The root of the working directories for this build + :type root: str + :param recipies: The instructions for the build steps + :type recipes: list + :param build_default: The default build function, defaults to ``build_default`` + :type build_default: types.FunctionType + :param populate_env: The default function to populate the build environment, defaults to ``populate_env`` + :type populate_env: types.FunctionType + :param force_download: If True, forces downloading the archives even if they exist, defaults to False + :type force_download: bool + :param arch: The architecture being built + :type arch: str + """ + + def __init__( + self, + root: Optional[PathLike] = None, + recipies: Optional[Dict[str, Recipe]] = None, + build_default: Optional[ + Callable[[MutableMapping[str, str], Dirs, IO[str]], None] + ] = None, + populate_env: Optional[Callable[[MutableMapping[str, str], Dirs], None]] = None, + arch: str = "x86_64", + version: str = "", + ) -> None: + self.root = root + self.dirs: WorkDirs = work_dirs(root) + self.build_arch = build_arch() + self.build_triplet = get_triplet(self.build_arch) + self.arch = arch + self.sources = self.dirs.src + self.downloads = self.dirs.download + + if recipies is None: + self.recipies: Dict[str, Recipe] = {} + else: + self.recipies = recipies + + # Use dependency injection with sensible defaults + self.build_default: Callable[ + [MutableMapping[str, str], Dirs, IO[str]], None + ] = (build_default if build_default is not None else _default_build_func) + + # Use the default populate_env if none provided + self.populate_env: Callable[[MutableMapping[str, str], Dirs], None] = ( + populate_env if populate_env is not None else _default_populate_env + ) + + self.version = version + self.set_arch(self.arch) + + def copy(self, version: str, checksum: Optional[str]) -> "Builder": + """Create a copy of this Builder with a different version.""" + recipies: Dict[str, Recipe] = {} + for name in self.recipies: + recipe = self.recipies[name] + recipies[name] = { + "build_func": recipe["build_func"], + "wait_on": list(recipe["wait_on"]), + "download": recipe["download"].copy() if recipe["download"] else None, + } + build = Builder( + self.root, + recipies, + self.build_default, + self.populate_env, + self.arch, + version, + ) + python_download = build.recipies["python"].get("download") + if python_download is None: + raise ConfigurationError("Python recipe is missing a download entry") + python_download.version = version + python_download.checksum = checksum + return build + + def set_arch(self, arch: str) -> None: + """ + Set the architecture for the build. + + :param arch: The arch to build + :type arch: str + """ + self.arch = arch + self._toolchain: Optional[pathlib.Path] = None + + @property + def toolchain(self) -> Optional[pathlib.Path]: + """Lazily fetch toolchain only when needed.""" + if self._toolchain is None and sys.platform == "linux": + from relenv.common import get_toolchain + + self._toolchain = get_toolchain(self.arch, self.dirs.root) + return self._toolchain + + @property + def triplet(self) -> str: + """Get the target triplet for the current architecture.""" + return get_triplet(self.arch) + + @property + def prefix(self) -> pathlib.Path: + """Get the build prefix directory path.""" + return self.dirs.build / f"{self.version}-{self.triplet}" + + @property + def _triplet(self) -> str: + if sys.platform == "darwin": + return "{}-macos".format(self.arch) + elif sys.platform == "win32": + return "{}-win".format(self.arch) + else: + return "{}-linux-gnu".format(self.arch) + + def add( + self, + name: str, + build_func: Optional[Callable[..., Any]] = None, + wait_on: Optional[Sequence[str]] = None, + download: Optional[Dict[str, Any]] = None, + ) -> None: + """ + Add a step to the build process. + + :param name: The name of the step + :type name: str + :param build_func: The function that builds this step, defaults to None + :type build_func: types.FunctionType, optional + :param wait_on: Processes to wait on before running this step, defaults to None + :type wait_on: list, optional + :param download: A dictionary of download information, defaults to None + :type download: dict, optional + """ + if wait_on is None: + wait_on_list: List[str] = [] + else: + wait_on_list = list(wait_on) + if build_func is None: + build_func = self.build_default + download_obj: Optional[Download] = None + if download is not None: + download_obj = Download(name, destination=self.downloads, **download) + self.recipies[name] = { + "build_func": build_func, + "wait_on": wait_on_list, + "download": download_obj, + } + + def run( + self, + name: str, + event: "multiprocessing.synchronize.Event", + build_func: Callable[..., Any], + download: Optional[Download], + show_ui: bool = False, + log_level: str = "WARNING", + line_counts: Optional[MutableMapping[str, int]] = None, + ) -> Any: + """ + Run a build step. + + :param name: The name of the step to run + :type name: str + :param event: An event to track this process' status and alert waiting steps + :type event: ``multiprocessing.Event`` + :param build_func: The function to use to build this step + :type build_func: types.FunctionType + :param download: The ``Download`` instance for this step + :type download: ``Download`` + :param line_counts: Optional shared dict for tracking log line counts + :type line_counts: Optional[MutableMapping[str, int]] + + :return: The output of the build function + """ + root_log = logging.getLogger(None) + if sys.platform == "win32": + if not show_ui: + handler = logging.StreamHandler() + handler.setLevel(logging.getLevelName(log_level)) + root_log.addHandler(handler) + + for handler in root_log.handlers: + if isinstance(handler, logging.StreamHandler): + handler.setFormatter( + logging.Formatter(f"%(asctime)s {name} %(message)s") + ) + + if not self.dirs.build.exists(): + os.makedirs(self.dirs.build, exist_ok=True) + + dirs = Dirs(self.dirs, name, self.arch, self.version) + os.makedirs(dirs.sources, exist_ok=True) + os.makedirs(dirs.logs, exist_ok=True) + os.makedirs(dirs.prefix, exist_ok=True) + + while event.is_set() is False: + time.sleep(0.3) + + logfp = io.open(os.path.join(dirs.logs, "{}.log".format(name)), "w") + handler = logging.FileHandler(dirs.logs / f"{name}.log") + root_log.addHandler(handler) + root_log.setLevel(logging.NOTSET) + + # Add line count handler if tracking is enabled + line_count_handler: Optional[LineCountHandler] = None + if line_counts is not None: + line_count_handler = LineCountHandler(name, line_counts) + root_log.addHandler(line_count_handler) + + # DEBUG: Uncomment to debug + # logfp = sys.stdout + + cwd = os.getcwd() + if download: + extract_archive(dirs.sources, str(download.filepath)) + dirs.source = dirs.sources / download.filepath.name.split(".tar")[0] + os.chdir(dirs.source) + else: + os.chdir(dirs.prefix) + + if sys.platform == "win32": + env = os.environ.copy() + else: + env = { + "PATH": os.environ["PATH"], + } + env["RELENV_DEBUG"] = "1" + env["RELENV_BUILDENV"] = "1" + env["RELENV_HOST"] = self.triplet + env["RELENV_HOST_ARCH"] = self.arch + env["RELENV_BUILD"] = self.build_triplet + env["RELENV_BUILD_ARCH"] = self.build_arch + python_download = self.recipies["python"].get("download") + if python_download is None: + raise ConfigurationError("Python recipe is missing download configuration") + env["RELENV_PY_VERSION"] = python_download.version + env["RELENV_PY_MAJOR_VERSION"] = env["RELENV_PY_VERSION"].rsplit(".", 1)[0] + if "RELENV_DATA" in os.environ: + env["RELENV_DATA"] = os.environ["RELENV_DATA"] + if self.build_arch != self.arch: + native_root = DATA_DIR / "native" + env["RELENV_NATIVE_PY"] = str(native_root / "bin" / "python3") + + self.populate_env(env, dirs) + + _ = dirs.to_dict() + for k in _: + log.info("Directory %s %s", k, _[k]) + for k in env: + log.info("Environment %s %s", k, env[k]) + try: + result = build_func(env, dirs, logfp) + # Update build stats with final line count on success + if line_count_handler is not None and line_counts is not None: + if name in line_counts: + final_count = line_counts[name] + update_build_stats(name, final_count) + return result + except Exception: + log.exception("Build failure") + sys.exit(1) + finally: + os.chdir(cwd) + if line_count_handler is not None: + root_log.removeHandler(line_count_handler) + root_log.removeHandler(handler) + logfp.close() + + def cleanup(self) -> None: + """ + Clean up the build directories. + """ + shutil.rmtree(self.prefix) + + def clean(self) -> None: + """ + Completely clean up the remnants of a relenv build. + """ + # Clean directories + for _ in [self.prefix, self.sources]: + try: + shutil.rmtree(_) + except PermissionError: + sys.stderr.write(f"Unable to remove directory: {_}") + except FileNotFoundError: + pass + # Clean files + archive = f"{self.prefix}.tar.xz" + for _ in [archive]: + try: + os.remove(_) + except FileNotFoundError: + pass + + def download_files( + self, + steps: Optional[Sequence[str]] = None, + force_download: bool = False, + show_ui: bool = False, + expanded_ui: bool = False, + ) -> None: + """ + Download all of the needed archives. + + :param steps: The steps to download archives for, defaults to None + :type steps: list, optional + :param expanded_ui: Whether to use expanded UI with progress bars + :type expanded_ui: bool, optional + """ + step_names = list(steps) if steps is not None else list(self.recipies) + + fails: List[str] = [] + processes: Dict[str, multiprocessing.Process] = {} + events: Dict[str, Any] = {} + + # For downloads, we don't track line counts but can still use expanded UI format + manager = multiprocessing.Manager() + line_counts: MutableMapping[str, int] = manager.dict() + build_stats: Dict[str, BuildStats] = {} + + if show_ui: + if not expanded_ui: + sys.stdout.write("Starting downloads \n") + log.info("Starting downloads") + if show_ui and not expanded_ui: + print_ui(events, processes, fails) + for name in step_names: + download = self.recipies[name]["download"] + if download is None: + continue + event = multiprocessing.Event() + event.set() + events[name] = event + + # Create progress callback if using expanded UI + def make_progress_callback( + step_name: str, shared_dict: MutableMapping[str, int] + ) -> Callable[[int, int], None]: + def progress_callback(downloaded: int, total: int) -> None: + shared_dict[step_name] = downloaded + shared_dict[f"{step_name}_total"] = total + + return progress_callback + + download_kwargs: Dict[str, Any] = { + "force_download": force_download, + "show_ui": show_ui, + "exit_on_failure": True, + } + if expanded_ui: + download_kwargs["progress_callback"] = make_progress_callback( + name, line_counts + ) + + proc = multiprocessing.Process( + name=name, + target=download, + kwargs=download_kwargs, + ) + proc.start() + processes[name] = proc + + while processes: + for proc in list(processes.values()): + proc.join(0.3) + # DEBUG: Comment to debug + if show_ui: + if expanded_ui: + print_ui_expanded( + events, + processes, + fails, + line_counts, + build_stats, + "download", + ) + else: + print_ui(events, processes, fails) + if proc.exitcode is None: + continue + processes.pop(proc.name) + if proc.exitcode != 0: + fails.append(proc.name) + if show_ui: + if expanded_ui: + print_ui_expanded( + events, processes, fails, line_counts, build_stats, "download" + ) + else: + print_ui(events, processes, fails) + sys.stdout.write("\n") + if fails and False: + if show_ui: + print_ui(events, processes, fails) + sys.stderr.write("The following failures were reported\n") + for fail in fails: + sys.stderr.write(fail + "\n") + sys.stderr.flush() + sys.exit(1) + + def build( + self, + steps: Optional[Sequence[str]] = None, + cleanup: bool = True, + show_ui: bool = False, + log_level: str = "WARNING", + expanded_ui: bool = False, + ) -> None: + """ + Build! + + :param steps: The steps to run, defaults to None + :type steps: list, optional + :param cleanup: Whether to clean up or not, defaults to True + :type cleanup: bool, optional + :param expanded_ui: Whether to use expanded UI with progress bars + :type expanded_ui: bool, optional + """ # noqa: D400 + fails: List[str] = [] + events: Dict[str, Any] = {} + waits: Dict[str, List[str]] = {} + processes: Dict[str, multiprocessing.Process] = {} + + # Set up shared line counts and load build stats for expanded UI + manager = multiprocessing.Manager() + line_counts: MutableMapping[str, int] = manager.dict() + build_stats: Dict[str, BuildStats] = {} + if expanded_ui: + build_stats = load_build_stats() + + if show_ui: + if expanded_ui: + # Expanded UI will print its own header + pass + else: + sys.stdout.write("Starting builds\n") + # DEBUG: Comment to debug + print_ui(events, processes, fails) + log.info("Starting builds") + + step_names = list(steps) if steps is not None else list(self.recipies) + + for name in step_names: + event = multiprocessing.Event() + events[name] = event + recipe = self.recipies[name] + kwargs = dict(recipe) + kwargs["show_ui"] = show_ui + kwargs["log_level"] = log_level + kwargs["line_counts"] = line_counts + + # Determine needed dependency recipies. + wait_on_seq = cast(List[str], kwargs.pop("wait_on", [])) + wait_on_list = list(wait_on_seq) + for dependency in wait_on_list[:]: + if dependency not in step_names: + wait_on_list.remove(dependency) + + waits[name] = wait_on_list + if not waits[name]: + event.set() + + proc = multiprocessing.Process( + name=name, target=self.run, args=(name, event), kwargs=kwargs + ) + proc.start() + processes[name] = proc + + # Wait for the processes to finish and check if we should send any + # dependency events. + while processes: + for proc in list(processes.values()): + proc.join(0.3) + if show_ui: + # DEBUG: Comment to debug + if expanded_ui: + print_ui_expanded( + events, processes, fails, line_counts, build_stats, "build" + ) + else: + print_ui(events, processes, fails) + if proc.exitcode is None: + continue + processes.pop(proc.name) + if proc.exitcode != 0: + fails.append(proc.name) + is_failure = True + else: + is_failure = False + for name in waits: + if proc.name in waits[name]: + if is_failure: + if name in processes: + processes[name].terminate() + time.sleep(0.1) + waits[name].remove(proc.name) + if not waits[name] and not events[name].is_set(): + events[name].set() + + if fails: + sys.stderr.write("The following failures were reported\n") + last_outs = {} + for fail in fails: + log_file = self.dirs.logs / f"{fail}.log" + try: + with io.open(log_file) as fp: + fp.seek(0, 2) + end = fp.tell() + ind = end - 4096 + if ind > 0: + fp.seek(ind) + else: + fp.seek(0) + last_out = fp.read() + if show_ui: + sys.stderr.write("=" * 20 + f" {fail} " + "=" * 20 + "\n") + sys.stderr.write(fp.read() + "\n\n") + except FileNotFoundError: + last_outs[fail] = f"Log file not found: {log_file}" + log.error("Build step %s has failed", fail) + log.error(last_out) + if show_ui: + sys.stderr.flush() + if cleanup: + log.debug("Performing cleanup.") + self.cleanup() + sys.exit(1) + if show_ui: + time.sleep(0.3) + if expanded_ui: + print_ui_expanded( + events, processes, fails, line_counts, build_stats, "build" + ) + else: + print_ui(events, processes, fails) + sys.stdout.write("\n") + sys.stdout.flush() + if cleanup: + log.debug("Performing cleanup.") + self.cleanup() + + def check_prereqs(self) -> List[str]: + """ + Check pre-requsists for build. + + This method verifies all requrements for a successful build are satisfied. + + :return: Returns a list of string describing failed checks + :rtype: list + """ + fail: List[str] = [] + if sys.platform == "linux": + if not self.toolchain or not self.toolchain.exists(): + fail.append( + f"Toolchain for {self.arch} does not exist. Please pip install ppbt." + ) + return fail + + def __call__( + self, + steps: Optional[Sequence[str]] = None, + arch: Optional[str] = None, + clean: bool = True, + cleanup: bool = True, + force_download: bool = False, + download_only: bool = False, + show_ui: bool = False, + log_level: str = "WARNING", + expanded_ui: bool = False, + ) -> None: + """ + Set the architecture, define the steps, clean if needed, download what is needed, and build. + + :param steps: The steps to run, defaults to None + :type steps: list, optional + :param arch: The architecture to build, defaults to None + :type arch: str, optional + :param clean: If true, cleans the directories first, defaults to True + :type clean: bool, optional + :param cleanup: Cleans up after build if true, defaults to True + :type cleanup: bool, optional + :param force_download: Whether or not to download the content if it already exists, defaults to True + :type force_download: bool, optional + :param expanded_ui: Whether to use expanded UI with progress bars + :type expanded_ui: bool, optional + """ + log = logging.getLogger(None) + log.setLevel(logging.NOTSET) + + stream_handler: Optional[logging.Handler] = None + if not show_ui: + stream_handler = logging.StreamHandler() + stream_handler.setLevel(logging.getLevelName(log_level)) + log.addHandler(stream_handler) + + os.makedirs(self.dirs.logs, exist_ok=True) + file_handler = logging.FileHandler(self.dirs.logs / "build.log") + file_handler.setLevel(logging.INFO) + log.addHandler(file_handler) + + if arch: + self.set_arch(arch) + + step_names = list(steps) if steps is not None else list(self.recipies) + + failures = self.check_prereqs() + if not download_only and failures: + for _ in failures: + sys.stderr.write(f"{_}\n") + sys.stderr.flush() + sys.exit(1) + + if clean: + self.clean() + + if self.build_arch != self.arch: + native_root = DATA_DIR / "native" + if not native_root.exists(): + if "RELENV_NATIVE_PY_VERSION" in os.environ: + version = os.environ["RELENV_NATIVE_PY_VERSION"] + else: + version = self.version + from relenv.create import create + + create("native", DATA_DIR, version=version) + + # Start a process for each build passing it an event used to notify each + # process if it's dependencies have finished. + try: + self.download_files( + step_names, + force_download=force_download, + show_ui=show_ui, + expanded_ui=expanded_ui, + ) + if download_only: + return + self.build( + step_names, + cleanup, + show_ui=show_ui, + log_level=log_level, + expanded_ui=expanded_ui, + ) + finally: + log.removeHandler(file_handler) + if stream_handler is not None: + log.removeHandler(stream_handler) + + +class Builds: + """Collection of platform-specific builders.""" + + def __init__(self) -> None: + """Initialize an empty collection of builders.""" + self.builds: Dict[str, Builder] = {} + + def add(self, platform: str, *args: Any, **kwargs: Any) -> Builder: + """Add a builder for a specific platform.""" + if "builder" in kwargs: + build_candidate = kwargs.pop("builder") + if args or kwargs: + raise RuntimeError( + "builder keyword can not be used with other kwargs or args" + ) + build = cast(Builder, build_candidate) + else: + build = Builder(*args, **kwargs) + self.builds[platform] = build + return build + + +builds = Builds() diff --git a/relenv/build/common/builders.py b/relenv/build/common/builders.py new file mode 100644 index 00000000..896d87eb --- /dev/null +++ b/relenv/build/common/builders.py @@ -0,0 +1,163 @@ +# Copyright 2022-2025 Broadcom. +# SPDX-License-Identifier: Apache-2.0 +""" +Build functions for specific dependencies. +""" +from __future__ import annotations + +import pathlib +import shutil +import sys +from typing import IO, MutableMapping, TYPE_CHECKING + +from relenv.common import PlatformError, runcmd + +if TYPE_CHECKING: + from .builder import Dirs + + +def build_default(env: MutableMapping[str, str], dirs: Dirs, logfp: IO[str]) -> None: + """ + The default build function if none is given during the build process. + + :param env: The environment dictionary + :type env: dict + :param dirs: The working directories + :type dirs: ``relenv.build.common.Dirs`` + :param logfp: A handle for the log file + :type logfp: file + """ + cmd = [ + "./configure", + "--prefix={}".format(dirs.prefix), + ] + if env["RELENV_HOST"].find("linux") > -1: + cmd += [ + "--build={}".format(env["RELENV_BUILD"]), + "--host={}".format(env["RELENV_HOST"]), + ] + runcmd(cmd, env=env, stderr=logfp, stdout=logfp) + runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp) + runcmd(["make", "install"], env=env, stderr=logfp, stdout=logfp) + + +def build_openssl_fips( + env: MutableMapping[str, str], dirs: Dirs, logfp: IO[str] +) -> None: + """Build OpenSSL with FIPS module.""" + return build_openssl(env, dirs, logfp, fips=True) + + +def build_openssl( + env: MutableMapping[str, str], + dirs: Dirs, + logfp: IO[str], + fips: bool = False, +) -> None: + """ + Build openssl. + + :param env: The environment dictionary + :type env: dict + :param dirs: The working directories + :type dirs: ``relenv.build.common.Dirs`` + :param logfp: A handle for the log file + :type logfp: file + """ + arch = "aarch64" + if sys.platform == "darwin": + plat = "darwin64" + if env["RELENV_HOST_ARCH"] == "x86_64": + arch = "x86_64-cc" + elif env["RELENV_HOST_ARCH"] == "arm64": + arch = "arm64-cc" + else: + raise PlatformError(f"Unable to build {env['RELENV_HOST_ARCH']}") + extended_cmd = [] + else: + plat = "linux" + if env["RELENV_HOST_ARCH"] == "x86_64": + arch = "x86_64" + elif env["RELENV_HOST_ARCH"] == "aarch64": + arch = "aarch64" + else: + raise PlatformError(f"Unable to build {env['RELENV_HOST_ARCH']}") + extended_cmd = [ + "-Wl,-z,noexecstack", + ] + if fips: + extended_cmd.append("enable-fips") + cmd = [ + "./Configure", + f"{plat}-{arch}", + f"--prefix={dirs.prefix}", + "--openssldir=/etc/ssl", + "--libdir=lib", + "--api=1.1.1", + "--shared", + "--with-rand-seed=os,egd", + "enable-md2", + "enable-egd", + "no-idea", + ] + cmd.extend(extended_cmd) + runcmd( + cmd, + env=env, + stderr=logfp, + stdout=logfp, + ) + runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp) + if fips: + shutil.copy( + pathlib.Path("providers") / "fips.so", + pathlib.Path(dirs.prefix) / "lib" / "ossl-modules", + ) + else: + runcmd(["make", "install_sw"], env=env, stderr=logfp, stdout=logfp) + + +def build_sqlite(env: MutableMapping[str, str], dirs: Dirs, logfp: IO[str]) -> None: + """ + Build sqlite. + + :param env: The environment dictionary + :type env: dict + :param dirs: The working directories + :type dirs: ``relenv.build.common.Dirs`` + :param logfp: A handle for the log file + :type logfp: file + """ + # extra_cflags=('-Os ' + # '-DSQLITE_ENABLE_FTS5 ' + # '-DSQLITE_ENABLE_FTS4 ' + # '-DSQLITE_ENABLE_FTS3_PARENTHESIS ' + # '-DSQLITE_ENABLE_JSON1 ' + # '-DSQLITE_ENABLE_RTREE ' + # '-DSQLITE_TCL=0 ' + # ) + # configure_pre=[ + # '--enable-threadsafe', + # '--enable-shared=no', + # '--enable-static=yes', + # '--disable-readline', + # '--disable-dependency-tracking', + # ] + cmd = [ + "./configure", + # "--with-shared", + # "--without-static", + "--enable-threadsafe", + "--disable-readline", + "--disable-dependency-tracking", + "--prefix={}".format(dirs.prefix), + # "--enable-add-ons=nptl,ports", + ] + if env["RELENV_HOST"].find("linux") > -1: + cmd += [ + "--build={}".format(env["RELENV_BUILD_ARCH"]), + "--host={}".format(env["RELENV_HOST"]), + ] + runcmd(cmd, env=env, stderr=logfp, stdout=logfp) + runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp) + runcmd(["make", "install"], env=env, stderr=logfp, stdout=logfp) diff --git a/relenv/build/common/download.py b/relenv/build/common/download.py new file mode 100644 index 00000000..5c2a8fb0 --- /dev/null +++ b/relenv/build/common/download.py @@ -0,0 +1,324 @@ +# Copyright 2022-2025 Broadcom. +# SPDX-License-Identifier: Apache-2.0 +""" +Download utility class for fetching build dependencies. +""" +from __future__ import annotations + +import hashlib +import logging +import os +import pathlib +import subprocess +import sys +from typing import Callable, Optional, Tuple, Union + +from relenv.common import ( + RelenvException, + ConfigurationError, + ChecksumValidationError, + download_url, + get_download_location, + runcmd, +) + +# Type alias for path-like objects +PathLike = Union[str, os.PathLike[str]] + +# Environment flag for CI/CD detection +CICD = "CI" in os.environ + +log = logging.getLogger(__name__) + + +def verify_checksum(file: PathLike, checksum: Optional[str]) -> bool: + """ + Verify the checksum of a file. + + Supports both SHA-1 (40 hex chars) and SHA-256 (64 hex chars) checksums. + The hash algorithm is auto-detected based on checksum length. + + :param file: The path to the file to check. + :type file: str + :param checksum: The checksum to verify against (SHA-1 or SHA-256) + :type checksum: str + + :raises RelenvException: If the checksum verification failed + + :return: True if it succeeded, or False if the checksum was None + :rtype: bool + """ + if checksum is None: + log.error("Can't verify checksum because none was given") + return False + + # Auto-detect hash type based on length + # SHA-1: 40 hex chars, SHA-256: 64 hex chars + if len(checksum) == 64: + hash_algo = hashlib.sha256() + hash_name = "sha256" + elif len(checksum) == 40: + hash_algo = hashlib.sha1() + hash_name = "sha1" + else: + raise ChecksumValidationError( + f"Invalid checksum length {len(checksum)}. Expected 40 (SHA-1) or 64 (SHA-256)" + ) + + with open(file, "rb") as fp: + hash_algo.update(fp.read()) + file_checksum = hash_algo.hexdigest() + if checksum != file_checksum: + raise ChecksumValidationError( + f"{hash_name} checksum verification failed. expected={checksum} found={file_checksum}" + ) + return True + + +class Download: + """ + A utility that holds information about content to be downloaded. + + :param name: The name of the download + :type name: str + :param url: The url of the download + :type url: str + :param signature: The signature of the download, defaults to None + :type signature: str + :param destination: The path to download the file to + :type destination: str + :param version: The version of the content to download + :type version: str + :param sha1: The sha1 sum of the download + :type sha1: str + + """ + + def __init__( + self, + name: str, + url: str, + fallback_url: Optional[str] = None, + signature: Optional[str] = None, + destination: PathLike = "", + version: str = "", + checksum: Optional[str] = None, + ) -> None: + self.name = name + self.url_tpl = url + self.fallback_url_tpl = fallback_url + self.signature_tpl = signature + self._destination: pathlib.Path = pathlib.Path() + if destination: + self._destination = pathlib.Path(destination) + self.version = version + self.checksum = checksum + + def copy(self) -> "Download": + """Create a copy of this Download instance.""" + return Download( + self.name, + self.url_tpl, + self.fallback_url_tpl, + self.signature_tpl, + self.destination, + self.version, + self.checksum, + ) + + @property + def destination(self) -> pathlib.Path: + """Get the destination directory path.""" + return self._destination + + @destination.setter + def destination(self, value: Optional[PathLike]) -> None: + """Set the destination directory path.""" + if value: + self._destination = pathlib.Path(value) + else: + self._destination = pathlib.Path() + + @property + def url(self) -> str: + """Get the formatted download URL.""" + return self.url_tpl.format(version=self.version) + + @property + def fallback_url(self) -> Optional[str]: + """Get the formatted fallback URL if configured.""" + if self.fallback_url_tpl: + return self.fallback_url_tpl.format(version=self.version) + return None + + @property + def signature_url(self) -> str: + """Get the formatted signature URL.""" + if self.signature_tpl is None: + raise ConfigurationError("Signature template not configured") + return self.signature_tpl.format(version=self.version) + + @property + def filepath(self) -> pathlib.Path: + """Get the full file path where the download will be saved.""" + _, name = self.url.rsplit("/", 1) + return self.destination / name + + @property + def formatted_url(self) -> str: + """Get the formatted URL (alias for url property).""" + return self.url_tpl.format(version=self.version) + + def fetch_file( + self, progress_callback: Optional[Callable[[int, int], None]] = None + ) -> Tuple[str, bool]: + """ + Download the file. + + :param progress_callback: Optional callback(downloaded_bytes, total_bytes) + :type progress_callback: Optional[Callable[[int, int], None]] + :return: The path to the downloaded content, and whether it was downloaded. + :rtype: tuple(str, bool) + """ + try: + return ( + download_url( + self.url, + self.destination, + CICD, + progress_callback=progress_callback, + ), + True, + ) + except Exception as exc: + fallback = self.fallback_url + if fallback: + print(f"Download failed {self.url} ({exc}); trying fallback url") + return ( + download_url( + fallback, + self.destination, + CICD, + progress_callback=progress_callback, + ), + True, + ) + raise + + def fetch_signature(self, version: Optional[str] = None) -> Tuple[str, bool]: + """ + Download the file signature. + + :return: The path to the downloaded signature. + :rtype: str + """ + return download_url(self.signature_url, self.destination, CICD), True + + def exists(self) -> bool: + """ + True when the artifact already exists on disk. + + :return: True when the artifact already exists on disk + :rtype: bool + """ + return self.filepath.exists() + + def valid_hash(self) -> None: + """Validate the hash of the downloaded file (placeholder method).""" + pass + + @staticmethod + def validate_signature(archive: PathLike, signature: Optional[PathLike]) -> bool: + """ + True when the archive's signature is valid. + + :param archive: The path to the archive to validate + :type archive: str + :param signature: The path to the signature to validate against + :type signature: str + + :return: True if it validated properly, else False + :rtype: bool + """ + if signature is None: + log.error("Can't check signature because none was given") + return False + try: + runcmd( + ["gpg", "--verify", signature, archive], + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + return True + except RelenvException as exc: + log.error("Signature validation failed on %s: %s", archive, exc) + return False + + @staticmethod + def validate_checksum(archive: PathLike, checksum: Optional[str]) -> bool: + """ + True when when the archive matches the sha1 hash. + + :param archive: The path to the archive to validate + :type archive: str + :param checksum: The sha1 sum to validate against + :type checksum: str + :return: True if the sums matched, else False + :rtype: bool + """ + try: + verify_checksum(archive, checksum) + return True + except RelenvException as exc: + log.error("sha1 validation failed on %s: %s", archive, exc) + return False + + def __call__( + self, + force_download: bool = False, + show_ui: bool = False, + exit_on_failure: bool = False, + progress_callback: Optional[Callable[[int, int], None]] = None, + ) -> bool: + """ + Downloads the url and validates the signature and sha1 sum. + + :param progress_callback: Optional callback(downloaded_bytes, total_bytes) + :type progress_callback: Optional[Callable[[int, int], None]] + :return: Whether or not validation succeeded + :rtype: bool + """ + os.makedirs(self.filepath.parent, exist_ok=True) + + downloaded = False + if force_download: + _, downloaded = self.fetch_file(progress_callback) + else: + file_is_valid = False + dest = get_download_location(self.url, self.destination) + if self.checksum and os.path.exists(dest): + file_is_valid = self.validate_checksum(dest, self.checksum) + if file_is_valid: + log.debug("%s already downloaded, skipping.", self.url) + else: + _, downloaded = self.fetch_file(progress_callback) + valid = True + if downloaded: + if self.signature_tpl is not None: + sig, _ = self.fetch_signature() + valid_sig = self.validate_signature(self.filepath, sig) + valid = valid and valid_sig + if self.checksum is not None: + valid_checksum = self.validate_checksum(self.filepath, self.checksum) + valid = valid and valid_checksum + + if not valid: + log.warning("Checksum did not match %s: %s", self.name, self.checksum) + if show_ui: + sys.stderr.write( + f"\nChecksum did not match {self.name}: {self.checksum}\n" + ) + sys.stderr.flush() + if exit_on_failure and not valid: + sys.exit(1) + return valid diff --git a/relenv/build/common/install.py b/relenv/build/common/install.py new file mode 100644 index 00000000..04ab379b --- /dev/null +++ b/relenv/build/common/install.py @@ -0,0 +1,531 @@ +# Copyright 2022-2025 Broadcom. +# SPDX-License-Identifier: Apache-2.0 +""" +Installation and finalization functions for the build process. +""" +from __future__ import annotations + +import fnmatch +import io +import logging +import os +import os.path +import pathlib +import pprint +import re +import shutil +import sys +import tarfile +from types import ModuleType +from typing import IO, MutableMapping, Optional, Sequence, Union, TYPE_CHECKING + +from relenv.common import ( + LINUX, + MODULE_DIR, + MissingDependencyError, + Version, + download_url, + format_shebang, + runcmd, +) +import relenv.relocate + +if TYPE_CHECKING: + from .builder import Dirs + +# Type alias for path-like objects +PathLike = Union[str, os.PathLike[str]] + +# Relenv PTH file content for bootstrapping +RELENV_PTH = ( + "import os; " + "import sys; " + "from importlib import util; " + "from pathlib import Path; " + "spec = util.spec_from_file_location(" + "'relenv.runtime', str(Path(__file__).parent / 'site-packages' / 'relenv' / 'runtime.py')" + "); " + "mod = util.module_from_spec(spec); " + "sys.modules['relenv.runtime'] = mod; " + "spec.loader.exec_module(mod); mod.bootstrap();" +) + +log = logging.getLogger(__name__) + + +def patch_file(path: PathLike, old: str, new: str) -> None: + """ + Search a file line by line for a string to replace. + + :param path: Location of the file to search + :type path: str + :param old: The value that will be replaced + :type path: str + :param new: The value that will replace the 'old' value. + :type path: str + """ + log.debug("Patching file: %s", path) + with open(path, "r") as fp: + content = fp.read() + new_content = "" + for line in content.splitlines(): + line = re.sub(old, new, line) + new_content += line + "\n" + with open(path, "w") as fp: + fp.write(new_content) + + +def patch_shebang(path: PathLike, old: str, new: str) -> bool: + """ + Replace a file's shebang. + + :param path: The path of the file to patch + :type path: str + :param old: The old shebang, will only patch when this is found + :type old: str + :param name: The new shebang to be written + :type name: str + """ + with open(path, "rb") as fp: + try: + data = fp.read(len(old.encode())).decode() + except UnicodeError: + return False + except Exception as exc: + log.warning("Unhandled exception: %r", exc) + return False + if data != old: + log.warning("Shebang doesn't match: %s %r != %r", path, old, data) + return False + data = fp.read().decode() + with open(path, "w") as fp: + fp.write(new) + fp.write(data) + with open(path, "r") as fp: + data = fp.read() + log.info("Patched shebang of %s => %r", path, data) + return True + + +def patch_shebangs(path: PathLike, old: str, new: str) -> None: + """ + Traverse directory and patch shebangs. + + :param path: The of the directory to traverse + :type path: str + :param old: The old shebang, will only patch when this is found + :type old: str + :param name: The new shebang to be written + :type name: str + """ + for root, _dirs, files in os.walk(str(path)): + for file in files: + patch_shebang(os.path.join(root, file), old, new) + + +def _load_sysconfigdata_template() -> str: + """Load the sysconfigdata template from disk. + + Returns: + The Python code template for sysconfigdata module. + + Note: + This is loaded from a .py file rather than embedded as a string + to enable syntax checking, IDE support, and easier maintenance. + Follows CPython convention of separating data from code. + """ + template_path = pathlib.Path(__file__).parent / "_sysconfigdata_template.py" + template_content = template_path.read_text(encoding="utf-8") + + # Extract only the code after the docstring + # Skip the copyright header and module docstring + lines = template_content.split("\n") + code_lines = [] + found_code = False + + for line in lines: + # Skip until we find the first import statement + if not found_code: + if line.startswith("import ") or line.startswith("from "): + found_code = True + else: + continue + + code_lines.append(line) + + return "\n".join(code_lines) + + +def update_ensurepip(directory: pathlib.Path) -> None: + """ + Update bundled dependencies for ensurepip (pip & setuptools). + """ + # ensurepip bundle location + bundle_dir = directory / "ensurepip" / "_bundled" + + # Make sure the destination directory exists + bundle_dir.mkdir(parents=True, exist_ok=True) + + # Detect existing whl. Later versions of python don't include setuptools. We + # only want to update whl files that python expects to be there + pip_version = "25.2" + setuptools_version = "80.9.0" + update_pip = False + update_setuptools = False + for file in bundle_dir.glob("*.whl"): + + log.debug("Checking whl: %s", str(file)) + if file.name.startswith("pip-"): + found_version = file.name.split("-")[1] + log.debug("Found version %s", found_version) + if Version(found_version) >= Version(pip_version): + log.debug("Found correct pip version or newer: %s", found_version) + else: + file.unlink() + update_pip = True + if file.name.startswith("setuptools-"): + found_version = file.name.split("-")[1] + log.debug("Found version %s", found_version) + if Version(found_version) >= Version(setuptools_version): + log.debug( + "Found correct setuptools version or newer: %s", found_version + ) + else: + file.unlink() + update_setuptools = True + + # Download whl files and update __init__.py + init_file = directory / "ensurepip" / "__init__.py" + if update_pip: + whl = f"pip-{pip_version}-py3-none-any.whl" + whl_path = "b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa" + url = f"https://files.pythonhosted.org/packages/{whl_path}/{whl}" + download_url(url=url, dest=bundle_dir) + assert (bundle_dir / whl).exists() + + # Update __init__.py + old = "^_PIP_VERSION.*" + new = f'_PIP_VERSION = "{pip_version}"' + patch_file(path=init_file, old=old, new=new) + + # setuptools + if update_setuptools: + whl = f"setuptools-{setuptools_version}-py3-none-any.whl" + whl_path = "a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772" + url = f"https://files.pythonhosted.org/packages/{whl_path}/{whl}" + download_url(url=url, dest=bundle_dir) + assert (bundle_dir / whl).exists() + + # setuptools + old = "^_SETUPTOOLS_VERSION.*" + new = f'_SETUPTOOLS_VERSION = "{setuptools_version}"' + patch_file(path=init_file, old=old, new=new) + + log.debug("ensurepip __init__.py contents:") + log.debug(init_file.read_text()) + + +def install_sysdata( + mod: ModuleType, + destfile: PathLike, + buildroot: PathLike, + toolchain: Optional[PathLike], +) -> None: + """ + Create a Relenv Python environment's sysconfigdata. + + Helper method used by the `finalize` build method to create a Relenv + Python environment's sysconfigdata. + + :param mod: The module to operate on + :type mod: ``types.ModuleType`` + :param destfile: Path to the file to write the data to + :type destfile: str + :param buildroot: Path to the root of the build + :type buildroot: str + :param toolchain: Path to the root of the toolchain + :type toolchain: str + """ + data = {} + + def fbuildroot(s: str) -> str: + return s.replace(str(buildroot), "{BUILDROOT}") + + def ftoolchain(s: str) -> str: + return s.replace(str(toolchain), "{TOOLCHAIN}") + + # XXX: keymap is not used, remove it? + # keymap = { + # "BINDIR": (fbuildroot,), + # "BINLIBDEST": (fbuildroot,), + # "CFLAGS": (fbuildroot, ftoolchain), + # "CPPLAGS": (fbuildroot, ftoolchain), + # "CXXFLAGS": (fbuildroot, ftoolchain), + # "datarootdir": (fbuildroot,), + # "exec_prefix": (fbuildroot,), + # "LDFLAGS": (fbuildroot, ftoolchain), + # "LDSHARED": (fbuildroot, ftoolchain), + # "LIBDEST": (fbuildroot,), + # "prefix": (fbuildroot,), + # "SCRIPTDIR": (fbuildroot,), + # } + for key in sorted(mod.build_time_vars): + val = mod.build_time_vars[key] + if isinstance(val, str): + for _ in (fbuildroot, ftoolchain): + val = _(val) + log.info("SYSCONFIG [%s] %s => %s", key, mod.build_time_vars[key], val) + data[key] = val + + sysconfigdata_code = _load_sysconfigdata_template() + with open(destfile, "w", encoding="utf8") as f: + f.write( + "# system configuration generated and used by" " the relenv at runtime\n" + ) + f.write("_build_time_vars = ") + pprint.pprint(data, stream=f) + f.write(sysconfigdata_code) + + +def find_sysconfigdata(pymodules: PathLike) -> str: + """ + Find sysconfigdata directory for python installation. + + :param pymodules: Path to python modules (e.g. lib/python3.10) + :type pymodules: str + + :return: The name of the sysconig data module + :rtype: str + """ + for root, dirs, files in os.walk(pymodules): + for file in files: + if file.find("sysconfigdata") > -1 and file.endswith(".py"): + return file[:-3] + raise MissingDependencyError("Unable to locate sysconfigdata module") + + +def install_runtime(sitepackages: PathLike) -> None: + """ + Install a base relenv runtime. + """ + site_dir = pathlib.Path(sitepackages) + relenv_pth = site_dir / "relenv.pth" + with io.open(str(relenv_pth), "w") as fp: + fp.write(RELENV_PTH) + + # Lay down relenv.runtime, we'll pip install the rest later + relenv = site_dir / "relenv" + os.makedirs(relenv, exist_ok=True) + + for name in [ + "runtime.py", + "relocate.py", + "common.py", + "buildenv.py", + "__init__.py", + ]: + src = MODULE_DIR / name + dest = relenv / name + with io.open(src, "r") as rfp: + with io.open(dest, "w") as wfp: + wfp.write(rfp.read()) + + +def finalize( + env: MutableMapping[str, str], + dirs: Dirs, + logfp: IO[str], +) -> None: + """ + Run after we've fully built python. + + This method enhances the newly created python with Relenv's runtime hacks. + + :param env: The environment dictionary + :type env: dict + :param dirs: The working directories + :type dirs: ``relenv.build.common.Dirs`` + :param logfp: A handle for the log file + :type logfp: file + """ + # Run relok8 to make sure the rpaths are relocatable. + relenv.relocate.main(dirs.prefix, log_file_name=str(dirs.logs / "relocate.py.log")) + # Install relenv-sysconfigdata module + libdir = pathlib.Path(dirs.prefix) / "lib" + + def find_pythonlib(libdir: pathlib.Path) -> Optional[str]: + for _root, dirs, _files in os.walk(libdir): + for entry in dirs: + if entry.startswith("python"): + return entry + return None + + python_lib = find_pythonlib(libdir) + if python_lib is None: + raise MissingDependencyError("Unable to locate python library directory") + + pymodules = libdir / python_lib + + # update ensurepip + update_ensurepip(pymodules) + + cwd = os.getcwd() + modname = find_sysconfigdata(pymodules) + path = sys.path + sys.path = [str(pymodules)] + try: + mod = __import__(str(modname)) + finally: + os.chdir(cwd) + sys.path = path + + dest = pymodules / f"{modname}.py" + install_sysdata(mod, dest, dirs.prefix, dirs.toolchain) + + # Lay down site customize + bindir = pathlib.Path(dirs.prefix) / "bin" + sitepackages = pymodules / "site-packages" + install_runtime(sitepackages) + + # Install pip + python_exe = str(dirs.prefix / "bin" / "python3") + if env["RELENV_HOST_ARCH"] != env["RELENV_BUILD_ARCH"]: + env["RELENV_CROSS"] = str(dirs.prefix) + python_exe = env["RELENV_NATIVE_PY"] + logfp.write("\nRUN ENSURE PIP\n") + + env.pop("RELENV_BUILDENV") + + runcmd( + [python_exe, "-m", "ensurepip"], + env=env, + stderr=logfp, + stdout=logfp, + ) + + # Fix the shebangs in the scripts python layed down. Order matters. + shebangs = [ + "#!{}".format(bindir / f"python{env['RELENV_PY_MAJOR_VERSION']}"), + "#!{}".format( + bindir / f"python{env['RELENV_PY_MAJOR_VERSION'].split('.', 1)[0]}" + ), + ] + newshebang = format_shebang("/python3") + for shebang in shebangs: + log.info("Patch shebang %r with %r", shebang, newshebang) + patch_shebangs( + str(pathlib.Path(dirs.prefix) / "bin"), + shebang, + newshebang, + ) + + if sys.platform == "linux": + pyconf = f"config-{env['RELENV_PY_MAJOR_VERSION']}-{env['RELENV_HOST']}" + patch_shebang( + str(pymodules / pyconf / "python-config.py"), + "#!{}".format(str(bindir / f"python{env['RELENV_PY_MAJOR_VERSION']}")), + format_shebang("../../../bin/python3"), + ) + + toolchain_path = dirs.toolchain + if toolchain_path is None: + raise MissingDependencyError("Toolchain path is required for linux builds") + shutil.copy( + pathlib.Path(toolchain_path) + / env["RELENV_HOST"] + / "sysroot" + / "lib" + / "libstdc++.so.6", + libdir, + ) + + # Moved in python 3.13 or removed? + if (pymodules / "cgi.py").exists(): + patch_shebang( + str(pymodules / "cgi.py"), + "#! /usr/local/bin/python", + format_shebang("../../bin/python3"), + ) + + def runpip(pkg: Union[str, os.PathLike[str]], upgrade: bool = False) -> None: + logfp.write(f"\nRUN PIP {pkg} {upgrade}\n") + target: Optional[pathlib.Path] = None + python_exe = str(dirs.prefix / "bin" / "python3") + if sys.platform == LINUX: + if env["RELENV_HOST_ARCH"] != env["RELENV_BUILD_ARCH"]: + target = pymodules / "site-packages" + python_exe = env["RELENV_NATIVE_PY"] + cmd = [ + python_exe, + "-m", + "pip", + "install", + str(pkg), + ] + if upgrade: + cmd.append("--upgrade") + if target: + cmd.append("--target={}".format(target)) + runcmd(cmd, env=env, stderr=logfp, stdout=logfp) + + runpip("wheel") + # This needs to handle running from the root of the git repo and also from + # an installed Relenv + if (MODULE_DIR.parent / ".git").exists(): + runpip(MODULE_DIR.parent, upgrade=True) + else: + runpip("relenv", upgrade=True) + globs = [ + "/bin/python*", + "/bin/pip*", + "/bin/relenv", + "/lib/python*/ensurepip/*", + "/lib/python*/site-packages/*", + "/include/*", + "*.so", + "/lib/*.so.*", + "*.py", + # Mac specific, factor this out + "*.dylib", + ] + archive = f"{ dirs.prefix }.tar.xz" + log.info("Archive is %s", archive) + with tarfile.open(archive, mode="w:xz") as fp: + create_archive(fp, dirs.prefix, globs, logfp) + + +def create_archive( + tarfp: tarfile.TarFile, + toarchive: PathLike, + globs: Sequence[str], + logfp: Optional[IO[str]] = None, +) -> None: + """ + Create an archive. + + :param tarfp: A pointer to the archive to be created + :type tarfp: file + :param toarchive: The path to the directory to archive + :type toarchive: str + :param globs: A list of filtering patterns to match against files to be added + :type globs: list + :param logfp: A pointer to the log file + :type logfp: file + """ + log.debug("Current directory %s", os.getcwd()) + log.debug("Creating archive %s", tarfp.name) + for root, _dirs, files in os.walk(toarchive): + relroot = pathlib.Path(root).relative_to(toarchive) + for f in files: + relpath = relroot / f + matches = False + for g in globs: + candidate = pathlib.Path("/") / relpath + if fnmatch.fnmatch(str(candidate), g): + matches = True + break + if matches: + log.debug("Adding %s", relpath) + tarfp.add(relpath, arcname=str(relpath), recursive=False) + else: + log.debug("Skipping %s", relpath) diff --git a/relenv/build/common/ui.py b/relenv/build/common/ui.py new file mode 100644 index 00000000..281e1158 --- /dev/null +++ b/relenv/build/common/ui.py @@ -0,0 +1,432 @@ +# Copyright 2022-2025 Broadcom. +# SPDX-License-Identifier: Apache-2.0 +""" +UI and build statistics utilities. +""" +from __future__ import annotations + +import logging +import os +import pathlib +import sys +import threading +from typing import Dict, MutableMapping, Optional, Sequence, cast + +import multiprocessing + +from typing import TYPE_CHECKING, TypedDict + +if TYPE_CHECKING: + from multiprocessing.synchronize import Event as SyncEvent +else: + SyncEvent = None + +from relenv.common import DATA_DIR + +from .download import CICD + + +log = logging.getLogger(__name__) + + +# ANSI color codes for terminal output +GREEN = "\033[0;32m" +YELLOW = "\033[1;33m" +RED = "\033[0;31m" +END = "\033[0m" +MOVEUP = "\033[F" + + +# Detect terminal capabilities for Unicode vs ASCII output +USE_UNICODE = True + +# Allow forcing ASCII mode via environment variable (useful for testing/debugging) +if os.environ.get("RELENV_ASCII"): + USE_UNICODE = False +elif sys.platform == "win32": + # Check if we're in a modern terminal that supports Unicode + # Windows Terminal and modern PowerShell support Unicode + wt_session = os.environ.get("WT_SESSION") + term_program = os.environ.get("TERM_PROGRAM") + if not wt_session and not term_program: + # Likely cmd.exe or old PowerShell, use ASCII + USE_UNICODE = False + + +# Spinner frames for in-progress builds +if USE_UNICODE: + # Modern Unicode spinner (looks great in most terminals) + SPINNER_FRAMES = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"] + SYMBOL_PENDING = "◯" + SYMBOL_RUNNING = None # Will use spinner + SYMBOL_SUCCESS = "✓" + SYMBOL_FAILED = "✗" +else: + # ASCII fallback for Windows cmd.exe + SPINNER_FRAMES = ["|", "/", "-", "\\"] + SYMBOL_PENDING = "o" + SYMBOL_RUNNING = None # Will use spinner + SYMBOL_SUCCESS = "+" + SYMBOL_FAILED = "X" + + +# Build statistics filename (path computed at runtime) +BUILD_STATS_FILENAME = "build_stats.json" + + +def _get_build_stats_file() -> pathlib.Path: + """Get the path to the build statistics file. + + Returns: + Path to build_stats.json in the relenv data directory. + + Note: + This is a function rather than a module-level constant to avoid + import-time dependencies on DATA_DIR, following CPython conventions. + """ + return DATA_DIR / BUILD_STATS_FILENAME + + +class SpinnerState: + """Thread-safe spinner state management. + + Tracks the animation frame index for each named spinner to ensure + smooth, consistent animation across multiple UI updates. + """ + + def __init__(self) -> None: + """Initialize empty spinner state with thread safety.""" + self._state: Dict[str, int] = {} + self._lock = threading.Lock() + + def get(self, name: str) -> int: + """Get the current frame index for a named spinner. + + Args: + name: The spinner identifier + + Returns: + The current frame index (0 if spinner hasn't been used yet) + """ + with self._lock: + return self._state.get(name, 0) + + def increment(self, name: str) -> None: + """Increment the frame index for a named spinner. + + Args: + name: The spinner identifier + """ + with self._lock: + self._state[name] = self._state.get(name, 0) + 1 + + def reset(self, name: Optional[str] = None) -> None: + """Reset spinner state. + + Args: + name: The spinner to reset, or None to reset all spinners + """ + with self._lock: + if name is None: + self._state.clear() + elif name in self._state: + del self._state[name] + + +# Module-level spinner state instance +_spinner_state = SpinnerState() + + +class BuildStats(TypedDict): + """Structure for tracking build step statistics.""" + + avg_lines: int + samples: int + last_lines: int + + +def print_ui( + events: MutableMapping[str, "multiprocessing.synchronize.Event"], + processes: MutableMapping[str, multiprocessing.Process], + fails: Sequence[str], + flipstat: Optional[Dict[str, tuple[int, float]]] = None, +) -> None: + """ + Prints the UI during the relenv building process. + + :param events: A dictionary of events that are updated during the build process + :type events: dict + :param processes: A dictionary of build processes + :type processes: dict + :param fails: A list of processes that have failed + :type fails: list + :param flipstat: Deprecated parameter, no longer used + :type flipstat: dict, optional + """ + if CICD: + sys.stdout.flush() + return + uiline = [] + for name in events: + if not events[name].is_set(): + # Pending: event not yet started + status = " {}{}".format(YELLOW, SYMBOL_PENDING) + elif name in processes: + # Running: show animated spinner + frame_idx = _spinner_state.get(name) % len(SPINNER_FRAMES) + spinner = SPINNER_FRAMES[frame_idx] + _spinner_state.increment(name) + status = " {}{}".format(GREEN, spinner) + elif name in fails: + # Failed: show error symbol + status = " {}{}".format(RED, SYMBOL_FAILED) + else: + # Success: show success symbol + status = " {}{}".format(GREEN, SYMBOL_SUCCESS) + uiline.append(status) + uiline.append(" " + END) + sys.stdout.write("\r") + sys.stdout.write("".join(uiline)) + sys.stdout.flush() + + +def print_ui_expanded( + events: MutableMapping[str, "multiprocessing.synchronize.Event"], + processes: MutableMapping[str, multiprocessing.Process], + fails: Sequence[str], + line_counts: MutableMapping[str, int], + build_stats: Dict[str, BuildStats], + phase: str = "build", +) -> None: + """ + Prints an expanded UI with progress bars during the relenv building process. + + :param events: A dictionary of events that are updated during the build process + :type events: dict + :param processes: A dictionary of build processes + :type processes: dict + :param fails: A list of processes that have failed + :type fails: list + :param line_counts: Current line counts for each step + :type line_counts: MutableMapping[str, int] + :param build_stats: Historical build statistics + :type build_stats: dict + :param phase: The current phase ("download" or "build") + :type phase: str + """ + if CICD: + sys.stdout.flush() + return + + # Track state per phase to handle download->build transitions + if not hasattr(print_ui_expanded, "_phase_state"): + print_ui_expanded._phase_state = {} # type: ignore + + phase_state = print_ui_expanded._phase_state # type: ignore + + # Number of lines = number of steps + 2 (header + separator) + num_lines = len(events) + 2 + + # If this phase has been called before, move up to overwrite previous output + if phase in phase_state: + prev_lines = phase_state[phase] + # Move up by previous line count to overwrite + sys.stdout.write(MOVEUP * prev_lines) + else: + # First call for this phase - if we're starting builds after downloads, + # add a newline to separate them + if phase == "build" and "download" in phase_state: + sys.stdout.write("\n") + + # Store line count for this phase + phase_state[phase] = num_lines + + # Clear line and print header + phase_name = "Downloads" if phase == "download" else "Builds" + sys.stdout.write("\r\033[K") # Clear line + sys.stdout.write(f"{phase_name}\n") + sys.stdout.write("─" * 70 + "\n") + + # Print each step + for name in events: + # Determine status + if not events[name].is_set(): + # Pending + status_symbol = f"{YELLOW}{SYMBOL_PENDING}{END}" + status_text = "Pending" + progress_bar = "" + elif name in processes: + # Running - show spinner and progress + frame_idx = _spinner_state.get(name) % len(SPINNER_FRAMES) + spinner = SPINNER_FRAMES[frame_idx] + _spinner_state.increment(name) + status_symbol = f"{GREEN}{spinner}{END}" + + # Determine if this is download or build phase + phase_action = "Downloading" if phase == "download" else "Building" + + # Calculate progress if we have historical data + current_lines = line_counts.get(name, 0) + if phase == "download": + # For downloads, line_counts stores bytes downloaded and total bytes + # Format: line_counts[name] = downloaded, line_counts[f"{name}_total"] = total + downloaded = current_lines + total = line_counts.get(f"{name}_total", 0) + if total > 0: + progress = min(100, int((downloaded / total) * 100)) + status_text = f"{phase_action} {progress:3d}%" + # Progress bar (20 chars wide) + filled = int(progress / 5) # 20 segments = 100% / 5 + bar = ( + "█" * filled + "░" * (20 - filled) + if USE_UNICODE + else ("#" * filled + "-" * (20 - filled)) + ) + progress_bar = f" [{bar}]" + else: + status_text = phase_action + progress_bar = "" + else: + # For builds, use historical line count data + if name in build_stats and build_stats[name]["avg_lines"] > 0: + avg_lines = build_stats[name]["avg_lines"] + progress = min(100, int((current_lines / avg_lines) * 100)) + status_text = f"{phase_action} {progress:3d}%" + + # Progress bar (20 chars wide) + filled = int(progress / 5) # 20 segments = 100% / 5 + bar = ( + "█" * filled + "░" * (20 - filled) + if USE_UNICODE + else ("#" * filled + "-" * (20 - filled)) + ) + progress_bar = f" [{bar}]" + else: + status_text = phase_action + progress_bar = "" + elif name in fails: + # Failed + status_symbol = f"{RED}{SYMBOL_FAILED}{END}" + status_text = "Failed" + progress_bar = "" + else: + # Success + status_symbol = f"{GREEN}{SYMBOL_SUCCESS}{END}" + status_text = "Done" + progress_bar = "" + + # Format step name (truncate/pad to 20 chars) + name_display = f"{name:<20}"[:20] + status_display = f"{status_text:<12}" + + # Clear line before writing to prevent leftover text + sys.stdout.write("\r\033[K") + sys.stdout.write( + f"{status_symbol} {name_display} {status_display}{progress_bar}\n" + ) + + sys.stdout.flush() + + +def load_build_stats() -> Dict[str, BuildStats]: + """ + Load historical build statistics from disk. + + :return: Dictionary mapping step names to their statistics + :rtype: dict + """ + stats_file = _get_build_stats_file() + if not stats_file.exists(): + return {} + try: + import json + + with open(stats_file, "r") as f: + data = json.load(f) + return cast(Dict[str, BuildStats], data) + except (json.JSONDecodeError, IOError): + log.warning("Failed to load build stats, starting fresh") + return {} + + +def save_build_stats(stats: Dict[str, BuildStats]) -> None: + """ + Save build statistics to disk. + + :param stats: Dictionary mapping step names to their statistics + :type stats: dict + """ + try: + import json + + stats_file = _get_build_stats_file() + stats_file.parent.mkdir(parents=True, exist_ok=True) + with open(stats_file, "w") as f: + json.dump(stats, f, indent=2) + except IOError: + log.warning("Failed to save build stats") + + +def update_build_stats(step_name: str, line_count: int) -> None: + """ + Update statistics for a build step with a new sample. + + Uses exponential moving average with weight 0.7 for new samples. + + :param step_name: Name of the build step + :type step_name: str + :param line_count: Number of log lines for this build + :type line_count: int + """ + stats = load_build_stats() + if step_name not in stats: + stats[step_name] = BuildStats( + avg_lines=line_count, samples=1, last_lines=line_count + ) + else: + old_avg = stats[step_name]["avg_lines"] + # Exponential moving average: 70% new value, 30% old average + new_avg = int(0.7 * line_count + 0.3 * old_avg) + stats[step_name] = BuildStats( + avg_lines=new_avg, + samples=stats[step_name]["samples"] + 1, + last_lines=line_count, + ) + save_build_stats(stats) + + +class LineCountHandler(logging.Handler): + """ + Custom logging handler that counts log lines for progress tracking. + + This handler increments a counter in a shared multiprocessing dict + for each log message emitted, enabling real-time progress estimation. + """ + + def __init__(self, step_name: str, shared_dict: MutableMapping[str, int]) -> None: + """ + Initialize the line count handler. + + :param step_name: Name of the build step being tracked + :type step_name: str + :param shared_dict: Multiprocessing-safe dict for sharing counts + :type shared_dict: MutableMapping[str, int] + """ + super().__init__() + self.step_name = step_name + self.shared_dict = shared_dict + + def emit(self, record: logging.LogRecord) -> None: + """ + Count each log record as a line. + + :param record: The log record to process + :type record: logging.LogRecord + """ + try: + # Increment line count in shared memory + current = self.shared_dict.get(self.step_name, 0) + self.shared_dict[self.step_name] = current + 1 + except Exception: + # Silently ignore errors in the handler to avoid breaking builds + pass diff --git a/relenv/build/darwin.py b/relenv/build/darwin.py index 264c5632..f9979c85 100644 --- a/relenv/build/darwin.py +++ b/relenv/build/darwin.py @@ -9,7 +9,7 @@ import io from typing import IO, MutableMapping -from ..common import DARWIN, MACOS_DEVELOPMENT_TARGET, arches +from ..common import DARWIN, MACOS_DEVELOPMENT_TARGET, arches, runcmd from .common import ( Dirs, build_openssl, @@ -17,7 +17,6 @@ builds, finalize, get_dependency_version, - runcmd, ) ARCHES = arches[DARWIN] diff --git a/relenv/build/linux.py b/relenv/build/linux.py index 8dadb3d2..85939101 100644 --- a/relenv/build/linux.py +++ b/relenv/build/linux.py @@ -21,9 +21,8 @@ builds, finalize, get_dependency_version, - runcmd, ) -from ..common import LINUX, Version, arches +from ..common import LINUX, Version, arches, runcmd ARCHES = arches[LINUX] @@ -570,10 +569,12 @@ def build_python(env: EnvMapping, dirs: Dirs, logfp: IO[str]) -> None: }, ) +# If openssl-fips-module runs before openssl we get an error installing openssl +# becuase /lib/ossl-modules exists. build.add( "openssl-fips-module", - build_func=build_openssl_fips, wait_on=["openssl"], + build_func=build_openssl_fips, download={ "url": "https://www.openssl.org/source/openssl-{version}.tar.gz", "version": "3.1.2", diff --git a/relenv/build/windows.py b/relenv/build/windows.py index c280ce56..1a4c80af 100644 --- a/relenv/build/windows.py +++ b/relenv/build/windows.py @@ -18,18 +18,22 @@ from .common import ( Dirs, - MODULE_DIR, builds, create_archive, - download_url, - extract_archive, get_dependency_version, install_runtime, patch_file, - runcmd, update_ensurepip, ) -from ..common import WIN32, Version, arches +from ..common import ( + WIN32, + Version, + arches, + MODULE_DIR, + download_url, + extract_archive, + runcmd, +) log = logging.getLogger(__name__) diff --git a/relenv/buildenv.py b/relenv/buildenv.py index 8eb0902f..2dcf35cb 100644 --- a/relenv/buildenv.py +++ b/relenv/buildenv.py @@ -14,7 +14,8 @@ from .common import ( MACOS_DEVELOPMENT_TARGET, - RelenvException, + PlatformError, + RelenvEnvironmentError, get_toolchain, get_triplet, ) @@ -58,15 +59,15 @@ def buildenv( """ if not relenv_path: if not is_relenv(): - raise RelenvException("Not in a relenv environment") + raise RelenvEnvironmentError("Not in a relenv environment") relenv_path = cast(str | os.PathLike[str], cast(Any, sys).RELENV) if sys.platform != "linux": - raise RelenvException("buildenv is only supported on Linux") + raise PlatformError("buildenv is only supported on Linux") toolchain = get_toolchain() if not toolchain: - raise RelenvException("buildenv is only supported on Linux") + raise PlatformError("buildenv is only supported on Linux") triplet = get_triplet() sysroot = f"{toolchain}/{triplet}/sysroot" diff --git a/relenv/buildenv.py.rej b/relenv/buildenv.py.rej new file mode 100644 index 00000000..52953b70 --- /dev/null +++ b/relenv/buildenv.py.rej @@ -0,0 +1,62 @@ +diff a/relenv/buildenv.py b/relenv/buildenv.py (rejected hunks) +@@ -69,7 +69,6 @@ def buildenv( + raise RelenvException("buildenv is only supported on Linux") + + triplet = get_triplet() +- sysroot = f"{toolchain}/{triplet}/sysroot" + env = { + "RELENV_BUILDENV": "1", + "TOOLCHAIN_PATH": f"{toolchain}", +@@ -77,45 +76,25 @@ def buildenv( + "RELENV_PATH": f"{relenv_path}", + "CC": f"{toolchain}/bin/{triplet}-gcc", + "CXX": f"{toolchain}/bin/{triplet}-g++", +- "CFLAGS": ( +- f"--sysroot={sysroot} " +- f"-fPIC " +- f"-I{relenv_path}/include " +- f"-I{sysroot}/usr/include" +- ), ++ "CFLAGS": f"-I{relenv_path}/include -I{toolchain}/sysroot/usr/include", + "CXXFLAGS": ( +- f"--sysroot={sysroot} " +- f"-fPIC " + f"-I{relenv_path}/include " +- f"-I{sysroot}/usr/include " +- f"-L{relenv_path}/lib -L{sysroot}/lib " ++ f"-I{toolchain}/{triplet}/sysroot/usr/include " ++ f"-L{relenv_path}/lib -L{toolchain}/{triplet}/sysroot/lib " + f"-Wl,-rpath,{relenv_path}/lib" + ), + "CPPFLAGS": ( +- f"--sysroot={sysroot} " +- f"-fPIC " +- f"-I{relenv_path}/include " +- f"-I{sysroot}/usr/include" ++ f"-I{relenv_path}/include " f"-I{toolchain}/{triplet}/sysroot/usr/include" + ), + "CMAKE_CFLAGS": ( +- f"--sysroot={sysroot} " +- f"-fPIC " +- f"-I{relenv_path}/include " +- f"-I{sysroot}/usr/include" ++ f"-I{relenv_path}/include " f"-I{toolchain}/{triplet}/sysroot/usr/include" + ), + "LDFLAGS": ( +- f"--sysroot={sysroot} " +- f"-L{relenv_path}/lib -L{sysroot}/lib " ++ f"-L{relenv_path}/lib -L{toolchain}/{triplet}/sysroot/lib " + f"-Wl,-rpath,{relenv_path}/lib" + ), +- "CRATE_CC_NO_DEFAULTS": "1", +- "OPENSSL_DIR": f"{relenv_path}", +- "OPENSSL_INCLUDE_DIR": f"{relenv_path}/include", +- "OPENSSL_LIB_DIR": f"{relenv_path}/lib", +- "PKG_CONFIG_PATH": f"{relenv_path}/lib/pkgconfig", +- "RUSTFLAGS": f"-L {relenv_path}/lib -C link-arg=-Wl,-rpath,{relenv_path}/lib", + } +- if sys.platform == "darwin": ++ if sys.platform == "dawin": + env["MACOS_DEVELOPMENT_TARGET"] = MACOS_DEVELOPMENT_TARGET + return env + diff --git a/relenv/common.py b/relenv/common.py index 7515aca6..5110f201 100644 --- a/relenv/common.py +++ b/relenv/common.py @@ -20,7 +20,18 @@ import textwrap import threading import time -from typing import IO, Any, BinaryIO, Iterable, Literal, Mapping, Optional, Union, cast +from typing import ( + IO, + Any, + BinaryIO, + Callable, + Iterable, + Literal, + Mapping, + Optional, + Union, + cast, +) # relenv package version __version__ = "0.21.2" @@ -185,6 +196,124 @@ class RelenvException(Exception): """ +# Validation Errors +class ValidationError(RelenvException): + """Base class for validation-related errors. + + Raised when data validation fails (checksums, signatures, etc.). + This follows CPython's convention of having intermediate base classes + for related exception types. + """ + + +class ChecksumValidationError(ValidationError): + """Raised when file checksum verification fails. + + This typically indicates file corruption or tampering. The error message + should include the expected and actual checksums when available. + + Example: + raise ChecksumValidationError( + f"Checksum mismatch for {filename}: " + f"expected {expected}, got {actual}" + ) + """ + + +class SignatureValidationError(ValidationError): + """Raised when GPG signature verification fails. + + This indicates that a downloaded file's cryptographic signature + does not match the expected signature, suggesting tampering or + an incomplete download. + + Example: + raise SignatureValidationError( + f"GPG signature verification failed for {filename}" + ) + """ + + +# Download Errors +class DownloadError(RelenvException): + """Raised when downloading a file from a URL fails. + + This encompasses network errors, HTTP errors, and other issues + that prevent successfully retrieving a remote resource. + + Example: + raise DownloadError(f"Failed to download {url}: {reason}") + """ + + +# Configuration Errors +class ConfigurationError(RelenvException): + """Raised when required configuration is missing or invalid. + + This typically occurs during build setup when recipes are incomplete + or environment variables are not properly set. + + Example: + raise ConfigurationError( + "Python recipe is missing download configuration" + ) + """ + + +# Platform Errors +class PlatformError(RelenvException): + """Raised when operating on an unsupported platform. + + Relenv supports Linux, macOS, and Windows. This exception is raised + when attempting operations that are platform-specific or when running + on an unsupported platform. + + Example: + raise PlatformError(f"Unsupported platform: {sys.platform}") + """ + + +# Build Errors +class BuildCommandError(RelenvException): + """Raised when a build command execution fails. + + This indicates that a subprocess (compiler, linker, etc.) returned + a non-zero exit code during the build process. + + Example: + raise BuildCommandError( + f"Build command failed: {' '.join(cmd)}" + ) + """ + + +class MissingDependencyError(RelenvException): + """Raised when a required build dependency is not found. + + This typically occurs when expected files, directories, or system + packages are missing from the build environment. + + Example: + raise MissingDependencyError( + f"Unable to locate {dependency_name}" + ) + """ + + +# Environment Errors +class RelenvEnvironmentError(RelenvException): + """Raised when there are issues with the relenv environment. + + This occurs when operations require being inside a relenv environment + but the current environment is not properly configured. + + Example: + raise RelenvEnvironmentError( + "Not in a relenv environment" + ) + """ + + def format_shebang(python: str, tpl: str = SHEBANG_TPL) -> str: """ Return a formatted shebang. @@ -405,7 +534,7 @@ def get_triplet(machine: Optional[str] = None, plat: Optional[str] = None) -> st elif plat == "linux": return f"{machine}-linux-gnu" else: - raise RelenvException(f"Unknown platform {plat}") + raise PlatformError(f"Unknown platform {plat}") def plat_from_triplet(plat: str) -> str: @@ -418,7 +547,7 @@ def plat_from_triplet(plat: str) -> str: return "darwin" elif plat == "win": return "win32" - raise RelenvException(f"Unkown platform {plat}") + raise PlatformError(f"Unkown platform {plat}") def list_archived_builds() -> list[tuple[str, str, str]]: @@ -535,11 +664,20 @@ def check_url(url: str, timestamp: Optional[float] = None, timeout: float = 30) return True -def fetch_url(url: str, fp: BinaryIO, backoff: int = 3, timeout: float = 30) -> None: +def fetch_url( + url: str, + fp: BinaryIO, + backoff: int = 3, + timeout: float = 30, + progress_callback: Optional[Callable[[int, int], None]] = None, +) -> None: """ Fetch the contents of a url. This method will store the contents in the given file like object. + + :param progress_callback: Optional callback(downloaded_bytes, total_bytes) + :type progress_callback: Optional[Callable[[int, int], None]] """ # Late import so we do not import hashlib before runtime.bootstrap is called. import urllib.error @@ -564,16 +702,34 @@ def fetch_url(url: str, fp: BinaryIO, backoff: int = 3, timeout: float = 30) -> if response is None: raise RelenvException(f"Unable to open url {url}") log.info("url opened %s", url) + + # Get content length from headers + content_length = 0 + try: + content_length_str = response.headers.get("Content-Length") + if content_length_str: + content_length = int(content_length_str) + log.info("Content-Length: %d bytes", content_length) + # Report initial state to callback + if progress_callback: + progress_callback(0, content_length) + except (ValueError, TypeError): + log.debug("Could not parse Content-Length header") + try: - total = 0 + downloaded = 0 size = 1024 * 300 block = response.read(size) while block: - total += size + block_size = len(block) + downloaded += block_size if time.time() - last > 10: - log.info("%s > %d", url, total) + log.info("%s > %d", url, downloaded) last = time.time() fp.write(block) + # Report progress + if progress_callback and content_length > 0: + progress_callback(downloaded, content_length) block = response.read(10240) finally: response.close() @@ -627,6 +783,7 @@ def download_url( verbose: bool = True, backoff: int = 3, timeout: float = 60, + progress_callback: Optional[Callable[[int, int], None]] = None, ) -> str: """ Download the url to the provided destination. @@ -639,6 +796,8 @@ def download_url( :type dest: str :param verbose: Print download url and destination to stdout :type verbose: bool + :param progress_callback: Optional callback(downloaded_bytes, total_bytes) + :type progress_callback: Optional[Callable[[int, int], None]] :raises urllib.error.HTTPError: If the url was unable to be downloaded @@ -650,7 +809,7 @@ def download_url( log.debug(f"Downloading {url} -> {local}") try: with open(local, "wb") as fout: - fetch_url(url, fout, backoff, timeout) + fetch_url(url, fout, backoff, timeout, progress_callback) except Exception as exc: if verbose: log.error("Unable to download: %s\n%s", url, exc) @@ -766,7 +925,7 @@ def enqueue_process( p.wait() if p.returncode != 0: - raise RelenvException("Build cmd '{}' failed".format(" ".join(args[0]))) + raise BuildCommandError("Build cmd '{}' failed".format(" ".join(args[0]))) return p diff --git a/tests/test_build.py b/tests/test_build.py index ab190a86..055c8e18 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -1,12 +1,22 @@ # Copyright 2022-2025 Broadcom. # SPDX-License-Identifier: Apache-2.0 import hashlib +import logging import pathlib import pytest -from relenv.build.common import Builder, get_dependency_version, verify_checksum -from relenv.common import DATA_DIR, RelenvException, toolchain_root_dir +from relenv.build.common import Dirs, get_dependency_version +from relenv.build.common.builder import Builder +from relenv.build.common.download import Download, verify_checksum +from relenv.build.common.ui import ( + BuildStats, + LineCountHandler, + load_build_stats, + save_build_stats, + update_build_stats, +) +from relenv.common import DATA_DIR, RelenvException, toolchain_root_dir, work_dirs # mypy: ignore-errors @@ -23,6 +33,11 @@ def fake_download_md5(fake_download: pathlib.Path) -> str: return hashlib.sha1(fake_download.read_bytes()).hexdigest() +@pytest.fixture +def fake_download_sha256(fake_download: pathlib.Path) -> str: + return hashlib.sha256(fake_download.read_bytes()).hexdigest() + + @pytest.mark.skip_unless_on_linux def test_builder_defaults_linux() -> None: builder = Builder(version="3.10.10") @@ -41,7 +56,7 @@ def test_builder_defaults_linux() -> None: @pytest.mark.skip_unless_on_linux def test_builder_toolchain_lazy_loading(monkeypatch: pytest.MonkeyPatch) -> None: """Test that toolchain is only fetched when accessed (lazy loading).""" - import relenv.build.common + import relenv.common call_count = {"count": 0} @@ -50,7 +65,8 @@ def mock_get_toolchain(arch=None, root=None): # Return a fake path instead of actually extracting return pathlib.Path(f"/fake/toolchain/{arch or 'default'}") - monkeypatch.setattr(relenv.build.common, "get_toolchain", mock_get_toolchain) + # Patch where get_toolchain is actually imported and used (in relenv.common) + monkeypatch.setattr(relenv.common, "get_toolchain", mock_get_toolchain) # Create builder - should NOT call get_toolchain yet builder = Builder(version="3.10.10", arch="aarch64") @@ -84,10 +100,28 @@ def test_verify_checksum(fake_download: pathlib.Path, fake_download_md5: str) -> assert verify_checksum(fake_download, fake_download_md5) is True +def test_verify_checksum_sha256( + fake_download: pathlib.Path, fake_download_sha256: str +) -> None: + """Test SHA-256 checksum validation.""" + assert verify_checksum(fake_download, fake_download_sha256) is True + + def test_verify_checksum_failed(fake_download: pathlib.Path) -> None: pytest.raises(RelenvException, verify_checksum, fake_download, "no") +def test_verify_checksum_none(fake_download: pathlib.Path) -> None: + """Test that verify_checksum returns False when checksum is None.""" + assert verify_checksum(fake_download, None) is False + + +def test_verify_checksum_invalid_length(fake_download: pathlib.Path) -> None: + """Test that invalid checksum length raises error.""" + with pytest.raises(RelenvException, match="Invalid checksum length"): + verify_checksum(fake_download, "abc123") # 6 chars, not 40 or 64 + + def test_get_dependency_version_openssl_linux() -> None: """Test getting OpenSSL version for Linux platform.""" result = get_dependency_version("openssl", "linux") @@ -148,3 +182,279 @@ def test_get_dependency_version_wrong_platform() -> None: # Try to get OpenSSL for a platform that doesn't exist result = get_dependency_version("openssl", "nonexistent-platform") assert result is None + + +# Build stats tests + + +def test_build_stats_save_load( + tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test saving and loading build statistics.""" + monkeypatch.setattr("relenv.build.common.ui.DATA_DIR", tmp_path) + + # Save some stats + stats = { + "python": BuildStats(avg_lines=100, samples=1, last_lines=100), + "openssl": BuildStats(avg_lines=200, samples=2, last_lines=180), + } + save_build_stats(stats) + + # Load them back + loaded = load_build_stats() + assert loaded["python"]["avg_lines"] == 100 + assert loaded["python"]["samples"] == 1 + assert loaded["python"]["last_lines"] == 100 + assert loaded["openssl"]["avg_lines"] == 200 + assert loaded["openssl"]["samples"] == 2 + + +def test_build_stats_load_nonexistent( + tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test loading stats when file doesn't exist returns empty dict.""" + monkeypatch.setattr("relenv.build.common.ui.DATA_DIR", tmp_path) + loaded = load_build_stats() + assert loaded == {} + + +def test_build_stats_update_new_step( + tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test updating stats for a new build step.""" + monkeypatch.setattr("relenv.build.common.ui.DATA_DIR", tmp_path) + + # Update a new step + update_build_stats("python", 100) + + # Load and verify + stats = load_build_stats() + assert stats["python"]["avg_lines"] == 100 + assert stats["python"]["samples"] == 1 + assert stats["python"]["last_lines"] == 100 + + +def test_build_stats_update_existing_step( + tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test updating stats for an existing step uses exponential moving average.""" + monkeypatch.setattr("relenv.build.common.ui.DATA_DIR", tmp_path) + + # Initial value + update_build_stats("python", 100) + + # Update with new value + update_build_stats("python", 200) + + # Load and verify exponential moving average: 0.7 * 200 + 0.3 * 100 = 170 + stats = load_build_stats() + assert stats["python"]["avg_lines"] == 170 + assert stats["python"]["samples"] == 2 + assert stats["python"]["last_lines"] == 200 + + +# LineCountHandler tests + + +def test_line_count_handler() -> None: + """Test LineCountHandler increments shared dict correctly.""" + shared_dict = {} + handler = LineCountHandler("test", shared_dict) + + # Create a log record + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="", + lineno=0, + msg="test message", + args=(), + exc_info=None, + ) + + # Emit first record + handler.emit(record) + assert shared_dict["test"] == 1 + + # Emit second record + handler.emit(record) + assert shared_dict["test"] == 2 + + # Emit third record + handler.emit(record) + assert shared_dict["test"] == 3 + + +def test_line_count_handler_multiple_steps() -> None: + """Test LineCountHandler tracks multiple steps independently.""" + shared_dict = {} + handler1 = LineCountHandler("step1", shared_dict) + handler2 = LineCountHandler("step2", shared_dict) + + record = logging.LogRecord( + name="test", + level=logging.INFO, + pathname="", + lineno=0, + msg="test", + args=(), + exc_info=None, + ) + + handler1.emit(record) + handler1.emit(record) + handler2.emit(record) + + assert shared_dict["step1"] == 2 + assert shared_dict["step2"] == 1 + + +# Dirs class tests + + +@pytest.mark.skip_unless_on_linux +def test_dirs_initialization() -> None: + """Test Dirs class initialization.""" + dirs = Dirs(work_dirs(), "python", "x86_64", "3.10.0") + assert dirs.name == "python" + assert dirs.arch == "x86_64" + assert dirs.version == "3.10.0" + assert "python_build" in dirs.tmpbuild + + +def test_dirs_triplet_darwin(monkeypatch: pytest.MonkeyPatch) -> None: + """Test Dirs._triplet property for darwin platform.""" + monkeypatch.setattr("sys.platform", "darwin") + dirs = Dirs(work_dirs(), "test", "arm64", "3.10.0") + assert dirs._triplet == "arm64-macos" + + +def test_dirs_triplet_win32(monkeypatch: pytest.MonkeyPatch) -> None: + """Test Dirs._triplet property for win32 platform.""" + monkeypatch.setattr("sys.platform", "win32") + dirs = Dirs(work_dirs(), "test", "amd64", "3.10.0") + assert dirs._triplet == "amd64-win" + + +@pytest.mark.skip_unless_on_linux +def test_dirs_triplet_linux() -> None: + """Test Dirs._triplet property for linux platform.""" + dirs = Dirs(work_dirs(), "test", "x86_64", "3.10.0") + assert dirs._triplet == "x86_64-linux-gnu" + + +@pytest.mark.skip_unless_on_linux +def test_dirs_prefix() -> None: + """Test Dirs.prefix property.""" + dirs = Dirs(work_dirs(), "test", "x86_64", "3.10.0") + assert "3.10.0-x86_64-linux-gnu" in str(dirs.prefix) + + +@pytest.mark.skip_unless_on_linux +def test_dirs_to_dict() -> None: + """Test Dirs.to_dict() method.""" + dirs = Dirs(work_dirs(), "test", "x86_64", "3.10.0") + d = dirs.to_dict() + assert "root" in d + assert "prefix" in d + assert "downloads" in d + assert "logs" in d + assert "sources" in d + assert "build" in d + assert "toolchain" in d + + +@pytest.mark.skip_unless_on_linux +def test_dirs_pickle() -> None: + """Test Dirs serialization/deserialization.""" + dirs = Dirs(work_dirs(), "python", "x86_64", "3.10.0") + + # Get state + state = dirs.__getstate__() + assert state["name"] == "python" + assert state["arch"] == "x86_64" + + # Create new instance and restore state + dirs2 = Dirs.__new__(Dirs) + dirs2.__setstate__(state) + assert dirs2.name == "python" + assert dirs2.arch == "x86_64" + assert dirs2.tmpbuild == dirs.tmpbuild + + +# Download class tests + + +def test_download_copy() -> None: + """Test Download.copy() creates independent copy.""" + d1 = Download( + "test", + "http://example.com/{version}/test.tar.gz", + version="1.0.0", + checksum="abc123", + ) + d2 = d1.copy() + + # Verify copy has same values + assert d2.name == d1.name + assert d2.url_tpl == d1.url_tpl + assert d2.version == d1.version + assert d2.checksum == d1.checksum + + # Verify it's a different object + assert d2 is not d1 + + # Verify modifying copy doesn't affect original + d2.version = "2.0.0" + assert d1.version == "1.0.0" + assert d2.version == "2.0.0" + + +def test_download_fallback_url() -> None: + """Test Download.fallback_url property.""" + d = Download( + "test", + "http://main.com/{version}/test.tar.gz", + fallback_url="http://backup.com/{version}/test.tar.gz", + version="1.0.0", + ) + assert d.fallback_url == "http://backup.com/1.0.0/test.tar.gz" + + +def test_download_no_fallback() -> None: + """Test Download.fallback_url returns None when not configured.""" + d = Download("test", "http://example.com/{version}/test.tar.gz", version="1.0.0") + assert d.fallback_url is None + + +def test_download_signature_url() -> None: + """Test Download.signature_url property.""" + d = Download( + "test", + "http://example.com/{version}/test.tar.gz", + signature="http://example.com/{version}/test.tar.gz.asc", + version="1.0.0", + ) + assert d.signature_url == "http://example.com/1.0.0/test.tar.gz.asc" + + +def test_download_signature_url_error() -> None: + """Test Download.signature_url raises error when not configured.""" + from relenv.common import ConfigurationError + + d = Download("test", "http://example.com/test.tar.gz") + with pytest.raises(ConfigurationError, match="Signature template not configured"): + _ = d.signature_url + + +def test_download_destination_setter() -> None: + """Test Download.destination setter with None value.""" + d = Download("test", "http://example.com/test.tar.gz") + + # Set to a path + d.destination = "/tmp/downloads" + assert d.destination == pathlib.Path("/tmp/downloads") + + # Set to None + d.destination = None + assert d.destination == pathlib.Path() diff --git a/tests/test_common.py b/tests/test_common.py index 2e05c2f5..217b3e38 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -11,7 +11,7 @@ import sys import tarfile from types import ModuleType -from typing import BinaryIO, Literal +from typing import BinaryIO, Callable, Literal, Optional from unittest.mock import patch import pytest @@ -243,7 +243,13 @@ def test_download_url_writes_file(tmp_path: pathlib.Path) -> None: dest.mkdir() data = b"payload" - def fake_fetch(url: str, fp: BinaryIO, backoff: int, timeout: float) -> None: + def fake_fetch( + url: str, + fp: BinaryIO, + backoff: int, + timeout: float, + progress_callback: Optional[Callable[[int, int], None]] = None, + ) -> None: fp.write(data) with patch("relenv.common.fetch_url", side_effect=fake_fetch): @@ -257,7 +263,13 @@ def test_download_url_failure_cleans_up(tmp_path: pathlib.Path) -> None: dest.mkdir() created = dest / "a.txt" - def fake_fetch(url: str, fp: BinaryIO, backoff: int, timeout: float) -> None: + def fake_fetch( + url: str, + fp: BinaryIO, + backoff: int, + timeout: float, + progress_callback: Optional[Callable[[int, int], None]] = None, + ) -> None: raise RelenvException("fail") with patch("relenv.common.get_download_location", return_value=str(created)), patch( diff --git a/tests/test_downloads.py b/tests/test_downloads.py index c7a87bef..641661da 100644 --- a/tests/test_downloads.py +++ b/tests/test_downloads.py @@ -7,7 +7,7 @@ # mypy: ignore-errors from unittest.mock import patch -from relenv.build.common import Download +from relenv.build.common.download import Download from relenv.common import RelenvException @@ -69,7 +69,7 @@ def test_download_exists(tmp_path: pathlib.Path) -> None: def test_validate_md5sum(tmp_path: pathlib.Path) -> None: fake_md5 = "fakemd5" - with patch("relenv.build.common.verify_checksum") as run_mock: + with patch("relenv.build.common.download.verify_checksum") as run_mock: assert Download.validate_checksum(str(tmp_path), fake_md5) is True run_mock.assert_called_with(str(tmp_path), fake_md5) @@ -77,7 +77,7 @@ def test_validate_md5sum(tmp_path: pathlib.Path) -> None: def test_validate_md5sum_failed(tmp_path: pathlib.Path) -> None: fake_md5 = "fakemd5" with patch( - "relenv.build.common.verify_checksum", side_effect=RelenvException + "relenv.build.common.download.verify_checksum", side_effect=RelenvException ) as run_mock: assert Download.validate_checksum(str(tmp_path), fake_md5) is False run_mock.assert_called_with(str(tmp_path), fake_md5) @@ -85,7 +85,7 @@ def test_validate_md5sum_failed(tmp_path: pathlib.Path) -> None: def test_validate_signature(tmp_path: pathlib.Path) -> None: sig = "fakesig" - with patch("relenv.build.common.runcmd") as run_mock: + with patch("relenv.build.common.download.runcmd") as run_mock: assert Download.validate_signature(str(tmp_path), sig) is True run_mock.assert_called_with( ["gpg", "--verify", sig, str(tmp_path)], @@ -96,7 +96,9 @@ def test_validate_signature(tmp_path: pathlib.Path) -> None: def test_validate_signature_failed(tmp_path: pathlib.Path) -> None: sig = "fakesig" - with patch("relenv.build.common.runcmd", side_effect=RelenvException) as run_mock: + with patch( + "relenv.build.common.download.runcmd", side_effect=RelenvException + ) as run_mock: assert Download.validate_signature(str(tmp_path), sig) is False run_mock.assert_called_with( ["gpg", "--verify", sig, str(tmp_path)],