diff --git a/lib/rift/Config.py b/lib/rift/Config.py index 523a2822..33a78daf 100644 --- a/lib/rift/Config.py +++ b/lib/rift/Config.py @@ -86,6 +86,7 @@ class RiftDeprecatedConfWarning(FutureWarning): _DEFAULT_SYNC_METHOD = 'dnf' _DEFAULT_SYNC_INCLUDE = [] _DEFAULT_SYNC_EXCLUDE = [] +_DEFAULT_DEPENDENCY_TRACKING = False class Config(): """ @@ -301,6 +302,10 @@ class Config(): 'values': ['9p', 'virtiofs'], }, 'sync_output': {}, + 'dependency_tracking': { + 'check': 'bool', + 'default': _DEFAULT_DEPENDENCY_TRACKING, + }, # XXX?: 'mock.name' ? # XXX?: 'mock.template' ? } diff --git a/lib/rift/Controller.py b/lib/rift/Controller.py index f97cd3df..0eff910b 100644 --- a/lib/rift/Controller.py +++ b/lib/rift/Controller.py @@ -52,6 +52,7 @@ from rift.Mock import Mock from rift.Package import Package, Test from rift.Repository import LocalRepository, ProjectArchRepositories +from rift.graph import PackagesDependencyGraph from rift.RPM import RPM, Spec, RPMLINT_CONFIG_V1, RPMLINT_CONFIG_V2 from rift.TempDir import TempDir from rift.TestResults import TestCase, TestResults @@ -140,6 +141,8 @@ def make_parser(): subprs.add_argument('-s', '--sign', action='store_true', help='sign built packages with GPG key ' '(implies -p, --publish)') + subprs.add_argument('-S', '--skip-deps', action='store_true', + help='Skip automatic rebuild of reverse dependencies') subprs.add_argument('--junit', metavar='FILENAME', help='write junit result file') subprs.add_argument('--dont-update-repo', dest='updaterepo', action='store_false', @@ -178,6 +181,8 @@ def make_parser(): help='write junit result file') subprs.add_argument('-p', '--publish', action='store_true', help='publish build RPMS to repository') + subprs.add_argument('-S', '--skip-deps', action='store_true', + help='Skip automatic validation of reverse dependencies') # Validate diff subprs = subparsers.add_parser('validdiff') @@ -443,15 +448,19 @@ def __init__(self, pkg, config=None): Test.__init__(self, cmd, "basic_install") self.local = False -def build_pkg(config, args, pkg, arch): +def build_pkg(config, args, pkg, arch, staging): """ Build a package for a specific architecture - config: rift configuration + - args: command line arguments - pkg: package to build - - repo: rpm repositories to use - - suppl_repos: optional additional repositories + - arch: CPU architecture + - staging: temporary staging rpm repositories to hold dependencies when + testing builds of reserve dependencies recursively. """ - repos = ProjectArchRepositories(config, arch) + repos = ProjectArchRepositories(config, arch, + extra=staging.consumables[arch] + if staging is not None else None) if args.publish and not repos.can_publish(): raise RiftError("Cannot publish if 'working_repo' is undefined") @@ -468,6 +477,14 @@ def build_pkg(config, args, pkg, arch): logging.info('Built: %s', rpm.filepath) message("RPMS successfully built") + # If defined, publish in staging repository + if staging: + message("Publishing RPMS in staging repository...") + mock.publish(staging) + + message("Updating staging repository...") + staging.update() + # Publish if args.publish: message("Publishing RPMS...") @@ -576,7 +593,11 @@ def validate_pkgs(config, args, pkgs, arch): - launch tests """ - repos = ProjectArchRepositories(config, arch) + # Create staging repository for all packages and add it to the project + # supplementary repositories. + (staging, stagedir) = create_staging_repo(config) + repos = ProjectArchRepositories(config, arch, + extra=staging.consumables[arch]) if args.publish and not repos.can_publish(): raise RiftError("Cannot publish if 'working_repo' is undefined") @@ -615,10 +636,12 @@ def validate_pkgs(config, args, pkgs, arch): message('Validate specfile...') spec.check(pkg) - (staging, stagedir) = create_staging_repo(config) - message('Preparing Mock environment...') mock = Mock(config, arch, config.get('version')) + + for repo in repos.all: + logging.debug("Mock with repo %s: %s", repo.name, repo.url) + mock.init(repos.all) try: @@ -665,7 +688,8 @@ def validate_pkgs(config, args, pkgs, arch): message("Keep environment, VM is running. Use: rift vm connect") else: mock.clean() - stagedir.delete() + + stagedir.delete() banner(f"All packages checked on architecture {arch}") @@ -747,7 +771,7 @@ def action_vm(args, config): ret = vm_build(vm, args, config) return ret -def build_pkgs(config, args, pkgs, arch): +def build_pkgs(config, args, pkgs, arch, staging): """ Build a list of packages on a given architecture and return results. """ @@ -776,7 +800,7 @@ def build_pkgs(config, args, pkgs, arch): now = time.time() try: pkg.load() - build_pkg(config, args, pkg, arch) + build_pkg(config, args, pkg, arch, staging) except RiftError as ex: logging.error("Build failure: %s", str(ex)) results.add_failure(case, time.time() - now, err=str(ex)) @@ -800,17 +824,30 @@ def action_build(args, config): results = TestResults('build') staff, modules = staff_modules(config) + pkgs = get_packages_to_build(config, staff, modules, args) + logging.info( + "Ordered list of packages to build: %s", + str([pkg.name for pkg in pkgs]) + ) # Build all packages for all project supported architectures for arch in config.get('arch'): - pkgs = Package.list(config, staff, modules, args.packages) - results.extend(build_pkgs(config, args, pkgs, arch)) + # Create temporary staging repository to hold dependencies unless + # dependency tracking is disabled in project configuration or user set + # --skip-deps argument. + staging = stagedir = None + if config.get('dependency_tracking') and not args.skip_deps: + (staging, stagedir) = create_staging_repo(config) + + results.extend(build_pkgs(config, args, pkgs, arch, staging)) if getattr(args, 'junit', False): logging.info('Writing test results in %s', args.junit) results.junit(args.junit) + if stagedir: + stagedir.delete() banner(f"All packages processed for architecture {arch}") banner('All architectures processed') @@ -866,13 +903,18 @@ def action_validate(args, config): staff, modules = staff_modules(config) results = TestResults('validate') + pkgs = get_packages_to_build(config, staff, modules, args) + logging.info( + "Ordered list of packages to validate: %s", + str([pkg.name for pkg in pkgs]) + ) # Validate packages on all project supported architectures for arch in config.get('arch'): results.extend( validate_pkgs( config, args, - Package.list(config, staff, modules, args.packages), + pkgs, arch ) ) @@ -1047,6 +1089,54 @@ def get_packages_from_patch(patch, config, modules, staff): return updated, removed +def get_packages_to_build(config, staff, modules, args): + """ + Return ordered list of Packages to build. If dependency_tracking is disabled + in project configuration or --skip-deps arguments is set, only the list of + packages in arguments is selected. Else, this function builds a dependency + graph of all packages in the project to determine the list of packages that + reverse depends on the list of packages in arguments, recursively. + """ + if not config.get('dependency_tracking') or args.skip_deps: + return list(Package.list(config, staff, modules, args.packages)) + + # Build dependency graph with all projects packages. + graph = PackagesDependencyGraph.from_project(config, staff, modules) + + result = [] + + def result_position(new_build_requirements): + """ + Return the first index in result of packages in provided build + requirements list. + """ + for build_requirement in new_build_requirements: + for index, package in enumerate(result): + if build_requirement.package == package: + return index + return -1 + + for pkg in Package.list(config, staff, modules, args.packages): + required_builds = graph.solve(pkg) + for index, required_build in enumerate(required_builds): + if required_build.package in result: + continue + # Search the position in result before all its own reverse + # dependencies. + position = result_position(required_builds[index+1:]) + logging.info( + "Package %s must be built: %s", + required_build.package.name, + required_build.reasons, + ) + # No position constraint in result, just append the package at the + # end. Else insert at the right position. + if position == -1: + result.append(required_build.package) + else: + result.insert(position, required_build.package) + return result + def create_staging_repo(config): """ Create and return staging temporary repository with a 2-tuple containing diff --git a/lib/rift/Package.py b/lib/rift/Package.py index 808b239c..44b4eba8 100644 --- a/lib/rift/Package.py +++ b/lib/rift/Package.py @@ -69,6 +69,7 @@ def __init__(self, name, config, staff, modules): self.origin = None self.ignore_rpms = None self.rpmnames = None + self.depends = None # Static paths pkgdir = os.path.join(self._config.get('packages_dir'), self.name) @@ -164,6 +165,13 @@ def load(self, infopath=None): else: self.ignore_rpms = data.get('ignore_rpms', []) + depends = data.get('depends') + if depends is not None: + if isinstance(depends, str): + self.depends = [depends] + else: + self.depends = depends + self.check_info() if os.path.exists(self.sourcesdir): diff --git a/lib/rift/RPM.py b/lib/rift/RPM.py index 7cfa3eae..c5c9e46c 100644 --- a/lib/rift/RPM.py +++ b/lib/rift/RPM.py @@ -40,6 +40,7 @@ import shutil from subprocess import Popen, PIPE, STDOUT, run, CalledProcessError import time +import itertools import rpm @@ -213,6 +214,7 @@ def __init__(self, filepath=None, config=None): self.filepath = filepath self.srpmname = None self.pkgnames = [] + self.provides = [] self.sources = [] self.basename = None self.version = None @@ -265,6 +267,12 @@ def load(self): except ValueError as exp: raise RiftError(f"{self.filepath}: {exp}") from exp self.pkgnames = [_header_values(pkg.header['name']) for pkg in spec.packages] + # Global unique list of provides. Here dict.fromkeys() is used to remove + # duplicates as an alternative to set() for the sake of preserving order. + self.provides = list(dict.fromkeys( + itertools.chain( + *[_header_values(pkg.header['provides']) + for pkg in spec.packages]))) hdr = spec.sourceHeader self.srpmname = hdr.sprintf('%{NAME}-%{VERSION}-%{RELEASE}.src.rpm') self.basename = hdr.sprintf('%{NAME}') diff --git a/lib/rift/Repository.py b/lib/rift/Repository.py index 3a685c97..ba6d98a0 100644 --- a/lib/rift/Repository.py +++ b/lib/rift/Repository.py @@ -263,7 +263,8 @@ class ProjectArchRepositories: """ Manipulate repositories defined in a project for a particular architecture. """ - def __init__(self, config, arch): + def __init__(self, config, arch, extra=None): + self.working = None self.arch = arch if config.get('working_repo'): @@ -275,6 +276,8 @@ def __init__(self, config, arch): ) self.working.create() self.supplementaries = [] + if extra: + self.supplementaries.append(extra) repos = config.get('repos', arch=arch) if repos: for name, data in repos.items(): diff --git a/lib/rift/graph.py b/lib/rift/graph.py new file mode 100644 index 00000000..1d9d9287 --- /dev/null +++ b/lib/rift/graph.py @@ -0,0 +1,256 @@ +# +# Copyright (C) 2024 CEA +# +# This file is part of Rift project. +# +# This software is governed by the CeCILL license under French law and +# abiding by the rules of distribution of free software. You can use, +# modify and/ or redistribute the software under the terms of the CeCILL +# license as circulated by CEA, CNRS and INRIA at the following URL +# "http://www.cecill.info". +# +# As a counterpart to the access to the source code and rights to copy, +# modify and redistribute granted by the license, users are provided only +# with a limited warranty and the software's author, the holder of the +# economic rights, and the successive licensors have only limited +# liability. +# +# In this respect, the user's attention is drawn to the risks associated +# with loading, using, modifying and/or developing or reproducing the +# software by the user in light of its specific status of free software, +# that may mean that it is complicated to manipulate, and that also +# therefore means that it is reserved for developers and experienced +# professionals having in-depth computer knowledge. Users are therefore +# encouraged to load and test the software's suitability as regards their +# requirements in conditions enabling the security of their systems and/or +# data to be ensured and, more generally, to use and operate it in the +# same conditions as regards security. +# +# The fact that you are presently reading this means that you have had +# knowledge of the CeCILL license and that you accept its terms. +# + +""" +Module to track dependencies between packages in Rift projects in a graph and +solve recursive build requirements. +""" +import time +from collections import namedtuple +import logging +import re + +from rift.Package import Package +from rift.RPM import Spec + +BuildRequirement = namedtuple("BuildRequirement", ["package", "reasons"]) + + +class PackageDependencyNode: + """Node in PackagesDependencyGraph.""" + def __init__(self, package): + self.package = package + # parse spec file subpackages and build requires + spec = Spec(package.specfile) + self.subpackages = spec.provides + # Parse buildrequires string in spec file to discard explicit versions + # enforcement. + self.build_requires = [ + value.group(1) + for value + in re.finditer(r"(\S+)( (>|>=|=|<=|<) \S+)?", spec.buildrequires) + ] + self.rdeps = [] + + def depends_on(self, node): + """ + Return True if the package of the current node depends on the package of + the given node, ie. when current node source package has build + requirement on any of the subpackages produced by the given node. + """ + # Check depends in info.yaml + if self.package.depends is not None: + return node.package.name in self.package.depends + # If dependencies are not defined in info.yaml, look at build requires + # and produced subpackages found in spec file. + return any( + build_require in node.subpackages + for build_require in self.build_requires + ) + + def required_subpackages(self, rdep): + """ + Return the list of current node subpackages that are build requirements + of the given reverse dependency. + """ + return [ + subpkg + for subpkg in self.subpackages + if subpkg in rdep.build_requires + ] + + +class PackagesDependencyGraph: + """Graph of dependencies between packages in Rift project.""" + def __init__(self): + self.nodes = [] + self.path = None + + def dump(self): + """Dump graph in its current state with logging message.""" + for node in self.nodes: + logging.info("→ %s", node.package.name) + logging.info(" requires: %s", node.build_requires) + logging.info(" subpackages: %s", str(node.subpackages)) + logging.info( + " rdeps: %s", str([rdep.package.name for rdep in node.rdeps]) + ) + + def solve(self, package): + """ + Return list of recursive build requirements for the provided package. + """ + self.path = [] # Start with empty path + for node in self.nodes: + if node.package.name == package.name: + return self._solve(node, "User request") + + # Package not found in graph, return empty list. + return [] + + def _dep_index(self, new, result): + """ + The new and results arguments are list of build requirements. The result + contains the current list of build requirements. The first item of the + new list is the build requirement to insert in result list followed by + all its own build requirements. + + If the first item in new is already present in result, return True and + the index of this item in result. Else, it returns False and the first + index of its build requirements in result. If none of its build + requirements is found in result, return index -1. + """ + # Search first item of new in result. If found, return True and its + # index. + for index, build_requirement in enumerate(result): + if build_requirement.package == new[0].package: + return True, index + + # First item not found in result, Return false and the first index of + # of its build requirement. + for index, build_requirement in enumerate(result): + for new_build_requirement in new[1:]: + if new_build_requirement.package == build_requirement.package: + return False, index + + # No build requirement found in result, return false and -1. + return False, -1 + + def _solve(self, node, reason, depth=0): + """ + Return list of recursive build requirements for the provided package + dependency node. The reason argument is a string to textually justify + the build requirement of the given node. The depth argument is used to + track the depth of recursive path in the dependency graph. + """ + + result = [] + logging.debug( + "%s→ Source package %s must be rebuilt", + ' '*depth, + node.package.name + ) + result.append( + BuildRequirement(node.package, [reason]) + ) + + # Remove the end of the processing path after the current node + del self.path[max(0, depth-1):-1] + # Add current node to the processing path + self.path.append(node) + + for rdep in node.rdeps: + # Determine the reason to justify the build requirement on the + # reverse dependency. If depends are defined in info.yaml, just + # indicate this dependency. Otherwise, resolve build requirements + # to indicate the subpackages that explain the dependency. + if rdep.package.depends is not None: + reason = f"depends on {node.package.name}" + else: + reason = "build requires on " + ", ".join( + node.required_subpackages(rdep) + ) + # If reverse dependency has already been processed in the processing + # path to the current node, add it to resulting list and stop + # processing to avoid endless loop. + if rdep in self.path[0:depth]: + logging.debug( + "%s ⥀ Loop detected on node %s at depth %d", + ' '*depth, + rdep.package.name, + depth + ) + result.append(BuildRequirement(rdep.package, [reason])) + continue + logging.debug( + "%s Exploring reverse dependency %s", + ' '*depth, + rdep.package.name + ) + # Iterate over all recursively solve build requirements for this + # reverse dependency. + build_requirements = self._solve(rdep, reason, depth+1) + for idx, build_requirement in enumerate(build_requirements): + found, position = self._dep_index(build_requirements[idx:], result) + if found: + # Build requirement already present in result, just extend + # the build reasons. + result[position].reasons.extend( + build_requirement.reasons + ) + elif position == -1: + # The recursive build requirements of the new build + # requirement are not present in the list, just append the + # new build requirement in result. + result.append(build_requirement) + else: + # Insert the new build requirement before its first + # recursive build requirements in result. + result.insert(position, build_requirement) + return result + + def build(self, packages): + """Build graph with the provided packages.""" + tic = time.perf_counter() + for package in packages: + # Load info.yaml to check for potential explicit dependencies. Skip + # package with warning if unable to load. + try: + package.load() + except FileNotFoundError as err: + logging.warning("Skipping package %s unable to load: %s", + package.name, err) + continue + self._insert(package) + toc = time.perf_counter() + logging.debug("Graph built in %0.4f seconds", toc - tic) + logging.debug("Graph size: %d", len(self.nodes)) + + def _insert(self, package): + """Insert package in the graph.""" + node = PackageDependencyNode(package) + for _node in self.nodes: + if _node.depends_on(node): + node.rdeps.append(_node) + if node.depends_on(_node): + _node.rdeps.append(node) + self.nodes.append(node) + + @classmethod + def from_project(cls, config, staff, modules): + """ + Iterate over Package instances from 'names' list or all packages + if list is not provided. + """ + graph = cls() + graph.build(Package.list(config, staff, modules)) + return graph diff --git a/template/project.conf b/template/project.conf index 8dead750..8d3c648d 100644 --- a/template/project.conf +++ b/template/project.conf @@ -129,3 +129,10 @@ repos: # defined with: # # sync_output: /path/to/local/mirrors + +# Automatic dependency tracking. If enabled, when Rift builds or validate +# packages, it also searches for reverse build dependencies of these packages +# recursively and automatically rebuild/revalidate them to ensure absence of +# regressions. +# +# dependency_tracking: false diff --git a/tests/Config.py b/tests/Config.py index ef2335c4..dfc1ce00 100644 --- a/tests/Config.py +++ b/tests/Config.py @@ -17,7 +17,7 @@ _DEFAULT_QEMU_CMD, _DEFAULT_REPO_CMD, \ _DEFAULT_SHARED_FS_TYPE, _DEFAULT_VIRTIOFSD, \ _DEFAULT_SYNC_METHOD, _DEFAULT_SYNC_INCLUDE, \ - _DEFAULT_SYNC_EXCLUDE, \ + _DEFAULT_SYNC_EXCLUDE, _DEFAULT_DEPENDENCY_TRACKING, \ RiftDeprecatedConfWarning class ConfigTest(RiftTestCase): @@ -49,6 +49,8 @@ def test_get(self): # Default external tools path self.assertEqual(config.get('qemu'), _DEFAULT_QEMU_CMD) self.assertEqual(config.get('createrepo'), _DEFAULT_REPO_CMD) + self.assertEqual(config.get('dependency_tracking'), + _DEFAULT_DEPENDENCY_TRACKING) # Default gpg settings self.assertEqual(config.get('gpg'), None) diff --git a/tests/Controller.py b/tests/Controller.py index ac952b68..a3d2c0f1 100644 --- a/tests/Controller.py +++ b/tests/Controller.py @@ -11,13 +11,14 @@ from unidiff import parse_unidiff from TestUtils import ( - make_temp_file, make_temp_dir, RiftTestCase, RiftProjectTestCase + make_temp_file, make_temp_dir, RiftTestCase, RiftProjectTestCase, SubPackage ) from VM import GLOBAL_CACHE, VALID_IMAGE_URL, PROXY from rift.Controller import ( main, get_packages_from_patch, + get_packages_to_build, remove_packages, make_parser, ) @@ -982,6 +983,149 @@ def test_action_sync_missing_output_parent(self): ): main(['sync']) + @patch('rift.Controller.PackagesDependencyGraph') + def test_build_graph(self, mock_graph_class): + """ Test build generates graph of packages dependencies with dependency tracking enabled. """ + # Enable dependency tracking in configuration + self.config.set('dependency_tracking', True) + self.update_project_conf() + main(['build', 'pkg']) + mock_graph_class.from_project.assert_called_once() + + @patch('rift.Controller.Package') + @patch('rift.Controller.PackagesDependencyGraph') + def test_build_graph_tracking_disabled(self, mock_graph_class, mock_package_class): + """ Test build does not build graph of packages dependencies with dependency tracking disabled. """ + # Return empty list of packages with Package.list() to avoid actual + # build iterations. + mock_package_class.list.return_value = [] + # By default, dependency tracking is disabled + main(['build', 'pkg']) + mock_graph_class.from_project.assert_not_called() + + @patch('rift.Controller.Package') + @patch('rift.Controller.PackagesDependencyGraph') + def test_build_graph_skip_deps(self, mock_graph_class, mock_package_class): + """ Test build --skip-deps does not build graph of packages dependencies. """ + # Return empty list of packages with Package.list() to avoid actual + # build iterations. + mock_package_class.list.return_value = [] + # Enable dependency tracking in configuration + self.config.set('dependency_tracking', True) + self.update_project_conf() + main(['build', '--skip-deps', 'pkg']) + mock_graph_class.from_project.assert_not_called() + + @patch('rift.Controller.PackagesDependencyGraph') + def test_validate_graph(self, mock_graph_class): + """ Test validate generates graph of packages dependencies with dependency tracking enabled. """ + # Enable dependency tracking in configuration + self.config.set('dependency_tracking', True) + self.update_project_conf() + main(['validate', 'pkg']) + mock_graph_class.from_project.assert_called_once() + + @patch('rift.Controller.Package') + @patch('rift.Controller.PackagesDependencyGraph') + def test_validate_graph_tracking_disabled(self, mock_graph_class, mock_package_class): + """ Test validate does not build graph of packages dependencies with dependency tracking disabled. """ + # Return empty list of packages with Package.list() to avoid actual + # build iterations. + mock_package_class.list.return_value = [] + # By default, dependency tracking is disabled + main(['validate', 'pkg']) + mock_graph_class.from_project.assert_not_called() + + @patch('rift.Controller.Package') + @patch('rift.Controller.PackagesDependencyGraph') + def test_validate_graph_skip_deps(self, mock_graph_class, mock_package_class): + """ Test validate --skip-deps does not build graph of packages dependencies. """ + # Return empty list of packages with Package.list() to avoid actual + # build iterations. + mock_package_class.list.return_value = [] + self.config.set('dependency_tracking', True) + self.update_project_conf() + main(['validate', '--skip-deps', 'pkg']) + mock_graph_class.from_project.assert_not_called() + + def test_get_packages_to_build_tracking_disabled(self): + """ Test get_packages_to_build() with tracking disabled (by default) returns user provided packages. """ + args = Mock() + args.skip_deps = False + args.packages = ['pkg'] + pkgs = get_packages_to_build( + self.config, self.staff, self.modules, args + ) + self.assertEqual([pkg.name for pkg in pkgs], ['pkg']) + + def test_get_packages_to_build_skip_deps(self): + """ Test get_packages_to_build() with skip deps (tracking enabled) returns user provided packages. """ + self.config.set('dependency_tracking', True) + args = Mock() + args.skip_deps = True + args.packages = ['pkg'] + pkgs = get_packages_to_build( + self.config, self.staff, self.modules, args + ) + self.assertEqual([pkg.name for pkg in pkgs], ['pkg']) + + def test_get_packages_to_build_no_package(self): + """ Test get_packages_to_build() (tracking enabled, w/o skip deps) returns empty with unexisting package. """ + self.config.set('dependency_tracking', True) + args = Mock() + args.skip_deps = False + args.packages = ['pkg'] + pkgs = get_packages_to_build( + self.config, self.staff, self.modules, args + ) + # Package 'pkg' does not exist in project directory, graph solving must + # return an empty list. + self.assertEqual(pkgs, []) + + def test_get_packages_to_build_package_order(self): + """ Test get_packages_to_build() returns correctly ordered list of reverse dependencies. """ + self.make_pkg( + name='libone', + build_requires=['libtwo-devel'], + subpackages=[ + SubPackage('libone-bin'), + SubPackage('libone-devel') + ] + ) + self.make_pkg( + name='libtwo', + subpackages=[ + SubPackage('libtwo-bin'), + SubPackage('libtwo-devel') + ] + ) + self.make_pkg( + name='my-software', + build_requires=['libone-devel, libtwo-devel'] + ) + # Enable tracking, disable --skip-deps + self.config.set('dependency_tracking', True) + args = Mock() + args.skip_deps = False + args.packages = ['libone'] + pkgs = get_packages_to_build( + self.config, self.staff, self.modules, args + ) + self.assertEqual( + [pkg.name for pkg in pkgs], ['libone', 'my-software'] + ) + args.skip_deps = False + args.packages = ['libone', 'libtwo'] + pkgs = get_packages_to_build( + self.config, self.staff, self.modules, args + ) + # Package libone must be present after libtwo and my-software must be + # present after both libtwo and libone in the order list of build + # requirements. + self.assertEqual( + [pkg.name for pkg in pkgs], ['libtwo', 'libone', 'my-software'] + ) + class ControllerArgumentsTest(RiftTestCase): """ Arguments parsing tests for Controller module""" diff --git a/tests/RPM.py b/tests/RPM.py index bbb50022..0c92b407 100644 --- a/tests/RPM.py +++ b/tests/RPM.py @@ -61,11 +61,14 @@ def tearDown(self): def test_init(self): """ Test Spec instanciation """ spec = Spec(self.spec) - self.assertTrue(self.name in spec.pkgnames) + self.assertIn(self.name, spec.pkgnames) self.assertEqual(len(spec.pkgnames), 1) + self.assertIn(self.name, spec.provides) + self.assertIn(f"{self.name}-provide", spec.provides) + self.assertEqual(len(spec.provides), 2) self.assertEqual(spec.exclusive_archs, []) self.assertEqual(spec.arch, self.arch) - self.assertTrue("{0}-{1}.tar.gz".format(self.name, self.version) in spec.sources) + self.assertIn("{0}-{1}.tar.gz".format(self.name, self.version), spec.sources) self.assertEqual(len(spec.lines), 36) def test_init_fails(self): diff --git a/tests/Repository.py b/tests/Repository.py index b538e796..2789a364 100644 --- a/tests/Repository.py +++ b/tests/Repository.py @@ -397,6 +397,18 @@ def test_working_and_supplementaries(self): self.assertEqual(repos.all[1], repos.supplementaries[0]) shutil.rmtree(working_repo_path) + def test_extra(self): + """Test extra repository""" + repos = ProjectArchRepositories( + self.config, + 'x86_64', + extra=ConsumableRepository('/nowhere', name='hello'), + ) + self.assertEqual(len(repos.supplementaries), 1) + self.assertEqual(len(repos.all), 1) + self.assertEqual(repos.supplementaries[0].name, 'hello') + self.assertEqual(repos.supplementaries[0].url, '/nowhere') + def test_can_publish(self): """Test ProjectArchRepositories.can_publish() method""" working_repo_path = make_temp_dir() diff --git a/tests/TestUtils.py b/tests/TestUtils.py index facc714b..d75045cc 100644 --- a/tests/TestUtils.py +++ b/tests/TestUtils.py @@ -194,7 +194,9 @@ def tearDown(self): for src in self.pkgsrc.values(): os.unlink(src) for pkgdir in self.pkgdirs.values(): - os.unlink(os.path.join(pkgdir, 'info.yaml')) + info_path = os.path.join(pkgdir, 'info.yaml') + if os.path.exists(info_path): + os.unlink(info_path) os.rmdir(os.path.join(pkgdir, 'sources')) os.rmdir(pkgdir) # Remove potentially generated files for VM related tests @@ -259,6 +261,8 @@ def make_pkg( metadata.get('reason', 'Missing feature') ) ) + if 'depends' in metadata: + nfo.write(" depends: {}\n".format(metadata.get('depends'))) # ./packages/pkg/pkg.spec self.pkgspecs[name] = os.path.join(self.pkgdirs[name], diff --git a/tests/graph.py b/tests/graph.py new file mode 100644 index 00000000..b2dbd94a --- /dev/null +++ b/tests/graph.py @@ -0,0 +1,350 @@ +# +# Copyright (C) 2020 CEA +# +import os +import shutil + +from rift.graph import PackagesDependencyGraph +from rift.Package import Package +from TestUtils import RiftProjectTestCase, SubPackage + +class GraphTest(RiftProjectTestCase): + """ + Tests class for PackageDependencyGraph + """ + def test_one_package(self): + """ Test graph with one package """ + pkg_name = 'fake' + self.make_pkg(name=pkg_name) + package = Package(pkg_name, self.config, self.staff, self.modules) + graph = PackagesDependencyGraph.from_project( + self.config, + self.staff, + self.modules + ) + self.assertEqual(len(graph.nodes), 1) + build_requirements = graph.solve(package) + self.assertEqual(len(build_requirements), 1) + self.assertEqual(build_requirements[0].package.name, package.name) + self.assertEqual(build_requirements[0].reasons, ["User request"]) + + def test_packages_unable_load(self): + """ Test graph build with package unable to load """ + pkgs_names = [ 'success', 'failed'] + packages = {} + for pkg_name in pkgs_names: + self.make_pkg(name=pkg_name) + packages[pkg_name] = Package(pkg_name, self.config, self.staff, self.modules) + # Remove info.yaml in packages failed to generate error + os.unlink(packages['failed'].metafile) + # Build packages graph + with self.assertLogs(level='WARNING') as cm: + graph = PackagesDependencyGraph.from_project( + self.config, + self.staff, + self.modules + ) + # Check warning message has been emitted + self.assertEqual( + cm.output, + [ "WARNING:root:Skipping package failed unable to load: [Errno 2] " + "No such file or directory: " + f"'{self.projdir}/packages/failed/info.yaml'" ] + ) + # Check success package is successfully loaded anyway. + self.assertEqual(len(graph.nodes), 1) + self.assertEqual(graph.nodes[0].package.name, 'success') + + def test_dump(self): + """ Test graph dump """ + pkg_name = 'fake' + self.make_pkg(name=pkg_name) + package = Package(pkg_name, self.config, self.staff, self.modules) + graph = PackagesDependencyGraph.from_project( + self.config, + self.staff, + self.modules + ) + with self.assertLogs(level='INFO') as cm: + graph.dump() + self.assertEqual( + cm.output, + [ + 'INFO:root:→ fake', + "INFO:root: requires: ['br-package']", + "INFO:root: subpackages: ['fake', 'fake-provide']", + 'INFO:root: rdeps: []' + ] + ) + + def test_empty_solve(self): + """ Test solve with package not in graph """ + pkg_name = 'one' + self.make_pkg(name=pkg_name) + graph = PackagesDependencyGraph.from_project( + self.config, + self.staff, + self.modules + ) + package = Package('another', self.config, self.staff, self.modules) + build_requirements = graph.solve(package) + self.assertEqual(len(build_requirements), 0) + + def test_multiple_packages(self): + """ Test graph with multiple packages and dependencies in info.yaml """ + # Define 3 packages with depends in info.yaml, in both string and list + # formats. + self.make_pkg( + name='libone', + metadata={ + 'depends': 'libtwo' + } + ) + self.make_pkg( + name='libtwo', + ) + self.make_pkg( + name='my-software', + metadata={ + 'depends': ['libone'] + } + ) + + # Load graph + graph = PackagesDependencyGraph.from_project( + self.config, + self.staff, + self.modules + ) + self.assertEqual(len(graph.nodes), 3) + + # Rebuild of my-software does not trigger rebuild of other packages. + build_requirements = graph.solve( + Package('my-software', self.config, self.staff, self.modules) + ) + self.assertEqual(len(build_requirements), 1) + self.assertEqual(build_requirements[0].package.name, 'my-software') + self.assertEqual(build_requirements[0].reasons, ["User request"]) + + # Rebuild of libone triggers rebuild of my-software because it depends + # on libone. + build_requirements = graph.solve( + Package('libone', self.config, self.staff, self.modules) + ) + self.assertEqual(len(build_requirements), 2) + self.assertEqual(build_requirements[0].package.name, 'libone') + self.assertEqual(build_requirements[0].reasons, ["User request"]) + self.assertEqual(build_requirements[1].package.name, 'my-software') + self.assertEqual( + build_requirements[1].reasons, + ["depends on libone"], + ) + + # Rebuild of libtwo triggers rebuild of: + # - libone because it depends on libtwo + # - my-software because it depends on libone + build_requirements = graph.solve( + Package('libtwo', self.config, self.staff, self.modules) + ) + self.assertEqual(len(build_requirements), 3) + self.assertEqual(build_requirements[0].package.name, 'libtwo') + self.assertEqual(build_requirements[0].reasons, ["User request"]) + self.assertEqual(build_requirements[1].package.name, 'libone') + self.assertEqual( + build_requirements[1].reasons, + ["depends on libtwo"], + ) + self.assertEqual(build_requirements[2].package.name, 'my-software') + self.assertEqual( + build_requirements[2].reasons, + ["depends on libone"], + ) + + def test_multiple_packages_spec_fallback(self): + """ Test graph with multiple packages and dependencies in RPM spec files """ + # Define 3 packages without depends in info.yaml but with build requires + # on others subpackages. + self.make_pkg( + name='libone', + build_requires=['libtwo-devel >= 3.5'], + subpackages=[ + SubPackage('libone-bin'), + SubPackage('libone-devel') + ] + ) + self.make_pkg( + name='libtwo', + subpackages=[ + SubPackage('libtwo-bin'), + SubPackage('libtwo-devel') + ] + ) + self.make_pkg( + name='my-software', + build_requires=['libone-devel = 3, libtwo-devel'], + ) + + def load_graph(): + graph = PackagesDependencyGraph.from_project( + self.config, + self.staff, + self.modules + ) + self.assertEqual(len(graph.nodes), 3) + return graph + + graph = load_graph() + + # Rebuild of my-software does not trigger rebuild of other packages. + build_requirements = graph.solve( + Package('my-software', self.config, self.staff, self.modules) + ) + self.assertEqual(len(build_requirements), 1) + self.assertEqual(build_requirements[0].package.name, 'my-software') + self.assertEqual(build_requirements[0].reasons, ["User request"]) + + + # Rebuild of libone triggers rebuild of my-software because my-software + # build requires on one of libone subpackage. + build_requirements = graph.solve( + Package('libone', self.config, self.staff, self.modules) + ) + self.assertEqual(len(build_requirements), 2) + self.assertEqual(build_requirements[0].package.name, 'libone') + self.assertEqual(build_requirements[0].reasons, ["User request"]) + self.assertEqual(build_requirements[1].package.name, 'my-software') + self.assertEqual( + build_requirements[1].reasons, + ["build requires on libone-devel"] + ) + + # Rebuild of libtwo triggers rebuild of libone and my-software because + # - libone build requires on one of libtwo subpackage + # - my-software build requires on one of libtwo subpackage and on one + # of libone subpackage. + build_requirements = graph.solve( + Package('libtwo', self.config, self.staff, self.modules) + ) + self.assertEqual(len(build_requirements), 3) + self.assertEqual(build_requirements[0].package.name, 'libtwo') + self.assertEqual(build_requirements[0].reasons, ["User request"]) + self.assertEqual(build_requirements[1].package.name, 'libone') + self.assertEqual( + build_requirements[1].reasons, + ["build requires on libtwo-devel"] + ) + self.assertEqual(build_requirements[2].package.name, 'my-software') + self.assertCountEqual( + build_requirements[2].reasons, + [ + "build requires on libone-devel", + "build requires on libtwo-devel" + ] + ) + + # Remove my-software package directory, redefine my-software package + # with dependencies in info.yaml and reload the graph. + shutil.rmtree(self.pkgdirs['my-software']) + self.make_pkg( + name='my-software', + build_requires=['libone-devel, libtwo-devel'], + metadata={ + 'depends': ['libtwo'] + } + ) + graph = load_graph() + + # Rebuild of libone MUST NOT trigger rebuild of my-software anymore + # because my-software dependencies defined in info.yaml now overrides + # build requires in RPM spec file. + self.assertEqual( + len( + graph.solve( + Package('libone', self.config, self.staff, self.modules) + ) + ), + 1 + ) + + def test_multiple_packages_with_provides(self): + """ Test graph with multiple packages and dependencies on provides in RPM spec files """ + # Define 2 packages without depends in info.yaml but with build requires + # on other subpackages provides. + self.make_pkg( + name='libone', + subpackages=[ + SubPackage('libone-bin'), + SubPackage('libone-devel') + ] + ) + self.make_pkg( + name='my-software', + build_requires=['libone-provide = 3'], + ) + + def load_graph(): + graph = PackagesDependencyGraph.from_project( + self.config, + self.staff, + self.modules + ) + self.assertEqual(len(graph.nodes), 2) + return graph + + graph = load_graph() + + # Rebuild of libone triggers rebuild of my-software because my-software + # build requires on one of libone subpackage provides. + build_requirements = graph.solve( + Package('libone', self.config, self.staff, self.modules) + ) + self.assertEqual(len(build_requirements), 2) + self.assertEqual(build_requirements[0].package.name, 'libone') + self.assertEqual(build_requirements[0].reasons, ["User request"]) + self.assertEqual(build_requirements[1].package.name, 'my-software') + self.assertEqual( + build_requirements[1].reasons, + ["build requires on libone-provide"] + ) + + def test_loop(self): + """ Test graph solve with dependency loop """ + # Define 3 packages with a dependency loop. + self.make_pkg( + name='libone', + metadata={ + 'depends': 'libtwo' + } + ) + self.make_pkg( + name='libtwo', + metadata={ + 'depends': 'libthree' + } + ) + self.make_pkg( + name='libthree', + metadata={ + 'depends': 'libone' + } + ) + + # Load graph + graph = PackagesDependencyGraph.from_project( + self.config, + self.staff, + self.modules + ) + self.assertEqual(len(graph.nodes), 3) + + # For all three package, the resolution should return all three + # build requirements. + for package in ['libone', 'libtwo', 'libthree']: + self.assertEqual( + len( + graph.solve( + Package(package, self.config, self.staff, self.modules) + ) + ), + 3 + )