Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions lib/rift/Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ class RiftDeprecatedConfWarning(FutureWarning):
_DEFAULT_SYNC_METHOD = 'dnf'
_DEFAULT_SYNC_INCLUDE = []
_DEFAULT_SYNC_EXCLUDE = []
_DEFAULT_DEPENDENCY_TRACKING = False

class Config():
"""
Expand Down Expand Up @@ -301,6 +302,10 @@ class Config():
'values': ['9p', 'virtiofs'],
},
'sync_output': {},
'dependency_tracking': {
'check': 'bool',
'default': _DEFAULT_DEPENDENCY_TRACKING,
},
# XXX?: 'mock.name' ?
# XXX?: 'mock.template' ?
}
Expand Down
116 changes: 103 additions & 13 deletions lib/rift/Controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
from rift.Mock import Mock
from rift.Package import Package, Test
from rift.Repository import LocalRepository, ProjectArchRepositories
from rift.graph import PackagesDependencyGraph
from rift.RPM import RPM, Spec, RPMLINT_CONFIG_V1, RPMLINT_CONFIG_V2
from rift.TempDir import TempDir
from rift.TestResults import TestCase, TestResults
Expand Down Expand Up @@ -140,6 +141,8 @@ def make_parser():
subprs.add_argument('-s', '--sign', action='store_true',
help='sign built packages with GPG key '
'(implies -p, --publish)')
subprs.add_argument('-S', '--skip-deps', action='store_true',
help='Skip automatic rebuild of reverse dependencies')
subprs.add_argument('--junit', metavar='FILENAME',
help='write junit result file')
subprs.add_argument('--dont-update-repo', dest='updaterepo', action='store_false',
Expand Down Expand Up @@ -178,6 +181,8 @@ def make_parser():
help='write junit result file')
subprs.add_argument('-p', '--publish', action='store_true',
help='publish build RPMS to repository')
subprs.add_argument('-S', '--skip-deps', action='store_true',
help='Skip automatic validation of reverse dependencies')

# Validate diff
subprs = subparsers.add_parser('validdiff')
Expand Down Expand Up @@ -443,15 +448,19 @@ def __init__(self, pkg, config=None):
Test.__init__(self, cmd, "basic_install")
self.local = False

def build_pkg(config, args, pkg, arch):
def build_pkg(config, args, pkg, arch, staging):
"""
Build a package for a specific architecture
- config: rift configuration
- args: command line arguments
- pkg: package to build
- repo: rpm repositories to use
- suppl_repos: optional additional repositories
- arch: CPU architecture
- staging: temporary staging rpm repositories to hold dependencies when
testing builds of reserve dependencies recursively.
"""
repos = ProjectArchRepositories(config, arch)
repos = ProjectArchRepositories(config, arch,
extra=staging.consumables[arch]
if staging is not None else None)
if args.publish and not repos.can_publish():
raise RiftError("Cannot publish if 'working_repo' is undefined")

Expand All @@ -468,6 +477,14 @@ def build_pkg(config, args, pkg, arch):
logging.info('Built: %s', rpm.filepath)
message("RPMS successfully built")

# If defined, publish in staging repository
if staging:
message("Publishing RPMS in staging repository...")
mock.publish(staging)

message("Updating staging repository...")
staging.update()

# Publish
if args.publish:
message("Publishing RPMS...")
Expand Down Expand Up @@ -576,7 +593,11 @@ def validate_pkgs(config, args, pkgs, arch):
- launch tests
"""

repos = ProjectArchRepositories(config, arch)
# Create staging repository for all packages and add it to the project
# supplementary repositories.
(staging, stagedir) = create_staging_repo(config)
repos = ProjectArchRepositories(config, arch,
extra=staging.consumables[arch])

if args.publish and not repos.can_publish():
raise RiftError("Cannot publish if 'working_repo' is undefined")
Expand Down Expand Up @@ -615,10 +636,12 @@ def validate_pkgs(config, args, pkgs, arch):
message('Validate specfile...')
spec.check(pkg)

(staging, stagedir) = create_staging_repo(config)

message('Preparing Mock environment...')
mock = Mock(config, arch, config.get('version'))

for repo in repos.all:
logging.debug("Mock with repo %s: %s", repo.name, repo.url)

mock.init(repos.all)

try:
Expand Down Expand Up @@ -665,7 +688,8 @@ def validate_pkgs(config, args, pkgs, arch):
message("Keep environment, VM is running. Use: rift vm connect")
else:
mock.clean()
stagedir.delete()

stagedir.delete()

banner(f"All packages checked on architecture {arch}")

Expand Down Expand Up @@ -747,7 +771,7 @@ def action_vm(args, config):
ret = vm_build(vm, args, config)
return ret

def build_pkgs(config, args, pkgs, arch):
def build_pkgs(config, args, pkgs, arch, staging):
"""
Build a list of packages on a given architecture and return results.
"""
Expand Down Expand Up @@ -776,7 +800,7 @@ def build_pkgs(config, args, pkgs, arch):
now = time.time()
try:
pkg.load()
build_pkg(config, args, pkg, arch)
build_pkg(config, args, pkg, arch, staging)
except RiftError as ex:
logging.error("Build failure: %s", str(ex))
results.add_failure(case, time.time() - now, err=str(ex))
Expand All @@ -800,17 +824,30 @@ def action_build(args, config):
results = TestResults('build')

staff, modules = staff_modules(config)
pkgs = get_packages_to_build(config, staff, modules, args)
logging.info(
"Ordered list of packages to build: %s",
str([pkg.name for pkg in pkgs])
)

# Build all packages for all project supported architectures
for arch in config.get('arch'):

pkgs = Package.list(config, staff, modules, args.packages)
results.extend(build_pkgs(config, args, pkgs, arch))
# Create temporary staging repository to hold dependencies unless
# dependency tracking is disabled in project configuration or user set
# --skip-deps argument.
staging = stagedir = None
if config.get('dependency_tracking') and not args.skip_deps:
(staging, stagedir) = create_staging_repo(config)

results.extend(build_pkgs(config, args, pkgs, arch, staging))

if getattr(args, 'junit', False):
logging.info('Writing test results in %s', args.junit)
results.junit(args.junit)

if stagedir:
stagedir.delete()
banner(f"All packages processed for architecture {arch}")

banner('All architectures processed')
Expand Down Expand Up @@ -866,13 +903,18 @@ def action_validate(args, config):

staff, modules = staff_modules(config)
results = TestResults('validate')
pkgs = get_packages_to_build(config, staff, modules, args)
logging.info(
"Ordered list of packages to validate: %s",
str([pkg.name for pkg in pkgs])
)
# Validate packages on all project supported architectures
for arch in config.get('arch'):
results.extend(
validate_pkgs(
config,
args,
Package.list(config, staff, modules, args.packages),
pkgs,
arch
)
)
Expand Down Expand Up @@ -1047,6 +1089,54 @@ def get_packages_from_patch(patch, config, modules, staff):

return updated, removed

def get_packages_to_build(config, staff, modules, args):
"""
Return ordered list of Packages to build. If dependency_tracking is disabled
in project configuration or --skip-deps arguments is set, only the list of
packages in arguments is selected. Else, this function builds a dependency
graph of all packages in the project to determine the list of packages that
reverse depends on the list of packages in arguments, recursively.
"""
if not config.get('dependency_tracking') or args.skip_deps:
return list(Package.list(config, staff, modules, args.packages))

# Build dependency graph with all projects packages.
graph = PackagesDependencyGraph.from_project(config, staff, modules)

result = []

def result_position(new_build_requirements):
"""
Return the first index in result of packages in provided build
requirements list.
"""
for build_requirement in new_build_requirements:
for index, package in enumerate(result):
if build_requirement.package == package:
return index
return -1

for pkg in Package.list(config, staff, modules, args.packages):
required_builds = graph.solve(pkg)
for index, required_build in enumerate(required_builds):
if required_build.package in result:
continue
# Search the position in result before all its own reverse
# dependencies.
position = result_position(required_builds[index+1:])
logging.info(
"Package %s must be built: %s",
required_build.package.name,
required_build.reasons,
)
# No position constraint in result, just append the package at the
# end. Else insert at the right position.
if position == -1:
result.append(required_build.package)
else:
result.insert(position, required_build.package)
return result

def create_staging_repo(config):
"""
Create and return staging temporary repository with a 2-tuple containing
Expand Down
8 changes: 8 additions & 0 deletions lib/rift/Package.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ def __init__(self, name, config, staff, modules):
self.origin = None
self.ignore_rpms = None
self.rpmnames = None
self.depends = None

# Static paths
pkgdir = os.path.join(self._config.get('packages_dir'), self.name)
Expand Down Expand Up @@ -164,6 +165,13 @@ def load(self, infopath=None):
else:
self.ignore_rpms = data.get('ignore_rpms', [])

depends = data.get('depends')
if depends is not None:
if isinstance(depends, str):
self.depends = [depends]
else:
self.depends = depends

self.check_info()

if os.path.exists(self.sourcesdir):
Expand Down
8 changes: 8 additions & 0 deletions lib/rift/RPM.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import shutil
from subprocess import Popen, PIPE, STDOUT, run, CalledProcessError
import time
import itertools

import rpm

Expand Down Expand Up @@ -213,6 +214,7 @@ def __init__(self, filepath=None, config=None):
self.filepath = filepath
self.srpmname = None
self.pkgnames = []
self.provides = []
self.sources = []
self.basename = None
self.version = None
Expand Down Expand Up @@ -265,6 +267,12 @@ def load(self):
except ValueError as exp:
raise RiftError(f"{self.filepath}: {exp}") from exp
self.pkgnames = [_header_values(pkg.header['name']) for pkg in spec.packages]
# Global unique list of provides. Here dict.fromkeys() is used to remove
# duplicates as an alternative to set() for the sake of preserving order.
self.provides = list(dict.fromkeys(
itertools.chain(
*[_header_values(pkg.header['provides'])
for pkg in spec.packages])))
hdr = spec.sourceHeader
self.srpmname = hdr.sprintf('%{NAME}-%{VERSION}-%{RELEASE}.src.rpm')
self.basename = hdr.sprintf('%{NAME}')
Expand Down
5 changes: 4 additions & 1 deletion lib/rift/Repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,8 @@ class ProjectArchRepositories:
"""
Manipulate repositories defined in a project for a particular architecture.
"""
def __init__(self, config, arch):
def __init__(self, config, arch, extra=None):

self.working = None
self.arch = arch
if config.get('working_repo'):
Expand All @@ -275,6 +276,8 @@ def __init__(self, config, arch):
)
self.working.create()
self.supplementaries = []
if extra:
self.supplementaries.append(extra)
repos = config.get('repos', arch=arch)
if repos:
for name, data in repos.items():
Expand Down
Loading