diff --git a/Jenkinsfile b/Jenkinsfile index 756892b901d..0a62c64433d 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -5,7 +5,7 @@ /* groovylint-disable ParameterName, VariableName */ /* Copyright 2019-2024 Intel Corporation /* Copyright 2025 Google LLC - * Copyright 2025 Hewlett Packard Enterprise Development LP + * Copyright 2025-2026 Hewlett Packard Enterprise Development LP * All rights reserved. * * This file is part of the DAOS Project. It is subject to the license terms @@ -19,6 +19,7 @@ // To use a test branch (i.e. PR) until it lands to master // I.e. for testing library changes //@Library(value='pipeline-lib@your_branch') _ +@Library(value='pipeline-lib@hendersp/DAOS-18348') _ /* groovylint-disable-next-line CompileStatic */ job_status_internal = [:] @@ -90,9 +91,6 @@ String next_version() { // Don't define this as a type or it loses it's global scope target_branch = env.CHANGE_TARGET ? env.CHANGE_TARGET : env.BRANCH_NAME -String sanitized_JOB_NAME() { - return JOB_NAME.toLowerCase().replaceAll('/', '-').replaceAll('%2f', '-') -} // bail out of branch builds that are not on a whitelist if (!env.CHANGE_ID && @@ -189,10 +187,11 @@ Boolean skip_pragma_set(String name, String def_val='false') { } Boolean skip_build_stage(String distro='', String compiler='gcc') { - // Skip the stage if the CI__NOBUILD parameter is set + // Skip the stage if the CI_BUILD_ parameter is not set if (distro) { - if (startedByUser() && paramsValue("CI_${distro}_NOBUILD", false)) { - println("[${env.STAGE_NAME}] Skipping build stage due to CI_${distro}_NOBUILD") + String param_name = "CI_BUILD_${distro.toUpperCase()}" + if (startedByUser() && !paramsValue(param_name, false)) { + println("[${env.STAGE_NAME}] Skipping build stage due to ${param_name} parameter") return true } } @@ -202,6 +201,9 @@ Boolean skip_build_stage(String distro='', String compiler='gcc') { if (distro && compiler) { pragma_names << "build-${distro}-${compiler}" } + else if (distro) { + pragma_names << "build-${distro}" + } Boolean any_pragma_skip = pragma_names.any { name -> skip_pragma_set(name) } if (any_pragma_skip) { println("[${env.STAGE_NAME}] Skipping build stage for due to Skip-[${pragma_names}] pragma") @@ -218,10 +220,369 @@ Boolean skip_build_stage(String distro='', String compiler='gcc') { return false } +Boolean skip_unit_test_stage(String name) { + // Skip the unit test stage if the CI_UNIT_TEST_ parameter is not set + if (startedByUser() && !paramsValue("CI_UNIT_TEST_${name.toUpperCase()}", true)) { + println("[${env.STAGE_NAME}] Skipping unit test stage due to CI_UNIT_TEST_${name.toUpperCase()} parameter") + return true + } + + // Skip the unit test stage if any Skip-unit-test- pragmas are true + if (skip_pragma_set("unit-test-${name.toLowerCase()}")) { + println("[${env.STAGE_NAME}] Skipping unit test stage due to Skip-unit-test-${name.toLowerCase()} pragma") + return true + } + + // Otherwise run the unit test stage + return false +} + +Boolean code_coverage_enabled() { + // Determine if code coverage is enabled for the build + return !skip_build_stage('bullseye', 'covc') + // if (paramsValue('CI_CODE_COVERAGE', true) == true) { + // env.COVFN_DISABLED = 'false' + // } + // return env.COVFN_DISABLED == 'false' +} + +String code_coverage_build_args() { + // Get additional build args for code coverage, if enabled + if (!code_coverage_enabled()) { + return '' + } + return " --build-arg COMPILER=covc --build-arg CODE_COVERAGE=true" +} + +String code_coverage_scons_args() { + // Get additional scons args for code coverage, if enabled + if (!code_coverage_enabled()) { + return '' + } + return ' COMPILER=covc' +} + +String add_daos_pkgs() { + // Get the additional daos package names to install in functional test stages + // if (code_coverage_enabled()) { + // return 'tests-internal,-code-coverage' + // } + return 'tests-internal' +} + +Map unit_test_post_args(String name) { + // Get the arguments for unitTestPost + Map args = [artifacts: ["${name}_logs/"]] + if (code_coverage_enabled()) { + // args['artifacts'].add("covc_${name}_logs/") + // args['artifacts'].add('covc_vm_test/**') + args['ignore_failure'] = true + args['code_coverage'] = true + } + return args +} + +Map nlt_post_args() { + Map args = [ + artifacts: ['nlt_logs/'], + testResults: 'nlt-junit.xml', + always_script: 'ci/unit/test_nlt_post.sh', + valgrind_stash: 'el8-gcc-nlt-memcheck' + ] + if (code_coverage_enabled()) { + // args['artifacts'].add('covc_nlt_logs/') + args['ignore_failure'] = true + args['code_coverage'] = true + } + return args +} + +String getScriptOutput(String script, String args='') { + return sh(script: "${script} ${args}", returnStdout: true).trim() +} + +/** + * runStage + * + * Determine if the stage should be run. + * + * @param Map of parameter names and expected values to check + * @pragmas Map of commit pragma names and expected values to check + * @otherCondition Additional condition to consider + */ +Boolean runStage(Map params=[:], Map pragmas=[:], Boolean otherCondition=true) { + // Run stage w/o any conditionals + if (params.isEmpty() && pragmas.isEmpty()) { + return true + } + if (!otherCondition) { + println("[${env.STAGE_NAME}] Skipping stage due to otherCondition=false") + return false + } + + String skip_pragma_msg = '' + for(entry in params) { + if (paramsValue(entry.key, entry.value) == entry.value) { + skip_pragma_msg = "Skipping stage due to ${entry.key} parameter (${entry.value})" + break + } + } + + String skip_param_msg = '' + for(entry in pragmas) { + String expected = entry.value.toString().toLowerCase() + if (cachedCommitPragma(entry.key, expected).toLowerCase() == expected) { + skip_pragma_msg = "Skipping stage due to ${entry.key} parameter (${entry.value})" + break + } + } + + for(name in truePragmas) { + if (cachedCommitPragma(name, 'true').toLowerCase() == 'true') { + skip_param_msg = "Skipping stage due to ${name} commit pragma (true)" + break + } + } + if (!skip_param_msg) { + for(name in falsePragmas) { + if (cachedCommitPragma(name, 'false').toLowerCase() == 'false') { + skip_param_msg = "Skipping stage due to ${name} commit pragma (false)" + break + } + } + } + + if (startedByUser()) { + // Manual build: check parameters first + if (skip_pragma_msg) { + println("[${env.STAGE_NAME}] ${skip_pragma_msg}") + return false + } + // Manual build: check commit pragmas second + if (skip_param_msg) { + println("[${env.STAGE_NAME}] ${skip_param_msg}") + return false + } + } else { + // Normal build: check commit pragmas first + if (skip_param_msg) { + println("[${env.STAGE_NAME}] ${skip_param_msg}") + return false + } + // Normal build: check parameters second + if (skip_pragma_msg) { + println("[${env.STAGE_NAME}] ${skip_pragma_msg}") + return false + } + } + + // Otherwise run the stage + return true +} + +/** + * scriptedBuildStage + * + * Get a build stage in scripted syntax. + * + * @param kwargs Map containing the following optional arguments (empty strings yield defaults): + * name the build stage name + * distro the shorthand distro name; defaults to 'el8' + * rpmDistro the distro to use for rpm building; defaults to distro + * compiler the compiler to use; defaults to 'gcc' + * runCondition optional condition to determine if the stage should run; defaults + * to !skip_build_stage(distro, compiler) + * buildRpms whether or not to build rpms; defaults to true + * release the DAOS RPM release value to use; defaults to env.DAOS_RELVAL + * dockerBuildArgs optional docker build arguments + * sconsBuildArgs optional scons build arguments + * artifacts optional artifacts name to archive; defaults to + * "config.log-${distro}-${compiler}" + * uploadTarget the distro to use when uploading rpms; defaults to distro + * @return a scripted stage to run in a pipeline + */ +def scriptedBuildStage(Map kwargs = [:]) { + String name = kwargs.get('name', 'Unknown Build Stage') + String distro = kwargs.get('distro', 'el8') + String rpmDistro = kwargs.get('rpmDistro', distro) + String compiler = kwargs.get('compiler', 'gcc') + Boolean runCondition = kwargs.get('runCondition', !skip_build_stage(distro, compiler)) + Boolean buildRpms = kwargs.get('buildRpms', true) + String release = kwargs.get('release', env.DAOS_RELVAL) + String dockerBuildArgs = kwargs.get('dockerBuildArgs', '') + Map sconsBuildArgs = kwargs.get('sconsBuildArgs', [:]) + String artifacts = kwargs.get('artifacts', "config.log-${distro}-${compiler}") + String uploadTarget = kwargs.get('uploadTarget', distro) + String dockerTag = jobStatusKey("${name}-${distro}-${compiler}").toLowerCase() + String bullseye = 'false' + if (compiler == 'covc') { + bullseye = 'true' + } + return { + stage("${name}") { + if (runCondition) { + node('docker_runner') { + def dockerImage = docker.build(dockerTag, dockerBuildArgs) + try { + dockerImage.inside() { + if (buildRpms) { + sh label: 'Install RPMs', + script: "./ci/rpm/install_deps.sh ${rpmDistro} ${release} ${bullseye}" + sh label: 'Build deps', + script: "./ci/rpm/build_deps.sh ${bullseye} ${env.BULLSEYE_KEY}" + } + job_step_update(sconsBuild(sconsBuildArgs)) + if (buildRpms) { + sh label: 'Generate RPMs', + script: "./ci/rpm/gen_rpms.sh ${rpmDistro} ${release} ${bullseye}" + // Success actions + uploadNewRPMs(uploadTarget, 'success') + } + } + } catch (Exception e) { + // Unsuccessful actions + sh """if [ -f config.log ]; then + mv config.log ${artifacts} + fi""" + archiveArtifacts artifacts: "${artifacts}", allowEmptyArchive: true + throw e + } finally { + // Cleanup actions + if (buildRpms) { + uploadNewRPMs(uploadTarget, 'cleanup') + } + jobStatusUpdate(job_status_internal, name) + } + } + } + else { + println("[${name}] Skipping build stage") + Utils.markStageSkippedForConditional("${name}") + } + println("[${name}] Finished with ${job_status_internal}") + } + } +} + +/** + * scriptedUnitTestStage + * + * Get a unit test stage in scripted syntax. + * + * @param kwargs Map containing the following optional arguments (empty strings yield defaults): + * name the unit test stage name + * runCondition Optional additional condition to determine if the stage should run + * nodeLabel the node label to use + * unitTestArgs Map of arguments to pass to unitTest() + * stashArgs Map of arguments to pass to optional stash() call + * unitTestPostArgs Map of arguments to pass to unitTestPost() + * recordIssuesArgs Map of arguments to pass to recordIssues() + * @return a scripted stage to run in a pipeline + */ +def scriptedUnitTestStage(Map kwargs = [:]) { + String name = kwargs.get('name', 'Unknown Unit Test') + Boolean runCondition = kwargs.get('runCondition', !skip_unit_test_stage(name)) + String nodeLabel = kwargs.get('nodeLabel', params.CI_UNIT_VM1_LABEL) + Map unitTestArgs = kwargs.get('unitTestArgs', [:]) + Map stashArgs = kwargs.get('stashArgs', [:]) + Map unitTestPostArgs = kwargs.get('unitTestPostArgs', [:]) + Map recordIssuesArgs = kwargs.get('recordIssuesArgs', [:]) + + return { + stage("${name}") { + if (runCondition) { + node(nodeLabel) { + try { + checkoutScm(pruneStaleBranch: true) + // Execute the unit test + job_step_update(unitTest(unitTestArgs)) + if (stashArgs) { + stash(stashArgs) + } + } finally { + // Always execute post actions + if (unitTestPostArgs) { + unitTestPost(unitTestPostArgs) + } + if (recordIssuesArgs) { + recordIssues(recordIssuesArgs) + } + jobStatusUpdate(job_status_internal, name) + } + } + } + else { + println("[${name}] Skipping unit test stage") + Utils.markStageSkippedForConditional("${name}") + } + println("[${name}] Finished with ${job_status_internal}") + } + } +} + +/** + * scriptedSummaryStage + * + * Get a summary stage in scripted syntax. + * + * @param kwargs Map containing the following optional arguments (empty strings yield defaults): + * name the summary stage name + * distro the shorthand distro name; defaults to 'el8' + * compiler the compiler to use; defaults to 'gcc' + * runCondition Optional additional condition to determine if the stage should run + * dockerBuildArgs optional docker build arguments + * installScript optional script to install RPMs + * runScriptArgs Map of arguments to pass to runScriptWithStashes() + * archiveArtifactsArgs Map of arguments to pass to archiveArtifacts() + * @return a scripted stage to run in a pipeline + */ +def scriptedSummaryStage(Map kwargs = [:]) { + String name = kwargs.get('name', 'Unknown Summary Stage') + String distro = kwargs.get('distro', 'el8') + String compiler = kwargs.get('compiler', 'gcc') + Boolean runCondition = kwargs.get('runCondition', true) + String dockerBuildArgs = kwargs.get('dockerBuildArgs', '') + String installScript = kwargs.get('installScript', '') + Map runScriptArgs = kwargs.get('runScriptArgs', [:]) + Map archiveArtifactsArgs = kwargs.get('archiveArtifactsArgs', [:]) + String dockerTag = jobStatusKey("${name}-${distro}-${compiler}").toLowerCase() + + return { + stage("${name}") { + if (runCondition) { + node('docker_runner') { + def dockerImage = docker.build(dockerTag, dockerBuildArgs) + try { + dockerImage.inside() { + if (installScript) { + sh label: 'Install RPMs', + script: installScript + } + job_step_update(runScriptWithStashes(runScriptArgs)) + } + } finally { + // Cleanup actions + if (archiveArtifactsArgs) { + archiveArtifacts(archiveArtifactsArgs) + } + jobStatusUpdate(job_status_internal, name) + } + } + } + else { + println("[${name}] Skipping summary stage") + Utils.markStageSkippedForConditional("${name}") + } + println("[${name}] Finished with ${job_status_internal}") + } + } +} + pipeline { agent { label 'lightweight' } environment { + BULLSEYE_KEY = credentials('bullseye_license_key') GITHUB_USER = credentials('daos-jenkins-review-posting') SSH_KEY_ARGS = '-ici_key' CLUSH_ARGS = "-o$SSH_KEY_ARGS" @@ -296,15 +657,21 @@ pipeline { string(name: 'CI_UBUNTU20.04_TARGET', defaultValue: '', description: 'Image to used for Ubuntu 20 CI tests. I.e. ubuntu20.04, etc.') - booleanParam(name: 'CI_el8_NOBUILD', - defaultValue: false, - description: 'Do not build sources and RPMs on EL 8') - booleanParam(name: 'CI_el9_NOBUILD', - defaultValue: false, - description: 'Do not build sources and RPMs on EL 9') - booleanParam(name: 'CI_leap15_NOBUILD', - defaultValue: false, - description: 'Do not build sources and RPMs on Leap 15') + booleanParam(name: 'CI_BUILD_EL8', + defaultValue: true, + description: 'Build sources and RPMs on EL 8') + booleanParam(name: 'CI_BUILD_EL9', + defaultValue: true, + description: 'Build sources and RPMs on EL 9') + booleanParam(name: 'CI_BUILD_LEAP15', + defaultValue: true, + description: 'Build sources and RPMs on Leap 15') + booleanParam(name: 'CI_BUILD_LEAP15_ICC', + defaultValue: true, + description: 'Build sources on Leap 15 with Intel-C') + booleanParam(name: 'CI_BUILD_BULLSEYE', + defaultValue: true, + description: 'Build sources and RPMs with Bullseye code coverage') booleanParam(name: 'CI_ALLOW_UNSTABLE_TEST', defaultValue: false, description: 'Continue testing if a previous stage is Unstable') @@ -317,6 +684,12 @@ pipeline { booleanParam(name: 'CI_UNIT_TEST_MEMCHECK', defaultValue: true, description: 'Run the Unit Test with memcheck on EL 8 test stage') + booleanParam(name: 'CI_UNIT_TEST_BULLSEYE', + defaultValue: true, + description: 'Run the Unit Test with Bullseye code coverage test stage') + booleanParam(name: 'CI_NLT_TEST_BULLSEYE', + defaultValue: true, + description: 'Run the NLT test with Bullseye code coverage test stage') booleanParam(name: 'CI_FI_el8_TEST', defaultValue: true, description: 'Run the Fault injection testing on EL 8 test stage') @@ -521,205 +894,128 @@ pipeline { beforeAgent true expression { !skip_build_stage() } } - parallel { - stage('Build on EL 8.8') { - when { - beforeAgent true - expression { !skip_build_stage('el8') } - } - agent { - dockerfile { - filename 'utils/docker/Dockerfile.el.8' - label 'docker_runner' - additionalBuildArgs dockerBuildArgs(repo_type: 'stable', - deps_build: false, - parallel_build: true) + - " -t ${sanitized_JOB_NAME()}-el8 " + - ' --build-arg DAOS_PACKAGES_BUILD=no ' + - ' --build-arg DAOS_KEEP_SRC=yes ' + - ' --build-arg REPOS="' + prRepos() + '"' - } - } - steps { - script { - sh label: 'Install RPMs', - script: './ci/rpm/install_deps.sh el8 "' + env.DAOS_RELVAL + '"' - sh label: 'Build deps', - script: './ci/rpm/build_deps.sh' - job_step_update( - sconsBuild(parallel_build: true, - stash_files: 'ci/test_files_to_stash.txt', - build_deps: 'no', - stash_opt: true, - scons_args: sconsArgs() + - ' PREFIX=/opt/daos TARGET_TYPE=release')) - sh label: 'Generate RPMs', - script: './ci/rpm/gen_rpms.sh el8 "' + env.DAOS_RELVAL + '"' - } - } - post { - success { - uploadNewRPMs('el8', 'success') - } - unsuccessful { - sh '''if [ -f config.log ]; then - mv config.log config.log-el8-gcc - fi''' - archiveArtifacts artifacts: 'config.log-el8-gcc', - allowEmptyArchive: true - } - cleanup { - uploadNewRPMs('el8', 'cleanup') - job_status_update() - } - } - } - stage('Build on EL 9.6') { - when { - beforeAgent true - expression { !skip_build_stage('el9') } - } - agent { - dockerfile { - filename 'utils/docker/Dockerfile.el.9' - label 'docker_runner' - additionalBuildArgs dockerBuildArgs(repo_type: 'stable', - deps_build: false, - parallel_build: true) + - " -t ${sanitized_JOB_NAME()}-el9 " + - ' --build-arg DAOS_PACKAGES_BUILD=no ' + - ' --build-arg DAOS_KEEP_SRC=yes ' + - ' --build-arg REPOS="' + prRepos() + '"' + - ' --build-arg POINT_RELEASE=.6 ' - - } - } - steps { - script { - sh label: 'Install RPMs', - script: './ci/rpm/install_deps.sh el9 "' + env.DAOS_RELVAL + '"' - sh label: 'Build deps', - script: './ci/rpm/build_deps.sh' - job_step_update( - sconsBuild(parallel_build: true, - stash_files: 'ci/test_files_to_stash.txt', - build_deps: 'no', - stash_opt: true, - scons_args: sconsArgs() + - ' PREFIX=/opt/daos TARGET_TYPE=release')) - sh label: 'Generate RPMs', - script: './ci/rpm/gen_rpms.sh el9 "' + env.DAOS_RELVAL + '"' - } - } - post { - success { - uploadNewRPMs('el9', 'success') - } - unsuccessful { - sh '''if [ -f config.log ]; then - mv config.log config.log-el9-gcc - fi''' - archiveArtifacts artifacts: 'config.log-el9-gcc', - allowEmptyArchive: true - } - cleanup { - uploadNewRPMs('el9', 'cleanup') - job_status_update() - } - } - } - stage('Build on Leap 15.5') { - when { - beforeAgent true - expression { !skip_build_stage('leap15') } - } - agent { - dockerfile { - filename 'utils/docker/Dockerfile.leap.15' - label 'docker_runner' - additionalBuildArgs dockerBuildArgs(repo_type: 'stable', - parallel_build: true, - deps_build: false) + - ' --build-arg DAOS_PACKAGES_BUILD=no ' + - ' --build-arg DAOS_KEEP_SRC=yes ' + - " -t ${sanitized_JOB_NAME()}-leap15" + - ' --build-arg POINT_RELEASE=.5 ' - - } - } - steps { - script { - sh label: 'Install RPMs', - script: './ci/rpm/install_deps.sh suse.lp155 "' + env.DAOS_RELVAL + '"' - sh label: 'Build deps', - script: './ci/rpm/build_deps.sh' - job_step_update( - sconsBuild(parallel_build: true, - scons_args: sconsFaultsArgs() + - ' PREFIX=/opt/daos TARGET_TYPE=release', - build_deps: 'yes')) - sh label: 'Generate RPMs', - script: './ci/rpm/gen_rpms.sh suse.lp155 "' + env.DAOS_RELVAL + '"' - } - } - post { - success { - uploadNewRPMs('leap15', 'success') - } - unsuccessful { - sh '''if [ -f config.log ]; then - mv config.log config.log-leap15-gcc - fi''' - archiveArtifacts artifacts: 'config.log-leap15-gcc', - allowEmptyArchive: true - } - cleanup { - uploadNewRPMs('leap15', 'cleanup') - job_status_update() - } - } - } - stage('Build on Leap 15.5 with Intel-C and TARGET_PREFIX') { - when { - beforeAgent true - expression { !skip_build_stage('leap15', 'icc') } - } - agent { - dockerfile { - filename 'utils/docker/Dockerfile.leap.15' - label 'docker_runner' - additionalBuildArgs dockerBuildArgs(repo_type: 'stable', - parallel_build: true, - deps_build: true) + - " -t ${sanitized_JOB_NAME()}-leap15-icc" + - ' --build-arg DAOS_PACKAGES_BUILD=no ' + - ' --build-arg COMPILER=icc' + - ' --build-arg POINT_RELEASE=.5 ' - - } - } - steps { - job_step_update( - sconsBuild(parallel_build: true, - scons_args: sconsFaultsArgs() + - ' PREFIX=/opt/daos TARGET_TYPE=release', - build_deps: 'no')) - } - post { - unsuccessful { - sh '''if [ -f config.log ]; then - mv config.log config.log-leap15-intelc - fi''' - archiveArtifacts artifacts: 'config.log-leap15-intelc', - allowEmptyArchive: true - } - cleanup { - job_status_update() - } - } - } - } - } + steps { + script { + parallel( + 'Build on EL 8.8': scriptedBuildStage( + name: 'Build on EL 8.8', + distro:'el8', + compiler: 'gcc', + buildRpms: true, + release: env.DAOS_RELVAL, + dockerBuildArgs: dockerBuildArgs(repo_type: 'stable', + deps_build: false, + parallel_build: true) + + ' --build-arg DAOS_PACKAGES_BUILD=no' + + ' --build-arg DAOS_KEEP_SRC=yes' + + ' --build-arg REPOS="' + prRepos('el8') + '"' + + ' -f utils/docker/Dockerfile.el.8 .', + sconsBuildArgs: [ + parallel_build: true, + stash_files: 'ci/test_files_to_stash.txt', + build_deps: 'no', + stash_opt: true, + scons_args: sconsArgs() + ' PREFIX=/opt/daos TARGET_TYPE=release' + ], + artifacts: "config.log-el8-gcc" + ), + 'Build on EL 9.6': scriptedBuildStage( + name: 'Build on EL 9.6', + distro:'el9', + compiler: 'gcc', + buildRpms: true, + release: env.DAOS_RELVAL, + dockerBuildArgs: dockerBuildArgs(repo_type: 'stable', + deps_build: false, + parallel_build: true) + + ' --build-arg DAOS_PACKAGES_BUILD=no' + + ' --build-arg DAOS_KEEP_SRC=yes' + + ' --build-arg REPOS="' + prRepos('el9') + '"' + + ' --build-arg POINT_RELEASE=.6' + + ' -f utils/docker/Dockerfile.el.9 .', + sconsBuildArgs: [ + parallel_build: true, + stash_files: 'ci/test_files_to_stash.txt', + build_deps: 'no', + stash_opt: true, + scons_args: sconsArgs() + ' PREFIX=/opt/daos TARGET_TYPE=release' + ], + artifacts: "config.log-el9-gcc" + ), + 'Build on Leap 15.5': scriptedBuildStage( + name: 'Build on Leap 15.5', + distro:'leap15', + rpmDistro: 'suse.lp155', + compiler: 'gcc', + buildRpms: true, + release: env.DAOS_RELVAL, + dockerBuildArgs: dockerBuildArgs(repo_type: 'stable', + deps_build: false, + parallel_build: true) + + ' --build-arg DAOS_PACKAGES_BUILD=no' + + ' --build-arg DAOS_KEEP_SRC=yes' + + ' --build-arg POINT_RELEASE=.5' + + ' -f utils/docker/Dockerfile.leap.15 .', + sconsBuildArgs: [ + parallel_build: true, + build_deps: 'yes', + scons_args: sconsArgs() + ' PREFIX=/opt/daos TARGET_TYPE=release' + ], + artifacts: "config.log-leap15-gcc" + ), + 'Build on Leap 15.5 with Intel-C and TARGET_PREFIX': scriptedBuildStage( + name: 'Build on Leap 15.5 with Intel-C and TARGET_PREFIX', + distro:'leap15', + compiler: 'icc', + runCondition: !skip_build_stage('leap15_icc'), + buildRpms: false, + release: env.DAOS_RELVAL, + dockerBuildArgs: dockerBuildArgs(repo_type: 'stable', + deps_build: true, + parallel_build: true) + + ' --build-arg DAOS_PACKAGES_BUILD=no' + + ' --build-arg DAOS_KEEP_SRC=yes' + + ' --build-arg POINT_RELEASE=.5' + + ' --build-arg COMPILER=icc' + + ' -f utils/docker/Dockerfile.leap.15 .', + sconsBuildArgs: [ + parallel_build: true, + build_deps: 'yes', + scons_args: sconsArgs() + ' PREFIX=/opt/daos TARGET_TYPE=release' + ], + artifacts: "config.log-leap15-intelc" + ), + 'Build on EL 8.8 with Bullseye': scriptedBuildStage( + name: 'Build on EL 8.8 with Bullseye', + distro:'el8', + compiler: 'covc', + runCondition: !skip_build_stage('bullseye', 'covc') && code_coverage_enabled(), + buildRpms: true, + release: "${env.DAOS_RELVAL}.bullseye", + dockerBuildArgs: dockerBuildArgs(repo_type: 'stable', + deps_build: false, + parallel_build: true) + + ' --build-arg DAOS_PACKAGES_BUILD=no' + + ' --build-arg DAOS_KEEP_SRC=yes' + + ' --build-arg REPOS="' + prRepos('el8') + '"' + + ' --build-arg COMPILER=covc' + + ' --build-arg CODE_COVERAGE=true' + + ' -f utils/docker/Dockerfile.el.8 .', + sconsBuildArgs: [ + parallel_build: true, + stash_files: 'ci/test_files_to_stash.txt', + build_deps: 'no', + stash_opt: true, + scons_args: sconsArgs() + ' PREFIX=/opt/daos TARGET_TYPE=release' + + ' COMPILER=covc' + ], + artifacts: "config.log-el8-covc", + uploadTarget: 'el8' + ) + ) // parallel + } // script + } // steps + } // stage('Build') stage('Unit Tests') { when { beforeAgent true @@ -739,7 +1035,8 @@ pipeline { unitTest(timeout_time: 60, unstash_opt: true, inst_repos: daosRepos(), - inst_rpms: unitPackages())) + inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + compiler: 'gcc')) } post { always { @@ -761,7 +1058,8 @@ pipeline { unitTest(timeout_time: 60, unstash_opt: true, inst_repos: daosRepos(), - inst_rpms: unitPackages())) + inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + compiler: 'gcc')) } post { always { @@ -782,10 +1080,12 @@ pipeline { job_step_update( unitTest(timeout_time: 60, inst_repos: daosRepos(), + inst_rpms: unitPackages(), + compiler: 'gcc', test_script: 'ci/unit/test_nlt.sh', unstash_opt: true, unstash_tests: false, - inst_rpms: unitPackages())) + with_valgrind: 'memcheck')) // recordCoverage(tools: [[parser: 'COBERTURA', pattern:'nltir.xml']], // skipPublishingChecks: true, // id: 'tlc', name: 'Fault Injection Interim Report') @@ -824,7 +1124,8 @@ pipeline { unstash_opt: true, ignore_failure: true, inst_repos: daosRepos(), - inst_rpms: unitPackages())) + inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + compiler: 'gcc')) } post { always { @@ -849,7 +1150,8 @@ pipeline { unstash_opt: true, ignore_failure: true, inst_repos: daosRepos(), - inst_rpms: unitPackages())) + inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + compiler: 'gcc')) } post { always { @@ -860,8 +1162,270 @@ pipeline { } } } // stage('Unit Test bdev with memcheck on EL 8') + stage('Unit Test with Bullseye on EL 8.8') { + when { + beforeAgent true + expression { + runStage(params: ['CI_BUILD_BULLSEYE': true, + 'CI_UNIT_TEST_BULLSEYE': true, + 'CI_BUILD_PACKAGES_ONLY': false], + pragmas: ['Skip-unit-tests': false, + 'Skip-unit-test-bullseye': false]) + } + } + agent { + label cachedCommitPragma(pragma: 'VM1-label', def_val: params.CI_UNIT_VM1_LABEL) + } + steps { + job_step_update( + unitTest(timeout_time: 120, + unstash_opt: true, + inst_repos: daosRepos(), + inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8 true'), + compiler: 'covc', + ignore_failure: true, + coverage_stash: 'unit_test_bullseye')) + } + post { + always { + unitTestPost artifacts: ['unit_test_bullseye_logs/'], + ignore_failure: true, + compiler: 'covc' + job_status_update() + } + } + } + stage('NLT with Bullseye on EL 8.8') { + when { + beforeAgent true + expression { + runStage(params: ['CI_BUILD_BULLSEYE': true, + 'CI_UNIT_TEST_BULLSEYE': true, + 'CI_BUILD_PACKAGES_ONLY': false], + pragmas: ['Skip-unit-tests': false, + 'Skip-nlt-bullseye': false]) + } + } + agent { + label params.CI_NLT_1_LABEL + } + steps { + job_step_update( + unitTest(timeout_time: 120, + unstash_opt: true, + inst_repos: daosRepos(), + inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8 true'), + compiler: 'covc', + test_script: 'ci/unit/test_nlt.sh', + unstash_tests: false, + ignore_failure: true, + coverage_stash: 'unit_test_bullseye')) + stash(name:'nltr-bullseye', includes:'nltr-bullseye.json', allowEmpty: true) + } + post { + always { + unitTestPost artifacts: ['nlt_logs/'], + testResults: 'nlt-junit.xml', + always_script: 'ci/unit/test_nlt_post.sh', + compiler: 'covc', + NLT: true + recordIssues enabledForFailure: true, + failOnError: false, + ignoreQualityGate: true, + name: 'NLT server leaks', + qualityGates: [[threshold: 1, type: 'TOTAL', unstable: true]], + tool: issues(pattern: 'nlt-server-leaks.json', + name: 'NLT server results', + id: 'NLT_server'), + scm: 'daos-stack/daos' + job_status_update() + } + } + } } - } + // steps { + // script { + // parallel( + // 'Unit Test on EL 8.8': scriptedUnitTestStage( + // name: 'Unit Test on EL 8.8', + // runCondition: params.CI_UNIT_TEST, + // nodeLabel: cachedCommitPragma(pragma: 'VM1-label', def_val: params.CI_UNIT_VM1_LABEL), + // unitTestArgs: [ + // timeout_time: 60, + // unstash_opt: true, + // inst_repos: daosRepos(), + // inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + // testResults: 'test_results/*.xml', + // always_script: 'ci/unit/test_post_always.sh' + // ], + // unitTestPostArgs: [ + // artifacts: ['unit_test_logs/'] + // ] + // ), + // 'Unit Test bdev on EL 8.8': scriptedUnitTestStage( + // name: 'Unit Test bdev on EL 8.8', + // runCondition: params.CI_UNIT_TEST, + // nodeLabel: cachedCommitPragma(pragma: 'VM1-label', def_val: params.CI_UNIT_VM1_LABEL), + // unitTestArgs: [ + // timeout_time: 60, + // unstash_opt: true, + // inst_repos: daosRepos(), + // inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + // testResults: 'test_results/*.xml', + // always_script: 'ci/unit/test_post_always.sh' + // ], + // unitTestPostArgs: [ + // artifacts: ['unit_test_bdev_logs/'] + // ] + // ), + // 'NLT on EL 8.8': scriptedUnitTestStage( + // name: 'NLT on EL 8.8', + // runCondition: params.CI_NLT_TEST, + // nodeLabel: params.CI_NLT_1_LABEL, + // unitTestArgs: [ + // timeout_time: 60, + // unstash_opt: true, + // inst_repos: daosRepos(), + // inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + // testResults: 'nlt-junit.xml', + // always_script: 'ci/unit/test_nlt_post.sh', + // with_valgrind: 'memcheck', + // valgrind_pattern: '*memcheck.xml', + // test_script: 'ci/unit/test_nlt.sh', + // unstash_tests: false + // ], + // stashArgs: [ + // name: 'nltr', + // includes: 'nltr.json', + // allowEmpty: true + // ], + // unitTestPostArgs: [ + // artifacts: ['nlt_logs/'], + // testResults: 'nlt-junit.xml', + // always_script: 'ci/unit/test_nlt_post.sh', + // with_valgrind: 'memcheck', + // valgrind_stash: 'el8-gcc-nlt-memcheck', + // NLT: true + // ], + // recordIssuesArgs: [ + // enabledForFailure: true, + // failOnError: false, + // ignoreQualityGate: true, + // name: 'NLT server leaks', + // qualityGates: [[threshold: 1, type: 'TOTAL', unstable: true]], + // tool: issues(pattern: 'nlt-server-leaks.json', + // name: 'NLT server results', + // id: 'NLT_server'), + // scm: 'daos-stack/daos' + // ] + // ), + // 'Unit Test with memcheck on EL 8.8': scriptedUnitTestStage( + // name: 'Unit Test with memcheck on EL 8.8', + // runCondition: params.CI_UNIT_TEST_MEMCHECK, + // nodeLabel: cachedCommitPragma(pragma: 'VM1-label', def_val: params.CI_UNIT_VM1_LABEL), + // unitTestArgs: [ + // timeout_time: 160, + // unstash_opt: true, + // inst_repos: daosRepos(), + // inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + // testResults: 'test_results/*.xml', + // always_script: 'ci/unit/test_post_always.sh', + // with_valgrind: 'memcheck', + // ignore_failure: true, + // ], + // unitTestPostArgs: [ + // artifacts: ['unit_test_memcheck_logs.tar.gz', + // 'unit_test_memcheck_logs/**/*.log'], + // with_valgrind: 'memcheck', + // valgrind_stash: 'el8-gcc-unit-memcheck' + // ] + // ), + // 'Unit Test bdev with memcheck on EL 8.8': scriptedUnitTestStage( + // name: 'Unit Test bdev with memcheck on EL 8.8', + // runCondition: params.CI_UNIT_TEST_MEMCHECK, + // nodeLabel: cachedCommitPragma(pragma: 'VM1-label', def_val: params.CI_UNIT_VM1_LABEL), + // unitTestArgs: [ + // timeout_time: 180, + // unstash_opt: true, + // inst_repos: daosRepos(), + // inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8'), + // testResults: 'test_results/*.xml', + // always_script: 'ci/unit/test_post_always.sh', + // with_valgrind: 'memcheck', + // ignore_failure: true, + // ], + // unitTestPostArgs: [ + // artifacts: ['unit_test_memcheck_bdev_logs.tar.gz', + // 'unit_test_memcheck_bdev_logs/**/*.log'], + // with_valgrind: 'memcheck', + // valgrind_stash: 'el8-gcc-unit-memcheck-bdev' + // ] + // ), + // 'Unit Test with Bullseye on EL 8.8': scriptedUnitTestStage( + // name: 'Unit Test with Bullseye on EL 8.8', + // runCondition: params.CI_UNIT_TEST_BULLSEYE && code_coverage_enabled(), + // nodeLabel: cachedCommitPragma(pragma: 'VM1-label', def_val: params.CI_UNIT_VM1_LABEL), + // unitTestArgs: [ + // timeout_time: 120, + // unstash_opt: true, + // inst_repos: daosRepos(), + // inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8 true'), + // testResults: 'test_results/*.xml', + // always_script: 'ci/unit/test_post_always.sh', + // ignore_failure: true, + // coverage_stash: 'unit_test_bullseye' + // ], + // unitTestPostArgs: [ + // artifacts: ['unit_test_bullseye_logs/'], + // ignore_failure: true, + // code_coverage: true + // ] + // ), + // 'NLT with Bullseye on EL 8.8': scriptedUnitTestStage( + // name: 'NLT with Bullseye on EL 8.8', + // runCondition: params.CI_NLT_TEST && code_coverage_enabled(), + // nodeLabel: params.CI_NLT_1_LABEL, + // unitTestArgs: [ + // timeout_time: 120, + // unstash_opt: true, + // inst_repos: daosRepos(), + // inst_rpms: getScriptOutput('ci/unit/required_packages.sh el8 true'), + // testResults: 'nlt-junit.xml', + // always_script: 'ci/unit/test_nlt_post.sh', + // test_script: 'ci/unit/test_nlt.sh', + // unstash_tests: false, + // ignore_failure: true, + // code_coverage: true, + // coverage_stash: 'nlt-bullseye' + // ], + // stashArgs: [ + // name: 'nltr-bullseye', + // includes: 'nltr-bullseye.json', + // allowEmpty: true + // ], + // unitTestPostArgs: [ + // artifacts: ['nlt_logs/'], + // testResults: 'nlt-junit.xml', + // always_script: 'ci/unit/test_nlt_post.sh', + // code_coverage: true, + // NLT: true + // ], + // recordIssuesArgs: [ + // enabledForFailure: true, + // failOnError: false, + // ignoreQualityGate: true, + // name: 'NLT server leaks', + // qualityGates: [[threshold: 1, type: 'TOTAL', unstable: true]], + // tool: issues(pattern: 'nlt-server-leaks.json', + // name: 'NLT server results', + // id: 'NLT_server'), + // scm: 'daos-stack/daos' + // ] + // ) + // ) // parallel + // } // script + // } // steps + } // stage('Unit Tests') stage('Test') { when { beforeAgent true @@ -904,7 +1468,7 @@ pipeline { job_step_update( functionalTest( inst_repos: daosRepos(), - inst_rpms: functionalPackages(1, next_version(), 'tests-internal'), + inst_rpms: functionalPackages(1, next_version(), add_daos_pkgs()), test_function: 'runTestFunctionalV2')) } post { @@ -1144,6 +1708,7 @@ pipeline { pragma_suffix: '-hw-medium', label: params.FUNCTIONAL_HARDWARE_MEDIUM_LABEL, next_version: next_version(), + other_daos_packages: add_daos_pkgs(), stage_tags: 'hw,medium,-provider', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', nvme: 'auto', @@ -1156,6 +1721,7 @@ pipeline { pragma_suffix: '-hw-medium-md-on-ssd', label: params.FUNCTIONAL_HARDWARE_MEDIUM_LABEL, next_version: next_version(), + other_daos_packages: add_daos_pkgs(), stage_tags: 'hw,medium,-provider', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', nvme: 'auto_md_on_ssd', @@ -1168,6 +1734,7 @@ pipeline { pragma_suffix: '-hw-medium-vmd', label: params.FUNCTIONAL_HARDWARE_MEDIUM_VMD_LABEL, next_version: next_version(), + other_daos_packages: add_daos_pkgs(), stage_tags: 'hw_vmd,medium', /* groovylint-disable-next-line UnnecessaryGetter */ default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', @@ -1181,6 +1748,7 @@ pipeline { pragma_suffix: '-hw-medium-verbs-provider', label: params.FUNCTIONAL_HARDWARE_MEDIUM_VERBS_PROVIDER_LABEL, next_version: next_version(), + other_daos_packages: add_daos_pkgs(), stage_tags: 'hw,medium,provider', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto', @@ -1194,6 +1762,7 @@ pipeline { pragma_suffix: '-hw-medium-verbs-provider-md-on-ssd', label: params.FUNCTIONAL_HARDWARE_MEDIUM_VERBS_PROVIDER_LABEL, next_version: next_version(), + other_daos_packages: add_daos_pkgs(), stage_tags: 'hw,medium,provider', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto_md_on_ssd', @@ -1207,6 +1776,7 @@ pipeline { pragma_suffix: '-hw-medium-ucx-provider', label: params.FUNCTIONAL_HARDWARE_MEDIUM_UCX_PROVIDER_LABEL, next_version: next_version(), + other_daos_packages: add_daos_pkgs(), stage_tags: 'hw,medium,provider', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto', @@ -1220,6 +1790,7 @@ pipeline { pragma_suffix: '-hw-large', label: params.FUNCTIONAL_HARDWARE_LARGE_LABEL, next_version: next_version(), + other_daos_packages: add_daos_pkgs(), stage_tags: 'hw,large', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto', @@ -1232,6 +1803,7 @@ pipeline { pragma_suffix: '-hw-large-md-on-ssd', label: params.FUNCTIONAL_HARDWARE_LARGE_LABEL, next_version: next_version(), + other_daos_packages: add_daos_pkgs(), stage_tags: 'hw,large', default_tags: startedByTimer() ? 'pr daily_regression' : 'pr', default_nvme: 'auto_md_on_ssd', @@ -1243,6 +1815,84 @@ pipeline { } } } // stage('Test Hardware') + stage('Test Summary') { + when { + beforeAgent true + expression { true } + } + steps { + script { + parallel( + 'Bullseye Report': scriptedSummaryStage( + name: 'Bullseye Report', + distro: 'el8', + compiler: 'covc', + runCondition: code_coverage_enabled(), + nodeLabel: 'docker_runner', + dockerBuildArgs: dockerBuildArgs( + repo_type: 'stable', + deps_build: false, + parallel_build: true) + + ' --build-arg DAOS_PACKAGES_BUILD=no ' + + ' --build-arg REPOS="' + daosRepos() + '"' + + code_coverage_build_args(), + installScript: './ci/summary/install_pkgs.sh', + runScriptArgs: [ + label: 'Generate Bullseye Report', + script: 'ci/summary/bullseye_report.sh', + stashes: ['unit_test_bullseye', + 'unit_test_bdev_bullseye', + 'nlt_bullseye'] + ], + archiveArtifactsArgs: [ + artifacts: 'bullseye_report/*', + allowEmptyArchive: false + ] + ) + ) // parallel + } // script + } // steps + // parallel { + // stage('Bullseye Report') { + // when { + // beforeAgent true + // expression { code_coverage_enabled() } + // } + // agent { + // dockerfile { + // filename 'utils/docker/Dockerfile.el.8' + // label 'docker_runner' + // additionalBuildArgs dockerBuildArgs(repo_type: 'stable', + // deps_build: false, + // parallel_build: true) + + // ' --build-arg DAOS_PACKAGES_BUILD=no ' + + // ' --build-arg REPOS="' + daosRepos() + '"' + + // code_coverage_build_args() + // } + // } + // steps { + // script { + // sh label: 'Install packages', + // script: './ci/summary/install_pkgs.sh' + // job_step_update( + // runScriptWithStashes( + // label: 'Generate Bullseye Report', + // script: 'ci/summary/bullseye_report.sh', + // stashes: ['unit_test_bullseye', + // 'unit_test_bdev_bullseye', + // 'nlt_bullseye'])) + // } + // } + // post { + // always { + // archiveArtifacts artifacts: 'bullseye_report/*', + // allowEmptyArchive: false + // job_status_update() + // } + // } + // } // stage('Code Coverage Report') + // } // parallel + } // stage('Test Summary') } // stages post { always { diff --git a/ci/bullseye_generate_report.sh b/ci/bullseye_generate_report.sh deleted file mode 100755 index d8e7421e90d..00000000000 --- a/ci/bullseye_generate_report.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash - -set -eux - -if [ ! -d '/opt/BullseyeCoverage/bin' ]; then - echo 'Bullseye not found.' - exit 1 -fi -export COVFILE="$WORKSPACE/test.cov" -export PATH="/opt/BullseyeCoverage/bin:$PATH" - -mv "$WORKSPACE/test.cov_1" "$COVFILE" -if [ -e "$WORKSPACE/test.cov_2" ]; then - covmerge --no-banner --file "$COVFILE" "$WORKSPACE"/test.cov_* -fi - -if [ ! -e "$COVFILE" ]; then - echo "Coverage file $COVFILE is missing" -else - ls -l "$COVFILE" -fi - -java -jar bullshtml.jar test_coverage diff --git a/ci/provisioning/post_provision_config_common_functions.sh b/ci/provisioning/post_provision_config_common_functions.sh index c3b11439d47..d820c543ff4 100755 --- a/ci/provisioning/post_provision_config_common_functions.sh +++ b/ci/provisioning/post_provision_config_common_functions.sh @@ -2,7 +2,7 @@ # # Copyright 2022-2023 Intel Corporation. # Copyright 2025 Google LLC -# Copyright 2025 Hewlett Packard Enterprise Development LP +# Copyright 2025-2026 Hewlett Packard Enterprise Development LP # # SPDX-License-Identifier: BSD-2-Clause-Patent # @@ -60,6 +60,18 @@ add_repo() { fi } +add_inst_repo() { + local repo="$1" + local branch="$2" + local build_number="$3" + local repo_url="${ARTIFACTS_URL:-${JENKINS_URL}job/}"daos-stack/job/"$repo"/job/"${branch//\//%252F}"/"$build_number"/artifact/artifacts/$DISTRO_NAME/ + dnf -y config-manager --add-repo="$repo_url" + repo="$(url_to_repo "$repo_url")" + # PR-repos: should always be able to upgrade modular packages + dnf -y config-manager --save --setopt "$repo.module_hotfixes=true" "$repo" + disable_gpg_check "$repo_url" +} + disable_gpg_check() { local url="$1" @@ -377,16 +389,7 @@ post_provision_config_nodes() { branch="${branch%:*}" fi fi - local subdir - if ! $COVFN_DISABLED; then - subdir="bullseye/" - fi - local repo_url="${ARTIFACTS_URL:-${JENKINS_URL}job/}"daos-stack/job/"$repo"/job/"${branch//\//%252F}"/"$build_number"/artifact/artifacts/"${subdir:-}"$DISTRO_NAME/ - dnf -y config-manager --add-repo="$repo_url" - repo="$(url_to_repo "$repo_url")" - # PR-repos: should always be able to upgrade modular packages - dnf -y config-manager --save --setopt "$repo.module_hotfixes=true" "$repo" - disable_gpg_check "$repo_url" + add_inst_repo "${repo}" "${branch}" "${build_number}" done # start with everything fully up-to-date diff --git a/ci/rpm/build_deps.sh b/ci/rpm/build_deps.sh index f989a96332a..1b31b88d385 100755 --- a/ci/rpm/build_deps.sh +++ b/ci/rpm/build_deps.sh @@ -1,3 +1,16 @@ #!/bin/bash +# +# Copyright 2025-2026 Hewlett Packard Enterprise Development LP +# +# Build DAOS dependencies +code_coverage="${1:-false}" +bullseye_key="${2:-}" +mydir="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" + cd /home/daos/pre || exit 1 scons install --build-deps=only USE_INSTALLED=all PREFIX=/opt/daos TARGET_TYPE=release -j 32 + +if [[ "${code_coverage}" == "true" ]] ; then + pushd "${mydir}/../.." || exit 1 + utils/rpms/bullseye_build.sh "${bullseye_key}" +fi diff --git a/ci/rpm/gen_rpms.sh b/ci/rpm/gen_rpms.sh index 30a4e248952..aef6e2544df 100755 --- a/ci/rpm/gen_rpms.sh +++ b/ci/rpm/gen_rpms.sh @@ -16,20 +16,21 @@ if [ -e "${ci_envs}" ]; then source "${ci_envs}" fi -env +env | sort -n pushd "${mydir}/../.." || exit 1 export DISTRO="${1}" export DAOS_RELVAL="${2}" +code_coverage="${3:-false}" rm -f ./*.rpm rm -rf /home/daos/rpms/* -utils/rpms/build_packages.sh deps +utils/rpms/build_packages.sh deps "${code_coverage}" if ls -1 ./*.rpm; then mkdir -p /home/daos/rpms/deps cp ./*.rpm /home/daos/rpms/deps rm -f ./*.rpm fi -utils/rpms/build_packages.sh daos +utils/rpms/build_packages.sh daos "${code_coverage}" mkdir -p /home/daos/rpms/daos cp ./*.rpm /home/daos/rpms/daos popd || exit 1 diff --git a/ci/rpm/install_deps.sh b/ci/rpm/install_deps.sh index 7ac5ff26622..02806a845c3 100755 --- a/ci/rpm/install_deps.sh +++ b/ci/rpm/install_deps.sh @@ -21,6 +21,7 @@ env pushd "${mydir}/../.." || exit 1 export DISTRO="${1}" export DAOS_RELVAL="${2}" +code_coverage="${3:-false}" libfabric_pkg="$(utils/rpms/package_version.sh libfabric dev)" mercury_pkg="$(utils/rpms/package_version.sh mercury dev)" argobots_pkg="$(utils/rpms/package_version.sh argobots dev)" @@ -38,4 +39,10 @@ sudo dnf install --allowerasing -y "${fused_pkg}" || echo "${fused_pkg} not avai sudo dnf install --allowerasing -y "${pmdk_pkg}" || echo "${pmdk_pkg} not available" sudo dnf install --allowerasing -y "${isal_pkg}" || echo "${isal_pkg} not available" sudo dnf install --allowerasing -y "${isal_crypto_pkg}" || echo "${isal_crypto_pkg} not available" + +if [[ "${code_coverage}" == "true" ]] ; then + bullseye_pkg="$(utils/rpms/package_version.sh bullseye normal)" + sudo dnf install --allowerasing -y "${bullseye_pkg}" || echo "${bullseye_pkg} not available" +fi + popd || exit 1 diff --git a/ci/summary/bullseye_report.sh b/ci/summary/bullseye_report.sh new file mode 100755 index 00000000000..7e4374c8877 --- /dev/null +++ b/ci/summary/bullseye_report.sh @@ -0,0 +1,42 @@ +#!/bin/bash +# +# Copyright 2026 Hewlett Packard Enterprise Development LP +# +# Script for generating a bullseye code coverage report summary +set -uex + +if [ ! -d '/opt/BullseyeCoverage/bin' ]; then + echo 'Bullseye not found.' + exit 1 +fi +export COVFILE="${WORKSPACE:-/tmp}/test.cov" +export PATH="/opt/BullseyeCoverage/bin:$PATH" + +# Merge all coverage files +cp /opt/BullseyeCoverage/daos/test.cov "${COVFILE}" +readarray -t cov_files < <(find "${WORKSPACE}" -name test.cov) +if [ ${#cov_files[@]} -gt 0 ]; then + covmerge --no-banner --file "${COVFILE}" "${cov_files[@]}" +fi + +if [ ! -e "$COVFILE" ]; then + echo "Coverage file ${COVFILE} is missing" + exit 1 +else + ls -al "${COVFILE}" + # covdir -m +fi + +# Generate the html report +rm -fr bullseye_code_coverage_report || true +mkdir bullseye_code_coverage_report +cp /opt/BullseyeCoverage/bin/bullseye_sources.tar.gz . +tar -xf bullseye_sources.tar.gz +covhtml --srcdir . --file test.cov bullseye_code_coverage_report +ls -al bullseye_code_coverage_report + +# rm -fr bullseye_code_coverage_report || true +# mkdir bullseye_code_coverage_report +# java -jar bullshtml.jar bullseye_code_coverage_report +# ls -al bullseye_code_coverage_report + diff --git a/ci/summary/install_pkgs.sh b/ci/summary/install_pkgs.sh new file mode 100755 index 00000000000..b174fe94b7d --- /dev/null +++ b/ci/summary/install_pkgs.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# +# Copyright 2026 Hewlett Packard Enterprise Development LP +# +# Script for installing packages used for CI summary steps +set -uex + +id +if [ "$(id -u)" = "0" ]; then + echo "Should not be run as root" + exit 1 +fi + +mydir="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" +files=("$mydir/../parse_ci_envs.sh") +files+=("$mydir/../provision/post_provision_config_common_functions.sh") +for src_file in "${files[@]}"; do + if [ -e "${src_file}" ]; then + # shellcheck source=parse_ci_envs.sh disable=SC1091 + source "${src_file}" + fi +done + +env | sort -n + +# Add a repo for this build +add_inst_repo "daos" "${BRANCH_NAME}" "${BUILD_NUMBER}" + +# Install bullseye +bullseye_pkg="$(utils/rpms/package_version.sh bullseye normal)" +sudo dnf install --allowerasing -y "${bullseye_pkg}" || echo "${bullseye_pkg} not available" + +# # Install bullshtml +# bullshtml_vers=1.0.5 +# bullshtml_src=https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/bullshtml +# bullshtml_tar="bullshtml_${bullshtml_vers}.tar.gz" +# if [ -n "${DAOS_HTTPS_PROXY:-}" ]; then +# export https_proxy="${DAOS_HTTPS_PROXY}" +# fi +# sudo dnf install -y wget +# wget "${bullshtml_src}/${bullshtml_tar}" +# tar --strip-components=1 -xf "${bullshtml_tar}" + +# bullshtml_pkg="$(utils/rpms/package_version.sh bullshtml normal)" +# sudo dnf install --allowerasing -y "${bullshtml_pkg}" || echo "${bullshtml_pkg} not available" diff --git a/ci/unit/required_packages.sh b/ci/unit/required_packages.sh index bbd3313155a..990545a2b4f 100755 --- a/ci/unit/required_packages.sh +++ b/ci/unit/required_packages.sh @@ -2,45 +2,63 @@ set -eu -# No longer used but provided by pipeline-lib -# distro="$1" -# quick_build="${2:-false}" +distro="${1:-el8}" +code_coverage="${2:-false}" OPENMPI_VER="" PY_MINOR_VER="" -export DISTRO="el8" # should also work for el9 -pkgs="$(utils/rpms/package_version.sh argobots lib) \ - boost-python3$PY_MINOR_VER-devel \ - capstone \ - $(utils/rpms/package_version.sh argobots lib) \ - $(utils/rpms/package_version.sh argobots debug) \ - $(utils/rpms/package_version.sh daos_spdk dev) \ - $(utils/rpms/package_version.sh daos_spdk debug) \ - $(utils/rpms/package_version.sh isal dev) \ - $(utils/rpms/package_version.sh isal_crypto lib) \ - $(utils/rpms/package_version.sh isal_crypto debug) \ - $(utils/rpms/package_version.sh libfabric dev) \ - $(utils/rpms/package_version.sh libfabric debug) \ - $(utils/rpms/package_version.sh mercury dev) \ - $(utils/rpms/package_version.sh mercury debug) \ - $(utils/rpms/package_version.sh pmdk lib pmemobj) \ - $(utils/rpms/package_version.sh pmdk debug pmemobj) \ - $(utils/rpms/package_version.sh pmdk debug pmem) \ - fuse3 \ - gotestsum \ - hwloc-devel \ - libasan \ - libipmctl-devel \ - libyaml-devel \ - numactl \ - numactl-devel \ - openmpi$OPENMPI_VER \ - patchelf \ - pciutils-devel \ - protobuf-c \ - valgrind-devel" +export DISTRO="${distro}" +pkgs=("$(utils/rpms/package_version.sh argobots lib)") +pkgs+=("boost-python3${PY_MINOR_VER}-devel") +pkgs+=("capstone") +pkgs+=("$(utils/rpms/package_version.sh argobots lib)") +pkgs+=("$(utils/rpms/package_version.sh argobots debug)") +pkgs+=("$(utils/rpms/package_version.sh daos_spdk dev)") +pkgs+=("$(utils/rpms/package_version.sh daos_spdk debug)") +pkgs+=("$(utils/rpms/package_version.sh isal dev)") +pkgs+=("$(utils/rpms/package_version.sh isal_crypto lib)") +pkgs+=("$(utils/rpms/package_version.sh isal_crypto debug)") +pkgs+=("$(utils/rpms/package_version.sh libfabric dev)") +pkgs+=("$(utils/rpms/package_version.sh libfabric debug)") +pkgs+=("$(utils/rpms/package_version.sh mercury dev)") +pkgs+=("$(utils/rpms/package_version.sh mercury debug)") +pkgs+=("$(utils/rpms/package_version.sh pmdk lib pmemobj)") +pkgs+=("$(utils/rpms/package_version.sh pmdk debug pmemobj)") +pkgs+=("$(utils/rpms/package_version.sh pmdk debug pmem)") +pkgs+=("fuse3") +pkgs+=("gotestsum") +pkgs+=("hwloc-devel") +pkgs+=("libasan") +pkgs+=("libipmctl-devel") +pkgs+=("libyaml-devel") +pkgs+=("numactl") +pkgs+=("numactl-devel") +pkgs+=("openmpi${OPENMPI_VER}") +pkgs+=("patchelf") +pkgs+=("pciutils-devel") +pkgs+=("protobuf-c") +pkgs+=("valgrind-devel") + +if [ "${code_coverage}" == "true" ] ; then + pkgs+=("$(utils/rpms/package_version.sh bullseye normal)") + case "${distro}" in + el*|centos*|rocky*|rhel*|alma*) + pkgs+=("java-1.8.0-openjdk") + ;; + ubuntu*) + pkgs+=("openjdk-8-jdk") + ;; + leap*|sles*) + pkgs+=("java-1_8_0-openjdk-devel") + ;; + *) + echo "Unknown java package for ${distro} with code coverage" + exit 1 + ;; + esac +fi # output with trailing newline suppressed -echo -e "$pkgs\c" +printf "${pkgs[*]}" exit 0 diff --git a/ci/unit/test_main.sh b/ci/unit/test_main.sh index d7bbdb5358a..8097d7341b8 100755 --- a/ci/unit/test_main.sh +++ b/ci/unit/test_main.sh @@ -1,7 +1,7 @@ #!/bin/bash # # Copyright 2020-2023 Intel Corporation. -# Copyright 2025 Hewlett Packard Enterprise Development LP +# Copyright 2025-2026 Hewlett Packard Enterprise Development LP # # SPDX-License-Identifier: BSD-2-Clause-Patent # @@ -19,24 +19,14 @@ rm -rf test_results mkdir test_results chmod 777 test_results -# Check if this is a Bulleye stage -USE_BULLSEYE=false +# Check if this is a bdev stage BDEV_TEST=false case $STAGE_NAME in - *Bullseye**) - USE_BULLSEYE=true - ;; *bdev**) BDEV_TEST=true ;; esac -if $USE_BULLSEYE; then - rm -rf bullseye - mkdir -p bullseye - tar -C bullseye --strip-components=1 -xf bullseye.tar -fi - NODE=${NODELIST%%,*} # Copy over the install tree and some of the build tree. diff --git a/ci/unit/test_main_node.sh b/ci/unit/test_main_node.sh index 0afbf26fea6..09ab4bfb37f 100755 --- a/ci/unit/test_main_node.sh +++ b/ci/unit/test_main_node.sh @@ -1,7 +1,7 @@ #!/bin/bash # # Copyright 2020-2023 Intel Corporation. -# Copyright 2025 Hewlett Packard Enterprise Development LP +# Copyright 2025-2026 Hewlett Packard Enterprise Development LP # # SPDX-License-Identifier: BSD-2-Clause-Patent # @@ -26,18 +26,12 @@ sudo mount --bind build "${SL_SRC_DIR}" log_prefix="unit_test" -: "${BULLSEYE:=}" -if [ -n "$BULLSEYE" ]; then - pushd "${SL_SRC_DIR}/bullseye" - set +x - echo + sudo ./install --quiet --key "**********" --prefix /opt/BullseyeCoverage - sudo ./install --quiet --key "${BULLSEYE}" --prefix /opt/BullseyeCoverage - set -x - popd - rm -rf bullseye +: "${BULLSEYE_DIR:=/opt/BullseyeCoverage}" +if [[ -d "${BULLSEYE_DIR}" ]]; then export COVFILE="${SL_SRC_DIR}/test.cov" - export PATH="/opt/BullseyeCoverage/bin:$PATH" - log_prefix="covc_test" + export PATH="${BULLSEYE_DIR}/bin:$PATH" + cp "${BULLSEYE_DIR}/daos/test.cov" "${COVFILE}" + ls -al "${COVFILE}" fi cd "${SL_SRC_DIR}" @@ -93,5 +87,19 @@ pip install --requirement requirements-utest.txt pip install /opt/daos/lib/daos/python/ +if [[ -n "${COVFILE:-}" ]]; then + echo "Code coverage before running unit tests:" + /opt/BullseyeCoverage/bin/covdir --file "${COVFILE}" || true +fi + HTTPS_PROXY="${DAOS_HTTPS_PROXY:-}" utils/run_utest.py $RUN_TEST_VALGRIND \ --no-fail-on-error $VDB_ARG --log_dir="$test_log_dir" $SUDO_ARG + +if [[ -n "${COVFILE:-}" ]]; then + echo "Code coverage after running unit tests:" + /opt/BullseyeCoverage/bin/covdir --file "${COVFILE}" || true + + # Copy bullseye file to expected location for stashing + cp "${COVFILE}" /tmp/test.cov + ls -al /tmp/test.cov || true +fi diff --git a/ci/unit/test_nlt_node.sh b/ci/unit/test_nlt_node.sh index fa422586ad9..7f4db9f41c8 100755 --- a/ci/unit/test_nlt_node.sh +++ b/ci/unit/test_nlt_node.sh @@ -41,5 +41,35 @@ pip install /opt/daos/lib/daos/python/ sudo prlimit --nofile=1024:262144 --pid $$ prlimit -n -HTTPS_PROXY="${DAOS_HTTPS_PROXY:-}" ./utils/node_local_test.py --max-log-size 1950MiB \ - --dfuse-dir /localhome/jenkins/ --log-usage-save nltir.xml --log-usage-export nltr.json all +nlt_args=() +nlt_args+=(--max-log-size 1950MiB) +nlt_args+=(--dfuse-dir /localhome/jenkins/) +nlt_args+=(--log-usage-save nltir.xml) +nlt_args+=(--log-usage-export nltr.json) + +echo "[DEBUG] BULLSEYE_DIR: ${BULLSEYE_DIR:-}" +echo "[DEBUG] COVFILE: ${COVFILE:-}" + +: "${BULLSEYE_DIR:=/opt/BullseyeCoverage}" +if [ -d "${BULLSEYE_DIR}" ]; then + export COVFILE="/tmp/test.cov" + export PATH="${BULLSEYE_DIR}/bin:$PATH" + cp "${BULLSEYE_DIR}/daos/test.cov" "${COVFILE}" + ls -al "${COVFILE}" + nlt_args+=(--memcheck no) +fi + +echo "[DEBUG] BULLSEYE_DIR: ${BULLSEYE_DIR:-}" +echo "[DEBUG] COVFILE: ${COVFILE:-}" + +if [ -e "${COVFILE}" ]; then + echo "Code coverage before running unit tests:" + /opt/BullseyeCoverage/bin/covdir --file "${COVFILE}" || true +fi + +HTTPS_PROXY="${DAOS_HTTPS_PROXY:-}" ./utils/node_local_test.py "${nlt_args[@]}" all + +if [ -e "${COVFILE}" ]; then + echo "Code coverage after running unit tests:" + /opt/BullseyeCoverage/bin/covdir --file "${COVFILE}" || true +fi diff --git a/ci/unit/test_nlt_post.sh b/ci/unit/test_nlt_post.sh index c46a63dac2f..9068613d037 100755 --- a/ci/unit/test_nlt_post.sh +++ b/ci/unit/test_nlt_post.sh @@ -15,7 +15,7 @@ mkdir nlt_logs # standard wildcards. rsync -v -dprt -e "ssh $SSH_KEY_ARGS" jenkins@"$NODE":/tmp/ \ --filter="include dnt*.log" --filter="include dnt*.log.bz2" \ - --filter="include dnt_fi_*_logs" \ + --filter="include dnt_fi_*_logs" --filter="include test.cov" \ --filter="exclude *" nlt_logs/ rsync -v -dpt -z -e "ssh $SSH_KEY_ARGS" jenkins@"$NODE":build/ \ diff --git a/ci/unit/test_post_always.sh b/ci/unit/test_post_always.sh index 5d48ab5482d..1d94f178bbe 100755 --- a/ci/unit/test_post_always.sh +++ b/ci/unit/test_post_always.sh @@ -26,15 +26,24 @@ ssh "$SSH_KEY_ARGS" jenkins@"$NODE" \ $(cat "$mydir/test_post_always_node.sh")" case $STAGE_NAME in - *Bullseye*) - test_log_dir="covc_test_logs" - ;; - *memcheck*) - test_log_dir="unit_test_memcheck_logs" - ;; - *Unit*) - test_log_dir="unit_test_logs" - ;; + "Unit Test on "*) + test_log_dir="unit_test_logs" + ;; + "Unit Test bdev on "*) + test_log_dir="unit_test_bdev_logs" + ;; + "NLT on "*) + test_log_dir="nlt_logs" + ;; + "Unit Test with memcheck on "*) + test_log_dir="unit_test_memcheck_logs" + ;; + "Unit Test bdev with memcheck on "*) + test_log_dir="unit_test_memcheck_bdev_logs" + ;; + *) + test_log_dir="unkown_test_logs" + ;; esac mkdir -p "$test_log_dir" diff --git a/site_scons/prereq_tools/base.py b/site_scons/prereq_tools/base.py index a1cd84fab2a..4bda64e7161 100644 --- a/site_scons/prereq_tools/base.py +++ b/site_scons/prereq_tools/base.py @@ -1,6 +1,6 @@ # Copyright 2016-2024 Intel Corporation # Copyright 2025 Google LLC -# Copyright 2025 Hewlett Packard Enterprise Development LP +# Copyright 2025-2026 Hewlett Packard Enterprise Development LP # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -712,33 +712,31 @@ def _setup_compiler(self): os.remove(covfile) commands = [['$COV01', '-1'], ['$COV01', '-s'], - ['$CVS', '--add', '!**/src/cart/test/utest/'], + ['$CVS', '--add', '!**/src/tests/'], + ['$CVS', '--add', '!**/src/bio/smd/tests/'], + ['$CVS', '--add', '!**/src/cart/crt_self_test.h'], + ['$CVS', '--add', '!**/src/cart/crt_self_test_client.c'], + ['$CVS', '--add', '!**/src/cart/crt_self_test_service.c'], + ['$CVS', '--add', '!**/src/client/api/tests/'], ['$CVS', '--add', '!**/src/common/tests/'], + ['$CVS', '--add', '!**/src/common/tests_dmg_helpers.c'], + ['$CVS', '--add', '!**/src/common/tests_lib.c'], + ['$CVS', '--add', '!**/src/dtx/tests/'], + ['$CVS', '--add', '!**/src/engine/tests/'], + ['$CVS', '--add', '!**/src/gurt/examples/'], ['$CVS', '--add', '!**/src/gurt/tests/'], - ['$CVS', '--add', '!**/src/iosrv/tests/'], ['$CVS', '--add', '!**/src/mgmt/tests/'], ['$CVS', '--add', '!**/src/object/tests/'], + ['$CVS', '--add', '!**/src/placement/ring_map.c'], ['$CVS', '--add', '!**/src/placement/tests/'], ['$CVS', '--add', '!**/src/rdb/tests/'], ['$CVS', '--add', '!**/src/security/tests/'], - ['$CVS', '--add', '!**/src/utils/self_test/'], + ['$CVS', '--add', '!**/src/utils/daos_autotest.c'], + ['$CVS', '--add', '!**/src/utils/crt_launch/'], ['$CVS', '--add', '!**/src/utils/ctl/'], + ['$CVS', '--add', '!**/src/utils/self_test/'], ['$CVS', '--add', '!**/src/vea/tests/'], - ['$CVS', '--add', '!**/src/vos/tests/'], - ['$CVS', '--add', '!**/src/engine/tests/'], - ['$CVS', '--add', '!**/src/tests/'], - ['$CVS', '--add', '!**/src/bio/smd/tests/'], - ['$CVS', '--add', '!**/src/cart/crt_self_test.h'], - ['$CVS', '--add', '!**/src/cart/crt_self_test_client.c'], - ['$CVS', '--add', '!**/src/cart/crt_self_test_service.c'], - ['$CVS', '--add', '!**/src/client/api/tests/'], - ['$CVS', '--add', '!**/src/client/dfuse/test/'], - ['$CVS', '--add', '!**/src/gurt/examples/'], - ['$CVS', '--add', '!**/src/utils/crt_launch/'], - ['$CVS', '--add', '!**/src/utils/daos_autotest.c'], - ['$CVS', '--add', '!**/src/placement/ring_map.c'], - ['$CVS', '--add', '!**/src/common/tests_dmg_helpers.c'], - ['$CVS', '--add', '!**/src/common/tests_lib.c']] + ['$CVS', '--add', '!**/src/vos/tests/']] if not RUNNER.run_commands(commands): raise BuildFailure("cov01") diff --git a/src/tests/ftest/container/list.yaml b/src/tests/ftest/container/list.yaml index 77ca02a2ca3..24576c2878a 100644 --- a/src/tests/ftest/container/list.yaml +++ b/src/tests/ftest/container/list.yaml @@ -2,7 +2,7 @@ hosts: test_servers: 1 test_clients: 1 -timeout: 360 +timeout: 460 server_config: name: daos_server diff --git a/src/tests/ftest/control/version.py b/src/tests/ftest/control/version.py index 47ddd1f6752..c0ddeadc91f 100644 --- a/src/tests/ftest/control/version.py +++ b/src/tests/ftest/control/version.py @@ -1,16 +1,18 @@ ''' (C) Copyright 2018-2023 Intel Corporation. - (C) Copyright 2025 Hewlett Packard Enterprise Development LP + (C) Copyright 2025-2026 Hewlett Packard Enterprise Development LP SPDX-License-Identifier: BSD-2-Clause-Patent ''' import json import re +from getpass import getuser from apricot import TestWithServers from ClusterShell.NodeSet import NodeSet +from command_utils_base import EnvironmentVariables from general_utils import append_error, report_errors -from run_utils import run_remote +from run_utils import command_as_user, run_remote from server_utils_base import DaosServerCommandRunner @@ -63,7 +65,9 @@ def test_version(self): # Get daos_agent version. daos_agent_version = None - daos_agent_cmd = "daos_agent --json version" + env = EnvironmentVariables() + env["COVFILE"] = self.test_env.bullseye_file + daos_agent_cmd = command_as_user("daos_agent --json version", getuser(), env) result = run_remote(self.log, NodeSet(self.hostlist_servers[0]), daos_agent_cmd) if not result.passed: self.fail("Failed to get daos_agent version") diff --git a/src/tests/ftest/util/code_coverage_utils.py b/src/tests/ftest/util/code_coverage_utils.py index 3a4042872b2..8743209f3e5 100644 --- a/src/tests/ftest/util/code_coverage_utils.py +++ b/src/tests/ftest/util/code_coverage_utils.py @@ -1,13 +1,16 @@ """ (C) Copyright 2022-2023 Intel Corporation. + (C) Copyright 2026 Hewlett Packard Enterprise Development LP SPDX-License-Identifier: BSD-2-Clause-Patent """ +import glob import os +import shutil # pylint: disable=import-error,no-name-in-module from util.collection_utils import archive_files -from util.run_utils import run_remote +from util.run_utils import run_local, run_remote class CodeCoverage(): @@ -73,7 +76,7 @@ def setup(self, logger, result): logger.debug( "Updating %s bullseye code coverage file permissions", self.__test_env.bullseye_file) - command = ["chmod", "777", self.__test_env.bullseye_file] + command = ["chmod", "666", self.__test_env.bullseye_file] if not run_remote(logger, self.__hosts, " ".join(command)).passed: message = "Error updating bullseye code coverage file on at least one host" result.fail_test(logger, "Run", message, None) @@ -97,28 +100,29 @@ def finalize(self, logger, job_results_dir, result): return True logger.debug("-" * 80) - logger.debug("Collecting bullseye code coverage information on %s:", self.__hosts) + logger.debug("Collecting bullseye code coverage information from %s:", self.__hosts) bullseye_path, bullseye_file = os.path.split(self.__test_env.bullseye_file) bullseye_dir = os.path.join(job_results_dir, "bullseye_coverage_logs") status = archive_files( logger, "bullseye coverage log files", self.__hosts, bullseye_path, - "".join([bullseye_file, "*"]), bullseye_dir, 1, None, 900, result) - - # Rename bullseye_coverage_logs.host/test.cov.* to bullseye_coverage_logs/test.host.cov.* - for item in os.listdir(job_results_dir): - item_full = os.path.join(job_results_dir, item) - if os.path.isdir(item_full) and "bullseye_coverage_logs" in item: - host_ext = os.path.splitext(item) - if len(host_ext) > 1: - os.makedirs(bullseye_dir, exist_ok=True) - for name in os.listdir(item_full): - old_file = os.path.join(item_full, name) - if os.path.isfile(old_file): - new_name = name.split(".") - new_name.insert(1, host_ext[-1][1:]) - new_file_name = ".".join(new_name) - new_file = os.path.join(bullseye_dir, new_file_name) - logger.debug("Renaming %s to %s", old_file, new_file) - os.rename(old_file, new_file) - return status == 0 + "".join([bullseye_file, "*"]), bullseye_dir, 1, None, 900, result, compress=False) + if status != 0: + message = "Error retrieving bullseye code coverage files from at least one host" + result.fail_test(logger, "Run", message, None) + return False + + # Combine the bullseye code coverage files from each host into one file + logger.debug("Merging bullseye code coverage files") + os.makedirs(bullseye_dir, exist_ok=True) + shutil.copy(self.__test_env.bullseye_src, bullseye_dir) + command = ("/opt/BullseyeCoverage/bin/covmerge --no-banner --file " + f"{os.path.join(bullseye_dir, 'test.cov')} {bullseye_dir}.*/test.cov") + if not run_local(logger, command).passed: + message = "Error merging bullseye code coverage files" + result.fail_test(logger, "Run", message, None) + return False + for directory in glob.glob(f"{bullseye_dir}.*"): + if os.path.isdir(directory): + shutil.rmtree(directory, ignore_errors=True) + return True diff --git a/src/tests/ftest/util/collection_utils.py b/src/tests/ftest/util/collection_utils.py index 7c0d3ccf08d..ec38c6c3c07 100644 --- a/src/tests/ftest/util/collection_utils.py +++ b/src/tests/ftest/util/collection_utils.py @@ -1,6 +1,6 @@ """ (C) Copyright 2022-2024 Intel Corporation. - (C) Copyright 2025 Hewlett Packard Enterprise Development LP + (C) Copyright 2025-2026 Hewlett Packard Enterprise Development LP SPDX-License-Identifier: BSD-2-Clause-Patent """ @@ -225,7 +225,7 @@ def check_server_storage(logger, test, test_result, stage): def archive_files(logger, summary, hosts, source, pattern, destination, depth, threshold, timeout, - test_result, test=None): + test_result, test=None, compress=True): # pylint: disable=too-many-arguments """Archive the files from the source to the destination. @@ -241,6 +241,7 @@ def archive_files(logger, summary, hosts, source, pattern, destination, depth, t timeout (int): number of seconds to wait for the command to complete. test_result (TestResult): the test result used to update the status of the test test (TestInfo, optional): the test information. Defaults to None. + compress (bool, optional): compress files before transfer. Defaults to True Returns: int: status code: 0 = success, 16 = failure @@ -276,8 +277,9 @@ def archive_files(logger, summary, hosts, source, pattern, destination, depth, t # Remove any empty files return_code |= remove_empty_files(logger, file_hosts, source, pattern, depth, test_result) - # Compress any files larger than 1 MB - return_code |= compress_files(logger, file_hosts, source, pattern, depth, test_result) + if compress: + # Compress any files larger than 1 MB + return_code |= compress_files(logger, file_hosts, source, pattern, depth, test_result) # Move the test files to the test-results directory on this host return_code |= move_files( diff --git a/src/tests/ftest/util/environment_utils.py b/src/tests/ftest/util/environment_utils.py index 8835fd90eb6..e6b78d2bdd4 100644 --- a/src/tests/ftest/util/environment_utils.py +++ b/src/tests/ftest/util/environment_utils.py @@ -1,6 +1,6 @@ """ (C) Copyright 2018-2024 Intel Corporation. - (C) Copyright 2025 Hewlett Packard Enterprise Development LP + (C) Copyright 2025-2026 Hewlett Packard Enterprise Development LP SPDX-License-Identifier: BSD-2-Clause-Patent """ @@ -177,8 +177,9 @@ def set_defaults(self, logger, servers=None, clients=None, provider=None, insecu if self.insecure_mode is None: self.insecure_mode = "True" if self.bullseye_src is None: - self.bullseye_src = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "..", "test.cov") + self.bullseye_src = "/opt/BullseyeCoverage/daos/test.cov" + # self.bullseye_src = os.path.join( + # os.path.dirname(os.path.abspath(__file__)), "..", "test.cov") if self.bullseye_file is None: self.bullseye_file = os.path.join(os.sep, "tmp", "test.cov") if self.daos_prefix is None: diff --git a/utils/docker/Dockerfile.el.8 b/utils/docker/Dockerfile.el.8 index bf80f3fc122..aa2709c7514 100644 --- a/utils/docker/Dockerfile.el.8 +++ b/utils/docker/Dockerfile.el.8 @@ -1,6 +1,6 @@ # Copyright 2018-2024 Intel Corporation # Copyright 2025 Google LLC -# Copyright 2025 Hewlett Packard Enterprise Development LP +# Copyright 2025-2026 Hewlett Packard Enterprise Development LP # All rights reserved. # # 'recipe' for Docker to build an image of EL 8 based @@ -69,6 +69,20 @@ RUN mkdir -p /opt/daos /mnt/daos /var/run/daos_server /var/run/daos_agent /home/ USER daos_server:daos_server +# # Install bullseye for code coverage reporting +# ARG BULLSEYE_KEY +# RUN [ "x$BULLSEYE_KEY" != "x" ] || { \ +# dnf install xz && dnf clean all && \ +# curl ${JENKINS_URL}job/daos-stack/job/tools/job/master/lastSuccessfulBuild/artifact/bullseyecoverage-linux.tar \ +# --retry 10 --retry-max-time 60 --silent --show-error -o bullseye.tar.xz && \ +# mkdir -p bullseye && \ +# tar -C bullseye --strip-components=1 -xf bullseye.tar.xz && \ +# pushd bullseye && \ +# ./install --quiet --key "${BULLSEYE_KEY}" --prefix /opt/BullseyeCoverage && \ +# ls -aR && ls -aR /opt/BullseyeCoverage && \ +# popd && rm -rf bullseye.tar.xz bullseye; \ +# } + # Setup a python venv so that python packages can be installed locally. RUN python3 -m venv /home/daos/venv ENV PATH=/home/daos/venv/bin:$PATH @@ -96,6 +110,7 @@ ARG DAOS_DEPS_BUILD=yes ARG DAOS_KEEP_BUILD=no ARG DAOS_TARGET_TYPE=release ARG DAOS_PACKAGES_BUILD=yes +ARG CODE_COVERAGE=false # Now do an update to ensure software is up to date for the deps build. If the # src hasn't changed then this won't do anything, but if it has then we want to @@ -121,7 +136,7 @@ COPY --chown=daos_server:daos_server utils/sl utils/sl # Build third party RPMs RUN [ "$DAOS_PACKAGES_BUILD" != "yes" ] || [ "$DAOS_DEPS_BUILD" != "yes" ] || { \ export DISTRO="el8" && \ - utils/rpms/build_packages.sh deps && \ + utils/rpms/build_packages.sh deps "$CODE_COVERAGE" && \ mkdir -p /home/daos/rpms && \ mv *.rpm /home/daos/rpms; \ } @@ -167,7 +182,7 @@ COPY --chown=daos_server:daos_server utils utils # Build DAOS RPMs RUN [ "$DAOS_PACKAGES_BUILD" != "yes" ] || [ "$DAOS_BUILD" != "yes" ] || { \ - utils/rpms/build_packages.sh daos && \ + utils/rpms/build_packages.sh daos "$CODE_COVERAGE" && \ mkdir -p /home/daos/rpms && \ cp *.rpm /home/daos/rpms; \ } diff --git a/utils/rpms/build_packages.sh b/utils/rpms/build_packages.sh index e696c48fee8..4661776aa7b 100755 --- a/utils/rpms/build_packages.sh +++ b/utils/rpms/build_packages.sh @@ -1,6 +1,7 @@ #!/bin/bash set -eEuo pipefail build_type="${1:-all}" +code_coverage="${2:-false}" source utils/sl/setup_local.sh if [[ "${build_type}" =~ deps|all ]]; then utils/rpms/argobots.sh @@ -11,7 +12,10 @@ if [[ "${build_type}" =~ deps|all ]]; then utils/rpms/mercury.sh utils/rpms/pmdk.sh utils/rpms/spdk.sh + if [[ "${code_coverage}" != "false" ]]; then + utils/rpms/bullseye.sh + fi fi if [[ "${build_type}" =~ daos|all ]]; then - utils/rpms/daos.sh + utils/rpms/daos.sh "${code_coverage}" fi diff --git a/utils/rpms/bullseye.changelog b/utils/rpms/bullseye.changelog new file mode 100644 index 00000000000..14f914e5f5d --- /dev/null +++ b/utils/rpms/bullseye.changelog @@ -0,0 +1,9 @@ +%changelog +* Fri Dec 12 2025 Phillip Henderson 9.23.7-1 +- Update to 9.23.7 + +* Fri Mar 31 2023 Brian J. Murrell - 9.1.1-1 +- Update to 9.1.1 + +* Fri Feb 19 2021 Brian J. Murrell - 8.21.4-1 +- First packaged version diff --git a/utils/rpms/bullseye.sh b/utils/rpms/bullseye.sh new file mode 100755 index 00000000000..efdfe975818 --- /dev/null +++ b/utils/rpms/bullseye.sh @@ -0,0 +1,82 @@ +#!/bin/bash +set -uex + +root="$(realpath "$(dirname "${BASH_SOURCE[0]}")")" +. "${root}/fpm_common.sh" + +: "${SL_BULLSEYE_PREFIX:=/opt/BullseyeCoverage}" +if [ ! -d "${SL_BULLSEYE_PREFIX}" ]; then + echo "bullseye must be installed or built in ${SL_BULLSEYE_PREFIX}" + exit 0 +fi + +VERSION="${bullseye_version}" +RELEASE="${bullseye_release}" +LICENSE="Proprietary" +ARCH="${isa}" +DESCRIPTION="The BullseyeCoverage compiler" +URL="https://www.bullseye.com/index.html" +RPM_CHANGELOG="bullseye.changelog" +PACKAGE_TYPE="dir" +files=() + +# Add bullseye files +FILTER_LIST=("${SL_BULLSEYE_PREFIX}/sample") +readarray -t dir_list < <(find "${SL_BULLSEYE_PREFIX}" -mindepth 1 -maxdepth 1 -type d) +for dir in "${dir_list[@]}"; do + if filter_file "${dir}"; then + continue + fi + readarray -t dir_file_list < <(find "${dir}" -mindepth 1 -maxdepth 1 -type f) + TARGET_PATH="${dir}" + for dir_file in "${dir_file_list[@]}"; do + list_files files "${dir_file}" + append_install_list "${files[@]}" + done +done + +# Add test.cov file +TARGET_PATH="${SL_BULLSEYE_PREFIX}/daos" +list_files files "test.cov" +append_install_list "${files[@]}" + +# Create tar file containing all source files for the covhtml command +readarray -t src_file_list < <("${SL_BULLSEYE_PREFIX}/bin/covmgr" -l --file test.cov) +if [ ${#src_file_list[@]} -gt 0 ]; then + tar -czf "${tmp}/bullseye_sources.tar.gz" "${src_file_list[@]}" 2>/dev/null || { + echo "Warning: Some source files may not exist, creating tar with existing files only" + existing_files=() + for src_file in "${src_file_list[@]}"; do + if [ -f "${src_file}" ]; then + existing_files+=("${src_file}") + fi + done + if [ ${#existing_files[@]} -gt 0 ]; then + tar -czf "${tmp}/bullseye_sources.tar.gz" "${existing_files[@]}" + echo "Created tar file with ${#existing_files[@]} existing source files" + else + echo "No source files found to archive" + fi + } +else + echo "No source files found in src_file_list" +fi +list_files files "${tmp}/bullseye_sources.tar.gz" +append_install_list "${files[@]}" + +# # Add sources for covhtml command +# for src_file in "${src_file_list[@]}"; do +# dir_name=$(dirname "${src_file}") +# TARGET_PATH="${SL_BULLSEYE_PREFIX}/daos/${dir_name}" +# list_files files "${src_file}" +# append_install_list "${files[@]}" +# done + +# Fix file permissions +cat << EOF > "${tmp}/post_install_bullseye" +chmod 666 ${SL_BULLSEYE_PREFIX}/daos/test.cov +chmod 666 ${SL_BULLSEYE_PREFIX}/daos/bullseye_sources.tar.gz +EOF +EXTRA_OPTS+=("--after-install" "${tmp}/post_install_bullseye") + +build_package "bullseye" diff --git a/utils/rpms/bullseye_build.sh b/utils/rpms/bullseye_build.sh new file mode 100755 index 00000000000..4ea81c86bf7 --- /dev/null +++ b/utils/rpms/bullseye_build.sh @@ -0,0 +1,32 @@ +#!/bin/bash +set -uex + +bullseye_key="${1:-}" + +# Get the bullseye version +mydir="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" +pushd "${mydir}/../.." || exit 1 +source utils/rpms/package_info.sh +popd + +: "${SL_BULLSEYE_PREFIX:=/opt/BullseyeCoverage}" +bullseye_url="https://www.bullseye.com/download" +bullseye_src="${bullseye_url}/BullseyeCoverage-${bullseye_version}-Linux-x64.tar.xz" +bullseye_out="bullseye.tar.xz" + +if [ -n "${DAOS_HTTPS_PROXY:-}" ]; then + curl --proxy "${DAOS_HTTPS_PROXY}" "${bullseye_src}" --retry 10 --retry-max-time 60 --silent --show-error -o "${bullseye_out}" +else + curl "${bullseye_src}" --retry 10 --retry-max-time 60 --silent --show-error -o "${bullseye_out}" +fi + +mkdir -p bullseye +tar -C bullseye --strip-components=1 -xf "${bullseye_out}" +pushd bullseye +set +x +echo + sudo ./install --quiet --key "**********" --prefix "${SL_BULLSEYE_PREFIX}" +sudo ./install --quiet --key "${bullseye_key}" --prefix "${SL_BULLSEYE_PREFIX}" +set -x +popd +# rm -rf bullseye.tar.xz bullseye +# ls -alR "${SL_BULLSEYE_PREFIX}" diff --git a/utils/rpms/daos.sh b/utils/rpms/daos.sh index 8a28b2a187d..e71f1f5da79 100755 --- a/utils/rpms/daos.sh +++ b/utils/rpms/daos.sh @@ -10,6 +10,7 @@ if [ -z "${SL_PREFIX:-}" ]; then exit 1 fi +code_coverage="${1:-false}" daoshome="${prefix}/lib/daos" server_svc_name="daos_server.service" agent_svc_name="daos_agent.service" @@ -423,6 +424,21 @@ append_install_list "${files[@]}" # Don't do autoreq, we know we need OpenMPI so add it explicitly build_package "daos-client-tests-openmpi" "noautoreq" +# Code coverage +if [[ "${code_coverage}" != "false" ]]; then + code_coverage_prefix="/opt/BullseyeCoverage" + EXTERNAL_DEPENDS=("${bullseye_normal}") + TARGET_PATH="${code_coverage_prefix}/daos" + list_files files "test.cov" + append_install_list "${files[@]}" + + cat << EOF > "${tmp}/post_install_bullseye" +chmod 666 ${code_coverage_prefix}/daos/test.cov +EOF + EXTRA_OPTS+=("--after-install" "${tmp}/post_install_bullseye") + build_package "daos-code-coverage" +fi + #shim packages PACKAGE_TYPE="empty" ARCH="noarch" diff --git a/utils/rpms/package_info.sh b/utils/rpms/package_info.sh index cc3be377607..6da184ebe3b 100644 --- a/utils/rpms/package_info.sh +++ b/utils/rpms/package_info.sh @@ -59,6 +59,9 @@ export daos_spdk_full="${daos_spdk_version}-${daos_spdk_release}" export fused_version="1.0.0" export fused_release="3${distro_name}" export fused_full="${fused_version}-${fused_release}" +export bullseye_version="9.23.7" +export bullseye_release="1${distro_name}" +export bullseye_full="${bullseye_version}-${bullseye_release}" set_lib_name openmpi lib openmpi openmpi3 openmpi export openmpi_lib @@ -124,6 +127,9 @@ export uuid_lib set_lib_name hdf5 lib hdf5 hdf5 hdf5 export hdf5_lib +set_lib_name bullseye normal bullseye bullseye bullseye +export bullseye_normal + lmod="Lmod" if [[ "${DISTRO:-el8}" =~ suse ]]; then lmod="lua-lmod" diff --git a/utils/run_utest.py b/utils/run_utest.py index 39261be2a51..d9a059f8885 100755 --- a/utils/run_utest.py +++ b/utils/run_utest.py @@ -14,7 +14,6 @@ # pylint: disable=broad-except import os import re -import shutil import subprocess # nosec import sys import tempfile @@ -609,21 +608,6 @@ def run_suites(args, suites, results, aio): results.merge(suite.run_suite(args, aio)) -def move_codecov(base): - """Move any code coverage results""" - try: - target = "/tmp/test.cov" - if os.path.isfile(target): - os.unlink(target) - src = os.path.join(base, "test.cov") - if os.path.isfile(src): - print(f"Moving {src} to {target}") - shutil.move(src, target) - except Exception: - print("Exception trying to copy test.cov") - traceback.print_exc() - - def get_args(): """Parse the arguments""" parser = argparse.ArgumentParser(description='Run DAOS unit tests') @@ -706,8 +690,6 @@ def main(): results.create_junit() - move_codecov(path_info["DAOS_BASE"]) - if args.no_fail_on_error: return