diff --git a/doc/develop/test/twister.rst b/doc/develop/test/twister.rst index a362c79861a89..2ba3b5db3960d 100644 --- a/doc/develop/test/twister.rst +++ b/doc/develop/test/twister.rst @@ -793,6 +793,34 @@ filter: Would match it. +required_images: + A list of test scenarios that are required to be built for this test to run. + If the test does not have a ``CMakeLists.txt`` file or has set ``no_own_image`` + to True, then the first image from this list will be used as the test + scenario image. + All required test scenarios must be available from the tree (given with ``-T`` option). + If a required test scenario is not available in tree or its build failed, + the test will be skipped. + If the required scenario is prefixed with a platform name, then it will be + built for that platform only. + The following is an example yaml with 2 required images. + + .. code-block:: yaml + + tests: + shared.app.example: + required_images: + - test.example.first + - native_sim:test.example.second + no_own_image: True + + Not supported with options: ``--subset`` or ``--runtime-artifact-cleanup``. + +no_own_image: (default False) + If true, the test scenario will not be built. Instead, it + will use the first test scenario from ``required_images`` list. + Not supported on QEMU platforms. + required_snippets: :ref:`Snippets ` are supported in twister for test scenarios that require them. As with normal applications, twister supports using the base diff --git a/samples/subsys/testsuite/shared_app/CMakeLists.txt b/samples/subsys/testsuite/shared_app/CMakeLists.txt new file mode 100644 index 0000000000000..4e4e27a2998bf --- /dev/null +++ b/samples/subsys/testsuite/shared_app/CMakeLists.txt @@ -0,0 +1,8 @@ +# SPDX-License-Identifier: Apache-2.0 + +cmake_minimum_required(VERSION 3.20.0) +find_package(Zephyr REQUIRED HINTS $ENV{ZEPHYR_BASE}) +project(shared_app_with_own_image) + +FILE(GLOB app_sources src/*.c) +target_sources(app PRIVATE ${app_sources}) diff --git a/samples/subsys/testsuite/shared_app/prj.conf b/samples/subsys/testsuite/shared_app/prj.conf new file mode 100644 index 0000000000000..377883810656c --- /dev/null +++ b/samples/subsys/testsuite/shared_app/prj.conf @@ -0,0 +1,5 @@ +CONFIG_PRINTK=y +CONFIG_SHELL=y +CONFIG_LOG=y +CONFIG_SHELL_BACKEND_SERIAL=y +CONFIG_KERNEL_SHELL=y diff --git a/samples/subsys/testsuite/shared_app/pytest/test_shared_app.py b/samples/subsys/testsuite/shared_app/pytest/test_shared_app.py new file mode 100644 index 0000000000000..8c7129e13be74 --- /dev/null +++ b/samples/subsys/testsuite/shared_app/pytest/test_shared_app.py @@ -0,0 +1,23 @@ +# Copyright (c) 2024 Nordic Semiconductor ASA +# +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import logging +from pathlib import Path + +from twister_harness import DeviceAdapter, Shell + +logger = logging.getLogger(__name__) + + +def test_shell_is_ready(shell: Shell): + # wait_for_prompt is called from shell fixture, so if passed, shell is ready to use + assert True + + +def test_second_app_is_found(dut: DeviceAdapter, shell: Shell, required_images): + logger.info(f"Required images: {required_images}") + assert required_images + assert Path(required_images[0]).is_dir() + assert Path(Path(required_images[0]) / 'build.log').exists() diff --git a/samples/subsys/testsuite/shared_app/src/main.c b/samples/subsys/testsuite/shared_app/src/main.c new file mode 100644 index 0000000000000..a05ee59143061 --- /dev/null +++ b/samples/subsys/testsuite/shared_app/src/main.c @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Nordic Semiconductor ASA + * + * SPDX-License-Identifier: Apache-2.0 + */ + +int main(void) +{ + return 0; +} diff --git a/samples/subsys/testsuite/shared_app/testcase.yaml b/samples/subsys/testsuite/shared_app/testcase.yaml new file mode 100644 index 0000000000000..561c8c90d218b --- /dev/null +++ b/samples/subsys/testsuite/shared_app/testcase.yaml @@ -0,0 +1,36 @@ +common: + filter: CONFIG_SERIAL and dt_chosen_enabled("zephyr,shell-uart") + min_ram: 40 + extra_configs: + - arch:posix:CONFIG_NATIVE_UART_0_ON_STDINOUT=y + integration_platforms: + - native_sim +tests: + sample.shared.build_only: + build_only: true + sample.shared.with_own_image.pytest: + required_images: + - sample.shared.build_only + harness: pytest + harness_config: + pytest_dut_scope: module + tags: pytest shared + sample.shared.no_own_image.pytest: + no_own_image: true + required_images: + - sample.shared.with_own_image.pytest + - sample.shared.build_only + harness: pytest + harness_config: + pytest_dut_scope: module + tags: pytest shared + sample.shared.no_own_image.helloworld: + no_own_image: true + required_images: + - sample.basic.helloworld + harness: console + harness_config: + type: one_line + regex: + - "Hello World! (.*)" + tags: shared diff --git a/scripts/pylib/pytest-twister-harness/src/twister_harness/fixtures.py b/scripts/pylib/pytest-twister-harness/src/twister_harness/fixtures.py index ddafadc883cf4..a4c45e5271024 100644 --- a/scripts/pylib/pytest-twister-harness/src/twister_harness/fixtures.py +++ b/scripts/pylib/pytest-twister-harness/src/twister_harness/fixtures.py @@ -92,3 +92,8 @@ def is_mcumgr_available() -> None: @pytest.fixture() def mcumgr(is_mcumgr_available: None, dut: DeviceAdapter) -> Generator[MCUmgr, None, None]: yield MCUmgr.create_for_serial(dut.device_config.serial) + + +@pytest.fixture(scope='session') +def required_images(request): + return request.config.getoption('--required-image') diff --git a/scripts/pylib/pytest-twister-harness/src/twister_harness/plugin.py b/scripts/pylib/pytest-twister-harness/src/twister_harness/plugin.py index ef8a1fb6dc197..c301b43a2126c 100644 --- a/scripts/pylib/pytest-twister-harness/src/twister_harness/plugin.py +++ b/scripts/pylib/pytest-twister-harness/src/twister_harness/plugin.py @@ -126,6 +126,10 @@ def pytest_addoption(parser: pytest.Parser): '--twister-fixture', action='append', dest='fixtures', metavar='FIXTURE', default=[], help='Twister fixture supported by this platform. May be given multiple times.' ) + twister_harness_group.addoption( + '--required-image', action='append', dest='required_images', default=[], + help='Paths to build_dir of required images. May be given multiple times.' + ) def pytest_configure(config: pytest.Config): diff --git a/scripts/pylib/twister/twisterlib/config_parser.py b/scripts/pylib/twister/twisterlib/config_parser.py index 10a31613459e2..fce716149311e 100644 --- a/scripts/pylib/twister/twisterlib/config_parser.py +++ b/scripts/pylib/twister/twisterlib/config_parser.py @@ -49,6 +49,8 @@ class TwisterConfigParser: "extra_conf_files": {"type": "list", "default": []}, "extra_overlay_confs" : {"type": "list", "default": []}, "extra_dtc_overlay_files": {"type": "list", "default": []}, + "required_images": {"type": "list"}, + "no_own_image": {"type": "bool", "default": False }, "required_snippets": {"type": "list"}, "build_only": {"type": "bool", "default": False}, "build_on_all": {"type": "bool", "default": False}, diff --git a/scripts/pylib/twister/twisterlib/handlers.py b/scripts/pylib/twister/twisterlib/handlers.py index 0ebab95434bf6..32064ab3a4249 100755 --- a/scripts/pylib/twister/twisterlib/handlers.py +++ b/scripts/pylib/twister/twisterlib/handlers.py @@ -168,7 +168,7 @@ def get_default_domain_build_dir(self): domain_path = os.path.join(self.build_dir, "domains.yaml") domains = Domains.from_file(domain_path) logger.debug("Loaded sysbuild domain data from %s" % domain_path) - build_dir = domains.get_default_domain().build_dir + build_dir = os.path.join(self.build_dir, domains.get_default_domain().name) else: build_dir = self.build_dir return build_dir diff --git a/scripts/pylib/twister/twisterlib/harness.py b/scripts/pylib/twister/twisterlib/harness.py index 2629cdfb83faf..a1972d9b4ab34 100644 --- a/scripts/pylib/twister/twisterlib/harness.py +++ b/scripts/pylib/twister/twisterlib/harness.py @@ -413,6 +413,12 @@ def generate_command(self): else: raise PytestHarnessException(f'Support for handler {handler.type_str} not implemented yet') + if self.instance.required_build_dirs: + # skip first image, it is used as main app when no_own_image is set + start_index = 1 if self.instance.no_own_image else 0 + for req_image in self.instance.required_build_dirs[start_index:]: + command.append(f'--required-image={req_image}') + if handler.type_str != 'device': for fixture in handler.options.fixture: command.append(f'--twister-fixture={fixture}') diff --git a/scripts/pylib/twister/twisterlib/runner.py b/scripts/pylib/twister/twisterlib/runner.py index 430e43846ea4e..7ca8135b7b684 100644 --- a/scripts/pylib/twister/twisterlib/runner.py +++ b/scripts/pylib/twister/twisterlib/runner.py @@ -8,7 +8,6 @@ import multiprocessing import os import pickle -import queue import re import shutil import subprocess @@ -18,7 +17,8 @@ import yaml from multiprocessing import Lock, Process, Value from multiprocessing.managers import BaseManager -from typing import List +from collections import deque +from typing import List, Dict from packaging import version from colorama import Fore @@ -538,7 +538,7 @@ def __init__(self, instance: TestInstance, env: TwisterEnv, jobserver, **kwargs) super().__init__(instance.testsuite, instance.platform, instance.testsuite.source_dir, instance.build_dir, jobserver) self.log = "build.log" - self.instance = instance + self.instance: TestInstance = instance self.filtered_tests = 0 self.options = env.options self.env = env @@ -601,20 +601,19 @@ def log_info_file(self, inline_logs): else: self.log_info("{}".format(b_log), inline_logs) - - def _add_to_pipeline(self, pipeline, op: str, additionals: dict={}): + def _add_to_processing_queue(self, processing_queue: deque, op: str, additionals: dict={}): try: if op: task = dict({'op': op, 'test': self.instance}, **additionals) - pipeline.put(task) - # Only possible RuntimeError source here is a mutation of the pipeline during iteration. - # If that happens, we ought to consider the whole pipeline corrupted. + processing_queue.append(task) + # Only possible RuntimeError source here is a mutation of the processing_queue during iteration. + # If that happens, we ought to consider the whole processing_queue corrupted. except RuntimeError as e: logger.error(f"RuntimeError: {e}") traceback.print_exc() - - def process(self, pipeline, done, message, lock, results): + def process(self, processing_queue: deque, ready_instances: Dict[str, TestInstance], + message, lock: Lock, results: ExecutionCounter): next_op = None additionals = {} @@ -646,7 +645,7 @@ def process(self, pipeline, done, message, lock, results): self.instance.add_missing_case_status(TwisterStatus.BLOCK, reason) next_op = 'report' finally: - self._add_to_pipeline(pipeline, next_op) + self._add_to_processing_queue(processing_queue, next_op) # The build process, call cmake and build with configured generator elif op == "cmake": @@ -677,7 +676,7 @@ def process(self, pipeline, done, message, lock, results): self.instance.add_missing_case_status(TwisterStatus.BLOCK, reason) next_op = 'report' finally: - self._add_to_pipeline(pipeline, next_op) + self._add_to_processing_queue(processing_queue, next_op) elif op == "build": try: @@ -718,7 +717,7 @@ def process(self, pipeline, done, message, lock, results): self.instance.add_missing_case_status(TwisterStatus.BLOCK, reason) next_op = 'report' finally: - self._add_to_pipeline(pipeline, next_op) + self._add_to_processing_queue(processing_queue, next_op) elif op == "gather_metrics": try: @@ -739,7 +738,7 @@ def process(self, pipeline, done, message, lock, results): self.instance.add_missing_case_status(TwisterStatus.BLOCK, reason) next_op = 'report' finally: - self._add_to_pipeline(pipeline, next_op) + self._add_to_processing_queue(processing_queue, next_op) # Run the generated binary using one of the supported handlers elif op == "run": @@ -766,13 +765,13 @@ def process(self, pipeline, done, message, lock, results): next_op = 'report' additionals = {} finally: - self._add_to_pipeline(pipeline, next_op, additionals) + self._add_to_processing_queue(processing_queue, next_op, additionals) # Report results and output progress to screen elif op == "report": try: with lock: - done.put(self.instance) + ready_instances.update({self.instance.name: self.instance}) self.report_out(results) if not self.options.coverage: @@ -794,7 +793,7 @@ def process(self, pipeline, done, message, lock, results): next_op = None additionals = {} finally: - self._add_to_pipeline(pipeline, next_op, additionals) + self._add_to_processing_queue(processing_queue, next_op, additionals) elif op == "cleanup": try: @@ -1279,11 +1278,10 @@ def calc_size(instance: TestInstance, from_buildlog: bool): class TwisterRunner: def __init__(self, instances, suites, env=None) -> None: - self.pipeline = None self.options = env.options self.env = env - self.instances = instances - self.suites = suites + self.instances: Dict[str, TestInstance] = instances + self.suites: Dict[str, TestSuite] = suites self.duts = None self.jobs = 1 self.results = None @@ -1293,14 +1291,15 @@ def run(self): retries = self.options.retry_failed + 1 - BaseManager.register('LifoQueue', queue.LifoQueue) + BaseManager.register('deque', deque, exposed=['append', 'appendleft', 'pop']) + BaseManager.register('get_dict', dict) manager = BaseManager() manager.start() self.results = ExecutionCounter(total=len(self.instances)) self.iteration = 0 - pipeline = manager.LifoQueue() - done_queue = manager.LifoQueue() + processing_queue: deque = manager.deque() + ready_instances: Dict[str, TestInstance] = manager.get_dict() # Set number of jobs if self.options.jobs: @@ -1338,18 +1337,13 @@ def run(self): else: self.results.done = self.results.skipped_filter - self.execute(pipeline, done_queue) + self.execute(processing_queue, ready_instances) - while True: - try: - inst = done_queue.get_nowait() - except queue.Empty: - break - else: - inst.metrics.update(self.instances[inst.name].metrics) - inst.metrics["handler_time"] = inst.execution_time - inst.metrics["unrecognized"] = [] - self.instances[inst.name] = inst + for inst in ready_instances.values(): + inst.metrics.update(self.instances[inst.name].metrics) + inst.metrics["handler_time"] = inst.execution_time + inst.metrics["unrecognized"] = [] + self.instances[inst.name] = inst print("") @@ -1386,7 +1380,8 @@ def show_brief(self): self.results.skipped_filter, self.results.skipped_configs - self.results.skipped_filter)) - def add_tasks_to_queue(self, pipeline, build_only=False, test_only=False, retry_build_errors=False): + def add_tasks_to_queue(self, processing_queue: deque, build_only=False, + test_only=False, retry_build_errors=False): for instance in self.instances.values(): if build_only: instance.run = False @@ -1406,61 +1401,115 @@ def add_tasks_to_queue(self, pipeline, build_only=False, test_only=False, retry_ if instance.testsuite.filter: instance.filter_stages = self.get_cmake_filter_stages(instance.testsuite.filter, expr_parser.reserved.keys()) - if test_only and instance.run: - pipeline.put({"op": "run", "test": instance}) + if (test_only and instance.run) or instance.no_own_image: + processing_queue.append({"op": "run", "test": instance}) elif instance.filter_stages and "full" not in instance.filter_stages: - pipeline.put({"op": "filter", "test": instance}) + processing_queue.append({"op": "filter", "test": instance}) else: cache_file = os.path.join(instance.build_dir, "CMakeCache.txt") if os.path.exists(cache_file) and self.env.options.aggressive_no_clean: - pipeline.put({"op": "build", "test": instance}) + processing_queue.append({"op": "build", "test": instance}) else: - pipeline.put({"op": "cmake", "test": instance}) + processing_queue.append({"op": "cmake", "test": instance}) + + def _required_images_are_ready(self, instance: TestInstance, ready_instances: Dict[str, TestInstance]): + return all([required_image in ready_instances.keys() for required_image in instance.required_images]) + + def _required_images_failed(self, instance: TestInstance, ready_instances: Dict[str, TestInstance]): + # Verify that all required images were successfully built + found_failed_image = False + for required_image in instance.required_images: + inst = ready_instances.get(required_image) + if inst.status != TwisterStatus.PASS: + logger.debug(f"{required_image}: Required image failed: {inst.reason}") + found_failed_image = True + return found_failed_image + + def required_images_processed(self, instance: TestInstance, processing_queue: deque, + ready_instances: Dict[str, TestInstance], task): + if not instance.required_images: + return True + + if not self._required_images_are_ready(instance, ready_instances): + # required image not ready yet, + # add the task back to the pipeline to process it later + if self.jobs > 1: + # to avoid busy waiting + time.sleep(1) + processing_queue.appendleft(task) + return False + + if self._required_images_failed(instance, ready_instances): + instance.status = TwisterStatus.SKIP + for tc in instance.testcases: + tc.status = TwisterStatus.SKIP + instance.reason = "Required image failed" + instance.required_images = [] + processing_queue.append({"op": "report", "test": instance}) + return False + + if instance.no_own_image: + # copy build_dir from the first required image to the current instance + origin_build_dir = self.instances[instance.required_images[0]].build_dir + shutil.copytree(origin_build_dir, instance.build_dir, dirs_exist_ok=True) + logger.debug(f"Copied build_dir from {origin_build_dir}") + if not instance.run or self.options.build_only: + instance.status = TwisterStatus.SKIP + instance.reason = "not run" + instance.required_images = [] + processing_queue.append({"op": "report", "test": instance}) + return False + + # keep paths to required build dirs for further processing + for required_image in instance.required_images: + instance.required_build_dirs.append(self.instances[required_image].build_dir) + + # required images are ready, clear to not process them later + instance.required_images = [] + return True + + def _pipeline_mgr(self, processing_queue: deque, ready_instances: Dict[str, TestInstance], + lock: Lock, results: ExecutionCounter): + while True: + try: + task = processing_queue.pop() + except IndexError: + break + else: + instance: TestInstance = task['test'] + + if not self.required_images_processed(instance, processing_queue, ready_instances, task): + # postpone processing task if required images are not ready + continue + pb = ProjectBuilder(instance, self.env, self.jobserver) + pb.duts = self.duts + pb.process(processing_queue, ready_instances, task, lock, results) + return True - def pipeline_mgr(self, pipeline, done_queue, lock, results): + def pipeline_mgr(self, processing_queue: deque, ready_instances: Dict[str, TestInstance], + lock: Lock, results: ExecutionCounter): try: if sys.platform == 'linux': with self.jobserver.get_job(): - while True: - try: - task = pipeline.get_nowait() - except queue.Empty: - break - else: - instance = task['test'] - pb = ProjectBuilder(instance, self.env, self.jobserver) - pb.duts = self.duts - pb.process(pipeline, done_queue, task, lock, results) - - return True + return self._pipeline_mgr(processing_queue, ready_instances, lock, results) else: - while True: - try: - task = pipeline.get_nowait() - except queue.Empty: - break - else: - instance = task['test'] - pb = ProjectBuilder(instance, self.env, self.jobserver) - pb.duts = self.duts - pb.process(pipeline, done_queue, task, lock, results) - return True + return self._pipeline_mgr(processing_queue, ready_instances, lock, results) except Exception as e: - logger.error(f"General exception: {e}") + logger.exception(f"General exception: {e}") sys.exit(1) - def execute(self, pipeline, done): + def execute(self, processing_queue: deque, ready_instances: Dict[str, TestInstance]): lock = Lock() logger.info("Adding tasks to the queue...") - self.add_tasks_to_queue(pipeline, self.options.build_only, self.options.test_only, + self.add_tasks_to_queue(processing_queue, self.options.build_only, self.options.test_only, retry_build_errors=self.options.retry_build_errors) logger.info("Added initial list of jobs to queue") processes = [] for _ in range(self.jobs): - p = Process(target=self.pipeline_mgr, args=(pipeline, done, lock, self.results, )) + p = Process(target=self.pipeline_mgr, args=(processing_queue, ready_instances, lock, self.results, )) processes.append(p) p.start() logger.debug(f"Launched {self.jobs} jobs") diff --git a/scripts/pylib/twister/twisterlib/testinstance.py b/scripts/pylib/twister/twisterlib/testinstance.py index 16586a7a20e15..b0b0d4cb757c5 100644 --- a/scripts/pylib/twister/twisterlib/testinstance.py +++ b/scripts/pylib/twister/twisterlib/testinstance.py @@ -46,7 +46,7 @@ class TestInstance: __test__ = False - def __init__(self, testsuite, platform, outdir): + def __init__(self, testsuite: TestSuite, platform: Platform, outdir): self.testsuite: TestSuite = testsuite self.platform: Platform = platform @@ -80,6 +80,9 @@ def __init__(self, testsuite, platform, outdir): self.init_cases() self.filters = [] self.filter_type = None + self.required_images = [] + self.required_build_dirs = [] + self.no_own_image = False def record(self, recording, fname_csv="recording.csv"): if recording: diff --git a/scripts/pylib/twister/twisterlib/testplan.py b/scripts/pylib/twister/twisterlib/testplan.py index 360040490de54..03049548a6159 100755 --- a/scripts/pylib/twister/twisterlib/testplan.py +++ b/scripts/pylib/twister/twisterlib/testplan.py @@ -19,6 +19,7 @@ import snippets from pathlib import Path from argparse import Namespace +from typing import Dict logger = logging.getLogger('twister') logger.setLevel(logging.DEBUG) @@ -70,6 +71,8 @@ class Filters: TOOLCHAIN = 'Toolchain filter' # in case an optional module is not available MODULE = 'Module filter' + # in case of missing required image + REQUIRED_IMAGE = 'Required image filter' class TestLevel: @@ -109,7 +112,7 @@ def __init__(self, env=None): self.filtered_platforms = [] self.default_platforms = [] self.load_errors = 0 - self.instances = dict() + self.instances: Dict[str, TestInstance] = dict() self.instance_fail_count = 0 self.warnings = 0 @@ -667,6 +670,7 @@ def load_from_file(self, file, filter_platform=[]): except FileNotFoundError as e: logger.error(f"{e}") return 1 + self.apply_changes_for_required_images(loaded_from_file=True) def apply_filters(self, **kwargs): @@ -1010,6 +1014,8 @@ def apply_filters(self, **kwargs): else: self.add_instances(instance_list) + self.apply_changes_for_required_images() + for _, case in self.instances.items(): case.create_overlay(case.platform, self.options.enable_asan, self.options.enable_ubsan, self.options.enable_coverage, self.options.coverage_platform) @@ -1024,11 +1030,96 @@ def apply_filters(self, **kwargs): self.filtered_platforms = set(p.platform.name for p in self.instances.values() if p.status != TwisterStatus.SKIP ) + def _get_required_platform_and_id(self, required_image, instance: TestInstance): + # required_image can be in the form of platform:test_id or just test_id + split_req_image = required_image.split(':') + req_platform = instance.platform.name + if len(split_req_image) == 2: + req_platform = split_req_image[0] + req_test_id = split_req_image[-1] + return req_platform, req_test_id + + def _find_required_instance(self, required_image, instance: TestInstance): + req_platform, req_test_id = self._get_required_platform_and_id(required_image, instance) + for inst in self.instances.values(): + if req_test_id == inst.testsuite.id and req_platform == inst.platform.name: + return inst + return None + + def apply_changes_for_required_images(self, loaded_from_file=False): + # check if required images are in scope + for instance in self.instances.values(): + if not instance.testsuite.required_images: + continue + if instance.status == TwisterStatus.FILTER: + # do not proceed if the test is already filtered + continue + + if self.options.subset: + reason = "Required images are not supported with --subsets" + instance.add_filter(reason, Filters.REQUIRED_IMAGE) + logger.debug(f"{instance.name}: {reason}") + continue + + if self.options.runtime_artifact_cleanup: + reason = "Required images are not supported with --runtime-artifact-cleanup" + instance.add_filter(reason, Filters.REQUIRED_IMAGE) + logger.debug(f"{instance.name}: {reason}") + continue + + if instance.testsuite.no_own_image or not Path( + Path(instance.testsuite.source_dir) / 'CMakeLists.txt').exists(): + instance.no_own_image = True + # filter out qemu platforms, because for tests must use origin path to QEMU_PIPE, + # can be done, but first better to refactor QemuHandler + if instance.platform.simulation == 'qemu': + instance.add_filter("QEMU with no own image not supported", Filters.REQUIRED_IMAGE) + logger.debug(f"{instance.name}: QEMU with no own image not supported") + continue + + # check if platform is correct in required images + first_image = instance.testsuite.required_images[0] + req_platform, _ = self._get_required_platform_and_id(first_image, instance) + if req_platform != instance.platform.name: + instance.add_filter(f"Wrong platform in required image: {first_image}", Filters.REQUIRED_IMAGE) + logger.warning(f"{instance.name}: If there is no own image, the first required image" + " should be for the same platform as the test") + continue + + for required_image in instance.testsuite.required_images: + req_instance = self._find_required_instance(required_image, instance) + + if not req_instance: + instance.add_filter(f"Missing required image {required_image}", Filters.REQUIRED_IMAGE) + logger.debug(f"{instance.name}: Required image '{required_image}' was not found." + " Please verify if required image is provided with --testsuite-root") + break + + if req_instance.status == TwisterStatus.FILTER: + # check if required image is filtered because is not runnable + if loaded_from_file or ( + self.options.device_testing and not req_instance.run + and len(req_instance.filters) == 1 + and req_instance.reason == "Not runnable on device"): + # clear status flag to build required image + self.instances[req_instance.name].status = TwisterStatus.NONE + else: + instance.add_filter(f"Required image {required_image} is filtered", Filters.REQUIRED_IMAGE) + logger.debug(f"{instance.name}: Required image '{required_image}' is filtered") + break + + if instance.testsuite.id in req_instance.testsuite.required_images: + instance.add_filter("Circular dependency in required images", Filters.REQUIRED_IMAGE) + logger.warning(f"{instance.name}: Circular dependency, current app also required by " + f"{required_image}") + break + + instance.required_images.append(req_instance.name) + def add_instances(self, instance_list): for instance in instance_list: self.instances[instance.name] = instance - def get_testsuite(self, identifier): results = [] for _, ts in self.testsuites.items(): @@ -1100,7 +1191,7 @@ def change_skip_to_error_if_integration(options, instance): filters = {t['type'] for t in instance.filters} ignore_filters ={Filters.CMD_LINE, Filters.SKIP, Filters.PLATFORM_KEY, Filters.TOOLCHAIN, Filters.MODULE, Filters.TESTPLAN, - Filters.QUARANTINE} + Filters.QUARANTINE, Filters.REQUIRED_IMAGE} if filters.intersection(ignore_filters): return instance.status = TwisterStatus.ERROR diff --git a/scripts/schemas/twister/testsuite-schema.yaml b/scripts/schemas/twister/testsuite-schema.yaml index 4cb81431d86b8..b70e95599149d 100644 --- a/scripts/schemas/twister/testsuite-schema.yaml +++ b/scripts/schemas/twister/testsuite-schema.yaml @@ -60,6 +60,14 @@ schema;scenario-schema: "extra_sections": type: any required: false + "required_images": + type: seq + required: false + sequence: + - type: str + "no_own_image": + type: bool + required: false "required_snippets": type: seq required: false diff --git a/scripts/tests/twister/test_handlers.py b/scripts/tests/twister/test_handlers.py index 3475c65197e34..f4505e6297d32 100644 --- a/scripts/tests/twister/test_handlers.py +++ b/scripts/tests/twister/test_handlers.py @@ -1540,19 +1540,17 @@ def mock_process(pid): ( True, os.path.join('self', 'dummy_dir', '1'), - mock.PropertyMock(return_value=os.path.join('dummy_dir', '1')), - os.path.join('dummy_dir', '1') + os.path.join('self', 'dummy_dir', '1', 'domain_name') ), ( False, os.path.join('self', 'dummy_dir', '2'), - mock.PropertyMock(return_value=os.path.join('dummy_dir', '2')), os.path.join('self', 'dummy_dir', '2') ), ] @pytest.mark.parametrize( - 'self_sysbuild, self_build_dir, build_dir, expected', + 'self_sysbuild, self_build_dir, expected', TESTDATA_19, ids=['domains build dir', 'self build dir'] ) @@ -1560,11 +1558,10 @@ def test_qemuhandler_get_default_domain_build_dir( mocked_instance, self_sysbuild, self_build_dir, - build_dir, expected ): get_default_domain_mock = mock.Mock() - type(get_default_domain_mock()).build_dir = build_dir + type(get_default_domain_mock()).name = 'domain_name' domains_mock = mock.Mock(get_default_domain=get_default_domain_mock) from_file_mock = mock.Mock(return_value=domains_mock) diff --git a/scripts/tests/twister/test_runner.py b/scripts/tests/twister/test_runner.py index 8d7973f5e0554..d99f30f12d7ac 100644 --- a/scripts/tests/twister/test_runner.py +++ b/scripts/tests/twister/test_runner.py @@ -11,12 +11,12 @@ import os import pathlib import pytest -import queue import re import subprocess import sys import yaml +from collections import deque from contextlib import nullcontext from elftools.elf.sections import SymbolTableSection from typing import List @@ -859,7 +859,7 @@ def mock_getsize(filename, *args, **kwargs): TESTDATA_6 = [ - ( + ( # filter, failed {'op': 'filter'}, TwisterStatus.FAIL, 'Failed', @@ -881,7 +881,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # filter, cmake res {'op': 'filter'}, TwisterStatus.PASS, mock.ANY, @@ -903,7 +903,7 @@ def mock_getsize(filename, *args, **kwargs): 1, (TwisterStatus.SKIP,) ), - ( + ( # filter, no cmake res {'op': 'filter'}, TwisterStatus.PASS, mock.ANY, @@ -925,7 +925,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # cmake, failed {'op': 'cmake'}, TwisterStatus.ERROR, 'dummy error', @@ -947,7 +947,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # cmake, cmake_only, no status {'op': 'cmake'}, TwisterStatus.NONE, mock.ANY, @@ -969,7 +969,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # cmake, cmake_only {'op': 'cmake'}, 'success', mock.ANY, @@ -991,7 +991,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # cmake, no cmake_only, cmake res {'op': 'cmake'}, 'success', mock.ANY, @@ -1013,7 +1013,7 @@ def mock_getsize(filename, *args, **kwargs): 1, (TwisterStatus.SKIP,) ), - ( + ( # cmake, no cmake_only, no cmake res {'op': 'cmake'}, 'success', mock.ANY, @@ -1035,7 +1035,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # build, no build res {'op': 'build'}, mock.ANY, None, @@ -1057,7 +1057,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # build, skipped {'op': 'build'}, TwisterStatus.SKIP, mock.ANY, @@ -1080,7 +1080,7 @@ def mock_getsize(filename, *args, **kwargs): 1, (TwisterStatus.SKIP, mock.ANY) ), - ( + ( # build, blocked {'op': 'build'}, TwisterStatus.PASS, mock.ANY, @@ -1102,7 +1102,7 @@ def mock_getsize(filename, *args, **kwargs): 0, (TwisterStatus.BLOCK, mock.ANY) ), - ( + ( # build, determine testcases {'op': 'build'}, 'success', mock.ANY, @@ -1125,7 +1125,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # build, determine testcases Error {'op': 'build'}, 'success', mock.ANY, @@ -1148,7 +1148,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # gather metrics, run and ready handler {'op': 'gather_metrics'}, mock.ANY, mock.ANY, @@ -1169,8 +1169,8 @@ def mock_getsize(filename, *args, **kwargs): mock.ANY, 0, None - ), # 'gather metrics, run and ready handler' - ( + ), + ( # gather metrics {'op': 'gather_metrics'}, mock.ANY, mock.ANY, @@ -1191,8 +1191,8 @@ def mock_getsize(filename, *args, **kwargs): mock.ANY, 0, None - ), # 'gather metrics' - ( + ), + ( # build ok, gather metrics fail {'op': 'gather_metrics'}, mock.ANY, mock.ANY, @@ -1213,8 +1213,8 @@ def mock_getsize(filename, *args, **kwargs): 'Build Failure at gather_metrics.', 0, None - ), # 'build ok, gather metrics fail', - ( + ), + ( # run {'op': 'run'}, 'success', 'OK', @@ -1237,7 +1237,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # run, Pipeline Runtime Error {'op': 'run'}, TwisterStatus.FAIL, mock.ANY, @@ -1261,7 +1261,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # report, prep artifacts for testing {'op': 'report'}, mock.ANY, mock.ANY, @@ -1283,7 +1283,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # report, runtime artifact cleanup pass, status passed {'op': 'report'}, TwisterStatus.PASS, mock.ANY, @@ -1305,7 +1305,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # report, runtime artifact cleanup all {'op': 'report'}, mock.ANY, mock.ANY, @@ -1327,7 +1327,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # report, no message put {'op': 'report'}, mock.ANY, mock.ANY, @@ -1349,7 +1349,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # cleanup, device {'op': 'cleanup', 'mode': 'device'}, mock.ANY, mock.ANY, @@ -1371,7 +1371,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # cleanup, mode passed {'op': 'cleanup', 'mode': 'passed'}, mock.ANY, mock.ANY, @@ -1393,7 +1393,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # cleanup, mode all {'op': 'cleanup', 'mode': 'all'}, mock.ANY, 'Valgrind error', @@ -1415,7 +1415,7 @@ def mock_getsize(filename, *args, **kwargs): 0, None ), - ( + ( # cleanup, mode all, cmake build failure {'op': 'cleanup', 'mode': 'all'}, mock.ANY, 'Cmake build failure', @@ -1489,7 +1489,7 @@ def test_projectbuilder_process( expected_skipped, expected_missing ): - def mock_pipeline_put(msg): + def mock_pipeline_append(msg): if isinstance(pipeline_runtime_error, type) and \ issubclass(pipeline_runtime_error, Exception): raise RuntimeError('Pipeline Error!') @@ -1526,7 +1526,7 @@ def mock_determine_testcases(res): pb.run = mock.Mock() pb.gather_metrics = mock.Mock(return_value=metrics_res) - pipeline_mock = mock.Mock(put=mock.Mock(side_effect=mock_pipeline_put)) + pipeline_mock = mock.Mock(append=mock.Mock(side_effect=mock_pipeline_append)) done_mock = mock.Mock() lock_mock = mock.Mock( __enter__=mock.Mock(return_value=(mock.Mock(), mock.Mock())), @@ -1540,7 +1540,7 @@ def mock_determine_testcases(res): assert all([log in caplog.text for log in expected_logs]) if resulting_message: - pipeline_mock.put.assert_called_with(resulting_message) + pipeline_mock.append.assert_called_with(resulting_message) assert pb.instance.status == expected_status assert pb.instance.reason == expected_reason @@ -2422,18 +2422,17 @@ def mock_client_from_environ(jobs): jobclient_mock = mock.Mock() jobclient_mock().name='JobClient' - pipeline_q = queue.LifoQueue() - done_q = queue.LifoQueue() + pipeline_q = deque() + done_q = dict() done_instance = mock.Mock( metrics={'k2': 'v2'}, execution_time=30 ) done_instance.name='dummy instance' - done_q.put(done_instance) + done_q[done_instance.name] = done_instance manager_mock = mock.Mock() - manager_mock().LifoQueue = mock.Mock( - side_effect=iter([pipeline_q, done_q]) - ) + manager_mock().deque = mock.Mock(return_value=pipeline_q) + manager_mock().get_dict = mock.Mock(return_value=done_q) results_mock = mock.Mock() results_mock().error = 1 @@ -2586,11 +2585,11 @@ def mock_get_cmake_filter_stages(filter, keys): return [filter] instances = { - 'dummy1': mock.Mock(run=True, retries=0, status=TwisterStatus.PASS, build_dir="/tmp"), - 'dummy2': mock.Mock(run=True, retries=0, status=TwisterStatus.SKIP, build_dir="/tmp"), - 'dummy3': mock.Mock(run=True, retries=0, status=TwisterStatus.FILTER, build_dir="/tmp"), - 'dummy4': mock.Mock(run=True, retries=0, status=TwisterStatus.ERROR, build_dir="/tmp"), - 'dummy5': mock.Mock(run=True, retries=0, status=TwisterStatus.FAIL, build_dir="/tmp") + 'dummy1': mock.Mock(run=True, retries=0, status=TwisterStatus.PASS, build_dir="/tmp", no_own_image=False), + 'dummy2': mock.Mock(run=True, retries=0, status=TwisterStatus.SKIP, build_dir="/tmp", no_own_image=False), + 'dummy3': mock.Mock(run=True, retries=0, status=TwisterStatus.FILTER, build_dir="/tmp", no_own_image=False), + 'dummy4': mock.Mock(run=True, retries=0, status=TwisterStatus.ERROR, build_dir="/tmp", no_own_image=False), + 'dummy5': mock.Mock(run=True, retries=0, status=TwisterStatus.FAIL, build_dir="/tmp", no_own_image=False) } instances['dummy4'].testsuite.filter = 'some' instances['dummy5'].testsuite.filter = 'full' @@ -2619,10 +2618,10 @@ def mock_get_cmake_filter_stages(filter, keys): if retry_build_errors: tr.get_cmake_filter_stages.assert_any_call('some', mock.ANY) - print(pipeline_mock.put.call_args_list) + print(pipeline_mock.append.call_args_list) print([mock.call(el) for el in expected_pipeline_elements]) - assert pipeline_mock.put.call_args_list == \ + assert pipeline_mock.append.call_args_list == \ [mock.call(el) for el in expected_pipeline_elements] @@ -2641,8 +2640,8 @@ def mock_get_nowait(): nonlocal counter counter += 1 if counter > 5: - raise queue.Empty() - return {'test': 'dummy'} + raise IndexError + return {'test': mock.Mock(required_images=False)} instances = {} suites = [] @@ -2656,7 +2655,7 @@ def mock_get_nowait(): ) pipeline_mock = mock.Mock() - pipeline_mock.get_nowait = mock.Mock(side_effect=mock_get_nowait) + pipeline_mock.pop = mock.Mock(side_effect=mock_get_nowait) done_queue_mock = mock.Mock() lock_mock = mock.Mock() results_mock = mock.Mock()