Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 6 additions & 8 deletions scripts/tests/twister/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,21 +47,20 @@ def tesenv_obj(test_data, testsuites_dir, tmpdir_factory):
options.detailed_test_id = True
env = TwisterEnv(options)
env.board_roots = [os.path.join(test_data, "board_config", "1_level", "2_level")]
env.test_roots = [os.path.join(testsuites_dir, 'tests', testsuites_dir, 'samples')]
env.test_roots = [os.path.join(testsuites_dir, 'tests'),
os.path.join(testsuites_dir, 'samples')]
env.test_config = os.path.join(test_data, "test_config.yaml")
env.outdir = tmpdir_factory.mktemp("sanity_out_demo")
return env


@pytest.fixture(name='class_testplan')
def testplan_obj(test_data, class_env, testsuites_dir, tmpdir_factory):
def testplan_obj(class_env):
""" Pytest fixture to initialize and return the class TestPlan object"""
env = class_env
env.board_roots = [test_data +"board_config/1_level/2_level/"]
env.test_roots = [testsuites_dir + '/tests', testsuites_dir + '/samples']
env.outdir = tmpdir_factory.mktemp("sanity_out_demo")
plan = TestPlan(env)
plan.test_config = TestConfiguration(config_file=env.test_config)
plan.options.outdir = env.outdir
return plan

@pytest.fixture(name='all_testsuites_dict')
Expand All @@ -84,15 +83,14 @@ def all_platforms_list(test_data, class_testplan):
return plan.platforms

@pytest.fixture
def instances_fixture(class_testplan, platforms_list, all_testsuites_dict, tmpdir_factory):
def instances_fixture(class_testplan, platforms_list, all_testsuites_dict):
""" Pytest fixture to call add_instances function of Testsuite class
and return the instances dictionary"""
class_testplan.outdir = tmpdir_factory.mktemp("sanity_out_demo")
class_testplan.platforms = platforms_list
platform = class_testplan.get_platform("demo_board_2")
instance_list = []
for _, testcase in all_testsuites_dict.items():
instance = TestInstance(testcase, platform, 'zephyr', class_testplan.outdir)
instance = TestInstance(testcase, platform, 'zephyr', class_testplan.env.outdir)
instance_list.append(instance)
class_testplan.add_instances(instance_list)
return class_testplan.instances
11 changes: 11 additions & 0 deletions scripts/tests/twister/pytest_integration/test_harness_pytest.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,17 @@ def test_pytest_command_extra_args_in_options(testinstance: TestInstance):
assert command.index(pytest_args_from_yaml) < command.index(pytest_args_from_cmd[1])


def test_pytest_command_required_build_args(testinstance: TestInstance):
""" Test that required build dirs are passed to pytest harness """
pytest_harness = Pytest()
required_builds = ['/req/build/dir', 'another/req/dir']
testinstance.required_build_dirs = required_builds
pytest_harness.configure(testinstance)
command = pytest_harness.generate_command()
for req_dir in required_builds:
assert f'--required-build={req_dir}' in command


@pytest.mark.parametrize(
('pytest_root', 'expected'),
[
Expand Down
131 changes: 131 additions & 0 deletions scripts/tests/twister/test_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -2853,3 +2853,134 @@ def test_twisterrunner_get_cmake_filter_stages(filter, expected_result):
result = TwisterRunner.get_cmake_filter_stages(filter, ['not', 'and'])

assert sorted(result) == sorted(expected_result)


@pytest.mark.parametrize(
'required_apps, processing_ready_keys, expected_result',
[
(['app1', 'app2'], ['app1', 'app2'], True), # all apps ready
(['app1', 'app2', 'app3'], ['app1', 'app2'], False), # some apps missing
([], [], True), # no required apps
(['app1'], [], False), # single app missing
],
ids=['all_ready', 'some_missing', 'no_apps', 'single_missing']
)
def test_twisterrunner_are_required_apps_ready(required_apps, processing_ready_keys, expected_result):
"""Test _are_required_apps_ready method with various scenarios"""
instances = {}
suites = []
env_mock = mock.Mock()
tr = TwisterRunner(instances, suites, env=env_mock)

instance_mock = mock.Mock()
instance_mock.required_applications = required_apps

processing_ready = {key: mock.Mock() for key in processing_ready_keys}

result = tr._are_required_apps_ready(instance_mock, processing_ready)

assert result is expected_result


@pytest.mark.parametrize(
'app_statuses, expected_result',
[
([TwisterStatus.PASS, TwisterStatus.PASS], True), # all passed
([TwisterStatus.NOTRUN, TwisterStatus.NOTRUN], True), # all notrun
([TwisterStatus.PASS, TwisterStatus.NOTRUN], True), # mixed pass/notrun
([TwisterStatus.PASS, TwisterStatus.FAIL], False), # one failed
([TwisterStatus.ERROR], False), # single error
],
ids=['all_pass', 'all_notrun', 'mixed_pass_notrun', 'one_fail', 'single_error']
)
def test_twisterrunner_are_all_required_apps_success(app_statuses, expected_result):
"""Test _are_all_required_apps_success method with various app statuses"""
instances = {}
suites = []
env_mock = mock.Mock()
tr = TwisterRunner(instances, suites, env=env_mock)

instance_mock = mock.Mock()
required_apps = [f'app{i + 1}' for i in range(len(app_statuses))]
instance_mock.required_applications = required_apps

processing_ready = {}
for i, status in enumerate(app_statuses):
app_instance = mock.Mock()
app_instance.status = status
app_instance.reason = f"Reason for app{i + 1}"
processing_ready[f'app{i + 1}'] = app_instance

result = tr._are_all_required_apps_success(instance_mock, processing_ready)
assert result is expected_result


@pytest.mark.parametrize(
'required_apps, ready_apps, expected_result, expected_actions',
[
([], {}, True,
{'requeue': False, 'skip': False, 'build_dirs': 0}),
(['app1'], {}, False,
{'requeue': True, 'skip': False, 'build_dirs': 0}),
(['app1', 'app2'], {'app1': TwisterStatus.PASS}, False,
{'requeue': True, 'skip': False, 'build_dirs': 0}),
(['app1'], {'app1': TwisterStatus.FAIL}, False,
{'requeue': False, 'skip': True, 'build_dirs': 0}),
(['app1', 'app2'], {'app1': TwisterStatus.PASS, 'app2': TwisterStatus.NOTRUN}, True,
{'requeue': False, 'skip': False, 'build_dirs': 2}),
],
ids=['no_apps', 'not_ready_single_job', 'not_ready_multi_job',
'apps_failed', 'apps_success']
)
def test_twisterrunner_are_required_apps_processed(required_apps, ready_apps,
expected_result, expected_actions):
"""Test are_required_apps_processed method with various scenarios"""
# Setup TwisterRunner instances dict
tr_instances = {}
for app_name in required_apps:
tr_instances[app_name] = mock.Mock(build_dir=f'/path/to/{app_name}')

env_mock = mock.Mock()
tr = TwisterRunner(tr_instances, [], env=env_mock)
tr.jobs = 1

instance_mock = mock.Mock()
instance_mock.required_applications = required_apps[:]
instance_mock.required_build_dirs = []

# Setup testcases for skip scenarios
if expected_actions['skip']:
testcase_mock = mock.Mock()
instance_mock.testcases = [testcase_mock]

# Setup processing_ready with app instances
processing_ready = {}
for app_name, status in ready_apps.items():
app_instance = mock.Mock()
app_instance.status = status
app_instance.reason = f"Reason for {app_name}"
app_instance.build_dir = f'/path/to/{app_name}'
processing_ready[app_name] = app_instance

processing_queue = deque()
task = {'test': instance_mock}

result = tr.are_required_apps_processed(instance_mock, processing_queue, processing_ready, task)

assert result is expected_result

if expected_actions['requeue']:
assert len(processing_queue) == 1
assert processing_queue[0] == task

if expected_actions['skip']:
assert instance_mock.status == TwisterStatus.SKIP
assert instance_mock.reason == "Required application failed"
assert instance_mock.required_applications == []
assert instance_mock.testcases[0].status == TwisterStatus.SKIP
# Check for report task in queue
assert any(item.get('op') == 'report' for item in processing_queue)

assert len(instance_mock.required_build_dirs) == expected_actions['build_dirs']
if expected_actions['build_dirs'] > 0:
assert instance_mock.required_applications == []
100 changes: 100 additions & 0 deletions scripts/tests/twister/test_testplan.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import pytest

from contextlib import nullcontext
from pathlib import Path

ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/twister"))
Expand Down Expand Up @@ -252,6 +253,105 @@ def test_apply_filters_part3(class_testplan, all_testsuites_dict, platforms_list
filtered_instances = list(filter(lambda item: item.status == TwisterStatus.FILTER, class_testplan.instances.values()))
assert not filtered_instances


def get_testsuite_for_given_test(plan: TestPlan, testname: str) -> TestSuite | None:
""" Helper function to get testsuite object for a given testname"""
for _, testsuite in plan.testsuites.items():
if testname in testsuite.name:
return testsuite
return None


@pytest.fixture()
def testplan_with_one_instance(
class_testplan: TestPlan, platforms_list, all_testsuites_dict
) -> TestPlan:
""" Pytest fixture to initialize and return the class TestPlan object
with one instance for 'sample_test.app' test on 'demo_board_1' platform"""
class_testplan.platforms = platforms_list
class_testplan.platform_names = [p.name for p in platforms_list]
class_testplan.testsuites = all_testsuites_dict
platform = class_testplan.get_platform("demo_board_1")
testsuite = get_testsuite_for_given_test(class_testplan, 'sample_test.app')
testinstance = TestInstance(testsuite, platform, 'zephyr', class_testplan.env.outdir)
class_testplan.add_instances([testinstance])
return class_testplan


def test_apply_changes_for_required_applications(testplan_with_one_instance: TestPlan):
""" Testing apply_changes_for_required_applications function of TestPlan class in Twister """
plan = testplan_with_one_instance
testinstance_req = next(iter(plan.instances.values()))

testsuite = get_testsuite_for_given_test(plan, 'test_a.check_1')
testsuite.required_applications = [{'name': 'sample_test.app'}]
platform = plan.get_platform("demo_board_1")
testinstance = TestInstance(testsuite, platform, 'zephyr', plan.env.outdir)
plan.add_instances([testinstance])

plan.apply_changes_for_required_applications()
# Check that the required application was added to the instance
assert testinstance.required_applications[0] == testinstance_req.name


def test_apply_changes_for_required_applications_missing_app(testplan_with_one_instance: TestPlan):
""" Test apply_changes_for_required_applications when required application is missing """
plan = testplan_with_one_instance
testsuite = get_testsuite_for_given_test(plan, 'test_a.check_1')
# Set a required application that does not exist
testsuite.required_applications = [{'name': 'nonexistent_app'}]
platform = plan.get_platform("demo_board_1")
testinstance = TestInstance(testsuite, platform, 'zephyr', plan.env.outdir)
plan.add_instances([testinstance])

plan.apply_changes_for_required_applications()
# Check that the instance was filtered
assert testinstance.status == TwisterStatus.FILTER
assert "Missing required application" in testinstance.reason
assert len(testinstance.required_applications) == 0


def test_apply_changes_for_required_applications_wrong_platform(testplan_with_one_instance: TestPlan):
""" Test apply_changes_for_required_applications with not matched platform """
plan = testplan_with_one_instance
testsuite = get_testsuite_for_given_test(plan, 'test_a.check_1')
testsuite.required_applications = [{'name': 'sample_test.app', 'platform': 'demo_board_2'}]
platform = plan.get_platform("demo_board_2")
testinstance = TestInstance(testsuite, platform, 'zephyr', plan.env.outdir)
plan.add_instances([testinstance])

plan.apply_changes_for_required_applications()
# Check that the instance was filtered
assert testinstance.status == TwisterStatus.FILTER
assert "Missing required application" in testinstance.reason
assert len(testinstance.required_applications) == 0


def test_apply_changes_for_required_applications_in_outdir(testplan_with_one_instance: TestPlan):
""" Testing apply_changes_for_required_applications when required application is already in outdir
and --no-clean option is used """
plan = testplan_with_one_instance
plan.options.no_clean = True
req_app_in_outdir = "prebuilt_sample_test.app"

testsuite = get_testsuite_for_given_test(plan, 'test_a.check_1')
testsuite.required_applications = [{'name': req_app_in_outdir}]
platform = plan.get_platform("demo_board_1")
testinstance = TestInstance(testsuite, platform, 'zephyr', plan.env.outdir)
plan.add_instances([testinstance])

# create the required application directory in outdir to simulate prebuilt app
req_app_dir = Path(plan.env.outdir) / platform.normalized_name / "test_dir" / req_app_in_outdir
(req_app_dir / "zephyr").mkdir(parents=True, exist_ok=True)

plan.apply_changes_for_required_applications()
# Check that the required application was not added to the instance,
# but the required build dir was added
assert len(testinstance.required_applications) == 0
assert len(testinstance.required_build_dirs) == 1
assert str(req_app_dir) in testinstance.required_build_dirs


def test_add_instances_short(tmp_path, class_env, all_testsuites_dict, platforms_list):
""" Testing add_instances() function of TestPlan class in Twister
Test 1: instances dictionary keys have expected values (Platform Name + Testcase Name)
Expand Down
Loading