Skip to content

Commit 5492e55

Browse files
committed
twister: Add unit tests for Pytest harness
Added unit tests for parsing JUnitXml report by Pytest harness. Signed-off-by: Lukasz Fundakowski <[email protected]>
1 parent 0520dfe commit 5492e55

File tree

1 file changed

+144
-3
lines changed

1 file changed

+144
-3
lines changed

scripts/tests/twister/test_harness.py

Lines changed: 144 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
#!/usr/bin/env python3
2-
32
# Copyright(c) 2023 Google LLC
43
# SPDX-License-Identifier: Apache-2.0
54

@@ -12,6 +11,7 @@
1211
import pytest
1312
import re
1413
import logging as logger
14+
import textwrap
1515

1616
# ZEPHYR_BASE = os.getenv("ZEPHYR_BASE")
1717
from conftest import ZEPHYR_BASE
@@ -30,7 +30,7 @@
3030
Test,
3131
)
3232
from twisterlib.statuses import TwisterStatus
33-
from twisterlib.testsuite import TestSuite
33+
from twisterlib.testsuite import TestSuite, TestCase
3434
from twisterlib.testinstance import TestInstance
3535

3636
GTEST_START_STATE = " RUN "
@@ -608,7 +608,7 @@ def test_pytest__generate_parameters_for_hardware(tmp_path, pty_value, hardware_
608608
assert "--twister-fixture=fixture2" in command
609609

610610

611-
def test__update_command_with_env_dependencies():
611+
def test_pytest__update_command_with_env_dependencies():
612612
cmd = ["cmd"]
613613
pytest_test = Pytest()
614614
mock.patch.object(Pytest, "PYTEST_PLUGIN_INSTALLED", False)
@@ -662,6 +662,147 @@ def test_pytest_run(tmp_path, caplog):
662662
assert exp_out in caplog.text
663663

664664

665+
666+
class FakeTestInstance:
667+
668+
def __init__(self):
669+
self.testcases = []
670+
self.reason = ""
671+
672+
def add_testcase(self, name):
673+
tc = TestCase(name)
674+
self.testcases.append(tc)
675+
return tc
676+
677+
678+
def get_test_case_by_name(testcases, name):
679+
for tc in testcases:
680+
if tc.name == name:
681+
return tc
682+
683+
684+
@pytest.fixture
685+
def pytest_harness():
686+
py_harness = Pytest()
687+
py_harness.id = "tests.test_foobar"
688+
py_harness.instance = FakeTestInstance()
689+
return py_harness
690+
691+
692+
EXAMPLE_TESTS = textwrap.dedent("""\
693+
import pytest
694+
695+
@pytest.fixture
696+
def raise_exception():
697+
raise Exception("Something went wrong")
698+
699+
def test_pass():
700+
assert 1
701+
702+
def test_fail():
703+
assert 0, "Not True"
704+
705+
def test_error(raise_exception):
706+
assert 1
707+
708+
@pytest.mark.skip("WIP")
709+
def test_skip():
710+
assert 1
711+
""")
712+
713+
714+
def test_if_pytest_harness_parses_report_with_all_kinds_of_statuses(tmp_path, testdir, pytest_harness):
715+
# Create JunitXml report
716+
report_xml = tmp_path / "results.xml"
717+
testdir.makepyfile(EXAMPLE_TESTS)
718+
testdir.runpytest("--junitxml", str(report_xml))
719+
720+
pytest_harness._parse_report_file(report_xml)
721+
722+
assert pytest_harness.status == "failed"
723+
assert pytest_harness.instance.reason == "1/4 pytest scenario(s) failed"
724+
assert len(pytest_harness.instance.testcases) == 4
725+
assert {tc.name for tc in pytest_harness.instance.testcases} == {
726+
"tests.test_foobar.test_pass",
727+
"tests.test_foobar.test_fail",
728+
"tests.test_foobar.test_error",
729+
"tests.test_foobar.test_skip"
730+
}
731+
732+
passed_tc = get_test_case_by_name(pytest_harness.instance.testcases, "tests.test_foobar.test_pass")
733+
assert passed_tc.status == "passed"
734+
assert passed_tc.reason is None
735+
assert passed_tc.output == ""
736+
assert isinstance(passed_tc.duration, float)
737+
738+
failed_tc = get_test_case_by_name(pytest_harness.instance.testcases, "tests.test_foobar.test_fail")
739+
assert failed_tc.status == "failed"
740+
assert failed_tc.reason == "AssertionError: Not True\nassert 0"
741+
assert failed_tc.output != ""
742+
assert isinstance(failed_tc.duration, float)
743+
744+
error_tc = get_test_case_by_name(pytest_harness.instance.testcases, "tests.test_foobar.test_error")
745+
assert error_tc.status == "error"
746+
assert error_tc.reason == 'failed on setup with "Exception: Something went wrong"'
747+
assert error_tc.output != ""
748+
assert isinstance(error_tc.duration, float)
749+
750+
skipped_tc = get_test_case_by_name(pytest_harness.instance.testcases, "tests.test_foobar.test_skip")
751+
assert skipped_tc.status == "skipped"
752+
assert skipped_tc.reason == 'WIP'
753+
assert skipped_tc.output != ""
754+
assert isinstance(skipped_tc.duration, float)
755+
756+
757+
def test_if_pytest_harness_parses_report_with_passed_and_skipped_tests(tmp_path, testdir, pytest_harness):
758+
# Create JunitXml report
759+
report_xml = tmp_path / "results.xml"
760+
testdir.makepyfile(EXAMPLE_TESTS)
761+
testdir.runpytest("-k", "(test_pass or test_skip)", "--junitxml", str(report_xml))
762+
763+
pytest_harness._parse_report_file(report_xml)
764+
765+
assert pytest_harness.status == "passed"
766+
assert pytest_harness.instance.reason == ""
767+
assert len(pytest_harness.instance.testcases) == 2
768+
assert {tc.name for tc in pytest_harness.instance.testcases} == {
769+
"tests.test_foobar.test_pass",
770+
"tests.test_foobar.test_skip"
771+
}
772+
773+
774+
def test_if_pytest_harness_parses_report_with_passed_and_error_tests(tmp_path, testdir, pytest_harness):
775+
# Create JunitXml report
776+
report_xml = tmp_path / "results.xml"
777+
testdir.makepyfile(EXAMPLE_TESTS)
778+
testdir.runpytest("-k", "(test_pass or test_error)", "--junitxml", str(report_xml))
779+
780+
pytest_harness._parse_report_file(report_xml)
781+
782+
assert pytest_harness.status == "error"
783+
assert pytest_harness.instance.reason == "Error during pytest execution"
784+
assert len(pytest_harness.instance.testcases) == 2
785+
assert {tc.name for tc in pytest_harness.instance.testcases} == {
786+
"tests.test_foobar.test_pass",
787+
"tests.test_foobar.test_error"
788+
}
789+
790+
def test_if_pytest_harness_parses_report_with_skipped_tests_only(tmp_path, testdir, pytest_harness):
791+
# Create JunitXml report
792+
report_xml = tmp_path / "results.xml"
793+
testdir.makepyfile(EXAMPLE_TESTS)
794+
testdir.runpytest("-k", "test_skip", "--junitxml", str(report_xml))
795+
796+
pytest_harness._parse_report_file(report_xml)
797+
798+
assert pytest_harness.status == "skipped"
799+
assert pytest_harness.instance.reason == ""
800+
assert len(pytest_harness.instance.testcases) == 1
801+
assert {tc.name for tc in pytest_harness.instance.testcases} == {
802+
"tests.test_foobar.test_skip"
803+
}
804+
805+
665806
TEST_DATA_6 = [(None), ("Test")]
666807

667808

0 commit comments

Comments
 (0)