Skip to content

Commit cb3e447

Browse files
author
Dzmitry Humianiuk
authored
Merge pull request #96 from bigbZik/issue-info
Adding issue info to test result in RP
2 parents 8c4ddad + 4adf02e commit cb3e447

File tree

4 files changed

+108
-6
lines changed

4 files changed

+108
-6
lines changed

README.rst

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,8 @@ The following parameters are optional:
6868
- :code:`rp_hierarchy_class = True` - Enables hierarchy for class, default `True`. Doesn't support 'xdist' plugin.
6969
- :code:`rp_hierarchy_parametrize = True` - Enables hierarchy parametrized tests, default `False`. Doesn't support 'xdist' plugin.
7070
- :code:`rp_hierarchy_dirs_level = 0` - Directory starting hierarchy level (from pytest.ini level) (default `0`)
71+
- :code:`rp_issue_marks = 'xfail' 'issue'` - Pytest marks that could be used to get issue information (id, type, reason)
72+
- :code:`rp_issue_system_url = https://bugzilla.olympus.f5net.com/show_bug.cgi?id=` - URL to get issue description (issue id from pytest mark will be added to this URL)
7173
- :code:`rp_verify_ssl = True` - Verify SSL when connecting to the server
7274

7375
If you like to override the above parameters from command line, or from CI environment based on your build, then pass
@@ -158,6 +160,27 @@ To run test with Report Portal you must provide '--reportportal' flag:
158160
py.test ./tests --reportportal
159161
160162
163+
Test issue info
164+
~~~~~~~~~
165+
166+
Some pytest marks could be used to specify information about skipped or failed test result.
167+
List of this marks should be specified in pytest ini file (see :code:`rp_issue_marks`).
168+
169+
The following mark fields are used to get information about test issue:
170+
171+
- :code:`issue_id` - issue id (or list) in tracking system. This id will be added as comment to test fail result. If URL is specified in pytest ini file (see :code:`rp_issue_system_url`), id will added as link to tracking system.
172+
- :code:`reason` - some comment that will be added to test fail description.
173+
- :code:`issue_type` - short name of RP issue type that should be assigned to failed or skipped test.
174+
175+
Example:
176+
177+
.. code-block:: python
178+
179+
@pytest.mark.issue(issue_id="111111", reason="Some bug", issue_type="PB")
180+
def test():
181+
assert False
182+
183+
161184
Troubleshooting
162185
~~~~~~~~~
163186

pytest_reportportal/listener.py

Lines changed: 46 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def pytest_runtest_protocol(self, item):
4141
self.PyTestService.finish_pytest_item(item, self.result or 'SKIPPED', self.issue or None)
4242

4343
@pytest.hookimpl(hookwrapper=True)
44-
def pytest_runtest_makereport(self):
44+
def pytest_runtest_makereport(self, item):
4545
report = (yield).get_result()
4646

4747
if report.longrepr:
@@ -58,17 +58,61 @@ def pytest_runtest_makereport(self):
5858
# This happens for example when a fixture fails to run
5959
# causing the test to error
6060
self.result = 'FAILED'
61+
self._add_issue_info(item, report)
6162
elif report.skipped:
6263
# This happens when a testcase is marked "skip". It will
6364
# show in reportportal as not requiring investigation.
6465
self.result = 'SKIPPED'
65-
self.issue['issue_type'] = 'NOT_ISSUE'
66+
self._add_issue_info(item, report)
6667

6768
if report.when == 'call':
6869
if report.passed:
6970
item_result = 'PASSED'
7071
elif report.skipped:
7172
item_result = 'SKIPPED'
73+
self._add_issue_info(item, report)
7274
else:
7375
item_result = 'FAILED'
76+
self._add_issue_info(item, report)
7477
self.result = item_result
78+
79+
80+
def _add_issue_info(self, item, report):
81+
82+
issue_type = None
83+
comment = ""
84+
url = item.session.config.getini('rp_issue_system_url')
85+
issue_marks = item.session.config.getini('rp_issue_marks')
86+
87+
for mark_name in issue_marks:
88+
try:
89+
mark = item.get_closest_marker(mark_name)
90+
except AttributeError:
91+
# pytest < 3.6
92+
mark = item.get_marker(mark_name)
93+
94+
if mark:
95+
if "reason" in mark.kwargs:
96+
comment += "\n" if comment else ""
97+
comment += mark.kwargs["reason"]
98+
if "issue_id" in mark.kwargs:
99+
issue_ids = mark.kwargs["issue_id"]
100+
if not isinstance(issue_ids, list):
101+
issue_ids = [issue_ids]
102+
comment += "\n" if comment else ""
103+
comment += "Issues:"
104+
105+
for issue_id in issue_ids:
106+
comment += " [{}]({}{})".format(issue_id, url, issue_id) if url else " {}".format(issue_id)
107+
108+
if "issue_type" in mark.kwargs:
109+
issue_type = mark.kwargs["issue_type"]
110+
111+
if comment:
112+
self.issue['comment'] = comment
113+
114+
if issue_type and self.PyTestService.issue_types and (issue_type in self.PyTestService.issue_types):
115+
self.issue['issue_type'] = self.PyTestService.issue_types[issue_type]
116+
# self.issue['ignoreAnalyzer'] = True ???
117+
elif (report.when == 'setup') and report.skipped:
118+
self.issue['issue_type'] = 'NOT_ISSUE'

pytest_reportportal/plugin.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,17 @@ def pytest_addoption(parser):
281281
type='bool',
282282
help='Enables hierarchy for parametrized tests')
283283

284+
parser.addini(
285+
'rp_issue_marks',
286+
type='args',
287+
default='',
288+
help='Pytest marks to get issue information')
289+
290+
parser.addini(
291+
'rp_issue_system_url',
292+
default='',
293+
help='URL to get issue description. Issue id from pytest mark will be added to this URL')
294+
284295
parser.addini(
285296
'rp_verify_ssl',
286297
default=True,

pytest_reportportal/service.py

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,8 @@ def init_service(self, endpoint, project, uuid, log_batch_size,
9595
log_batch_size=log_batch_size,
9696
verify_ssl=verify_ssl
9797
)
98+
self.project_settiings = self.RP.rp_client.get_project_settings() if self.RP else None
99+
self.issue_types = self.get_issue_types()
98100
else:
99101
log.debug('The pytest is already initialized')
100102
return self.RP
@@ -289,6 +291,16 @@ def _stop_if_necessary(self):
289291
except queue.Empty:
290292
pass
291293

294+
def get_issue_types(self):
295+
issue_types = {}
296+
if not self.project_settiings:
297+
return issue_types
298+
299+
for item_type in ("AUTOMATION_BUG", "PRODUCT_BUG", "SYSTEM_ISSUE", "NO_DEFECT", "TO_INVESTIGATE"):
300+
for item in self.project_settiings["subTypes"][item_type]:
301+
issue_types[item["shortName"]] = item["locator"]
302+
303+
return issue_types
292304

293305
@staticmethod
294306
def _add_item_hier_parts_dirs(item, hier_flag, dirs_level, report_parts, dirs_parts, rp_name=""):
@@ -408,13 +420,25 @@ def _get_item_tags(self, item):
408420
# Try to extract names of @pytest.mark.* decorators used for test item
409421
# and exclude those which present in rp_ignore_tags parameter
410422
def get_marker_value(item, keyword):
411-
marker = item.keywords.get(keyword)
423+
try:
424+
marker = item.get_closest_marker(keyword)
425+
except AttributeError:
426+
# pytest < 3.6
427+
marker = item.keywords.get(keyword)
428+
412429
return "{}:{}".format(keyword, marker.args[0]) \
413430
if marker and marker.args else keyword
414431

415-
tags = [get_marker_value(item, k) for k in item.keywords
416-
if item.get_marker(k) is not None
417-
and k not in self.ignored_tags]
432+
try:
433+
tags = [get_marker_value(item, k) for k in item.keywords
434+
if item.get_closest_marker(k) is not None
435+
and k not in self.ignored_tags]
436+
except AttributeError:
437+
# pytest < 3.6
438+
tags = [get_marker_value(item, k) for k in item.keywords
439+
if item.get_marker(k) is not None
440+
and k not in self.ignored_tags]
441+
418442
tags.extend(item.session.config.getini('rp_tests_tags'))
419443

420444
return tags

0 commit comments

Comments
 (0)