Skip to content

Commit 9da8721

Browse files
committed
feat(logfiles): add function for checking expected messages
Refactored the `expect_errors` function by extracting the log message checking logic into a separate `_check_msgs_presence_in_logs` function. Added a new `expect_messages` context manager for checking expected messages in logs. This improves code readability and reusability.
1 parent 7ed3393 commit 9da8721

File tree

1 file changed

+71
-28
lines changed

1 file changed

+71
-28
lines changed

cardano_node_tests/utils/logfiles.py

Lines changed: 71 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -270,33 +270,13 @@ def add_ignore_rule(
270270
infile.write(f"{files_glob};;{skip_after};;{regex}\n")
271271

272272

273-
@contextlib.contextmanager
274-
def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> tp.Iterator[None]:
275-
"""Make sure the expected errors are present in logs.
276-
277-
Args:
278-
regex_pairs: [(glob, regex)] - A list of regexes that need to be present in files
279-
described by the glob.
280-
worker_id: The id of the pytest-xdist worker (the `worker_id` fixture) that the test
281-
is running on.
282-
"""
283-
state_dir = cluster_nodes.get_cluster_env().state_dir
284-
285-
glob_list = []
286-
for files_glob, regex in regex_pairs:
287-
add_ignore_rule(files_glob=files_glob, regex=regex, ignore_file_id=worker_id)
288-
glob_list.append(files_glob)
289-
# Resolve the globs
290-
_expanded_paths = [list(state_dir.glob(glob_item)) for glob_item in glob_list]
291-
# Flatten the list
292-
expanded_paths = list(itertools.chain.from_iterable(_expanded_paths))
293-
# Record each end-of-file as a starting offset for searching the log file
294-
seek_offsets = {str(p): helpers.get_eof_offset(p) for p in expanded_paths}
295-
296-
timestamp = time.time()
297-
298-
yield
299-
273+
def _check_msgs_presence_in_logs(
274+
regex_pairs: tp.List[tp.Tuple[str, str]],
275+
seek_offsets: tp.Dict[str, int],
276+
state_dir: pl.Path,
277+
timestamp: float,
278+
) -> None:
279+
"""Make sure the expected messages are present in logs."""
300280
errors = []
301281
for files_glob, regex in regex_pairs:
302282
regex_comp = re.compile(regex)
@@ -307,7 +287,7 @@ def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> t
307287
if ROTATED_RE.match(logfile):
308288
continue
309289

310-
# Search for the expected error
290+
# Search for the expected string
311291
seek = seek_offsets.get(logfile) or 0
312292
line_found = False
313293
for logfile_rec in _get_rotated_logs(
@@ -329,6 +309,69 @@ def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> t
329309
raise AssertionError(errors_joined) from None
330310

331311

312+
@contextlib.contextmanager
313+
def expect_errors(regex_pairs: tp.List[tp.Tuple[str, str]], worker_id: str) -> tp.Iterator[None]:
314+
"""Make sure the expected errors are present in logs.
315+
316+
Context manager.
317+
318+
Args:
319+
regex_pairs: [(glob, regex)] - A list of regexes that need to be present in files
320+
described by the glob.
321+
worker_id: The id of the pytest-xdist worker (the `worker_id` fixture) that the test
322+
is running on.
323+
"""
324+
state_dir = cluster_nodes.get_cluster_env().state_dir
325+
326+
glob_list = []
327+
for files_glob, regex in regex_pairs:
328+
add_ignore_rule(files_glob=files_glob, regex=regex, ignore_file_id=worker_id)
329+
glob_list.append(files_glob)
330+
# Resolve the globs
331+
_expanded_paths = [list(state_dir.glob(glob_item)) for glob_item in glob_list]
332+
# Flatten the list
333+
expanded_paths = list(itertools.chain.from_iterable(_expanded_paths))
334+
# Record each end-of-file as a starting offset for searching the log file
335+
seek_offsets = {str(p): helpers.get_eof_offset(p) for p in expanded_paths}
336+
337+
timestamp = time.time()
338+
339+
yield
340+
341+
_check_msgs_presence_in_logs(
342+
regex_pairs=regex_pairs, seek_offsets=seek_offsets, state_dir=state_dir, timestamp=timestamp
343+
)
344+
345+
346+
@contextlib.contextmanager
347+
def expect_messages(regex_pairs: tp.List[tp.Tuple[str, str]]) -> tp.Iterator[None]:
348+
"""Make sure the expected messages are present in logs.
349+
350+
Context manager.
351+
352+
Args:
353+
regex_pairs: [(glob, regex)] - A list of regexes that need to be present in files
354+
described by the glob.
355+
"""
356+
state_dir = cluster_nodes.get_cluster_env().state_dir
357+
358+
glob_list = [r[0] for r in regex_pairs]
359+
# Resolve the globs
360+
_expanded_paths = [list(state_dir.glob(glob_item)) for glob_item in glob_list]
361+
# Flatten the list
362+
expanded_paths = list(itertools.chain.from_iterable(_expanded_paths))
363+
# Record each end-of-file as a starting offset for searching the log file
364+
seek_offsets = {str(p): helpers.get_eof_offset(p) for p in expanded_paths}
365+
366+
timestamp = time.time()
367+
368+
yield
369+
370+
_check_msgs_presence_in_logs(
371+
regex_pairs=regex_pairs, seek_offsets=seek_offsets, state_dir=state_dir, timestamp=timestamp
372+
)
373+
374+
332375
def search_cluster_logs() -> tp.List[tp.Tuple[pl.Path, str]]:
333376
"""Search cluster logs for errors."""
334377
cluster_env = cluster_nodes.get_cluster_env()

0 commit comments

Comments
 (0)