Skip to content

Commit 23931fd

Browse files
committed
Add a hack to ignore logs unrelated to the tests
Some resampling tests started getting some log errors about some gRPC task not being properly closed, which are clearly artefacts from other tests that shouldn't appear there. For example: assert config.initial_buffer_len == init_len > assert caplog.record_tuples == [ ( "frequenz.sdk.timeseries._resampling", logging.WARNING, f"initial_buffer_len ({init_len}) is bigger than " f"warn_buffer_len ({DEFAULT_BUFFER_LEN_WARN})", ) ] E assert [('asyncio', 40, "Task was destroyed but it is pending!\ntask: <Task pending name='raw-component-data-8' coro=<MicrogridGrpcClient._component_data_task() done, defined at /home/luca/devel/sdk/src/frequenz/sdk/microgrid/client/_client.py:298> wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at 0x7f9224472ca0>()]>>"), ('asyncio', 40, "Task was destroyed but it is pending!\ntask: <Task pending name='raw-component-data-18' coro=<MicrogridGrpcClient._component_data_task() done, defined at /home/luca/devel/sdk/src/frequenz/sdk/microgrid/client/_client.py:298> wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at 0x7f9224472c10>()]>>"), ('frequenz.sdk.timeseries._resampling', 30, 'initial_buffer_len (257) is bigger than warn_buffer_len (128)')] == [('frequenz.sdk.timeseries._resampling', 30, 'initial_buffer_len (257) is bigger than warn_buffer_len (128)')] E At index 0 diff: ('asyncio', 40, "Task was destroyed but it is pending!\ntask: <Task pending name='raw-component-data-8' coro=<MicrogridGrpcClient._component_data_task() done, defined at /home/luca/devel/sdk/src/frequenz/sdk/microgrid/client/_client.py:298> wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at 0x7f9224472ca0>()]>>") != ('frequenz.sdk.timeseries._resampling', 30, 'initial_buffer_len (257) is bigger than warn_buffer_len (128)') E Left contains 2 more items, first extra item: ('asyncio', 40, "Task was destroyed but it is pending!\ntask: <Task pending name='raw-component-data-18' coro=<Microgr...k/microgrid/client/_client.py:298> wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at 0x7f9224472c10>()]>>") E Full diff: E [ E + ('asyncio', E + 40, E + 'Task was destroyed but it is pending!\n' E + "task: <Task pending name='raw-component-data-8' " E + 'coro=<MicrogridGrpcClient._component_data_task() done, defined at ' E + '/home/luca/devel/sdk/src/frequenz/sdk/microgrid/client/_client.py:298> ' E + 'wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at ' E + '0x7f9224472ca0>()]>>'), E + ('asyncio', E + 40, E + 'Task was destroyed but it is pending!\n' E + "task: <Task pending name='raw-component-data-18' " E + 'coro=<MicrogridGrpcClient._component_data_task() done, defined at ' E + '/home/luca/devel/sdk/src/frequenz/sdk/microgrid/client/_client.py:298> ' E + 'wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at ' E + '0x7f9224472c10>()]>>'), E ('frequenz.sdk.timeseries._resampling', E 30, E 'initial_buffer_len (257) is bigger than warn_buffer_len (128)'), E ] tests/timeseries/test_resampling.py:113: AssertionError Signed-off-by: Leandro Lucarella <[email protected]>
1 parent 8ce069a commit 23931fd

File tree

1 file changed

+20
-9
lines changed

1 file changed

+20
-9
lines changed

tests/timeseries/test_resampling.py

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55
Tests for the `TimeSeriesResampler`
66
"""
77

8+
from __future__ import annotations
9+
810
import logging
911
from datetime import datetime, timedelta, timezone
1012
from typing import AsyncIterator, Iterator
@@ -94,7 +96,8 @@ async def test_resampler_config_len_ok(
9496
initial_buffer_len=init_len,
9597
)
9698
assert config.initial_buffer_len == init_len
97-
assert caplog.records == []
99+
# Ignore errors produced by wrongly finalized gRPC server in unrelated tests
100+
assert _filter_logs(caplog.record_tuples, logger_name="") == []
98101

99102

100103
@pytest.mark.parametrize(
@@ -110,13 +113,11 @@ async def test_resampler_config_len_warn(
110113
initial_buffer_len=init_len,
111114
)
112115
assert config.initial_buffer_len == init_len
113-
for record in caplog.records:
114-
assert record.levelname == "WARNING"
115-
assert caplog.text.startswith("")
116-
assert (
117-
caplog.text
118-
== f"initial_buffer_len ({init_len}) is bigger than {DEFAULT_BUFFER_LEN_WARN}"
119-
assert caplog.record_tuples == [
116+
# Ignore errors produced by wrongly finalized gRPC server in unrelated tests
117+
assert _filter_logs(
118+
caplog.record_tuples,
119+
logger_name="frequenz.sdk.timeseries._resampling",
120+
) == [
120121
(
121122
"frequenz.sdk.timeseries._resampling",
122123
logging.WARNING,
@@ -156,7 +157,11 @@ async def test_helper_buffer_too_big(
156157
fake_time.shift(1)
157158

158159
_ = helper.resample(datetime.now(timezone.utc))
159-
assert caplog.record_tuples == [
160+
# Ignore errors produced by wrongly finalized gRPC server in unrelated tests
161+
assert _filter_logs(
162+
caplog.record_tuples,
163+
logger_name="frequenz.sdk.timeseries._resampling",
164+
) == [
160165
(
161166
"frequenz.sdk.timeseries._resampling",
162167
logging.ERROR,
@@ -698,3 +703,9 @@ def _get_buffer_len(resampler: Resampler, source_recvr: Source) -> int:
698703
blen = resampler._resamplers[source_recvr]._helper._buffer.maxlen
699704
assert blen is not None
700705
return blen
706+
707+
708+
def _filter_logs(
709+
record_tuples: list[tuple[str, int, str]], *, logger_name: str
710+
) -> list[tuple[str, int, str]]:
711+
return [t for t in record_tuples if t[0] == logger_name]

0 commit comments

Comments
 (0)