Skip to content

Commit 8b2b775

Browse files
author
bosd
committed
✅ Export threaded Improve coverage unhappy paths
1 parent 4fb083b commit 8b2b775

File tree

1 file changed

+117
-0
lines changed

1 file changed

+117
-0
lines changed

tests/test_export_threaded.py

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -257,3 +257,120 @@ def test_export_handles_no_records_found(
257257

258258
with open(output_file) as f:
259259
assert f.read().strip() == "id,name"
260+
261+
def test_export_handles_memory_error_fallback(
262+
self, mock_conf_lib: MagicMock, tmp_path: Path
263+
) -> None:
264+
"""Tests that the batch is split and retried on server MemoryError."""
265+
# --- Arrange ---
266+
output_file = tmp_path / "output.csv"
267+
mock_model = mock_conf_lib.return_value.get_model.return_value
268+
mock_model.search.return_value = [1, 2, 3, 4]
269+
270+
# Simulate Odoo failing with MemoryError on the first large batch,
271+
# then succeeding on the two smaller retry batches.
272+
memory_error_response = Exception(
273+
{
274+
"code": 200,
275+
"message": "Odoo Server Error",
276+
"data": {"name": "builtins.MemoryError", "debug": "..."},
277+
}
278+
)
279+
mock_model.read.side_effect = [
280+
memory_error_response,
281+
[{"id": 1, "name": "A"}, {"id": 2, "name": "B"}], # 1st retry
282+
[{"id": 3, "name": "C"}, {"id": 4, "name": "D"}], # 2nd retry
283+
]
284+
mock_model.fields_get.return_value = {
285+
"id": {"type": "integer"},
286+
"name": {"type": "char"},
287+
}
288+
289+
# --- Act ---
290+
result_df = export_data(
291+
config_file="dummy.conf",
292+
model="res.partner",
293+
domain=[],
294+
header=["id", "name"],
295+
output=str(output_file),
296+
technical_names=True,
297+
streaming=True,
298+
batch_size=4,
299+
)
300+
301+
# --- Assert ---
302+
assert result_df is None
303+
assert output_file.exists()
304+
assert mock_model.read.call_count == 3 # 1 failure + 2 retries
305+
306+
# Verify the final file has all data from the successful retries
307+
on_disk_df = pl.read_csv(output_file, separator=";")
308+
expected_df = pl.DataFrame({"id": [1, 2, 3, 4], "name": ["A", "B", "C", "D"]})
309+
assert_frame_equal(on_disk_df.sort("id"), expected_df.sort("id"))
310+
311+
def test_export_handles_empty_batch_result(
312+
self, mock_conf_lib: MagicMock, tmp_path: Path
313+
) -> None:
314+
"""Tests that an empty result from a batch is handled gracefully."""
315+
# --- Arrange ---
316+
output_file = tmp_path / "output.csv"
317+
mock_model = mock_conf_lib.return_value.get_model.return_value
318+
mock_model.search.return_value = [1, 2]
319+
# Simulate one batch succeeding and one returning no data
320+
mock_model.read.side_effect = [[{"id": 1, "name": "A"}], []]
321+
mock_model.fields_get.return_value = {
322+
"id": {"type": "integer"},
323+
"name": {"type": "char"},
324+
}
325+
326+
# --- Act ---
327+
export_data(
328+
config_file="dummy.conf",
329+
model="res.partner",
330+
domain=[],
331+
header=["id", "name"],
332+
output=str(output_file),
333+
technical_names=True,
334+
batch_size=1,
335+
)
336+
337+
# --- Assert ---
338+
# The file should contain only the data from the successful batch
339+
on_disk_df = pl.read_csv(output_file, separator=";")
340+
assert len(on_disk_df) == 1
341+
assert on_disk_df["id"][0] == 1
342+
343+
def test_export_handles_permanent_worker_failure(
344+
self, mock_conf_lib: MagicMock, tmp_path: Path
345+
) -> None:
346+
"""Tests that a non-MemoryError exception in a worker is survivable."""
347+
# --- Arrange ---
348+
output_file = tmp_path / "output.csv"
349+
mock_model = mock_conf_lib.return_value.get_model.return_value
350+
mock_model.search.return_value = [1, 2]
351+
# Simulate one batch succeeding and one failing with a different error
352+
mock_model.read.side_effect = [
353+
[{"id": 1, "name": "A"}],
354+
ValueError("A permanent error"),
355+
]
356+
mock_model.fields_get.return_value = {
357+
"id": {"type": "integer"},
358+
"name": {"type": "char"},
359+
}
360+
361+
# --- Act ---
362+
export_data(
363+
config_file="dummy.conf",
364+
model="res.partner",
365+
domain=[],
366+
header=["id", "name"],
367+
output=str(output_file),
368+
technical_names=True,
369+
batch_size=1,
370+
)
371+
372+
# --- Assert ---
373+
# The export should complete with data from the successful batch
374+
assert output_file.exists()
375+
on_disk_df = pl.read_csv(output_file, separator=";")
376+
assert len(on_disk_df) == 1

0 commit comments

Comments
 (0)