|
1 | | -"""Tests for the TUI module.""" |
| 1 | +"""Tests for the TUI module. |
| 2 | +
|
| 3 | +These are smoke tests to ensure the UI doesn't crash. |
| 4 | +We don't test specific text/formatting as that's brittle and changes often. |
| 5 | +""" |
2 | 6 |
|
3 | 7 | from pathlib import Path |
4 | | -from typing import Any, TypedDict |
5 | 8 |
|
6 | 9 | import pytest |
7 | | -from rich.console import Console |
8 | | -from textual.containers import Container, VerticalScroll |
9 | 10 |
|
10 | 11 | from datanomy.reader import ParquetReader |
11 | 12 | from datanomy.tui import DatanomyApp |
12 | 13 |
|
13 | 14 |
|
14 | | -class FileDataFixture(TypedDict): |
15 | | - """Type definition for test file data.""" |
16 | | - |
17 | | - file_size: str |
18 | | - num_rows: int |
19 | | - num_row_groups: int |
20 | | - schema: dict[str, str] |
21 | | - |
22 | | - |
23 | | -@pytest.fixture |
24 | | -def file(request: pytest.FixtureRequest) -> Any: |
25 | | - """Indirect fixture to get other fixtures by name.""" |
26 | | - return request.getfixturevalue(request.param) |
27 | | - |
28 | | - |
29 | | -test_data_fixtures: dict[str, FileDataFixture] = { |
30 | | - "simple.parquet": { |
31 | | - "file_size": "0.00", |
32 | | - "num_rows": 5, |
33 | | - "num_row_groups": 1, |
34 | | - "schema": { |
35 | | - "id": "int64", |
36 | | - "name": "string", |
37 | | - "age": "int64", |
38 | | - "score": "double", |
39 | | - }, |
40 | | - }, |
41 | | - "multi_row_group.parquet": { |
42 | | - "file_size": "0.11", |
43 | | - "num_rows": 10000, |
44 | | - "num_row_groups": 5, |
45 | | - "schema": { |
46 | | - "id": "int64", |
47 | | - "category": "string", |
48 | | - "value": "int64", |
49 | | - }, |
50 | | - }, |
51 | | - "complex.parquet": { |
52 | | - "file_size": "0.00", |
53 | | - "num_rows": 3, |
54 | | - "num_row_groups": 1, |
55 | | - "schema": { |
56 | | - "id": "int64", |
57 | | - "data": "struct<x: int64, y: int64>", |
58 | | - "tags": "list<element: string>", |
59 | | - }, |
60 | | - }, |
61 | | - "empty.parquet": { |
62 | | - "file_size": "0.00", |
63 | | - "num_rows": 0, |
64 | | - "num_row_groups": 1, |
65 | | - "schema": { |
66 | | - "id": "int64", |
67 | | - "name": "string", |
68 | | - }, |
69 | | - }, |
70 | | - "large_schema.parquet": { |
71 | | - "file_size": "0.01", |
72 | | - "num_rows": 3, |
73 | | - "num_row_groups": 1, |
74 | | - "schema": {f"col_{i}": "int64" for i in range(50)}, |
75 | | - }, |
76 | | -} |
77 | | - |
78 | | - |
79 | | -async def check_app_for_file(filename: Path) -> None: |
80 | | - reader = ParquetReader(filename) |
| 15 | +@pytest.mark.asyncio |
| 16 | +async def test_app_launches_without_crash(simple_parquet: Path) -> None: |
| 17 | + """Test that app launches and runs without crashing.""" |
| 18 | + reader = ParquetReader(simple_parquet) |
| 19 | + app = DatanomyApp(reader) |
| 20 | + |
| 21 | + async with app.run_test(): |
| 22 | + # If we get here, app launched successfully |
| 23 | + assert app is not None |
| 24 | + |
| 25 | + |
| 26 | +@pytest.mark.asyncio |
| 27 | +async def test_app_has_required_widgets(simple_parquet: Path) -> None: |
| 28 | + """Test that all expected widgets are present.""" |
| 29 | + reader = ParquetReader(simple_parquet) |
| 30 | + app = DatanomyApp(reader) |
| 31 | + |
| 32 | + async with app.run_test(): |
| 33 | + # Verify core widgets exist |
| 34 | + assert app.query_one("#file-info") is not None |
| 35 | + assert app.query_one("#schema") is not None |
| 36 | + assert app.query_one("#row-groups") is not None |
| 37 | + |
| 38 | + |
| 39 | +@pytest.mark.asyncio |
| 40 | +async def test_widgets_render_without_error(simple_parquet: Path) -> None: |
| 41 | + """Test that all widgets can render without throwing exceptions.""" |
| 42 | + reader = ParquetReader(simple_parquet) |
| 43 | + app = DatanomyApp(reader) |
| 44 | + |
| 45 | + async with app.run_test(): |
| 46 | + # Call render on each widget - will raise if there's an error |
| 47 | + file_info = app.query_one("#file-info") |
| 48 | + file_info.render() |
| 49 | + |
| 50 | + schema = app.query_one("#schema") |
| 51 | + schema.render() |
| 52 | + |
| 53 | + row_groups = app.query_one("#row-groups") |
| 54 | + row_groups.render() |
| 55 | + |
| 56 | + |
| 57 | +@pytest.mark.asyncio |
| 58 | +async def test_app_with_empty_file(empty_parquet: Path) -> None: |
| 59 | + """Test that app handles empty Parquet files.""" |
| 60 | + reader = ParquetReader(empty_parquet) |
| 61 | + app = DatanomyApp(reader) |
| 62 | + |
| 63 | + async with app.run_test(): |
| 64 | + # Should not crash with empty file |
| 65 | + app.query_one("#file-info").render() |
| 66 | + app.query_one("#schema").render() |
| 67 | + app.query_one("#row-groups").render() |
| 68 | + |
| 69 | + |
| 70 | +@pytest.mark.asyncio |
| 71 | +async def test_app_with_complex_schema(complex_schema_parquet: Path) -> None: |
| 72 | + """Test that app handles complex nested schemas.""" |
| 73 | + reader = ParquetReader(complex_schema_parquet) |
81 | 74 | app = DatanomyApp(reader) |
| 75 | + |
82 | 76 | async with app.run_test(): |
83 | | - assert app.title == "DatanomyApp" |
84 | | - console = Console() |
85 | | - file_info_widget = ( |
86 | | - app.query_one(VerticalScroll).query_one(Container).query_one("#file-info") |
87 | | - ) |
88 | | - with console.capture() as capture: |
89 | | - console.print(file_info_widget.render()) |
90 | | - file_info = capture.get() |
91 | | - file_data = test_data_fixtures[filename.name] |
92 | | - assert ( |
93 | | - f"File: {filename.name}\nSize: {file_data['file_size']} MB\nRows: {file_data['num_rows']:,}\nRow Groups: {file_data['num_row_groups']}" |
94 | | - in file_info |
95 | | - ) |
96 | | - |
97 | | - schema_widget = ( |
98 | | - app.query_one(VerticalScroll).query_one(Container).query_one("#schema") |
99 | | - ) |
100 | | - with console.capture() as capture: |
101 | | - console.print(schema_widget.render()) |
102 | | - schema_info = capture.get() |
103 | | - for field, dtype in file_data["schema"].items(): |
104 | | - assert f"{field}: {dtype}" in schema_info |
105 | | - |
106 | | - row_groups_widget = ( |
107 | | - app.query_one(VerticalScroll).query_one(Container).query_one("#row-groups") |
108 | | - ) |
109 | | - with console.capture() as capture: |
110 | | - console.print(row_groups_widget.render()) |
111 | | - row_groups_info = capture.get() |
112 | | - for i in range(file_data["num_row_groups"]): |
113 | | - assert ( |
114 | | - f"Row Group {i}: {int(file_data['num_rows']) // int(file_data['num_row_groups']):,} rows" |
115 | | - in row_groups_info |
116 | | - ) |
| 77 | + # Should handle nested types without crashing |
| 78 | + app.query_one("#schema").render() |
117 | 79 |
|
118 | 80 |
|
119 | 81 | @pytest.mark.asyncio |
120 | | -@pytest.mark.parametrize( |
121 | | - "file", |
122 | | - [ |
123 | | - "simple_parquet", |
124 | | - "multi_row_group_parquet", |
125 | | - "complex_schema_parquet", |
126 | | - "empty_parquet", |
127 | | - "large_schema_parquet", |
128 | | - ], |
129 | | - indirect=True, |
130 | | -) |
131 | | -async def test_containers_with_files( |
132 | | - file: Path, |
133 | | -) -> None: |
134 | | - await check_app_for_file(file) |
| 82 | +async def test_app_with_many_columns(large_schema_parquet: Path) -> None: |
| 83 | + """Test that app handles files with many columns.""" |
| 84 | + reader = ParquetReader(large_schema_parquet) |
| 85 | + app = DatanomyApp(reader) |
| 86 | + |
| 87 | + async with app.run_test(): |
| 88 | + # Should handle large schema without crashing |
| 89 | + app.query_one("#schema").render() |
| 90 | + |
| 91 | + |
| 92 | +@pytest.mark.asyncio |
| 93 | +async def test_app_with_multiple_row_groups(multi_row_group_parquet: Path) -> None: |
| 94 | + """Test that app handles multiple row groups.""" |
| 95 | + reader = ParquetReader(multi_row_group_parquet) |
| 96 | + app = DatanomyApp(reader) |
| 97 | + |
| 98 | + async with app.run_test(): |
| 99 | + # Should handle multiple row groups without crashing |
| 100 | + app.query_one("#row-groups").render() |
0 commit comments