Skip to content

Commit 1b2a9b2

Browse files
committed
tests: User testcontainers for all Redis and PSQL tests, filter warnings
Signed-off-by: Ford <[email protected]>
1 parent 6673078 commit 1b2a9b2

File tree

3 files changed

+47
-17
lines changed

3 files changed

+47
-17
lines changed

pyproject.toml

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,8 @@ delta_lake = [
5353
]
5454

5555
iceberg = [
56-
"pyiceberg[sql-sqlite]>=0.9.1",
56+
"pyiceberg[sql-sqlite]>=0.10.0",
57+
"pydantic>=2.0,<2.12", # PyIceberg 0.10.0 has issues with Pydantic 2.12+
5758
]
5859

5960
snowflake = [
@@ -68,7 +69,8 @@ all_loaders = [
6869
"psycopg2-binary>=2.9.0", # PostgreSQL
6970
"redis>=4.5.0", # Redis
7071
"deltalake>=1.0.2", # Delta Lake (consistent version)
71-
"pyiceberg[sql-sqlite]>=0.9.1", # Apache Iceberg
72+
"pyiceberg[sql-sqlite]>=0.10.0", # Apache Iceberg
73+
"pydantic>=2.0,<2.12", # PyIceberg 0.10.0 compatibility
7274
"snowflake-connector-python>=3.5.0", # Snowflake
7375
"lmdb>=1.4.0", # LMDB
7476
]
@@ -78,11 +80,11 @@ test = [
7880
"pytest-asyncio>=0.21.0",
7981
"pytest-mock>=3.10.0",
8082
"pytest-cov>=4.0.0",
81-
"pytest-xdist>=3.0.0", # Parallel test execution
82-
"pytest-benchmark>=4.0.0", # Performance benchmarking
83-
"testcontainers>=3.7.0", # Database containers for integration tests
84-
"docker>=6.0.0", # Required by testcontainers
85-
"psutil>=5.9.0", # Memory usage monitoring
83+
"pytest-xdist>=3.0.0", # Parallel test execution
84+
"pytest-benchmark>=4.0.0", # Performance benchmarking
85+
"testcontainers>=4.0.0", # Database containers for integration tests
86+
"docker>=6.0.0", # Required by testcontainers
87+
"psutil>=5.9.0", # Memory usage monitoring
8688
]
8789

8890
[build-system]
@@ -98,6 +100,11 @@ addopts = [
98100
"--tb=short",
99101
"--strict-markers",
100102
]
103+
filterwarnings = [
104+
# Ignore testcontainers deprecation warnings from the library itself
105+
"ignore:The @wait_container_is_ready decorator is deprecated:DeprecationWarning:testcontainers",
106+
"ignore:The wait_for_logs function with string or callable predicates is deprecated:DeprecationWarning",
107+
]
101108

102109
markers = [
103110
"unit: Unit tests (fast, no external dependencies)",

tests/conftest.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,12 @@
2323
if 'TESTCONTAINERS_RYUK_DISABLED' not in os.environ:
2424
os.environ['TESTCONTAINERS_RYUK_DISABLED'] = 'true'
2525

26+
# Set Docker host for Colima if not already set
27+
if 'DOCKER_HOST' not in os.environ:
28+
colima_socket = Path.home() / '.colima' / 'default' / 'docker.sock'
29+
if colima_socket.exists():
30+
os.environ['DOCKER_HOST'] = f'unix://{colima_socket}'
31+
2632
# Import testcontainers conditionally
2733
if USE_TESTCONTAINERS:
2834
try:
@@ -118,9 +124,19 @@ def postgres_container():
118124
if not TESTCONTAINERS_AVAILABLE:
119125
pytest.skip('Testcontainers not available')
120126

127+
import time
128+
129+
from testcontainers.core.waiting_utils import wait_for_logs
130+
121131
container = PostgresContainer(image='postgres:13', username='test_user', password='test_pass', dbname='test_db')
122132
container.start()
123133

134+
# Wait for PostgreSQL to be ready using log message
135+
wait_for_logs(container, 'database system is ready to accept connections', timeout=30)
136+
137+
# PostgreSQL logs "ready" twice - wait a bit more to ensure fully ready
138+
time.sleep(2)
139+
124140
yield container
125141

126142
container.stop()
@@ -132,9 +148,14 @@ def redis_container():
132148
if not TESTCONTAINERS_AVAILABLE:
133149
pytest.skip('Testcontainers not available')
134150

151+
from testcontainers.core.waiting_utils import wait_for_logs
152+
135153
container = RedisContainer(image='redis:7-alpine')
136154
container.start()
137155

156+
# Wait for Redis to be ready using log message
157+
wait_for_logs(container, 'Ready to accept connections', timeout=30)
158+
138159
yield container
139160

140161
container.stop()

tests/performance/test_loader_performance.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@
2323
class TestPostgreSQLPerformance:
2424
"""Performance tests for PostgreSQL loader"""
2525

26-
def test_large_table_loading_performance(self, postgresql_config, performance_test_data, memory_monitor):
26+
def test_large_table_loading_performance(self, postgresql_test_config, performance_test_data, memory_monitor):
2727
"""Test loading large datasets with performance monitoring"""
28-
loader = PostgreSQLLoader(postgresql_config)
28+
loader = PostgreSQLLoader(postgresql_test_config)
2929

3030
with loader:
3131
start_time = time.time()
@@ -59,7 +59,7 @@ def test_large_table_loading_performance(self, postgresql_config, performance_te
5959
finally:
6060
loader.pool.putconn(conn)
6161

62-
def test_batch_performance_scaling(self, postgresql_config, performance_test_data):
62+
def test_batch_performance_scaling(self, postgresql_test_config, performance_test_data):
6363
"""Test performance scaling with different batch processing approaches"""
6464
from src.amp.loaders.base import LoadMode
6565

@@ -72,7 +72,7 @@ def test_batch_performance_scaling(self, postgresql_config, performance_test_dat
7272
results = {}
7373

7474
for approach_name, batch_size in batch_approaches.items():
75-
loader = PostgreSQLLoader(postgresql_config)
75+
loader = PostgreSQLLoader(postgresql_test_config)
7676
table_name = f'perf_batch_{approach_name}'
7777

7878
with loader:
@@ -123,9 +123,9 @@ def test_batch_performance_scaling(self, postgresql_config, performance_test_dat
123123
for approach, throughput in results.items():
124124
assert throughput > 500, f'{approach} too slow: {throughput:.0f} rows/sec'
125125

126-
def test_connection_pool_performance(self, postgresql_config, small_test_table):
126+
def test_connection_pool_performance(self, postgresql_test_config, small_test_table):
127127
"""Test connection pool efficiency under load"""
128-
config = {**postgresql_config, 'max_connections': 5}
128+
config = {**postgresql_test_config, 'max_connections': 5}
129129
loader = PostgreSQLLoader(config)
130130

131131
with loader:
@@ -1016,7 +1016,7 @@ class TestCrossLoaderPerformance:
10161016

10171017
def test_throughput_comparison(
10181018
self,
1019-
postgresql_config,
1019+
postgresql_test_config,
10201020
redis_test_config,
10211021
snowflake_config,
10221022
delta_basic_config,
@@ -1027,7 +1027,7 @@ def test_throughput_comparison(
10271027
results = {}
10281028

10291029
# Test PostgreSQL
1030-
pg_loader = PostgreSQLLoader(postgresql_config)
1030+
pg_loader = PostgreSQLLoader(postgresql_test_config)
10311031
with pg_loader:
10321032
start_time = time.time()
10331033
result = pg_loader.load_table(medium_test_table, 'throughput_test')
@@ -1120,7 +1120,9 @@ def test_throughput_comparison(
11201120
if throughput > 0:
11211121
print(f' {loader_name}: {throughput:.0f}')
11221122

1123-
def test_memory_usage_comparison(self, postgresql_config, redis_test_config, snowflake_config, small_test_table):
1123+
def test_memory_usage_comparison(
1124+
self, postgresql_test_config, redis_test_config, snowflake_config, small_test_table
1125+
):
11241126
"""Compare memory usage patterns across loaders"""
11251127
try:
11261128
import psutil
@@ -1132,7 +1134,7 @@ def test_memory_usage_comparison(self, postgresql_config, redis_test_config, sno
11321134

11331135
# Test PostgreSQL memory usage
11341136
initial_memory = process.memory_info().rss
1135-
pg_loader = PostgreSQLLoader(postgresql_config)
1137+
pg_loader = PostgreSQLLoader(postgresql_test_config)
11361138
with pg_loader:
11371139
pg_loader.load_table(small_test_table, 'memory_test')
11381140
peak_memory = process.memory_info().rss

0 commit comments

Comments
 (0)