Skip to content

Commit 4b8d0a6

Browse files
committed
Increase code coverage to 98%
1 parent d20843a commit 4b8d0a6

File tree

1 file changed

+292
-0
lines changed

1 file changed

+292
-0
lines changed

tests/test_store/test_caching_store.py

Lines changed: 292 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -299,3 +299,295 @@ async def test_clear_cache(self, cached_store: CacheStore) -> None:
299299
# Verify data still exists in source store
300300
assert await cached_store._store.exists("clear_test_1")
301301
assert await cached_store._store.exists("clear_test_2")
302+
303+
async def test_max_age_infinity(self) -> None:
304+
"""Test cache with infinite max age."""
305+
source_store = MemoryStore()
306+
cache_store = MemoryStore()
307+
cached_store = CacheStore(
308+
source_store,
309+
cache_store=cache_store,
310+
max_age_seconds="infinity"
311+
)
312+
313+
# Add data and verify it never expires
314+
test_data = CPUBuffer.from_bytes(b"test data")
315+
await cached_store.set("test_key", test_data)
316+
317+
# Even after time passes, key should be fresh
318+
assert cached_store._is_key_fresh("test_key")
319+
320+
async def test_max_age_numeric(self) -> None:
321+
"""Test cache with numeric max age."""
322+
source_store = MemoryStore()
323+
cache_store = MemoryStore()
324+
cached_store = CacheStore(
325+
source_store,
326+
cache_store=cache_store,
327+
max_age_seconds=1 # 1 second
328+
)
329+
330+
# Add data
331+
test_data = CPUBuffer.from_bytes(b"test data")
332+
await cached_store.set("test_key", test_data)
333+
334+
# Key should be fresh initially
335+
assert cached_store._is_key_fresh("test_key")
336+
337+
# Manually set old timestamp to test expiration
338+
cached_store.key_insert_times["test_key"] = time.monotonic() - 2 # 2 seconds ago
339+
340+
# Key should now be stale
341+
assert not cached_store._is_key_fresh("test_key")
342+
343+
async def test_cache_set_data_disabled(self) -> None:
344+
"""Test cache behavior when cache_set_data is False."""
345+
source_store = MemoryStore()
346+
cache_store = MemoryStore()
347+
cached_store = CacheStore(
348+
source_store,
349+
cache_store=cache_store,
350+
cache_set_data=False
351+
)
352+
353+
# Set data
354+
test_data = CPUBuffer.from_bytes(b"test data")
355+
await cached_store.set("test_key", test_data)
356+
357+
# Data should be in source but not in cache
358+
assert await source_store.exists("test_key")
359+
assert not await cache_store.exists("test_key")
360+
361+
# Cache info should show no cached data
362+
info = cached_store.cache_info()
363+
assert info["cache_set_data"] is False
364+
assert info["cached_keys"] == 0
365+
366+
async def test_eviction_with_max_size(self) -> None:
367+
"""Test LRU eviction when max_size is exceeded."""
368+
source_store = MemoryStore()
369+
cache_store = MemoryStore()
370+
cached_store = CacheStore(
371+
source_store,
372+
cache_store=cache_store,
373+
max_size=100 # Small cache size
374+
)
375+
376+
# Add data that exceeds cache size
377+
small_data = CPUBuffer.from_bytes(b"a" * 40) # 40 bytes
378+
medium_data = CPUBuffer.from_bytes(b"b" * 40) # 40 bytes
379+
large_data = CPUBuffer.from_bytes(b"c" * 40) # 40 bytes (would exceed 100 byte limit)
380+
381+
# Set first two items
382+
await cached_store.set("key1", small_data)
383+
await cached_store.set("key2", medium_data)
384+
385+
# Cache should have 2 items
386+
info = cached_store.cache_info()
387+
assert info["cached_keys"] == 2
388+
assert info["current_size"] == 80
389+
390+
# Add third item - should trigger eviction of first item
391+
await cached_store.set("key3", large_data)
392+
393+
# Cache should still have items but first one may be evicted
394+
info = cached_store.cache_info()
395+
assert info["current_size"] <= 100
396+
397+
async def test_value_exceeds_max_size(self) -> None:
398+
"""Test behavior when a single value exceeds max_size."""
399+
source_store = MemoryStore()
400+
cache_store = MemoryStore()
401+
cached_store = CacheStore(
402+
source_store,
403+
cache_store=cache_store,
404+
max_size=50 # Small cache size
405+
)
406+
407+
# Try to cache data larger than max_size
408+
large_data = CPUBuffer.from_bytes(b"x" * 100) # 100 bytes > 50 byte limit
409+
await cached_store.set("large_key", large_data)
410+
411+
# Data should be in source but not cached
412+
assert await source_store.exists("large_key")
413+
info = cached_store.cache_info()
414+
assert info["cached_keys"] == 0
415+
assert info["current_size"] == 0
416+
417+
async def test_get_nonexistent_key(self) -> None:
418+
"""Test getting a key that doesn't exist in either store."""
419+
source_store = MemoryStore()
420+
cache_store = MemoryStore()
421+
cached_store = CacheStore(source_store, cache_store=cache_store)
422+
423+
# Try to get nonexistent key
424+
result = await cached_store.get("nonexistent", default_buffer_prototype())
425+
assert result is None
426+
427+
# Should not create any cache entries
428+
info = cached_store.cache_info()
429+
assert info["cached_keys"] == 0
430+
431+
async def test_delete_both_stores(self) -> None:
432+
"""Test that delete removes from both source and cache stores."""
433+
source_store = MemoryStore()
434+
cache_store = MemoryStore()
435+
cached_store = CacheStore(source_store, cache_store=cache_store)
436+
437+
# Add data
438+
test_data = CPUBuffer.from_bytes(b"test data")
439+
await cached_store.set("test_key", test_data)
440+
441+
# Verify it's in both stores
442+
assert await source_store.exists("test_key")
443+
assert await cache_store.exists("test_key")
444+
445+
# Delete
446+
await cached_store.delete("test_key")
447+
448+
# Verify it's removed from both
449+
assert not await source_store.exists("test_key")
450+
assert not await cache_store.exists("test_key")
451+
452+
# Verify tracking is updated
453+
info = cached_store.cache_info()
454+
assert info["cached_keys"] == 0
455+
456+
async def test_invalid_max_age_seconds(self) -> None:
457+
"""Test that invalid max_age_seconds values raise ValueError."""
458+
source_store = MemoryStore()
459+
cache_store = MemoryStore()
460+
461+
with pytest.raises(ValueError, match="max_age_seconds string value must be 'infinity'"):
462+
CacheStore(
463+
source_store,
464+
cache_store=cache_store,
465+
max_age_seconds="invalid"
466+
)
467+
468+
async def test_buffer_size_function_coverage(self) -> None:
469+
"""Test different branches of the buffer_size function."""
470+
from zarr.storage._caching_store import buffer_size
471+
472+
# Test with Buffer object (nbytes attribute)
473+
buffer_data = CPUBuffer.from_bytes(b"test data")
474+
size = buffer_size(buffer_data)
475+
assert size > 0
476+
477+
# Test with bytes
478+
bytes_data = b"test bytes"
479+
size = buffer_size(bytes_data)
480+
assert size == len(bytes_data)
481+
482+
# Test with bytearray
483+
bytearray_data = bytearray(b"test bytearray")
484+
size = buffer_size(bytearray_data)
485+
assert size == len(bytearray_data)
486+
487+
# Test with memoryview
488+
memoryview_data = memoryview(b"test memoryview")
489+
size = buffer_size(memoryview_data)
490+
assert size == len(memoryview_data)
491+
492+
# Test fallback for other types - use a simple object
493+
# This will go through the numpy fallback or string encoding
494+
size = buffer_size("test string")
495+
assert size > 0
496+
497+
async def test_unlimited_cache_size(self) -> None:
498+
"""Test behavior when max_size is None (unlimited)."""
499+
source_store = MemoryStore()
500+
cache_store = MemoryStore()
501+
cached_store = CacheStore(
502+
source_store,
503+
cache_store=cache_store,
504+
max_size=None # Unlimited cache
505+
)
506+
507+
# Add large amounts of data
508+
for i in range(10):
509+
large_data = CPUBuffer.from_bytes(b"x" * 1000) # 1KB each
510+
await cached_store.set(f"large_key_{i}", large_data)
511+
512+
# All should be cached since there's no size limit
513+
info = cached_store.cache_info()
514+
assert info["cached_keys"] == 10
515+
assert info["current_size"] == 10000 # 10 * 1000 bytes
516+
517+
async def test_evict_key_exception_handling(self) -> None:
518+
"""Test exception handling in _evict_key method."""
519+
source_store = MemoryStore()
520+
cache_store = MemoryStore()
521+
cached_store = CacheStore(
522+
source_store,
523+
cache_store=cache_store,
524+
max_size=100
525+
)
526+
527+
# Add some data
528+
test_data = CPUBuffer.from_bytes(b"test data")
529+
await cached_store.set("test_key", test_data)
530+
531+
# Manually corrupt the tracking to trigger exception
532+
# Remove from one structure but not others to create inconsistency
533+
del cached_store._cache_order["test_key"]
534+
535+
# Try to evict - should handle the KeyError gracefully
536+
cached_store._evict_key("test_key")
537+
538+
# Should still work and not crash
539+
info = cached_store.cache_info()
540+
assert isinstance(info, dict)
541+
542+
async def test_get_no_cache_delete_tracking(self) -> None:
543+
"""Test _get_no_cache when key doesn't exist and needs cleanup."""
544+
source_store = MemoryStore()
545+
cache_store = MemoryStore()
546+
cached_store = CacheStore(source_store, cache_store=cache_store)
547+
548+
# First, add key to cache tracking but not to source
549+
test_data = CPUBuffer.from_bytes(b"test data")
550+
await cache_store.set("phantom_key", test_data)
551+
cached_store._cache_value("phantom_key", test_data)
552+
553+
# Verify it's in tracking
554+
assert "phantom_key" in cached_store._cache_order
555+
assert "phantom_key" in cached_store.key_insert_times
556+
557+
# Now try to get it - since it's not in source, should clean up tracking
558+
result = await cached_store._get_no_cache("phantom_key", default_buffer_prototype())
559+
assert result is None
560+
561+
# Should have cleaned up tracking
562+
assert "phantom_key" not in cached_store._cache_order
563+
assert "phantom_key" not in cached_store.key_insert_times
564+
565+
async def test_buffer_size_import_error_fallback(self) -> None:
566+
"""Test buffer_size ImportError fallback."""
567+
from unittest.mock import patch
568+
569+
from zarr.storage._caching_store import buffer_size
570+
571+
# Mock numpy import to raise ImportError
572+
with patch.dict('sys.modules', {'numpy': None}):
573+
with patch('builtins.__import__', side_effect=ImportError("No module named 'numpy'")):
574+
# This should trigger the ImportError fallback
575+
size = buffer_size("test string")
576+
assert size == len(b"test string")
577+
578+
async def test_accommodate_value_no_max_size(self) -> None:
579+
"""Test _accommodate_value early return when max_size is None."""
580+
source_store = MemoryStore()
581+
cache_store = MemoryStore()
582+
cached_store = CacheStore(
583+
source_store,
584+
cache_store=cache_store,
585+
max_size=None # No size limit
586+
)
587+
588+
# This should return early without doing anything
589+
cached_store._accommodate_value(1000000) # Large value
590+
591+
# Should not affect anything since max_size is None
592+
info = cached_store.cache_info()
593+
assert info["current_size"] == 0

0 commit comments

Comments
 (0)