Skip to content

Commit 84a87e2

Browse files
committed
Fix linting errors
1 parent 4b8d0a6 commit 84a87e2

File tree

1 file changed

+39
-55
lines changed

1 file changed

+39
-55
lines changed

tests/test_store/test_caching_store.py

Lines changed: 39 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -304,16 +304,12 @@ async def test_max_age_infinity(self) -> None:
304304
"""Test cache with infinite max age."""
305305
source_store = MemoryStore()
306306
cache_store = MemoryStore()
307-
cached_store = CacheStore(
308-
source_store,
309-
cache_store=cache_store,
310-
max_age_seconds="infinity"
311-
)
312-
307+
cached_store = CacheStore(source_store, cache_store=cache_store, max_age_seconds="infinity")
308+
313309
# Add data and verify it never expires
314310
test_data = CPUBuffer.from_bytes(b"test data")
315311
await cached_store.set("test_key", test_data)
316-
312+
317313
# Even after time passes, key should be fresh
318314
assert cached_store._is_key_fresh("test_key")
319315

@@ -324,31 +320,27 @@ async def test_max_age_numeric(self) -> None:
324320
cached_store = CacheStore(
325321
source_store,
326322
cache_store=cache_store,
327-
max_age_seconds=1 # 1 second
323+
max_age_seconds=1, # 1 second
328324
)
329-
325+
330326
# Add data
331327
test_data = CPUBuffer.from_bytes(b"test data")
332328
await cached_store.set("test_key", test_data)
333-
329+
334330
# Key should be fresh initially
335331
assert cached_store._is_key_fresh("test_key")
336-
332+
337333
# Manually set old timestamp to test expiration
338334
cached_store.key_insert_times["test_key"] = time.monotonic() - 2 # 2 seconds ago
339-
335+
340336
# Key should now be stale
341337
assert not cached_store._is_key_fresh("test_key")
342338

343339
async def test_cache_set_data_disabled(self) -> None:
344340
"""Test cache behavior when cache_set_data is False."""
345341
source_store = MemoryStore()
346342
cache_store = MemoryStore()
347-
cached_store = CacheStore(
348-
source_store,
349-
cache_store=cache_store,
350-
cache_set_data=False
351-
)
343+
cached_store = CacheStore(source_store, cache_store=cache_store, cache_set_data=False)
352344

353345
# Set data
354346
test_data = CPUBuffer.from_bytes(b"test data")
@@ -370,7 +362,7 @@ async def test_eviction_with_max_size(self) -> None:
370362
cached_store = CacheStore(
371363
source_store,
372364
cache_store=cache_store,
373-
max_size=100 # Small cache size
365+
max_size=100, # Small cache size
374366
)
375367

376368
# Add data that exceeds cache size
@@ -401,7 +393,7 @@ async def test_value_exceeds_max_size(self) -> None:
401393
cached_store = CacheStore(
402394
source_store,
403395
cache_store=cache_store,
404-
max_size=50 # Small cache size
396+
max_size=50, # Small cache size
405397
)
406398

407399
# Try to cache data larger than max_size
@@ -459,56 +451,52 @@ async def test_invalid_max_age_seconds(self) -> None:
459451
cache_store = MemoryStore()
460452

461453
with pytest.raises(ValueError, match="max_age_seconds string value must be 'infinity'"):
462-
CacheStore(
463-
source_store,
464-
cache_store=cache_store,
465-
max_age_seconds="invalid"
466-
)
467-
454+
CacheStore(source_store, cache_store=cache_store, max_age_seconds="invalid")
455+
468456
async def test_buffer_size_function_coverage(self) -> None:
469457
"""Test different branches of the buffer_size function."""
470458
from zarr.storage._caching_store import buffer_size
471-
459+
472460
# Test with Buffer object (nbytes attribute)
473461
buffer_data = CPUBuffer.from_bytes(b"test data")
474462
size = buffer_size(buffer_data)
475463
assert size > 0
476-
464+
477465
# Test with bytes
478466
bytes_data = b"test bytes"
479467
size = buffer_size(bytes_data)
480468
assert size == len(bytes_data)
481-
469+
482470
# Test with bytearray
483471
bytearray_data = bytearray(b"test bytearray")
484472
size = buffer_size(bytearray_data)
485473
assert size == len(bytearray_data)
486-
474+
487475
# Test with memoryview
488476
memoryview_data = memoryview(b"test memoryview")
489477
size = buffer_size(memoryview_data)
490478
assert size == len(memoryview_data)
491-
479+
492480
# Test fallback for other types - use a simple object
493481
# This will go through the numpy fallback or string encoding
494482
size = buffer_size("test string")
495483
assert size > 0
496-
484+
497485
async def test_unlimited_cache_size(self) -> None:
498486
"""Test behavior when max_size is None (unlimited)."""
499487
source_store = MemoryStore()
500488
cache_store = MemoryStore()
501489
cached_store = CacheStore(
502490
source_store,
503491
cache_store=cache_store,
504-
max_size=None # Unlimited cache
492+
max_size=None, # Unlimited cache
505493
)
506-
494+
507495
# Add large amounts of data
508496
for i in range(10):
509497
large_data = CPUBuffer.from_bytes(b"x" * 1000) # 1KB each
510498
await cached_store.set(f"large_key_{i}", large_data)
511-
499+
512500
# All should be cached since there's no size limit
513501
info = cached_store.cache_info()
514502
assert info["cached_keys"] == 10
@@ -518,76 +506,72 @@ async def test_evict_key_exception_handling(self) -> None:
518506
"""Test exception handling in _evict_key method."""
519507
source_store = MemoryStore()
520508
cache_store = MemoryStore()
521-
cached_store = CacheStore(
522-
source_store,
523-
cache_store=cache_store,
524-
max_size=100
525-
)
526-
509+
cached_store = CacheStore(source_store, cache_store=cache_store, max_size=100)
510+
527511
# Add some data
528512
test_data = CPUBuffer.from_bytes(b"test data")
529513
await cached_store.set("test_key", test_data)
530-
514+
531515
# Manually corrupt the tracking to trigger exception
532516
# Remove from one structure but not others to create inconsistency
533517
del cached_store._cache_order["test_key"]
534-
518+
535519
# Try to evict - should handle the KeyError gracefully
536520
cached_store._evict_key("test_key")
537-
521+
538522
# Should still work and not crash
539523
info = cached_store.cache_info()
540524
assert isinstance(info, dict)
541-
525+
542526
async def test_get_no_cache_delete_tracking(self) -> None:
543527
"""Test _get_no_cache when key doesn't exist and needs cleanup."""
544528
source_store = MemoryStore()
545529
cache_store = MemoryStore()
546530
cached_store = CacheStore(source_store, cache_store=cache_store)
547-
531+
548532
# First, add key to cache tracking but not to source
549533
test_data = CPUBuffer.from_bytes(b"test data")
550534
await cache_store.set("phantom_key", test_data)
551535
cached_store._cache_value("phantom_key", test_data)
552-
536+
553537
# Verify it's in tracking
554538
assert "phantom_key" in cached_store._cache_order
555539
assert "phantom_key" in cached_store.key_insert_times
556-
540+
557541
# Now try to get it - since it's not in source, should clean up tracking
558542
result = await cached_store._get_no_cache("phantom_key", default_buffer_prototype())
559543
assert result is None
560-
544+
561545
# Should have cleaned up tracking
562546
assert "phantom_key" not in cached_store._cache_order
563547
assert "phantom_key" not in cached_store.key_insert_times
564-
548+
565549
async def test_buffer_size_import_error_fallback(self) -> None:
566550
"""Test buffer_size ImportError fallback."""
567551
from unittest.mock import patch
568552

569553
from zarr.storage._caching_store import buffer_size
570554

571555
# Mock numpy import to raise ImportError
572-
with patch.dict('sys.modules', {'numpy': None}):
573-
with patch('builtins.__import__', side_effect=ImportError("No module named 'numpy'")):
556+
with patch.dict("sys.modules", {"numpy": None}):
557+
with patch("builtins.__import__", side_effect=ImportError("No module named 'numpy'")):
574558
# This should trigger the ImportError fallback
575559
size = buffer_size("test string")
576560
assert size == len(b"test string")
577-
561+
578562
async def test_accommodate_value_no_max_size(self) -> None:
579563
"""Test _accommodate_value early return when max_size is None."""
580564
source_store = MemoryStore()
581565
cache_store = MemoryStore()
582566
cached_store = CacheStore(
583567
source_store,
584568
cache_store=cache_store,
585-
max_size=None # No size limit
569+
max_size=None, # No size limit
586570
)
587-
571+
588572
# This should return early without doing anything
589573
cached_store._accommodate_value(1000000) # Large value
590-
574+
591575
# Should not affect anything since max_size is None
592576
info = cached_store.cache_info()
593577
assert info["current_size"] == 0

0 commit comments

Comments
 (0)