@@ -304,16 +304,12 @@ async def test_max_age_infinity(self) -> None:
304
304
"""Test cache with infinite max age."""
305
305
source_store = MemoryStore ()
306
306
cache_store = MemoryStore ()
307
- cached_store = CacheStore (
308
- source_store ,
309
- cache_store = cache_store ,
310
- max_age_seconds = "infinity"
311
- )
312
-
307
+ cached_store = CacheStore (source_store , cache_store = cache_store , max_age_seconds = "infinity" )
308
+
313
309
# Add data and verify it never expires
314
310
test_data = CPUBuffer .from_bytes (b"test data" )
315
311
await cached_store .set ("test_key" , test_data )
316
-
312
+
317
313
# Even after time passes, key should be fresh
318
314
assert cached_store ._is_key_fresh ("test_key" )
319
315
@@ -324,31 +320,27 @@ async def test_max_age_numeric(self) -> None:
324
320
cached_store = CacheStore (
325
321
source_store ,
326
322
cache_store = cache_store ,
327
- max_age_seconds = 1 # 1 second
323
+ max_age_seconds = 1 , # 1 second
328
324
)
329
-
325
+
330
326
# Add data
331
327
test_data = CPUBuffer .from_bytes (b"test data" )
332
328
await cached_store .set ("test_key" , test_data )
333
-
329
+
334
330
# Key should be fresh initially
335
331
assert cached_store ._is_key_fresh ("test_key" )
336
-
332
+
337
333
# Manually set old timestamp to test expiration
338
334
cached_store .key_insert_times ["test_key" ] = time .monotonic () - 2 # 2 seconds ago
339
-
335
+
340
336
# Key should now be stale
341
337
assert not cached_store ._is_key_fresh ("test_key" )
342
338
343
339
async def test_cache_set_data_disabled (self ) -> None :
344
340
"""Test cache behavior when cache_set_data is False."""
345
341
source_store = MemoryStore ()
346
342
cache_store = MemoryStore ()
347
- cached_store = CacheStore (
348
- source_store ,
349
- cache_store = cache_store ,
350
- cache_set_data = False
351
- )
343
+ cached_store = CacheStore (source_store , cache_store = cache_store , cache_set_data = False )
352
344
353
345
# Set data
354
346
test_data = CPUBuffer .from_bytes (b"test data" )
@@ -370,7 +362,7 @@ async def test_eviction_with_max_size(self) -> None:
370
362
cached_store = CacheStore (
371
363
source_store ,
372
364
cache_store = cache_store ,
373
- max_size = 100 # Small cache size
365
+ max_size = 100 , # Small cache size
374
366
)
375
367
376
368
# Add data that exceeds cache size
@@ -401,7 +393,7 @@ async def test_value_exceeds_max_size(self) -> None:
401
393
cached_store = CacheStore (
402
394
source_store ,
403
395
cache_store = cache_store ,
404
- max_size = 50 # Small cache size
396
+ max_size = 50 , # Small cache size
405
397
)
406
398
407
399
# Try to cache data larger than max_size
@@ -459,56 +451,52 @@ async def test_invalid_max_age_seconds(self) -> None:
459
451
cache_store = MemoryStore ()
460
452
461
453
with pytest .raises (ValueError , match = "max_age_seconds string value must be 'infinity'" ):
462
- CacheStore (
463
- source_store ,
464
- cache_store = cache_store ,
465
- max_age_seconds = "invalid"
466
- )
467
-
454
+ CacheStore (source_store , cache_store = cache_store , max_age_seconds = "invalid" )
455
+
468
456
async def test_buffer_size_function_coverage (self ) -> None :
469
457
"""Test different branches of the buffer_size function."""
470
458
from zarr .storage ._caching_store import buffer_size
471
-
459
+
472
460
# Test with Buffer object (nbytes attribute)
473
461
buffer_data = CPUBuffer .from_bytes (b"test data" )
474
462
size = buffer_size (buffer_data )
475
463
assert size > 0
476
-
464
+
477
465
# Test with bytes
478
466
bytes_data = b"test bytes"
479
467
size = buffer_size (bytes_data )
480
468
assert size == len (bytes_data )
481
-
469
+
482
470
# Test with bytearray
483
471
bytearray_data = bytearray (b"test bytearray" )
484
472
size = buffer_size (bytearray_data )
485
473
assert size == len (bytearray_data )
486
-
474
+
487
475
# Test with memoryview
488
476
memoryview_data = memoryview (b"test memoryview" )
489
477
size = buffer_size (memoryview_data )
490
478
assert size == len (memoryview_data )
491
-
479
+
492
480
# Test fallback for other types - use a simple object
493
481
# This will go through the numpy fallback or string encoding
494
482
size = buffer_size ("test string" )
495
483
assert size > 0
496
-
484
+
497
485
async def test_unlimited_cache_size (self ) -> None :
498
486
"""Test behavior when max_size is None (unlimited)."""
499
487
source_store = MemoryStore ()
500
488
cache_store = MemoryStore ()
501
489
cached_store = CacheStore (
502
490
source_store ,
503
491
cache_store = cache_store ,
504
- max_size = None # Unlimited cache
492
+ max_size = None , # Unlimited cache
505
493
)
506
-
494
+
507
495
# Add large amounts of data
508
496
for i in range (10 ):
509
497
large_data = CPUBuffer .from_bytes (b"x" * 1000 ) # 1KB each
510
498
await cached_store .set (f"large_key_{ i } " , large_data )
511
-
499
+
512
500
# All should be cached since there's no size limit
513
501
info = cached_store .cache_info ()
514
502
assert info ["cached_keys" ] == 10
@@ -518,76 +506,72 @@ async def test_evict_key_exception_handling(self) -> None:
518
506
"""Test exception handling in _evict_key method."""
519
507
source_store = MemoryStore ()
520
508
cache_store = MemoryStore ()
521
- cached_store = CacheStore (
522
- source_store ,
523
- cache_store = cache_store ,
524
- max_size = 100
525
- )
526
-
509
+ cached_store = CacheStore (source_store , cache_store = cache_store , max_size = 100 )
510
+
527
511
# Add some data
528
512
test_data = CPUBuffer .from_bytes (b"test data" )
529
513
await cached_store .set ("test_key" , test_data )
530
-
514
+
531
515
# Manually corrupt the tracking to trigger exception
532
516
# Remove from one structure but not others to create inconsistency
533
517
del cached_store ._cache_order ["test_key" ]
534
-
518
+
535
519
# Try to evict - should handle the KeyError gracefully
536
520
cached_store ._evict_key ("test_key" )
537
-
521
+
538
522
# Should still work and not crash
539
523
info = cached_store .cache_info ()
540
524
assert isinstance (info , dict )
541
-
525
+
542
526
async def test_get_no_cache_delete_tracking (self ) -> None :
543
527
"""Test _get_no_cache when key doesn't exist and needs cleanup."""
544
528
source_store = MemoryStore ()
545
529
cache_store = MemoryStore ()
546
530
cached_store = CacheStore (source_store , cache_store = cache_store )
547
-
531
+
548
532
# First, add key to cache tracking but not to source
549
533
test_data = CPUBuffer .from_bytes (b"test data" )
550
534
await cache_store .set ("phantom_key" , test_data )
551
535
cached_store ._cache_value ("phantom_key" , test_data )
552
-
536
+
553
537
# Verify it's in tracking
554
538
assert "phantom_key" in cached_store ._cache_order
555
539
assert "phantom_key" in cached_store .key_insert_times
556
-
540
+
557
541
# Now try to get it - since it's not in source, should clean up tracking
558
542
result = await cached_store ._get_no_cache ("phantom_key" , default_buffer_prototype ())
559
543
assert result is None
560
-
544
+
561
545
# Should have cleaned up tracking
562
546
assert "phantom_key" not in cached_store ._cache_order
563
547
assert "phantom_key" not in cached_store .key_insert_times
564
-
548
+
565
549
async def test_buffer_size_import_error_fallback (self ) -> None :
566
550
"""Test buffer_size ImportError fallback."""
567
551
from unittest .mock import patch
568
552
569
553
from zarr .storage ._caching_store import buffer_size
570
554
571
555
# Mock numpy import to raise ImportError
572
- with patch .dict (' sys.modules' , {' numpy' : None }):
573
- with patch (' builtins.__import__' , side_effect = ImportError ("No module named 'numpy'" )):
556
+ with patch .dict (" sys.modules" , {" numpy" : None }):
557
+ with patch (" builtins.__import__" , side_effect = ImportError ("No module named 'numpy'" )):
574
558
# This should trigger the ImportError fallback
575
559
size = buffer_size ("test string" )
576
560
assert size == len (b"test string" )
577
-
561
+
578
562
async def test_accommodate_value_no_max_size (self ) -> None :
579
563
"""Test _accommodate_value early return when max_size is None."""
580
564
source_store = MemoryStore ()
581
565
cache_store = MemoryStore ()
582
566
cached_store = CacheStore (
583
567
source_store ,
584
568
cache_store = cache_store ,
585
- max_size = None # No size limit
569
+ max_size = None , # No size limit
586
570
)
587
-
571
+
588
572
# This should return early without doing anything
589
573
cached_store ._accommodate_value (1000000 ) # Large value
590
-
574
+
591
575
# Should not affect anything since max_size is None
592
576
info = cached_store .cache_info ()
593
577
assert info ["current_size" ] == 0
0 commit comments