Skip to content

Commit f51fdb8

Browse files
committed
Fix cachingstore.rst errors
1 parent 62b739f commit f51fdb8

File tree

1 file changed

+19
-19
lines changed

1 file changed

+19
-19
lines changed

docs/user-guide/cachingstore.rst

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@ can be any Store implementation, providing flexibility in cache persistence:
3636
>>> source_store = zarr.storage.LocalStore('test.zarr')
3737
>>> cache_store = zarr.storage.MemoryStore() # In-memory cache
3838
>>> cached_store = zarr.storage.CacheStore(
39-
... store=source_store,
40-
... cache_store=cache_store,
39+
... store=source_store,
40+
... cache_store=cache_store,
4141
... max_size=256*1024*1024 # 256MB cache
4242
... )
4343
>>>
@@ -83,21 +83,21 @@ is a significant factor. You can use different store types for source and cache:
8383

8484
>>> from zarr.storage import FsspecStore, LocalStore
8585
>>>
86-
>>> # Create a remote store (S3 example)
87-
>>> remote_store = FsspecStore.from_url('s3://bucket/data.zarr', storage_options={'anon': True})
88-
>>>
89-
>>> # Use a local store for persistent caching
90-
>>> local_cache_store = LocalStore('cache_data')
91-
>>>
86+
>>> # Create a remote store (S3 example) - for demonstration only
87+
>>> remote_store = FsspecStore.from_url('s3://bucket/data.zarr', storage_options={'anon': True}) # doctest: +SKIP
88+
>>>
89+
>>> # Use a local store for persistent caching
90+
>>> local_cache_store = LocalStore('cache_data') # doctest: +SKIP
91+
>>>
9292
>>> # Create cached store with persistent local cache
93-
>>> cached_store = zarr.storage.CacheStore(
93+
>>> cached_store = zarr.storage.CacheStore( # doctest: +SKIP
9494
... store=remote_store,
9595
... cache_store=local_cache_store,
9696
... max_size=512*1024*1024 # 512MB cache
9797
... )
9898
>>>
99-
>>> # Open array through cached store
100-
>>> z = zarr.open(cached_store)
99+
>>> # Open array through cached store
100+
>>> z = zarr.open(cached_store) # doctest: +SKIP
101101

102102
The first access to any chunk will be slow (network retrieval), but subsequent accesses
103103
to the same chunk will be served from the local cache, providing dramatic speedup.
@@ -177,7 +177,7 @@ The CacheStore provides statistics to monitor cache performance and state:
177177
True
178178
>>> info['tracked_keys'] >= 0
179179
True
180-
>>> info['cached_keys'] >= 0
180+
>>> info['cached_keys'] >= 0
181181
True
182182
>>> isinstance(info['cache_set_data'], bool)
183183
True
@@ -193,14 +193,14 @@ The CacheStore provides methods for manual cache management:
193193
>>> import asyncio
194194
>>> asyncio.run(cached_store.clear_cache()) # doctest: +SKIP
195195
>>>
196-
>>> # Check cache info after clearing
196+
>>> # Check cache info after clearing
197197
>>> info = cached_store.cache_info() # doctest: +SKIP
198198
>>> info['tracked_keys'] == 0 # doctest: +SKIP
199199
True
200-
>>> info['current_size'] == 0 # doctest: +SKIP
200+
>>> info['current_size'] == 0 # doctest: +SKIP
201201
True
202202

203-
The `clear_cache()` method is an async method that clears both the cache store
203+
The `clear_cache()` method is an async method that clears both the cache store
204204
(if it supports the `clear` method) and all internal tracking data.
205205

206206
Best Practices
@@ -235,9 +235,9 @@ Remote Store with Local Cache
235235
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
236236

237237
>>> from zarr.storage import FsspecStore, LocalStore
238-
>>> remote_store = FsspecStore.from_url('s3://bucket/data.zarr', storage_options={'anon': True})
239-
>>> local_cache = LocalStore('local_cache')
240-
>>> cached_store = zarr.storage.CacheStore(
238+
>>> remote_store = FsspecStore.from_url('s3://bucket/data.zarr', storage_options={'anon': True}) # doctest: +SKIP
239+
>>> local_cache = LocalStore('local_cache') # doctest: +SKIP
240+
>>> cached_store = zarr.storage.CacheStore( # doctest: +SKIP
241241
... store=remote_store,
242242
... cache_store=local_cache,
243243
... max_size=1024*1024*1024,
@@ -286,7 +286,7 @@ Here's a complete example demonstrating cache effectiveness:
286286
>>> first_access = time.time() - start
287287
>>>
288288
>>> start = time.time()
289-
>>> data = zarr_array[20:30, 20:30] # Second access (cache hit)
289+
>>> data = zarr_array[20:30, 20:30] # Second access (cache hit)
290290
>>> second_access = time.time() - start
291291
>>>
292292
>>> # Check cache statistics

0 commit comments

Comments
 (0)