Skip to content

Commit 943b873

Browse files
committed
feature(store): add LoggingStore wrapper
1 parent 5e57f75 commit 943b873

File tree

2 files changed

+181
-0
lines changed

2 files changed

+181
-0
lines changed

src/zarr/store/logging.py

Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
from __future__ import annotations
2+
3+
import inspect
4+
import logging
5+
import time
6+
from contextlib import contextmanager
7+
from typing import TYPE_CHECKING
8+
9+
from zarr.abc.store import Store
10+
11+
if TYPE_CHECKING:
12+
from collections.abc import AsyncGenerator, Generator
13+
14+
from zarr.core.buffer import Buffer, BufferPrototype
15+
16+
17+
class LoggingStore(Store):
18+
_store: Store
19+
20+
def __init__(
21+
self,
22+
store: Store,
23+
log_level: str = "DEBUG",
24+
log_handler: logging.Handler | None = None,
25+
):
26+
self._store = store
27+
28+
self._configure_logger(log_level, log_handler)
29+
30+
def _configure_logger(
31+
self, log_level: str = "DEBUG", log_handler: logging.Handler | None = None
32+
) -> None:
33+
self.log_level = log_level
34+
self.logger = logging.getLogger(f"LoggingStore({self._store!s})")
35+
self.logger.setLevel(log_level)
36+
37+
if not self.logger.hasHandlers():
38+
if not log_handler:
39+
log_handler = self._default_handler()
40+
# Add handler to logger
41+
self.logger.addHandler(log_handler)
42+
43+
def _default_handler(self) -> logging.Handler:
44+
"""Define a default log handler"""
45+
handler = logging.StreamHandler()
46+
handler.setLevel(self.log_level)
47+
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
48+
handler.setFormatter(formatter)
49+
return handler
50+
51+
@contextmanager
52+
def log(self) -> Generator[None, None, None]:
53+
op = f"{type(self._store).__name__}.{inspect.stack()[2].function}"
54+
self.logger.info(f"Calling {op}")
55+
start_time = time.time()
56+
try:
57+
yield
58+
finally:
59+
end_time = time.time()
60+
self.logger.info(f"Finished {op} in {end_time - start_time:.2f} seconds")
61+
62+
@property
63+
def supports_writes(self) -> bool:
64+
with self.log():
65+
return self._store.supports_writes
66+
67+
@property
68+
def supports_deletes(self) -> bool:
69+
with self.log():
70+
return self._store.supports_deletes
71+
72+
@property
73+
def supports_partial_writes(self) -> bool:
74+
with self.log():
75+
return self._store.supports_partial_writes
76+
77+
@property
78+
def supports_listing(self) -> bool:
79+
with self.log():
80+
return self._store.supports_listing
81+
82+
async def empty(self) -> bool:
83+
with self.log():
84+
return await self._store.empty()
85+
86+
async def clear(self) -> None:
87+
with self.log():
88+
return await self._store.clear()
89+
90+
def __str__(self) -> str:
91+
return f"logging-{self._store!s}"
92+
93+
def __repr__(self) -> str:
94+
return f"LoggingStore({repr(self._store)!r})"
95+
96+
def __eq__(self, other: object) -> bool:
97+
with self.log():
98+
return self._store == other
99+
100+
async def get(
101+
self,
102+
key: str,
103+
prototype: BufferPrototype,
104+
byte_range: tuple[int | None, int | None] | None = None,
105+
) -> Buffer | None:
106+
with self.log():
107+
return await self._store.get(key=key, prototype=prototype, byte_range=byte_range)
108+
109+
async def get_partial_values(
110+
self,
111+
prototype: BufferPrototype,
112+
key_ranges: list[tuple[str, tuple[int | None, int | None]]],
113+
) -> list[Buffer | None]:
114+
with self.log():
115+
return await self._store.get_partial_values(prototype=prototype, key_ranges=key_ranges)
116+
117+
async def exists(self, key: str) -> bool:
118+
with self.log():
119+
return await self._store.exists(key)
120+
121+
async def set(self, key: str, value: Buffer) -> None:
122+
with self.log():
123+
return await self._store.set(key=key, value=value)
124+
125+
async def delete(self, key: str) -> None:
126+
with self.log():
127+
return await self._store.delete(key=key)
128+
129+
async def set_partial_values(self, key_start_values: list[tuple[str, int, bytes]]) -> None:
130+
with self.log():
131+
return await self._store.set_partial_values(key_start_values=key_start_values)
132+
133+
async def list(self) -> AsyncGenerator[str, None]:
134+
with self.log():
135+
async for key in self._store.list():
136+
yield key
137+
138+
async def list_prefix(self, prefix: str) -> AsyncGenerator[str, None]:
139+
with self.log():
140+
async for key in self._store.list_prefix(prefix=prefix):
141+
yield key
142+
143+
async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]:
144+
with self.log():
145+
async for key in self._store.list_dir(prefix=prefix):
146+
yield key
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
from __future__ import annotations
2+
3+
from typing import TYPE_CHECKING
4+
5+
import pytest
6+
7+
from zarr.core.buffer import default_buffer_prototype
8+
from zarr.store.logging import LoggingStore
9+
10+
if TYPE_CHECKING:
11+
from zarr.abc.store import Store
12+
13+
14+
@pytest.mark.parametrize("store", ("local", "memory", "zip"), indirect=["store"])
15+
async def test_logging_store(store: Store, caplog) -> None:
16+
wrapped = LoggingStore(store=store, log_level="DEBUG")
17+
buffer = default_buffer_prototype().buffer
18+
19+
caplog.clear()
20+
res = await wrapped.set("foo/bar/c/0", buffer.from_bytes(b"\x01\x02\x03\x04"))
21+
assert res is None
22+
assert len(caplog.record_tuples) == 2
23+
for tup in caplog.record_tuples:
24+
assert str(store) in tup[0]
25+
assert f"Calling {type(store).__name__}.set" in caplog.record_tuples[0][2]
26+
assert f"Finished {type(store).__name__}.set" in caplog.record_tuples[1][2]
27+
28+
caplog.clear()
29+
keys = [k async for k in wrapped.list()]
30+
assert keys == ["foo/bar/c/0"]
31+
assert len(caplog.record_tuples) == 2
32+
for tup in caplog.record_tuples:
33+
assert str(store) in tup[0]
34+
assert f"Calling {type(store).__name__}.list" in caplog.record_tuples[0][2]
35+
assert f"Finished {type(store).__name__}.list" in caplog.record_tuples[1][2]

0 commit comments

Comments
 (0)