Skip to content

Commit d1a7a58

Browse files
First commit - Adding Buffer and Config classes
1 parent 0ac6d98 commit d1a7a58

File tree

6 files changed

+445
-0
lines changed

6 files changed

+445
-0
lines changed
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
from aws_lambda_powertools.logging.buffer.cache import LoggerBufferCache
2+
from aws_lambda_powertools.logging.buffer.config import LoggerBufferConfig
3+
4+
__all__ = ["LoggerBufferCache", "LoggerBufferConfig"]
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
from __future__ import annotations
2+
3+
import warnings
4+
from collections import deque
5+
from typing import Any
6+
7+
from aws_lambda_powertools.warnings import PowertoolsUserWarning
8+
9+
10+
class LoggerBufferCache:
11+
def __init__(self, max_size_bytes: int):
12+
"""
13+
Initialize the LoggerBufferCache.
14+
15+
Parameters
16+
----------
17+
max_size_bytes : int
18+
Maximum size of the cache in bytes.
19+
"""
20+
self.max_size_bytes: int = max_size_bytes
21+
self.cache: dict[str, deque] = {}
22+
self.current_size: dict[str, int] = {}
23+
24+
def add(self, key: str, item: Any) -> None:
25+
"""
26+
Add an item to the cache for a specific key.
27+
28+
Parameters
29+
----------
30+
key : str
31+
The key to store the item under.
32+
item : Any
33+
The item to be stored in the cache.
34+
35+
Notes
36+
-----
37+
If the item size exceeds the maximum cache size, it will not be added.
38+
"""
39+
item_size = len(str(item))
40+
41+
if item_size > self.max_size_bytes:
42+
warnings.warn(
43+
message=f"Item size {item_size} bytes exceeds total cache size {self.max_size_bytes} bytes",
44+
category=PowertoolsUserWarning,
45+
stacklevel=2,
46+
)
47+
return
48+
49+
if key not in self.cache:
50+
self.cache[key] = deque()
51+
self.current_size[key] = 0
52+
53+
while self.current_size[key] + item_size > self.max_size_bytes and self.cache[key]:
54+
removed_item = self.cache[key].popleft()
55+
self.current_size[key] -= len(str(removed_item))
56+
57+
self.cache[key].append(item)
58+
self.current_size[key] += item_size
59+
60+
def get(self, key: str) -> list:
61+
"""
62+
Retrieve items for a specific key.
63+
64+
Parameters
65+
----------
66+
key : str
67+
The key to retrieve items for.
68+
69+
Returns
70+
-------
71+
list
72+
List of items for the given key, or an empty list if the key doesn't exist.
73+
"""
74+
return list(self.cache.get(key, deque()))
75+
76+
def clear(self, key: str | None = None) -> None:
77+
"""
78+
Clear the cache, either for a specific key or entirely.
79+
80+
Parameters
81+
----------
82+
key : str, optional
83+
The key to clear. If None, clears the entire cache.
84+
"""
85+
if key:
86+
if key in self.cache:
87+
del self.cache[key]
88+
del self.current_size[key]
89+
else:
90+
self.cache.clear()
91+
self.current_size.clear()
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
from __future__ import annotations
2+
3+
from typing import Literal
4+
5+
6+
class LoggerBufferConfig:
7+
"""
8+
Configuration for log buffering behavior.
9+
"""
10+
11+
# Define a type alias for valid log levels
12+
LogLevelType = Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
13+
14+
# Define class-level constant for valid log levels
15+
VALID_LOG_LEVELS: list[str] = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
16+
17+
def __init__(
18+
self,
19+
max_size: int = 10240,
20+
minimum_log_level: LogLevelType = "DEBUG",
21+
flush_on_error: bool = True,
22+
compress: bool = False,
23+
):
24+
"""
25+
Initialize logger buffer configuration.
26+
27+
Parameters
28+
----------
29+
max_size : int, optional
30+
Maximum size of the buffer in bytes
31+
minimum_log_level : str, optional
32+
Minimum log level to buffer
33+
flush_on_error : bool, optional
34+
Whether to flush the buffer when an error occurs
35+
compress : bool, optional
36+
Whether to compress buffered logs
37+
"""
38+
self._validate_inputs(max_size, minimum_log_level, flush_on_error, compress)
39+
40+
self._max_size = max_size
41+
self._minimum_log_level = minimum_log_level.upper()
42+
self._flush_on_error = flush_on_error
43+
self._compress = compress
44+
45+
def _validate_inputs(
46+
self,
47+
max_size: int,
48+
minimum_log_level: str,
49+
flush_on_error: bool,
50+
compress: bool,
51+
) -> None:
52+
"""
53+
Validate configuration inputs.
54+
55+
Parameters
56+
----------
57+
Same as __init__ method parameters
58+
"""
59+
if not isinstance(max_size, int) or max_size <= 0:
60+
raise ValueError("Max size must be a positive integer")
61+
62+
if not isinstance(minimum_log_level, str):
63+
raise ValueError("Log level must be a string")
64+
65+
# Validate log level
66+
if minimum_log_level.upper() not in self.VALID_LOG_LEVELS:
67+
raise ValueError(f"Invalid log level. Must be one of {self.VALID_LOG_LEVELS}")
68+
69+
if not isinstance(flush_on_error, bool):
70+
raise ValueError("flush_on_error must be a boolean")
71+
72+
if not isinstance(compress, bool):
73+
raise ValueError("compress must be a boolean")
74+
75+
@property
76+
def max_size(self) -> int:
77+
"""Maximum buffer size in bytes."""
78+
return self._max_size
79+
80+
@property
81+
def minimum_log_level(self) -> str:
82+
"""Minimum log level to buffer."""
83+
return self._minimum_log_level
84+
85+
@property
86+
def flush_on_error(self) -> bool:
87+
"""Flag to flush buffer on error."""
88+
return self._flush_on_error
89+
90+
@property
91+
def compress(self) -> bool:
92+
"""Flag to compress buffered logs."""
93+
return self._compress

tests/unit/logger/__init__.py

Whitespace-only changes.
Lines changed: 166 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,166 @@
1+
import pytest
2+
3+
from aws_lambda_powertools.logging.buffer import LoggerBufferCache
4+
from aws_lambda_powertools.warnings import PowertoolsUserWarning
5+
6+
7+
def test_initialization():
8+
9+
# GIVEN a new instance of LoggerBufferCache
10+
logger_cache = LoggerBufferCache(1000)
11+
12+
# THEN cache should have correct initial state
13+
assert logger_cache.max_size_bytes == 1000
14+
assert logger_cache.cache == {}
15+
assert logger_cache.current_size == {}
16+
17+
18+
def test_add_single_item():
19+
# GIVEN a new instance of LoggerBufferCache with 1024 bytes
20+
logger_cache = LoggerBufferCache(1024)
21+
22+
# WHEN a single item is added
23+
logger_cache.add("key1", "test_item")
24+
25+
# THEN item is stored correctly with proper size tracking
26+
assert len(logger_cache.get("key1")) == 1
27+
assert logger_cache.get("key1")[0] == "test_item"
28+
assert logger_cache.current_size["key1"] == len("test_item")
29+
30+
31+
def test_add_multiple_items_same_key():
32+
# GIVEN a new instance of LoggerBufferCache with 1024 bytes
33+
logger_cache = LoggerBufferCache(1024)
34+
35+
# WHEN multiple items are added to the same key
36+
logger_cache.add("key1", "item1")
37+
logger_cache.add("key1", "item2")
38+
39+
# THEN items are stored sequentially
40+
assert len(logger_cache.get("key1")) == 2
41+
assert logger_cache.get("key1") == ["item1", "item2"]
42+
43+
44+
def test_cache_size_limit_single_key():
45+
# GIVEN a new instance of LoggerBufferCache with small cache size
46+
logger_cache = LoggerBufferCache(10)
47+
48+
# WHEN multiple items are added
49+
logger_cache.add("key1", "long_item1")
50+
logger_cache.add("key1", "long_item2")
51+
logger_cache.add("key1", "long_item3")
52+
53+
# THEN cache maintains size limit for a single key
54+
assert len(logger_cache.get("key1")) > 0
55+
assert logger_cache.current_size["key1"] <= 10
56+
57+
58+
def test_item_larger_than_cache():
59+
# GIVEN a new instance of LoggerBufferCache with small cache size
60+
logger_cache = LoggerBufferCache(5)
61+
62+
# WHEN an item larger than cache is added
63+
with pytest.warns(expected_warning=PowertoolsUserWarning, match="Item size *"):
64+
# THEN a warning is raised
65+
logger_cache.add("key1", "very_long_item")
66+
67+
# THEN the key is not added
68+
assert "key1" not in logger_cache.cache
69+
70+
71+
def test_get_existing_key():
72+
# GIVEN a new instance of LoggerBufferCache with 1024 bytes
73+
logger_cache = LoggerBufferCache(1024)
74+
75+
# WHEN we add keys
76+
logger_cache.add("key1", "item1")
77+
logger_cache.add("key1", "item2")
78+
79+
# THEN all items are retrieved
80+
assert logger_cache.get("key1") == ["item1", "item2"]
81+
82+
83+
def test_get_non_existing_key():
84+
# GIVEN a new instance of LoggerBufferCache with 1024 bytes
85+
logger_cache = LoggerBufferCache(1000)
86+
87+
# WHEN getting items for a non-existing key
88+
retrieved = logger_cache.get("non_existing")
89+
90+
# THEN an empty list is returned
91+
assert retrieved == []
92+
93+
94+
def test_clear_all():
95+
# GIVEN a new instance of LoggerBufferCache with 1024 bytes
96+
logger_cache = LoggerBufferCache(1024)
97+
98+
# WHEN we add multiple keys
99+
logger_cache.add("key1", "item1")
100+
logger_cache.add("key2", "item2")
101+
102+
# WHEN clearing all keys
103+
logger_cache.clear()
104+
105+
# THEN cache becomes empty
106+
assert logger_cache.cache == {}
107+
assert logger_cache.current_size == {}
108+
109+
110+
def test_clear_specific_key():
111+
# GIVEN a new instance of LoggerBufferCache with 1024 bytes
112+
logger_cache = LoggerBufferCache(1024)
113+
114+
# WHEN we add multiple keys
115+
logger_cache.add("key1", "item1")
116+
logger_cache.add("key2", "item2")
117+
118+
# WHEN we remove a specific key
119+
logger_cache.clear("key1")
120+
121+
# THEN only that key is removed
122+
assert "key1" not in logger_cache.cache
123+
assert "key2" in logger_cache.cache
124+
assert logger_cache.get("key1") == []
125+
126+
127+
def test_multiple_keys_with_size_limits():
128+
# GIVEN a new instance of LoggerBufferCache with 20 bytes
129+
logger_cache = LoggerBufferCache(20)
130+
131+
# WHEN adding items to multiple keys
132+
logger_cache.add("key1", "item1")
133+
logger_cache.add("key1", "item2")
134+
logger_cache.add("key2", "long_item")
135+
136+
# THEN total size remains within limit
137+
assert len(logger_cache.cache["key1"]) > 0
138+
assert len(logger_cache.cache["key2"]) > 0
139+
assert logger_cache.current_size["key1"] + logger_cache.current_size["key2"] <= 20
140+
141+
142+
def test_add_different_types():
143+
# GIVEN a new instance of LoggerBufferCache with 1024 bytes
144+
logger_cache = LoggerBufferCache(1024)
145+
146+
# WHEN adding items of different types
147+
logger_cache.add("key1", 123)
148+
logger_cache.add("key1", [1, 2, 3])
149+
logger_cache.add("key1", {"a": 1})
150+
151+
# THEN items are stored successfully
152+
retrieved = logger_cache.get("key1")
153+
assert len(retrieved) == 3
154+
155+
156+
def test_cache_size_tracking():
157+
# GIVEN a new instance of LoggerBufferCache with 30 bytes
158+
logger_cache = LoggerBufferCache(30)
159+
160+
# WHEN adding items
161+
logger_cache.add("key1", "small")
162+
logger_cache.add("key1", "another_item")
163+
164+
# THEN current size is tracked correctly
165+
assert logger_cache.current_size["key1"] == len("small") + len("another_item")
166+
assert logger_cache.current_size["key1"] <= 30

0 commit comments

Comments
 (0)