@@ -78,6 +78,7 @@ class CacheStore(WrapperStore[Store]):
78
78
cache_set_data : bool
79
79
_cache_order : OrderedDict [str , None ] # Track access order for LRU
80
80
_current_size : int # Track current cache size
81
+ _key_sizes : dict [str , int ] # Track size of each cached key
81
82
82
83
def __init__ (
83
84
self ,
@@ -106,6 +107,7 @@ def __init__(
106
107
self .cache_set_data = cache_set_data
107
108
self ._cache_order = OrderedDict ()
108
109
self ._current_size = 0
110
+ self ._key_sizes = {}
109
111
110
112
def _is_key_fresh (self , key : str ) -> bool :
111
113
"""Check if a cached key is still fresh based on max_age_seconds."""
@@ -135,14 +137,20 @@ def _accommodate_value(self, value_size: int) -> None:
135
137
def _evict_key (self , key : str ) -> None :
136
138
"""Remove a key from cache and update size tracking."""
137
139
try :
138
- # Remove from cache store (async operation, but we'll handle it)
139
- # For now, we'll mark it for removal and actual removal happens in async methods
140
+ # Get the size of the key being evicted
141
+ key_size = self ._key_sizes .get (key , 0 )
142
+
143
+ # Remove from tracking structures
140
144
if key in self ._cache_order :
141
145
del self ._cache_order [key ]
142
146
if key in self .key_insert_times :
143
147
del self .key_insert_times [key ]
144
- # Note: Actual size reduction will happen when we get the item size
145
- logger .info ("_evict_key: evicted key %s from cache" , key )
148
+ if key in self ._key_sizes :
149
+ del self ._key_sizes [key ]
150
+
151
+ # Update current size
152
+ self ._current_size = max (0 , self ._current_size - key_size )
153
+ logger .info ("_evict_key: evicted key %s from cache, size %d" , key , key_size )
146
154
except Exception as e :
147
155
logger .warning ("_evict_key: failed to evict key %s: %s" , key , e )
148
156
@@ -165,6 +173,7 @@ def _cache_value(self, key: str, value: Any) -> None:
165
173
# Update tracking
166
174
self ._cache_order [key ] = None # OrderedDict to track access order
167
175
self ._current_size += value_size
176
+ self ._key_sizes [key ] = value_size
168
177
self .key_insert_times [key ] = time .monotonic ()
169
178
170
179
logger .info ("_cache_value: cached key %s with size %d bytes" , key , value_size )
@@ -181,6 +190,8 @@ def _remove_from_tracking(self, key: str) -> None:
181
190
del self ._cache_order [key ]
182
191
if key in self .key_insert_times :
183
192
del self .key_insert_times [key ]
193
+ if key in self ._key_sizes :
194
+ del self ._key_sizes [key ]
184
195
185
196
async def _get_try_cache (
186
197
self , key : str , prototype : BufferPrototype , byte_range : ByteRequest | None = None
@@ -318,5 +329,6 @@ async def clear_cache(self) -> None:
318
329
# Reset tracking
319
330
self .key_insert_times .clear ()
320
331
self ._cache_order .clear ()
332
+ self ._key_sizes .clear ()
321
333
self ._current_size = 0
322
334
logger .info ("clear_cache: cleared all cache data" )
0 commit comments