@@ -812,7 +812,7 @@ def read_response(self, disable_decoding=False, *, disconnect_on_error=True, pus
812
812
self ._current_command_hash in self ._cache
813
813
and self ._cache [self ._current_command_hash ] != "caching-in-progress"
814
814
):
815
- return self ._cache [self ._current_command_hash ]
815
+ return copy . deepcopy ( self ._cache [self ._current_command_hash ])
816
816
817
817
response = self ._conn .read_response (
818
818
disable_decoding = disable_decoding ,
@@ -821,7 +821,7 @@ def read_response(self, disable_decoding=False, *, disconnect_on_error=True, pus
821
821
)
822
822
823
823
with self ._cache_lock :
824
- # If response is None prevent from caching and remove temporary cache entry .
824
+ # If response is None prevent from caching.
825
825
if response is None :
826
826
self ._cache .pop (self ._current_command_hash )
827
827
return response
@@ -839,7 +839,7 @@ def read_response(self, disable_decoding=False, *, disconnect_on_error=True, pus
839
839
840
840
cache_entry = self ._cache .get (self ._current_command_hash , None )
841
841
842
- # Cache only responses that still valid and wasn't invalidated by another connection in meantime
842
+ # Cache only responses that still valid and wasn't invalidated by another connection in meantime.
843
843
if cache_entry is not None :
844
844
self ._cache [self ._current_command_hash ] = response
845
845
@@ -1244,7 +1244,7 @@ def __init__(
1244
1244
self .connection_class = connection_class
1245
1245
self .connection_kwargs = connection_kwargs
1246
1246
self .max_connections = max_connections
1247
- self ._cache = None
1247
+ self .cache = None
1248
1248
self ._cache_conf = None
1249
1249
self .cache_lock = None
1250
1250
self .scheduler = None
@@ -1258,9 +1258,9 @@ def __init__(
1258
1258
1259
1259
cache = self .connection_kwargs .get ("cache" )
1260
1260
if cache is not None :
1261
- self ._cache = cache
1261
+ self .cache = cache
1262
1262
else :
1263
- self ._cache = TTLCache (self .connection_kwargs ["cache_size" ], self .connection_kwargs ["cache_ttl" ])
1263
+ self .cache = TTLCache (self .connection_kwargs ["cache_size" ], self .connection_kwargs ["cache_ttl" ])
1264
1264
1265
1265
self .scheduler = BackgroundScheduler ()
1266
1266
self .scheduler .add_job (self ._perform_health_check , "interval" , seconds = 2 , id = "cache_health_check" )
@@ -1378,7 +1378,7 @@ def get_connection(self, command_name: str, *keys, **options) -> "Connection":
1378
1378
# pool before all data has been read or the socket has been
1379
1379
# closed. either way, reconnect and verify everything is good.
1380
1380
try :
1381
- if connection .can_read () and self ._cache is None :
1381
+ if connection .can_read () and self .cache is None :
1382
1382
raise ConnectionError ("Connection has data" )
1383
1383
except (ConnectionError , OSError ):
1384
1384
connection .disconnect ()
@@ -1408,10 +1408,10 @@ def make_connection(self) -> "ConnectionInterface":
1408
1408
raise ConnectionError ("Too many connections" )
1409
1409
self ._created_connections += 1
1410
1410
1411
- if self ._cache is not None and self ._cache_conf is not None :
1411
+ if self .cache is not None and self ._cache_conf is not None :
1412
1412
return CacheProxyConnection (
1413
1413
self .connection_class (** self .connection_kwargs ),
1414
- self ._cache ,
1414
+ self .cache ,
1415
1415
self ._cache_conf ,
1416
1416
self ._cache_lock
1417
1417
)
@@ -1558,10 +1558,10 @@ def reset(self):
1558
1558
1559
1559
def make_connection (self ):
1560
1560
"Make a fresh connection."
1561
- if self ._cache is not None and self ._cache_conf is not None :
1561
+ if self .cache is not None and self ._cache_conf is not None :
1562
1562
connection = CacheProxyConnection (
1563
1563
self .connection_class (** self .connection_kwargs ),
1564
- self ._cache ,
1564
+ self .cache ,
1565
1565
self ._cache_conf ,
1566
1566
self ._cache_lock
1567
1567
)
0 commit comments