Skip to content

Commit 2d8cf27

Browse files
committed
Removed background healthcheck, use connection reference approach instead
1 parent 20f3851 commit 2d8cf27

File tree

8 files changed

+70
-285
lines changed

8 files changed

+70
-285
lines changed

docs/resp3_features.rst

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -98,10 +98,4 @@ Enable caching with custom cache implementation:
9898
9999
CacheImpl should implement a `CacheInterface` specified in `redis.cache` package.
100100

101-
Explicit disconnect
102-
103-
It's important to call `close()` to properly close the connection to server.
104-
For caching purposes, we're using a separate thread that performs health checks with configurable interval and it relies on
105-
`close()` to be called before the shutdown.
106-
107101
More robust documentation soon will be available at `official Redis documentation <https://redis.io/docs/latest/>`_.

redis/cache.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import copy
2+
import weakref
23
from abc import ABC, abstractmethod
34
from collections import OrderedDict
45
from enum import Enum
@@ -32,14 +33,21 @@ def __eq__(self, other):
3233

3334
class CacheEntry:
3435
def __init__(
35-
self, cache_key: CacheKey, cache_value: bytes, status: CacheEntryStatus
36+
self,
37+
cache_key: CacheKey,
38+
cache_value: bytes,
39+
status: CacheEntryStatus,
40+
connection_ref,
3641
):
3742
self.cache_key = cache_key
3843
self.cache_value = cache_value
3944
self.status = status
45+
self.connection_ref = connection_ref
4046

4147
def __hash__(self):
42-
return hash((self.cache_key, self.cache_value, self.status))
48+
return hash(
49+
(self.cache_key, self.cache_value, self.status, self.connection_ref)
50+
)
4351

4452
def __eq__(self, other):
4553
return hash(self) == hash(other)
@@ -86,10 +94,6 @@ def get_max_size(self) -> int:
8694
def get_eviction_policy(self):
8795
pass
8896

89-
@abstractmethod
90-
def get_health_check_interval(self) -> float:
91-
pass
92-
9397
@abstractmethod
9498
def is_exceeds_max_size(self, count: int) -> bool:
9599
pass
@@ -182,7 +186,7 @@ def get(self, key: CacheKey) -> Union[CacheEntry, None]:
182186
return None
183187

184188
self._eviction_policy.touch(key)
185-
return copy.deepcopy(entry)
189+
return entry
186190

187191
def delete_by_cache_keys(self, cache_keys: List[CacheKey]) -> List[bool]:
188192
response = []
@@ -360,12 +364,10 @@ def __init__(
360364
max_size: int = DEFAULT_MAX_SIZE,
361365
cache_class: Any = DEFAULT_CACHE_CLASS,
362366
eviction_policy: EvictionPolicy = DEFAULT_EVICTION_POLICY,
363-
health_check_interval: float = 2.0,
364367
):
365368
self._cache_class = cache_class
366369
self._max_size = max_size
367370
self._eviction_policy = eviction_policy
368-
self._health_check_interval = health_check_interval
369371

370372
def get_cache_class(self):
371373
return self._cache_class
@@ -376,9 +378,6 @@ def get_max_size(self) -> int:
376378
def get_eviction_policy(self) -> EvictionPolicy:
377379
return self._eviction_policy
378380

379-
def get_health_check_interval(self) -> float:
380-
return self._health_check_interval
381-
382381
def is_exceeds_max_size(self, count: int) -> bool:
383382
return count > self._max_size
384383

redis/connection.py

Lines changed: 26 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535
TimeoutError,
3636
)
3737
from .retry import Retry
38-
from .scheduler import Scheduler
3938
from .utils import (
4039
CRYPTOGRAPHY_AVAILABLE,
4140
HIREDIS_AVAILABLE,
@@ -741,12 +740,18 @@ class CacheProxyConnection(ConnectionInterface):
741740
MIN_ALLOWED_VERSION = "7.4.0"
742741
DEFAULT_SERVER_NAME = "redis"
743742

744-
def __init__(self, conn: ConnectionInterface, cache: CacheInterface):
743+
def __init__(
744+
self,
745+
conn: ConnectionInterface,
746+
cache: CacheInterface,
747+
pool_lock: threading.Lock,
748+
):
745749
self.pid = os.getpid()
746750
self._conn = conn
747751
self.retry = self._conn.retry
748752
self.host = self._conn.host
749753
self.port = self._conn.port
754+
self._pool_lock = pool_lock
750755
self._cache = cache
751756
self._cache_lock = threading.Lock()
752757
self._current_command_cache_key = None
@@ -824,9 +829,17 @@ def send_command(self, *args, **kwargs):
824829
)
825830

826831
with self._cache_lock:
827-
# If current command reply already cached
828-
# prevent sending data over socket.
832+
# We have to trigger invalidation processing in case if
833+
# it was cached by another connection to avoid
834+
# queueing invalidations in stale connections.
829835
if self._cache.get(self._current_command_cache_key):
836+
entry = self._cache.get(self._current_command_cache_key)
837+
838+
if entry.connection_ref != self._conn:
839+
with self._pool_lock:
840+
while entry.connection_ref.can_read():
841+
entry.connection_ref.read_response(push_request=True)
842+
830843
return
831844

832845
# Set temporary entry value to prevent
@@ -836,6 +849,7 @@ def send_command(self, *args, **kwargs):
836849
cache_key=self._current_command_cache_key,
837850
cache_value=self.DUMMY_CACHE_VALUE,
838851
status=CacheEntryStatus.IN_PROGRESS,
852+
connection_ref=self._conn,
839853
)
840854
)
841855

@@ -857,7 +871,9 @@ def read_response(
857871
and self._cache.get(self._current_command_cache_key).status
858872
!= CacheEntryStatus.IN_PROGRESS
859873
):
860-
return self._cache.get(self._current_command_cache_key).cache_value
874+
return copy.deepcopy(
875+
self._cache.get(self._current_command_cache_key).cache_value
876+
)
861877

862878
response = self._conn.read_response(
863879
disable_decoding=disable_decoding,
@@ -879,13 +895,9 @@ def read_response(
879895
# Cache only responses that still valid
880896
# and wasn't invalidated by another connection in meantime.
881897
if cache_entry is not None:
882-
self._cache.set(
883-
CacheEntry(
884-
cache_key=self._current_command_cache_key,
885-
cache_value=response,
886-
status=CacheEntryStatus.VALID,
887-
)
888-
)
898+
cache_entry.status = CacheEntryStatus.VALID
899+
cache_entry.cache_value = response
900+
self._cache.set(cache_entry)
889901

890902
return response
891903

@@ -1284,9 +1296,6 @@ def __init__(
12841296
self.max_connections = max_connections
12851297
self.cache = None
12861298
self._cache_factory = cache_factory
1287-
self._scheduler = None
1288-
self._hc_cancel_event = None
1289-
self._hc_thread = None
12901299

12911300
if connection_kwargs.get("cache_config") or connection_kwargs.get("cache"):
12921301
if connection_kwargs.get("protocol") not in [3, "3"]:
@@ -1307,8 +1316,6 @@ def __init__(
13071316
self.connection_kwargs.get("cache_config")
13081317
).get_cache()
13091318

1310-
self._scheduler = Scheduler()
1311-
13121319
connection_kwargs.pop("cache", None)
13131320
connection_kwargs.pop("cache_config", None)
13141321

@@ -1322,7 +1329,6 @@ def __init__(
13221329
# release the lock.
13231330
self._fork_lock = threading.Lock()
13241331
self.reset()
1325-
self.run_scheduled_healthcheck()
13261332

13271333
def __repr__(self) -> (str, str):
13281334
return (
@@ -1452,7 +1458,7 @@ def make_connection(self) -> "ConnectionInterface":
14521458

14531459
if self.cache is not None:
14541460
return CacheProxyConnection(
1455-
self.connection_class(**self.connection_kwargs), self.cache
1461+
self.connection_class(**self.connection_kwargs), self.cache, self._lock
14561462
)
14571463

14581464
return self.connection_class(**self.connection_kwargs)
@@ -1501,8 +1507,6 @@ def disconnect(self, inuse_connections: bool = True) -> None:
15011507
for connection in connections:
15021508
connection.disconnect()
15031509

1504-
self.stop_scheduled_healthcheck()
1505-
15061510
def close(self) -> None:
15071511
"""Close the pool, disconnecting all connections"""
15081512
self.disconnect()
@@ -1514,33 +1518,6 @@ def set_retry(self, retry: "Retry") -> None:
15141518
for conn in self._in_use_connections:
15151519
conn.retry = retry
15161520

1517-
def run_scheduled_healthcheck(self) -> None:
1518-
# Run scheduled healthcheck to avoid stale invalidations in idle connections.
1519-
if self.cache is not None and self._scheduler is not None:
1520-
self._hc_cancel_event = threading.Event()
1521-
hc_interval = self.cache.get_config().get_health_check_interval()
1522-
self._hc_thread = self._scheduler.run_with_interval(
1523-
self._perform_health_check, hc_interval, self._hc_cancel_event
1524-
)
1525-
1526-
def stop_scheduled_healthcheck(self) -> None:
1527-
# Send an event to stop scheduled healthcheck execution.
1528-
if self._hc_cancel_event is not None and not self._hc_cancel_event.is_set():
1529-
self._hc_cancel_event.set()
1530-
1531-
# Joins healthcheck thread on disconnect.
1532-
if self._hc_thread is not None and not self._hc_thread.is_alive():
1533-
self._hc_thread.join()
1534-
1535-
def _perform_health_check(self, done: threading.Event) -> None:
1536-
self._checkpid()
1537-
with self._lock:
1538-
while self._available_connections:
1539-
conn = self._available_connections.pop()
1540-
conn.send_command("PING")
1541-
conn.read_response()
1542-
done.set()
1543-
15441521

15451522
class BlockingConnectionPool(ConnectionPool):
15461523
"""
@@ -1620,7 +1597,7 @@ def make_connection(self):
16201597
"Make a fresh connection."
16211598
if self.cache is not None:
16221599
connection = CacheProxyConnection(
1623-
self.connection_class(**self.connection_kwargs), self.cache
1600+
self.connection_class(**self.connection_kwargs), self.cache, self._lock
16241601
)
16251602
else:
16261603
connection = self.connection_class(**self.connection_kwargs)
@@ -1705,5 +1682,3 @@ def disconnect(self):
17051682
self._checkpid()
17061683
for connection in self._connections:
17071684
connection.disconnect()
1708-
1709-
self.stop_scheduled_healthcheck()

redis/scheduler.py

Lines changed: 0 additions & 60 deletions
This file was deleted.

redis/sentinel.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,6 @@ def get_master_address(self):
115115
connection_pool = self.connection_pool_ref()
116116
if connection_pool is not None:
117117
connection_pool.disconnect(inuse_connections=False)
118-
connection_pool.run_scheduled_healthcheck()
119118
return master_address
120119

121120
def rotate_slaves(self):

0 commit comments

Comments
 (0)