Skip to content

Commit d6d268e

Browse files
Yuri ZmytrakovYuri Zmytrakov
authored andcommitted
feat(navigation): implement Redis caching for navigation
- Configure Redis image for tests of caching navigation - Update Make file Redis with test targets for ES and OS - Integrate Redis/Redis Sentinel client to cache navigation - Add Redis funcs Sentinel for navigation caching
1 parent 9feecfe commit d6d268e

File tree

8 files changed

+245
-3
lines changed

8 files changed

+245
-3
lines changed

.github/workflows/cicd.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,16 @@ jobs:
6666
ports:
6767
- 9202:9202
6868

69+
redis:
70+
image: redis:7-alpine
71+
options: >-
72+
--health-cmd "redis-cli ping"
73+
--health-interval 10s
74+
--health-timeout 5s
75+
--health-retries 5
76+
ports:
77+
- 6379:6379
78+
6979
strategy:
7080
matrix:
7181
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13"]
@@ -126,3 +136,6 @@ jobs:
126136
DATABASE_REFRESH: true
127137
ES_VERIFY_CERTS: false
128138
BACKEND: ${{ matrix.backend == 'elasticsearch7' && 'elasticsearch' || matrix.backend == 'elasticsearch8' && 'elasticsearch' || 'opensearch' }}
139+
REDIS_ENABLE: true
140+
REDIS_HOST: localhost
141+
REDIS_PORT: 6379

Makefile

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ test-datetime-filtering-os:
8282
docker compose down
8383

8484
.PHONY: test
85-
test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os
85+
test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os test-redis-es test-redis-os
8686

8787
.PHONY: run-database-es
8888
run-database-es:
@@ -117,4 +117,16 @@ docs-image:
117117
.PHONY: docs
118118
docs: docs-image
119119
docker compose -f compose.docs.yml \
120-
run docs
120+
run docs
121+
122+
.PHONY: test-redis-es
123+
test-redis-es:
124+
docker compose -f compose-redis.yml up -d
125+
-$(run_es) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest redis/ -v'
126+
docker compose -f compose-redis.yml down
127+
128+
.PHONY: test-redis-os
129+
test-redis-os:
130+
docker compose -f compose-redis.yml up -d
131+
-$(run_os) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest redis/ -v'
132+
docker compose -f compose-redis.yml down

compose-redis.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
version: '3.8'
2+
3+
services:
4+
redis:
5+
image: redis:7-alpine
6+
ports:
7+
- "6379:6379"
8+
volumes:
9+
- redis_test_data:/data
10+
command: redis-server --appendonly yes
11+
12+
volumes:
13+
redis_test_data:

mypy.ini

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
[mypy]
2+
[mypy-redis.*]
3+
ignore_missing_imports = True

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 76 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,10 @@
2424
from stac_fastapi.core.base_settings import ApiBaseSettings
2525
from stac_fastapi.core.datetime_utils import format_datetime_range
2626
from stac_fastapi.core.models.links import PagingLinks
27+
from stac_fastapi.core.redis_utils import connect_redis, get_prev_link, save_self_link
2728
from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
2829
from stac_fastapi.core.session import Session
29-
from stac_fastapi.core.utilities import filter_fields
30+
from stac_fastapi.core.utilities import filter_fields, get_bool_env
3031
from stac_fastapi.extensions.core.transaction import AsyncBaseTransactionsClient
3132
from stac_fastapi.extensions.core.transaction.request import (
3233
PartialCollection,
@@ -328,6 +329,20 @@ async def all_collections(
328329
if parsed_sort:
329330
sort = parsed_sort
330331

332+
current_url = str(request.url)
333+
redis_enable = get_bool_env("REDIS_ENABLE", default=False)
334+
335+
redis = None
336+
if redis_enable:
337+
try:
338+
redis = await connect_redis()
339+
logger.info("Redis connection established successfully")
340+
except Exception as e:
341+
redis = None
342+
logger.warning(
343+
f"Redis connection failed, continuing without Redis: {e}"
344+
)
345+
331346
# Convert q to a list if it's a string
332347
q_list = None
333348
if q is not None:
@@ -426,6 +441,22 @@ async def all_collections(
426441
},
427442
]
428443

444+
if redis_enable and redis:
445+
if next_token:
446+
await save_self_link(redis, next_token, current_url)
447+
448+
prev_link = await get_prev_link(redis, token)
449+
if prev_link:
450+
links.insert(
451+
0,
452+
{
453+
"rel": "prev",
454+
"type": "application/json",
455+
"method": "GET",
456+
"href": prev_link,
457+
},
458+
)
459+
429460
if next_token:
430461
next_link = PagingLinks(next=next_token, request=request).link_next()
431462
links.append(next_link)
@@ -744,6 +775,7 @@ async def post_search(
744775
HTTPException: If there is an error with the cql2_json filter.
745776
"""
746777
base_url = str(request.base_url)
778+
redis_enable = get_bool_env("REDIS_ENABLE", default=False)
747779

748780
search = self.database.make_search()
749781

@@ -850,6 +882,49 @@ async def post_search(
850882
]
851883
links = await PagingLinks(request=request, next=next_token).get_links()
852884

885+
collection_links = []
886+
if search_request.collections:
887+
for collection_id in search_request.collections:
888+
collection_links.extend(
889+
[
890+
{
891+
"rel": "collection",
892+
"type": "application/json",
893+
"href": urljoin(base_url, f"collections/{collection_id}"),
894+
},
895+
{
896+
"rel": "parent",
897+
"type": "application/json",
898+
"href": urljoin(base_url, f"collections/{collection_id}"),
899+
},
900+
]
901+
)
902+
links.extend(collection_links)
903+
904+
if redis_enable:
905+
redis = None
906+
try:
907+
redis = await connect_redis()
908+
logger.info("Redis connection established successfully")
909+
self_link = str(request.url)
910+
await save_self_link(redis, next_token, self_link)
911+
912+
prev_link = await get_prev_link(redis, token_param)
913+
if prev_link:
914+
links.insert(
915+
0,
916+
{
917+
"rel": "prev",
918+
"type": "application/json",
919+
"method": "GET",
920+
"href": prev_link,
921+
},
922+
)
923+
except Exception as e:
924+
logger.warning(
925+
f"Redis connection failed, continuing without Redis: {e}"
926+
)
927+
853928
return stac_types.ItemCollection(
854929
type="FeatureCollection",
855930
features=items,
Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
"""Utilities for connecting to and managing Redis connections."""
2+
3+
from typing import Optional
4+
5+
from pydantic_settings import BaseSettings
6+
from redis import asyncio as aioredis
7+
from redis.asyncio.sentinel import Sentinel
8+
9+
redis_pool: Optional[aioredis.Redis] = None
10+
11+
12+
class RedisSentinelSettings(BaseSettings):
13+
"""Configuration for connecting to Redis Sentinel."""
14+
15+
REDIS_SENTINEL_HOSTS: str = ""
16+
REDIS_SENTINEL_PORTS: str = "26379"
17+
REDIS_SENTINEL_MASTER_NAME: str = "master"
18+
REDIS_DB: int = 15
19+
20+
REDIS_MAX_CONNECTIONS: int = 10
21+
REDIS_RETRY_TIMEOUT: bool = True
22+
REDIS_DECODE_RESPONSES: bool = True
23+
REDIS_CLIENT_NAME: str = "stac-fastapi-app"
24+
REDIS_HEALTH_CHECK_INTERVAL: int = 30
25+
26+
27+
class RedisSettings(BaseSettings):
28+
"""Configuration for connecting Redis Sentinel."""
29+
30+
REDIS_HOST: str = ""
31+
REDIS_PORT: int = 6379
32+
REDIS_DB: int = 0
33+
34+
REDIS_MAX_CONNECTIONS: int = 10
35+
REDIS_RETRY_TIMEOUT: bool = True
36+
REDIS_DECODE_RESPONSES: bool = True
37+
REDIS_CLIENT_NAME: str = "stac-fastapi-app"
38+
REDIS_HEALTH_CHECK_INTERVAL: int = 30
39+
40+
41+
# Select the Redis or Redis Sentinel configuration
42+
redis_settings: BaseSettings = RedisSettings()
43+
44+
45+
async def connect_redis(settings: Optional[RedisSettings] = None) -> aioredis.Redis:
46+
"""Return a Redis connection."""
47+
global redis_pool
48+
settings = settings or redis_settings
49+
50+
if not settings.REDIS_HOST or not settings.REDIS_PORT:
51+
return None
52+
53+
if redis_pool is None:
54+
pool = aioredis.ConnectionPool(
55+
host=settings.REDIS_HOST,
56+
port=settings.REDIS_PORT,
57+
db=settings.REDIS_DB,
58+
max_connections=settings.REDIS_MAX_CONNECTIONS,
59+
decode_responses=settings.REDIS_DECODE_RESPONSES,
60+
retry_on_timeout=settings.REDIS_RETRY_TIMEOUT,
61+
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL,
62+
)
63+
redis_pool = aioredis.Redis(
64+
connection_pool=pool, client_name=settings.REDIS_CLIENT_NAME
65+
)
66+
return redis_pool
67+
68+
69+
async def connect_redis_sentinel(
70+
settings: Optional[RedisSentinelSettings] = None,
71+
) -> Optional[aioredis.Redis]:
72+
"""Return a Redis Sentinel connection."""
73+
global redis_pool
74+
75+
settings = settings or redis_settings
76+
77+
if (
78+
not settings.REDIS_SENTINEL_HOSTS
79+
or not settings.REDIS_SENTINEL_PORTS
80+
or not settings.REDIS_SENTINEL_MASTER_NAME
81+
):
82+
return None
83+
84+
hosts = [h.strip() for h in settings.REDIS_SENTINEL_HOSTS.split(",") if h.strip()]
85+
ports = [
86+
int(p.strip()) for p in settings.REDIS_SENTINEL_PORTS.split(",") if p.strip()
87+
]
88+
89+
if redis_pool is None:
90+
try:
91+
sentinel = Sentinel(
92+
[(h, p) for h, p in zip(hosts, ports)],
93+
decode_responses=settings.REDIS_DECODE_RESPONSES,
94+
)
95+
master = sentinel.master_for(
96+
service_name=settings.REDIS_SENTINEL_MASTER_NAME,
97+
db=settings.REDIS_DB,
98+
decode_responses=settings.REDIS_DECODE_RESPONSES,
99+
retry_on_timeout=settings.REDIS_RETRY_TIMEOUT,
100+
client_name=settings.REDIS_CLIENT_NAME,
101+
max_connections=settings.REDIS_MAX_CONNECTIONS,
102+
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL,
103+
)
104+
redis_pool = master
105+
106+
except Exception:
107+
return None
108+
109+
return redis_pool
110+
111+
112+
async def save_self_link(
113+
redis: aioredis.Redis, token: Optional[str], self_href: str
114+
) -> None:
115+
"""Save the self link for the current token with 30 min TTL."""
116+
if token:
117+
await redis.setex(f"nav:self:{token}", 1800, self_href)
118+
119+
120+
async def get_prev_link(redis: aioredis.Redis, token: Optional[str]) -> Optional[str]:
121+
"""Get the previous page link for the current token (if exists)."""
122+
if not token:
123+
return None
124+
return await redis.get(f"nav:self:{token}")

stac_fastapi/elasticsearch/setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
"pre-commit~=3.0.0",
2222
"ciso8601~=2.3.0",
2323
"httpx>=0.24.0,<0.28.0",
24+
"redis==6.4.0",
2425
],
2526
"docs": ["mkdocs~=1.4.0", "mkdocs-material~=9.0.0", "pdocs~=1.2.0"],
2627
"server": ["uvicorn[standard]~=0.23.0"],

stac_fastapi/opensearch/setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
"pre-commit~=3.0.0",
2323
"ciso8601~=2.3.0",
2424
"httpx>=0.24.0,<0.28.0",
25+
"redis==6.4.0",
2526
],
2627
"docs": ["mkdocs~=1.4.0", "mkdocs-material~=9.0.0", "pdocs~=1.2.0"],
2728
"server": ["uvicorn[standard]~=0.23.0"],

0 commit comments

Comments
 (0)