Skip to content

Commit 9ef5617

Browse files
Yuri ZmytrakovYuri Zmytrakov
authored andcommitted
feat: add redis cache for navigation
1 parent 9feecfe commit 9ef5617

File tree

6 files changed

+237
-3
lines changed

6 files changed

+237
-3
lines changed

.github/workflows/cicd.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,16 @@ jobs:
6666
ports:
6767
- 9202:9202
6868

69+
redis:
70+
image: redis:7-alpine
71+
options: >-
72+
--health-cmd "redis-cli ping"
73+
--health-interval 10s
74+
--health-timeout 5s
75+
--health-retries 5
76+
ports:
77+
- 6379:6379
78+
6979
strategy:
7080
matrix:
7181
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13"]
@@ -126,3 +136,6 @@ jobs:
126136
DATABASE_REFRESH: true
127137
ES_VERIFY_CERTS: false
128138
BACKEND: ${{ matrix.backend == 'elasticsearch7' && 'elasticsearch' || matrix.backend == 'elasticsearch8' && 'elasticsearch' || 'opensearch' }}
139+
REDIS_ENABLE: true
140+
REDIS_HOST: localhost
141+
REDIS_PORT: 6379

Makefile

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ test-datetime-filtering-os:
8282
docker compose down
8383

8484
.PHONY: test
85-
test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os
85+
test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os test-redis-es test-redis-os
8686

8787
.PHONY: run-database-es
8888
run-database-es:
@@ -117,4 +117,16 @@ docs-image:
117117
.PHONY: docs
118118
docs: docs-image
119119
docker compose -f compose.docs.yml \
120-
run docs
120+
run docs
121+
122+
.PHONY: test-redis-es
123+
test-redis-es:
124+
docker compose -f compose-redis.yml up -d
125+
-$(run_es) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest redis/ -v'
126+
docker compose -f compose-redis.yml down
127+
128+
.PHONY: test-redis-os
129+
test-redis-os:
130+
docker compose -f compose-redis.yml up -d
131+
-$(run_os) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest redis/ -v'
132+
docker compose -f compose-redis.yml down

compose-redis.yml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
version: '3.8'
2+
3+
services:
4+
redis:
5+
image: redis:7-alpine
6+
ports:
7+
- "6379:6379"
8+
volumes:
9+
- redis_test_data:/data
10+
command: redis-server --appendonly yes
11+
12+
volumes:
13+
redis_test_data:

mypy.ini

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
[mypy]
2+
[mypy-redis.*]
3+
ignore_missing_imports = True

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 70 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,10 @@
2424
from stac_fastapi.core.base_settings import ApiBaseSettings
2525
from stac_fastapi.core.datetime_utils import format_datetime_range
2626
from stac_fastapi.core.models.links import PagingLinks
27+
from stac_fastapi.core.redis_utils import connect_redis, get_prev_link, save_self_link
2728
from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
2829
from stac_fastapi.core.session import Session
29-
from stac_fastapi.core.utilities import filter_fields
30+
from stac_fastapi.core.utilities import filter_fields, get_bool_env
3031
from stac_fastapi.extensions.core.transaction import AsyncBaseTransactionsClient
3132
from stac_fastapi.extensions.core.transaction.request import (
3233
PartialCollection,
@@ -328,6 +329,15 @@ async def all_collections(
328329
if parsed_sort:
329330
sort = parsed_sort
330331

332+
current_url = str(request.url)
333+
redis_enable = get_bool_env("REDIS_ENABLE", default=False)
334+
335+
if redis_enable:
336+
try:
337+
redis = await connect_redis()
338+
except Exception:
339+
redis = None
340+
331341
# Convert q to a list if it's a string
332342
q_list = None
333343
if q is not None:
@@ -426,6 +436,22 @@ async def all_collections(
426436
},
427437
]
428438

439+
if redis_enable and redis:
440+
if next_token:
441+
await save_self_link(redis, next_token, current_url)
442+
443+
prev_link = await get_prev_link(redis, token)
444+
if prev_link:
445+
links.insert(
446+
0,
447+
{
448+
"rel": "prev",
449+
"type": "application/json",
450+
"method": "GET",
451+
"href": prev_link,
452+
},
453+
)
454+
429455
if next_token:
430456
next_link = PagingLinks(next=next_token, request=request).link_next()
431457
links.append(next_link)
@@ -744,6 +770,14 @@ async def post_search(
744770
HTTPException: If there is an error with the cql2_json filter.
745771
"""
746772
base_url = str(request.base_url)
773+
redis_enable = get_bool_env("REDIS_ENABLE", default=False)
774+
775+
redis = None
776+
if redis_enable:
777+
try:
778+
redis = await connect_redis()
779+
except Exception:
780+
redis = None
747781

748782
search = self.database.make_search()
749783

@@ -850,6 +884,41 @@ async def post_search(
850884
]
851885
links = await PagingLinks(request=request, next=next_token).get_links()
852886

887+
collection_links = []
888+
if search_request.collections:
889+
for collection_id in search_request.collections:
890+
collection_links.extend(
891+
[
892+
{
893+
"rel": "collection",
894+
"type": "application/json",
895+
"href": urljoin(base_url, f"collections/{collection_id}"),
896+
},
897+
{
898+
"rel": "parent",
899+
"type": "application/json",
900+
"href": urljoin(base_url, f"collections/{collection_id}"),
901+
},
902+
]
903+
)
904+
links.extend(collection_links)
905+
906+
if redis_enable and redis:
907+
self_link = str(request.url)
908+
await save_self_link(redis, next_token, self_link)
909+
910+
prev_link = await get_prev_link(redis, token_param)
911+
if prev_link:
912+
links.insert(
913+
0,
914+
{
915+
"rel": "prev",
916+
"type": "application/json",
917+
"method": "GET",
918+
"href": prev_link,
919+
},
920+
)
921+
853922
return stac_types.ItemCollection(
854923
type="FeatureCollection",
855924
features=items,
Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
"""Utilities for connecting to and managing Redis connections."""
2+
3+
from typing import Optional
4+
5+
from pydantic_settings import BaseSettings
6+
from redis import asyncio as aioredis
7+
from redis.asyncio.sentinel import Sentinel
8+
9+
redis_pool: Optional[aioredis.Redis] = None
10+
11+
12+
class RedisSentinelSettings(BaseSettings):
13+
"""Configuration for connecting to Redis Sentinel."""
14+
15+
REDIS_SENTINEL_HOSTS: str = ""
16+
REDIS_SENTINEL_PORTS: str = "26379"
17+
REDIS_SENTINEL_MASTER_NAME: str = "master"
18+
REDIS_DB: int = 15
19+
20+
REDIS_MAX_CONNECTIONS: int = 10
21+
REDIS_RETRY_TIMEOUT: bool = True
22+
REDIS_DECODE_RESPONSES: bool = True
23+
REDIS_CLIENT_NAME: str = "stac-fastapi-app"
24+
REDIS_HEALTH_CHECK_INTERVAL: int = 30
25+
26+
27+
class RedisSettings(BaseSettings):
28+
"""Configuration for connecting Redis Sentinel."""
29+
30+
REDIS_HOST: str = ""
31+
REDIS_PORT: int = 6379
32+
REDIS_DB: int = 0
33+
34+
REDIS_MAX_CONNECTIONS: int = 10
35+
REDIS_RETRY_TIMEOUT: bool = True
36+
REDIS_DECODE_RESPONSES: bool = True
37+
REDIS_CLIENT_NAME: str = "stac-fastapi-app"
38+
REDIS_HEALTH_CHECK_INTERVAL: int = 30
39+
40+
41+
# Select the Redis or Redis Sentinel configuration
42+
redis_settings: BaseSettings = RedisSettings()
43+
44+
45+
async def connect_redis(settings: Optional[RedisSettings] = None) -> aioredis.Redis:
46+
"""Return a Redis connection."""
47+
global redis_pool
48+
settings = settings or redis_settings
49+
50+
if not settings.REDIS_HOST or not settings.REDIS_PORT:
51+
return None
52+
53+
if redis_pool is None:
54+
pool = aioredis.ConnectionPool(
55+
host=settings.REDIS_HOST,
56+
port=settings.REDIS_PORT,
57+
db=settings.REDIS_DB,
58+
max_connections=settings.REDIS_MAX_CONNECTIONS,
59+
decode_responses=settings.REDIS_DECODE_RESPONSES,
60+
retry_on_timeout=settings.REDIS_RETRY_TIMEOUT,
61+
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL,
62+
)
63+
redis_pool = aioredis.Redis(
64+
connection_pool=pool, client_name=settings.REDIS_CLIENT_NAME
65+
)
66+
return redis_pool
67+
68+
69+
async def connect_redis_sentinel(
70+
settings: Optional[RedisSentinelSettings] = None,
71+
) -> Optional[aioredis.Redis]:
72+
"""Return a Redis Sentinel connection."""
73+
global redis_pool
74+
75+
settings = settings or redis_settings
76+
77+
if (
78+
not settings.REDIS_SENTINEL_HOSTS
79+
or not settings.REDIS_SENTINEL_PORTS
80+
or not settings.REDIS_SENTINEL_MASTER_NAME
81+
):
82+
return None
83+
84+
hosts = [h.strip() for h in settings.REDIS_SENTINEL_HOSTS.split(",") if h.strip()]
85+
ports = [
86+
int(p.strip()) for p in settings.REDIS_SENTINEL_PORTS.split(",") if p.strip()
87+
]
88+
89+
if redis_pool is None:
90+
try:
91+
sentinel = Sentinel(
92+
[(h, p) for h, p in zip(hosts, ports)],
93+
decode_responses=settings.REDIS_DECODE_RESPONSES,
94+
)
95+
master = sentinel.master_for(
96+
service_name=settings.REDIS_SENTINEL_MASTER_NAME,
97+
db=settings.REDIS_DB,
98+
decode_responses=settings.REDIS_DECODE_RESPONSES,
99+
retry_on_timeout=settings.REDIS_RETRY_TIMEOUT,
100+
client_name=settings.REDIS_CLIENT_NAME,
101+
max_connections=settings.REDIS_MAX_CONNECTIONS,
102+
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL,
103+
)
104+
redis_pool = master
105+
106+
except Exception:
107+
return None
108+
109+
return redis_pool
110+
111+
112+
async def save_self_link(
113+
redis: aioredis.Redis, token: Optional[str], self_href: str
114+
) -> None:
115+
"""Save the self link for the current token with 30 min TTL."""
116+
if token:
117+
await redis.setex(f"nav:self:{token}", 1800, self_href)
118+
119+
120+
async def get_prev_link(redis: aioredis.Redis, token: Optional[str]) -> Optional[str]:
121+
"""Get the previous page link for the current token (if exists)."""
122+
if not token:
123+
return None
124+
return await redis.get(f"nav:self:{token}")

0 commit comments

Comments
 (0)