Skip to content

Commit 3facf11

Browse files
Yuri ZmytrakovYuri Zmytrakov
authored andcommitted
add redis
1 parent 041b729 commit 3facf11

File tree

6 files changed

+166
-22
lines changed

6 files changed

+166
-22
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ docker-shell-os:
6363

6464
.PHONY: test-elasticsearch
6565
test-elasticsearch:
66-
-$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
66+
-$(run_es) /bin/bash -c 'pip install redis export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
6767
docker compose down
6868

6969
.PHONY: test-opensearch

compose.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ services:
2222
- ES_VERIFY_CERTS=false
2323
- BACKEND=elasticsearch
2424
- DATABASE_REFRESH=true
25+
- REDIS_HOST=redis
26+
- REDIS_PORT=6379
2527
ports:
2628
- "8080:8080"
2729
volumes:
@@ -30,6 +32,7 @@ services:
3032
- ./esdata:/usr/share/elasticsearch/data
3133
depends_on:
3234
- elasticsearch
35+
- redis
3336
command:
3437
bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app"
3538

@@ -94,3 +97,10 @@ services:
9497
- ./opensearch/snapshots:/usr/share/opensearch/snapshots
9598
ports:
9699
- "9202:9202"
100+
101+
redis:
102+
container_name: stac-redis
103+
image: redis:7.2-alpine
104+
restart: always
105+
ports:
106+
- "6379:6379"

docker-compose.redis.yml

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
version: '3.8'
2+
3+
services:
4+
redis:
5+
image: redis:7-alpine
6+
container_name: stac-app-redis
7+
ports:
8+
- "6379:6379"
9+
volumes:
10+
- redis_data:/data
11+
command: >
12+
redis-server
13+
--appendonly yes
14+
--maxmemory 256mb
15+
--maxmemory-policy allkeys-lru
16+
healthcheck:
17+
test: ["CMD", "redis-cli", "ping"]
18+
interval: 10s
19+
timeout: 5s
20+
retries: 3
21+
restart: unless-stopped
22+
23+
volumes:
24+
redis_data:

stac_fastapi/core/setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
"pygeofilter~=0.3.1",
2020
"jsonschema~=4.0.0",
2121
"slowapi~=0.1.9",
22+
"redis==6.4.0",
2223
]
2324

2425
setup(

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 90 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
from stac_fastapi.types.extension import ApiExtension
4545
from stac_fastapi.types.requests import get_base_url
4646
from stac_fastapi.types.search import BaseSearchPostRequest
47+
from stac_fastapi.core.redis_utils import connect_redis, close_redis
4748

4849
logger = logging.getLogger(__name__)
4950

@@ -237,6 +238,18 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
237238
base_url = str(request.base_url)
238239
limit = int(request.query_params.get("limit", os.getenv("STAC_ITEM_LIMIT", 10)))
239240
token = request.query_params.get("token")
241+
current_url = str(request.url)
242+
redis = None
243+
try:
244+
redis = await connect_redis()
245+
246+
if redis:
247+
current_key = "current:collections"
248+
await redis.setex(current_key, 600, current_url)
249+
250+
except Exception as e:
251+
logger.error(f"Redis connection error: {e}")
252+
redis = None
240253

241254
collections, next_token = await self.database.get_all_collections(
242255
token=token, limit=limit, request=request
@@ -252,10 +265,27 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
252265
},
253266
]
254267

255-
if next_token:
268+
if token and redis:
269+
prev_key = "prev:collections"
270+
previous_url = await redis.get(prev_key)
271+
272+
if previous_url:
273+
if previous_url != current_url:
274+
links.append({
275+
"rel": "previous",
276+
"type": MimeTypes.json,
277+
"href": previous_url,
278+
})
279+
280+
if next_token and redis:
281+
prev_key = "prev:collections"
282+
await redis.setex(prev_key, 600, current_url)
283+
256284
next_link = PagingLinks(next=next_token, request=request).link_next()
257285
links.append(next_link)
258286

287+
if redis:
288+
await redis.close()
259289
return stac_types.Collections(collections=collections, links=links)
260290

261291
async def get_collection(
@@ -310,20 +340,23 @@ async def item_collection(
310340
"""
311341
request: Request = kwargs["request"]
312342
token = request.query_params.get("token")
313-
if not hasattr(self, '_prev_links'):
314-
self._prev_links = {}
315-
316-
session_id = request.cookies.get('stac_session', 'default_session')
317-
current_self_link = str(request.url)
318-
319-
if session_id not in self._prev_links:
320-
self._prev_links[session_id] = []
321-
322-
history = self._prev_links[session_id]
323-
if not history or current_self_link != history[-1]:
324-
history.append(current_self_link)
325343
base_url = str(request.base_url)
326344

345+
current_url = str(request.url)
346+
347+
try:
348+
redis = await connect_redis()
349+
except Exception as e:
350+
logger.error(f"Redis connection error: {e}")
351+
redis = None
352+
353+
if redis:
354+
try:
355+
current_key = f"current:{collection_id}"
356+
await redis.setex(current_key, 600, current_url)
357+
except Exception as e:
358+
logger.error(f"Redis error: {e}")
359+
327360
collection = await self.get_collection(
328361
collection_id=collection_id, request=request
329362
)
@@ -381,16 +414,28 @@ async def item_collection(
381414
]
382415

383416
paging_links = await PagingLinks(request=request, next=next_token).get_links()
384-
history = self._prev_links.get(session_id, [])
385-
if len(history) > 1:
386-
previous_self_link = history[-2]
387-
paging_links.append({
388-
"rel": "previous",
389-
"type": "application/json",
390-
"href": previous_self_link,
391-
})
417+
418+
if token and redis:
419+
prev_key = f"prev:{collection_id}"
420+
previous_url = await redis.get(prev_key)
421+
422+
if previous_url:
423+
# prevent looped navigation
424+
if previous_url != current_url:
425+
paging_links.append({
426+
"rel": "previous",
427+
"type": "application/json",
428+
"href": previous_url,
429+
})
430+
if redis and next_token:
431+
prev_key = f"prev:{collection_id}"
432+
await redis.setex(prev_key, 600, current_url)
433+
392434
links = collection_links + paging_links
393435

436+
if redis:
437+
await redis.close()
438+
394439
return stac_types.ItemCollection(
395440
type="FeatureCollection",
396441
features=items,
@@ -529,6 +574,12 @@ async def post_search(
529574
HTTPException: If there is an error with the cql2_json filter.
530575
"""
531576
base_url = str(request.base_url)
577+
current_url = str(request.url)
578+
try:
579+
redis = await connect_redis()
580+
except Exception as e:
581+
logger.error(f"Redis connection error: {e}")
582+
redis = None
532583

533584
search = self.database.make_search()
534585

@@ -628,6 +679,24 @@ async def post_search(
628679
]
629680
links = await PagingLinks(request=request, next=next_token).get_links()
630681

682+
if search_request.token and redis:
683+
prev_key = f"prev:search_result"
684+
previous_url = await redis.get(prev_key)
685+
686+
if previous_url and previous_url != current_url:
687+
links.append({
688+
"rel": "previous",
689+
"type": "application/json",
690+
"href": previous_url,
691+
})
692+
693+
if redis and next_token:
694+
prev_key = f"prev:search_result"
695+
await redis.setex(prev_key, 600, current_url)
696+
697+
if redis:
698+
await redis.close()
699+
631700
return stac_types.ItemCollection(
632701
type="FeatureCollection",
633702
features=items,
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
import os
2+
from redis import asyncio as aioredis
3+
from pydantic_settings import BaseSettings
4+
from stac_fastapi.core.utilities import get_bool_env
5+
from typing import Optional
6+
7+
redis_pool = None
8+
9+
class RedisSettings(BaseSettings):
10+
url: str = os.getenv("REDIS_URL", "redis://redis:6379")
11+
max_connections: int = int(os.getenv("REDIS_MAX_CONNECTIONS", 10))
12+
retry_on_timeout: bool = get_bool_env("REDIS_RETRY_TIMEOUT", True)
13+
decode_responses: bool = get_bool_env("REDIS_DECODE_RESPONSES", True)
14+
client_name: str = os.getenv("REDIS_CLIENT_NAME", "stac-fastapi-app")
15+
health_check_interval: int = int(os.getenv("REDIS_HEALTH_CHECK_INTERVAL", 30))
16+
17+
redis_settings = RedisSettings()
18+
19+
async def connect_redis(settings: Optional[RedisSettings] = None):
20+
"""Get Redis connection with optional custom settings"""
21+
global redis_pool
22+
settings = settings or redis_settings
23+
24+
if redis_pool is None:
25+
redis_pool = aioredis.ConnectionPool.from_url(
26+
settings.url,
27+
max_connections=settings.max_connections,
28+
retry_on_timeout=settings.retry_on_timeout,
29+
decode_responses=settings.decode_responses,
30+
client_name=settings.client_name,
31+
health_check_interval=settings.health_check_interval,
32+
)
33+
return aioredis.Redis(connection_pool=redis_pool)
34+
35+
async def close_redis():
36+
"""Close Redis connection pool"""
37+
global redis_pool
38+
if redis_pool:
39+
await redis_pool.disconnect()
40+
redis_pool = None

0 commit comments

Comments
 (0)