Skip to content

Commit 218df70

Browse files
Yuri ZmytrakovYuri Zmytrakov
authored andcommitted
fix: implement recommendations
1 parent cb7ee7c commit 218df70

File tree

8 files changed

+124
-144
lines changed

8 files changed

+124
-144
lines changed

Makefile

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ test-datetime-filtering-os:
8282
docker compose down
8383

8484
.PHONY: test
85-
test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os test-redis-es test-redis-os
85+
test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os
8686

8787
.PHONY: run-database-es
8888
run-database-es:
@@ -118,15 +118,3 @@ docs-image:
118118
docs: docs-image
119119
docker compose -f compose.docs.yml \
120120
run docs
121-
122-
.PHONY: test-redis-es
123-
test-redis-es:
124-
docker compose -f compose-redis.yml up -d
125-
-$(run_es) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest redis/ -v'
126-
docker compose -f compose-redis.yml down
127-
128-
.PHONY: test-redis-os
129-
test-redis-os:
130-
docker compose -f compose-redis.yml up -d
131-
-$(run_os) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest redis/ -v'
132-
docker compose -f compose-redis.yml down

compose-redis.yml

Lines changed: 0 additions & 13 deletions
This file was deleted.

compose.yml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,9 @@ services:
2323
- BACKEND=elasticsearch
2424
- DATABASE_REFRESH=true
2525
- ENABLE_COLLECTIONS_SEARCH_ROUTE=true
26+
- REDIS_ENABLE=true
27+
- REDIS_HOST=redis
28+
- REDIS_PORT=6379
2629
ports:
2730
- "8080:8080"
2831
volumes:
@@ -31,6 +34,7 @@ services:
3134
- ./esdata:/usr/share/elasticsearch/data
3235
depends_on:
3336
- elasticsearch
37+
- redis
3438
command:
3539
bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app"
3640

@@ -58,6 +62,9 @@ services:
5862
- BACKEND=opensearch
5963
- STAC_FASTAPI_RATE_LIMIT=200/minute
6064
- ENABLE_COLLECTIONS_SEARCH_ROUTE=true
65+
- REDIS_ENABLE=true
66+
- REDIS_HOST=redis
67+
- REDIS_PORT=6379
6168
ports:
6269
- "8082:8082"
6370
volumes:
@@ -66,6 +73,7 @@ services:
6673
- ./osdata:/usr/share/opensearch/data
6774
depends_on:
6875
- opensearch
76+
- redis
6977
command:
7078
bash -c "./scripts/wait-for-it-es.sh os-container:9202 && python -m stac_fastapi.opensearch.app"
7179

@@ -96,3 +104,14 @@ services:
96104
- ./opensearch/snapshots:/usr/share/opensearch/snapshots
97105
ports:
98106
- "9202:9202"
107+
108+
redis:
109+
image: redis:7-alpine
110+
hostname: redis
111+
ports:
112+
- "6379:6379"
113+
volumes:
114+
- redis_test_data:/data
115+
command: redis-server
116+
volumes:
117+
redis_test_data:

stac_fastapi/core/setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
"pygeofilter~=0.3.1",
2020
"jsonschema~=4.0.0",
2121
"slowapi~=0.1.9",
22+
"redis==6.4.0",
2223
]
2324

2425
setup(

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 16 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
from stac_fastapi.core.base_settings import ApiBaseSettings
2525
from stac_fastapi.core.datetime_utils import format_datetime_range
2626
from stac_fastapi.core.models.links import PagingLinks
27-
from stac_fastapi.core.redis_utils import connect_redis, get_prev_link, save_self_link
27+
from stac_fastapi.core.redis_utils import redis_pagination_links
2828
from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
2929
from stac_fastapi.core.session import Session
3030
from stac_fastapi.core.utilities import filter_fields, get_bool_env
@@ -270,6 +270,7 @@ async def all_collections(
270270
A Collections object containing all the collections in the database and links to various resources.
271271
"""
272272
base_url = str(request.base_url)
273+
redis_enable = get_bool_env("REDIS_ENABLE", default=False)
273274

274275
# Get the global limit from environment variable
275276
global_limit = None
@@ -329,20 +330,6 @@ async def all_collections(
329330
if parsed_sort:
330331
sort = parsed_sort
331332

332-
current_url = str(request.url)
333-
redis_enable = get_bool_env("REDIS_ENABLE", default=False)
334-
335-
redis = None
336-
if redis_enable:
337-
try:
338-
redis = await connect_redis()
339-
logger.info("Redis connection established successfully")
340-
except Exception as e:
341-
redis = None
342-
logger.warning(
343-
f"Redis connection failed, continuing without Redis: {e}"
344-
)
345-
346333
# Convert q to a list if it's a string
347334
q_list = None
348335
if q is not None:
@@ -441,21 +428,13 @@ async def all_collections(
441428
},
442429
]
443430

444-
if redis_enable and redis:
445-
if next_token:
446-
await save_self_link(redis, next_token, current_url)
447-
448-
prev_link = await get_prev_link(redis, token)
449-
if prev_link:
450-
links.insert(
451-
0,
452-
{
453-
"rel": "prev",
454-
"type": "application/json",
455-
"method": "GET",
456-
"href": prev_link,
457-
},
458-
)
431+
if redis_enable:
432+
await redis_pagination_links(
433+
current_url=str(request.url),
434+
token=token,
435+
next_token=next_token,
436+
links=links,
437+
)
459438

460439
if next_token:
461440
next_link = PagingLinks(next=next_token, request=request).link_next()
@@ -775,9 +754,8 @@ async def post_search(
775754
HTTPException: If there is an error with the cql2_json filter.
776755
"""
777756
base_url = str(request.base_url)
778-
redis_enable = get_bool_env("REDIS_ENABLE", default=False)
779-
780757
search = self.database.make_search()
758+
redis_enable = get_bool_env("REDIS_ENABLE", default=False)
781759

782760
if search_request.ids:
783761
search = self.database.apply_ids_filter(
@@ -902,28 +880,12 @@ async def post_search(
902880
links.extend(collection_links)
903881

904882
if redis_enable:
905-
redis = None
906-
try:
907-
redis = await connect_redis()
908-
logger.info("Redis connection established successfully")
909-
self_link = str(request.url)
910-
await save_self_link(redis, next_token, self_link)
911-
912-
prev_link = await get_prev_link(redis, token_param)
913-
if prev_link:
914-
links.insert(
915-
0,
916-
{
917-
"rel": "prev",
918-
"type": "application/json",
919-
"method": "GET",
920-
"href": prev_link,
921-
},
922-
)
923-
except Exception as e:
924-
logger.warning(
925-
f"Redis connection failed, continuing without Redis: {e}"
926-
)
883+
await redis_pagination_links(
884+
current_url=str(request.url),
885+
token=token_param,
886+
next_token=next_token,
887+
links=links,
888+
)
927889

928890
return stac_types.ItemCollection(
929891
type="FeatureCollection",
Lines changed: 87 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Utilities for connecting to and managing Redis connections."""
22

3+
import logging
34
from typing import Optional
45

56
from pydantic_settings import BaseSettings
@@ -8,6 +9,8 @@
89

910
redis_pool: Optional[aioredis.Redis] = None
1011

12+
logger = logging.getLogger(__name__)
13+
1114

1215
class RedisSentinelSettings(BaseSettings):
1316
"""Configuration for connecting to Redis Sentinel."""
@@ -25,7 +28,7 @@ class RedisSentinelSettings(BaseSettings):
2528

2629

2730
class RedisSettings(BaseSettings):
28-
"""Configuration for connecting Redis Sentinel."""
31+
"""Configuration for connecting Redis."""
2932

3033
REDIS_HOST: str = ""
3134
REDIS_PORT: int = 6379
@@ -38,75 +41,71 @@ class RedisSettings(BaseSettings):
3841
REDIS_HEALTH_CHECK_INTERVAL: int = 30
3942

4043

41-
# Select the Redis or Redis Sentinel configuration
42-
redis_settings: BaseSettings = RedisSettings()
43-
44+
# Configure only one Redis configuration
45+
sentinel_settings = RedisSentinelSettings()
46+
standalone_settings = RedisSettings()
4447

45-
async def connect_redis(settings: Optional[RedisSettings] = None) -> aioredis.Redis:
46-
"""Return a Redis connection."""
47-
global redis_pool
48-
settings = settings or redis_settings
49-
50-
if not settings.REDIS_HOST or not settings.REDIS_PORT:
51-
return None
5248

53-
if redis_pool is None:
54-
pool = aioredis.ConnectionPool(
55-
host=settings.REDIS_HOST,
56-
port=settings.REDIS_PORT,
57-
db=settings.REDIS_DB,
58-
max_connections=settings.REDIS_MAX_CONNECTIONS,
59-
decode_responses=settings.REDIS_DECODE_RESPONSES,
60-
retry_on_timeout=settings.REDIS_RETRY_TIMEOUT,
61-
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL,
62-
)
63-
redis_pool = aioredis.Redis(
64-
connection_pool=pool, client_name=settings.REDIS_CLIENT_NAME
65-
)
66-
return redis_pool
67-
68-
69-
async def connect_redis_sentinel(
70-
settings: Optional[RedisSentinelSettings] = None,
71-
) -> Optional[aioredis.Redis]:
72-
"""Return a Redis Sentinel connection."""
49+
async def connect_redis() -> Optional[aioredis.Redis]:
50+
"""Return a Redis connection Redis or Redis Sentinel."""
7351
global redis_pool
7452

75-
settings = settings or redis_settings
53+
if redis_pool is not None:
54+
return redis_pool
55+
56+
try:
57+
if sentinel_settings.REDIS_SENTINEL_HOSTS:
58+
hosts = [
59+
h.strip()
60+
for h in sentinel_settings.REDIS_SENTINEL_HOSTS.split(",")
61+
if h.strip()
62+
]
63+
ports = [
64+
int(p.strip())
65+
for p in sentinel_settings.REDIS_SENTINEL_PORTS.split(",")
66+
if p.strip()
67+
]
7668

77-
if (
78-
not settings.REDIS_SENTINEL_HOSTS
79-
or not settings.REDIS_SENTINEL_PORTS
80-
or not settings.REDIS_SENTINEL_MASTER_NAME
81-
):
82-
return None
83-
84-
hosts = [h.strip() for h in settings.REDIS_SENTINEL_HOSTS.split(",") if h.strip()]
85-
ports = [
86-
int(p.strip()) for p in settings.REDIS_SENTINEL_PORTS.split(",") if p.strip()
87-
]
88-
89-
if redis_pool is None:
90-
try:
9169
sentinel = Sentinel(
9270
[(h, p) for h, p in zip(hosts, ports)],
93-
decode_responses=settings.REDIS_DECODE_RESPONSES,
94-
)
95-
master = sentinel.master_for(
96-
service_name=settings.REDIS_SENTINEL_MASTER_NAME,
97-
db=settings.REDIS_DB,
98-
decode_responses=settings.REDIS_DECODE_RESPONSES,
99-
retry_on_timeout=settings.REDIS_RETRY_TIMEOUT,
100-
client_name=settings.REDIS_CLIENT_NAME,
101-
max_connections=settings.REDIS_MAX_CONNECTIONS,
102-
health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL,
71+
decode_responses=sentinel_settings.REDIS_DECODE_RESPONSES,
10372
)
104-
redis_pool = master
10573

106-
except Exception:
74+
redis_pool = sentinel.master_for(
75+
service_name=sentinel_settings.REDIS_SENTINEL_MASTER_NAME,
76+
db=sentinel_settings.REDIS_DB,
77+
decode_responses=sentinel_settings.REDIS_DECODE_RESPONSES,
78+
retry_on_timeout=sentinel_settings.REDIS_RETRY_TIMEOUT,
79+
client_name=sentinel_settings.REDIS_CLIENT_NAME,
80+
max_connections=sentinel_settings.REDIS_MAX_CONNECTIONS,
81+
health_check_interval=sentinel_settings.REDIS_HEALTH_CHECK_INTERVAL,
82+
)
83+
logger.info("Connected to Redis Sentinel")
84+
85+
elif standalone_settings.REDIS_HOST:
86+
pool = aioredis.ConnectionPool(
87+
host=standalone_settings.REDIS_HOST,
88+
port=standalone_settings.REDIS_PORT,
89+
db=standalone_settings.REDIS_DB,
90+
max_connections=standalone_settings.REDIS_MAX_CONNECTIONS,
91+
decode_responses=standalone_settings.REDIS_DECODE_RESPONSES,
92+
retry_on_timeout=standalone_settings.REDIS_RETRY_TIMEOUT,
93+
health_check_interval=standalone_settings.REDIS_HEALTH_CHECK_INTERVAL,
94+
)
95+
redis_pool = aioredis.Redis(
96+
connection_pool=pool, client_name=standalone_settings.REDIS_CLIENT_NAME
97+
)
98+
logger.info("Connected to Redis")
99+
else:
100+
logger.warning("No Redis configuration found")
107101
return None
108102

109-
return redis_pool
103+
return redis_pool
104+
105+
except Exception as e:
106+
logger.error(f"Failed to connect to Redis: {e}")
107+
redis_pool = None
108+
return None
110109

111110

112111
async def save_self_link(
@@ -122,3 +121,32 @@ async def get_prev_link(redis: aioredis.Redis, token: Optional[str]) -> Optional
122121
if not token:
123122
return None
124123
return await redis.get(f"nav:self:{token}")
124+
125+
126+
async def redis_pagination_links(
127+
current_url: str, token: str, next_token: str, links: list
128+
) -> None:
129+
"""Handle Redis pagination."""
130+
redis = None
131+
try:
132+
redis = await connect_redis()
133+
logger.info("Redis connection established successfully")
134+
except Exception as e:
135+
redis = None
136+
logger.warning(f"Redis connection failed: {e}")
137+
138+
if redis:
139+
if next_token:
140+
await save_self_link(redis, next_token, current_url)
141+
142+
prev_link = await get_prev_link(redis, token)
143+
if prev_link:
144+
links.insert(
145+
0,
146+
{
147+
"rel": "prev",
148+
"type": "application/json",
149+
"method": "GET",
150+
"href": prev_link,
151+
},
152+
)

0 commit comments

Comments
 (0)