Skip to content

Commit a0d4037

Browse files
Yuri ZmytrakovYuri Zmytrakov
authored andcommitted
add redis
1 parent 041b729 commit a0d4037

File tree

11 files changed

+295
-24
lines changed

11 files changed

+295
-24
lines changed

.pre-commit-config.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ repos:
3131
]
3232
additional_dependencies: [
3333
"types-attrs",
34-
"types-requests"
34+
"types-requests",
35+
"types-redis"
3536
]
3637
- repo: https://github.com/PyCQA/pydocstyle
3738
rev: 6.1.1

Dockerfile

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
FROM python:3.13-slim
2+
3+
RUN apt-get update && apt-get install -y \
4+
build-essential \
5+
&& apt-get clean \
6+
&& rm -rf /var/lib/apt/lists/*
7+
8+
WORKDIR /app
9+
10+
COPY README.md .
11+
COPY stac_fastapi/opensearch/setup.py stac_fastapi/opensearch/
12+
COPY stac_fastapi/core/setup.py stac_fastapi/core/
13+
COPY stac_fastapi/sfeos_helpers/setup.py stac_fastapi/sfeos_helpers/
14+
15+
16+
RUN pip install --no-cache-dir --upgrade pip setuptools wheel
17+
18+
COPY stac_fastapi/ stac_fastapi/
19+
20+
RUN pip install --no-cache-dir ./stac_fastapi/core
21+
RUN pip install --no-cache-dir ./stac_fastapi/sfeos_helpers
22+
RUN pip install --no-cache-dir ./stac_fastapi/opensearch[server]
23+
24+
EXPOSE 8080
25+
26+
CMD ["uvicorn", "stac_fastapi.opensearch.app:app", "--host", "0.0.0.0", "--port", "8080"]

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ docker-shell-os:
6363

6464
.PHONY: test-elasticsearch
6565
test-elasticsearch:
66-
-$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
66+
-$(run_es) /bin/bash -c 'pip install redis==6.4.0 export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest'
6767
docker compose down
6868

6969
.PHONY: test-opensearch

compose.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ services:
2222
- ES_VERIFY_CERTS=false
2323
- BACKEND=elasticsearch
2424
- DATABASE_REFRESH=true
25+
- REDIS_HOST=redis
26+
- REDIS_PORT=6379
2527
ports:
2628
- "8080:8080"
2729
volumes:
@@ -30,6 +32,7 @@ services:
3032
- ./esdata:/usr/share/elasticsearch/data
3133
depends_on:
3234
- elasticsearch
35+
- redis
3336
command:
3437
bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app"
3538

@@ -94,3 +97,10 @@ services:
9497
- ./opensearch/snapshots:/usr/share/opensearch/snapshots
9598
ports:
9699
- "9202:9202"
100+
101+
redis:
102+
container_name: stac-redis
103+
image: redis:7.2-alpine
104+
restart: always
105+
ports:
106+
- "6379:6379"

stac_fastapi/core/setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
"pygeofilter~=0.3.1",
2020
"jsonschema~=4.0.0",
2121
"slowapi~=0.1.9",
22+
"redis==6.4.0",
2223
]
2324

2425
setup(

stac_fastapi/core/stac_fastapi/core/core.py

Lines changed: 55 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,13 @@
2424
from stac_fastapi.core.base_settings import ApiBaseSettings
2525
from stac_fastapi.core.datetime_utils import format_datetime_range
2626
from stac_fastapi.core.models.links import PagingLinks
27+
from stac_fastapi.core.redis_utils import (
28+
add_previous_link,
29+
cache_current_url,
30+
cache_previous_url,
31+
connect_redis_sentinel,
32+
connect_redis,
33+
)
2734
from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer
2835
from stac_fastapi.core.session import Session
2936
from stac_fastapi.core.utilities import filter_fields
@@ -237,6 +244,12 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
237244
base_url = str(request.base_url)
238245
limit = int(request.query_params.get("limit", os.getenv("STAC_ITEM_LIMIT", 10)))
239246
token = request.query_params.get("token")
247+
current_url = str(request.url)
248+
redis = None
249+
try:
250+
redis = await connect_redis()
251+
except Exception as e:
252+
redis = None
240253

241254
collections, next_token = await self.database.get_all_collections(
242255
token=token, limit=limit, request=request
@@ -252,6 +265,12 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
252265
},
253266
]
254267

268+
await add_previous_link(
269+
redis, links, "collections", current_url, token
270+
)
271+
if redis:
272+
await cache_previous_url(redis, current_url, "collections")
273+
255274
if next_token:
256275
next_link = PagingLinks(next=next_token, request=request).link_next()
257276
links.append(next_link)
@@ -310,20 +329,18 @@ async def item_collection(
310329
"""
311330
request: Request = kwargs["request"]
312331
token = request.query_params.get("token")
313-
if not hasattr(self, '_prev_links'):
314-
self._prev_links = {}
315-
316-
session_id = request.cookies.get('stac_session', 'default_session')
317-
current_self_link = str(request.url)
318-
319-
if session_id not in self._prev_links:
320-
self._prev_links[session_id] = []
321-
322-
history = self._prev_links[session_id]
323-
if not history or current_self_link != history[-1]:
324-
history.append(current_self_link)
325332
base_url = str(request.base_url)
326333

334+
current_url = str(request.url)
335+
336+
try:
337+
redis = await connect_redis()
338+
except Exception as e:
339+
redis = None
340+
341+
if redis:
342+
await cache_current_url(redis, current_url, collection_id)
343+
327344
collection = await self.get_collection(
328345
collection_id=collection_id, request=request
329346
)
@@ -374,21 +391,22 @@ async def item_collection(
374391
"href": urljoin(str(request.base_url), f"collections/{collection_id}"),
375392
},
376393
{
377-
"rel": "parent",
394+
"rel": "parent",
378395
"type": "application/json",
379396
"href": urljoin(str(request.base_url), f"collections/{collection_id}"),
380-
}
397+
},
381398
]
382399

383400
paging_links = await PagingLinks(request=request, next=next_token).get_links()
384-
history = self._prev_links.get(session_id, [])
385-
if len(history) > 1:
386-
previous_self_link = history[-2]
387-
paging_links.append({
388-
"rel": "previous",
389-
"type": "application/json",
390-
"href": previous_self_link,
391-
})
401+
402+
if redis:
403+
await add_previous_link(
404+
redis, paging_links, collection_id, current_url, token
405+
)
406+
407+
if redis:
408+
await cache_previous_url(redis, current_url, collection_id)
409+
392410
links = collection_links + paging_links
393411

394412
return stac_types.ItemCollection(
@@ -529,7 +547,14 @@ async def post_search(
529547
HTTPException: If there is an error with the cql2_json filter.
530548
"""
531549
base_url = str(request.base_url)
550+
current_url = str(request.url)
551+
try:
552+
redis = await connect_redis()
553+
except Exception as e:
554+
redis = None
532555

556+
if redis:
557+
await cache_current_url(redis, current_url, "search_result")
533558
search = self.database.make_search()
534559

535560
if search_request.ids:
@@ -628,6 +653,14 @@ async def post_search(
628653
]
629654
links = await PagingLinks(request=request, next=next_token).get_links()
630655

656+
if redis:
657+
await add_previous_link(
658+
redis, links, "search_result", current_url, search_request.token
659+
)
660+
661+
if redis:
662+
await cache_previous_url(redis, current_url, "search_result")
663+
631664
return stac_types.ItemCollection(
632665
type="FeatureCollection",
633666
features=items,

0 commit comments

Comments
 (0)