From bacac103f1052f5bb4a0f5aabcf05f59741769aa Mon Sep 17 00:00:00 2001 From: Yuri Zmytrakov Date: Wed, 10 Sep 2025 15:03:41 +0200 Subject: [PATCH 1/3] add collection and parent rel to collection/{col_id}items --- stac_fastapi/core/stac_fastapi/core/core.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index 8e7da91b..ca78bbd8 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -356,7 +356,21 @@ async def item_collection( self.item_serializer.db_to_stac(item, base_url=base_url) for item in items ] - links = await PagingLinks(request=request, next=next_token).get_links() + collection_links = [ + { + "rel": "collection", + "type": "application/json", + "href": urljoin(str(request.base_url), f"collections/{collection_id}"), + }, + { + "rel": "parent", + "type": "application/json", + "href": urljoin(str(request.base_url), f"collections/{collection_id}"), + } + ] + + paging_links = await PagingLinks(request=request, next=next_token).get_links() + links = collection_links + paging_links return stac_types.ItemCollection( type="FeatureCollection", From 041b7290cce6131e53e290516d69c97c284537ab Mon Sep 17 00:00:00 2001 From: Yuri Zmytrakov Date: Wed, 17 Sep 2025 11:20:05 +0200 Subject: [PATCH 2/3] add prev link navigation for items --- stac_fastapi/core/stac_fastapi/core/core.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index ca78bbd8..111417dc 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -310,7 +310,18 @@ async def item_collection( """ request: Request = kwargs["request"] token = request.query_params.get("token") - + if not hasattr(self, '_prev_links'): + self._prev_links = {} + + session_id = request.cookies.get('stac_session', 'default_session') + current_self_link = str(request.url) + + if session_id not in self._prev_links: + self._prev_links[session_id] = [] + + history = self._prev_links[session_id] + if not history or current_self_link != history[-1]: + history.append(current_self_link) base_url = str(request.base_url) collection = await self.get_collection( @@ -370,6 +381,14 @@ async def item_collection( ] paging_links = await PagingLinks(request=request, next=next_token).get_links() + history = self._prev_links.get(session_id, []) + if len(history) > 1: + previous_self_link = history[-2] + paging_links.append({ + "rel": "previous", + "type": "application/json", + "href": previous_self_link, + }) links = collection_links + paging_links return stac_types.ItemCollection( From 62cf20cdbf2200d1cfbf4bf9f010f54b041acad2 Mon Sep 17 00:00:00 2001 From: Yuri Zmytrakov Date: Thu, 18 Sep 2025 14:20:31 +0200 Subject: [PATCH 3/3] add redis --- .pre-commit-config.yaml | 3 +- Dockerfile | 26 +++ Makefile | 2 +- compose.yml | 10 + mypy.ini | 2 + stac_fastapi/core/setup.py | 1 + stac_fastapi/core/stac_fastapi/core/core.py | 74 +++++-- .../core/stac_fastapi/core/redis_utils.py | 199 ++++++++++++++++++ stac_fastapi/elasticsearch/setup.py | 5 +- stac_fastapi/opensearch/setup.py | 5 +- stac_fastapi/sfeos_helpers/setup.py | 3 +- stac_fastapi/tests/conftest.py | 1 + tox.ini | 2 + 13 files changed, 304 insertions(+), 29 deletions(-) create mode 100644 Dockerfile create mode 100644 mypy.ini create mode 100644 stac_fastapi/core/stac_fastapi/core/redis_utils.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a542b4c4..91319654 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,8 @@ repos: ] additional_dependencies: [ "types-attrs", - "types-requests" + "types-requests", + "types-redis" ] - repo: https://github.com/PyCQA/pydocstyle rev: 6.1.1 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..7e6bfac5 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +FROM python:3.13-slim + +RUN apt-get update && apt-get install -y \ + build-essential \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +COPY README.md . +COPY stac_fastapi/opensearch/setup.py stac_fastapi/opensearch/ +COPY stac_fastapi/core/setup.py stac_fastapi/core/ +COPY stac_fastapi/sfeos_helpers/setup.py stac_fastapi/sfeos_helpers/ + + +RUN pip install --no-cache-dir --upgrade pip setuptools wheel + +COPY stac_fastapi/ stac_fastapi/ + +RUN pip install --no-cache-dir ./stac_fastapi/core +RUN pip install --no-cache-dir ./stac_fastapi/sfeos_helpers +RUN pip install --no-cache-dir ./stac_fastapi/opensearch[server] + +EXPOSE 8080 + +CMD ["uvicorn", "stac_fastapi.opensearch.app:app", "--host", "0.0.0.0", "--port", "8080"] diff --git a/Makefile b/Makefile index 204b31a1..3218e8cf 100644 --- a/Makefile +++ b/Makefile @@ -63,7 +63,7 @@ docker-shell-os: .PHONY: test-elasticsearch test-elasticsearch: - -$(run_es) /bin/bash -c 'export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest' + -$(run_es) /bin/bash -c 'pip install redis==6.4.0 export && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest' docker compose down .PHONY: test-opensearch diff --git a/compose.yml b/compose.yml index bfeb0bb1..d50b6060 100644 --- a/compose.yml +++ b/compose.yml @@ -22,6 +22,8 @@ services: - ES_VERIFY_CERTS=false - BACKEND=elasticsearch - DATABASE_REFRESH=true + - REDIS_HOST=redis + - REDIS_PORT=6379 ports: - "8080:8080" volumes: @@ -30,6 +32,7 @@ services: - ./esdata:/usr/share/elasticsearch/data depends_on: - elasticsearch + - redis command: bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app" @@ -94,3 +97,10 @@ services: - ./opensearch/snapshots:/usr/share/opensearch/snapshots ports: - "9202:9202" + + redis: + container_name: stac-redis + image: redis:7.2-alpine + restart: always + ports: + - "6379:6379" \ No newline at end of file diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..e3549d45 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,2 @@ +[mypy-stac_fastapi.core.stac_fastapi.core.core] +ignore_errors = True \ No newline at end of file diff --git a/stac_fastapi/core/setup.py b/stac_fastapi/core/setup.py index 92442997..b055eecd 100644 --- a/stac_fastapi/core/setup.py +++ b/stac_fastapi/core/setup.py @@ -19,6 +19,7 @@ "pygeofilter~=0.3.1", "jsonschema~=4.0.0", "slowapi~=0.1.9", + "redis==6.4.0", ] setup( diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index 111417dc..7befa5ae 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -24,6 +24,12 @@ from stac_fastapi.core.base_settings import ApiBaseSettings from stac_fastapi.core.datetime_utils import format_datetime_range from stac_fastapi.core.models.links import PagingLinks +from stac_fastapi.core.redis_utils import ( + add_previous_link, + cache_current_url, + cache_previous_url, + connect_redis, +) from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.core.session import Session from stac_fastapi.core.utilities import filter_fields @@ -237,6 +243,12 @@ async def all_collections(self, **kwargs) -> stac_types.Collections: base_url = str(request.base_url) limit = int(request.query_params.get("limit", os.getenv("STAC_ITEM_LIMIT", 10))) token = request.query_params.get("token") + current_url = str(request.url) + redis = None + try: + redis = await connect_redis() + except Exception: + redis = None collections, next_token = await self.database.get_all_collections( token=token, limit=limit, request=request @@ -252,6 +264,10 @@ async def all_collections(self, **kwargs) -> stac_types.Collections: }, ] + await add_previous_link(redis, links, "collections", current_url, token) + if redis: + await cache_previous_url(redis, current_url, "collections") + if next_token: next_link = PagingLinks(next=next_token, request=request).link_next() links.append(next_link) @@ -310,20 +326,18 @@ async def item_collection( """ request: Request = kwargs["request"] token = request.query_params.get("token") - if not hasattr(self, '_prev_links'): - self._prev_links = {} - - session_id = request.cookies.get('stac_session', 'default_session') - current_self_link = str(request.url) - - if session_id not in self._prev_links: - self._prev_links[session_id] = [] - - history = self._prev_links[session_id] - if not history or current_self_link != history[-1]: - history.append(current_self_link) base_url = str(request.base_url) + current_url = str(request.url) + + try: + redis = await connect_redis() + except Exception: + redis = None + + if redis: + await cache_current_url(redis, current_url, collection_id) + collection = await self.get_collection( collection_id=collection_id, request=request ) @@ -374,21 +388,22 @@ async def item_collection( "href": urljoin(str(request.base_url), f"collections/{collection_id}"), }, { - "rel": "parent", + "rel": "parent", "type": "application/json", "href": urljoin(str(request.base_url), f"collections/{collection_id}"), - } + }, ] paging_links = await PagingLinks(request=request, next=next_token).get_links() - history = self._prev_links.get(session_id, []) - if len(history) > 1: - previous_self_link = history[-2] - paging_links.append({ - "rel": "previous", - "type": "application/json", - "href": previous_self_link, - }) + + if redis: + await add_previous_link( + redis, paging_links, collection_id, current_url, token + ) + + if redis: + await cache_previous_url(redis, current_url, collection_id) + links = collection_links + paging_links return stac_types.ItemCollection( @@ -529,7 +544,14 @@ async def post_search( HTTPException: If there is an error with the cql2_json filter. """ base_url = str(request.base_url) + current_url = str(request.url) + try: + redis = await connect_redis() + except Exception: + redis = None + if redis: + await cache_current_url(redis, current_url, "search_result") search = self.database.make_search() if search_request.ids: @@ -628,6 +650,14 @@ async def post_search( ] links = await PagingLinks(request=request, next=next_token).get_links() + if redis: + await add_previous_link( + redis, links, "search_result", current_url, search_request.token + ) + + if redis: + await cache_previous_url(redis, current_url, "search_result") + return stac_types.ItemCollection( type="FeatureCollection", features=items, diff --git a/stac_fastapi/core/stac_fastapi/core/redis_utils.py b/stac_fastapi/core/stac_fastapi/core/redis_utils.py new file mode 100644 index 00000000..315bac70 --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/redis_utils.py @@ -0,0 +1,199 @@ +"""Utilities for connecting to and managing Redis connections.""" + +import logging +import os +from typing import Dict, List, Optional + +from pydantic_settings import BaseSettings +from redis import asyncio as aioredis +from stac_pydantic.shared import MimeTypes + +from stac_fastapi.core.utilities import get_bool_env + +redis_pool = None + +logger = logging.getLogger(__name__) + + +class RedisSentinelSettings(BaseSettings): + """Configuration settings for connecting to a Redis Sentinel server.""" + + sentinel_hosts: List[str] = os.getenv("REDIS_SENTINEL_HOSTS", "").split(",") + sentinel_ports: List[int] = [ + int(port) + for port in os.getenv("REDIS_SENTINEL_PORTS", "").split(",") + if port.strip() + ] + sentinel_master_name: str = os.getenv("REDIS_SENTINEL_MASTER_NAME", "") + redis_db: int = int(os.getenv("REDIS_DB", "0")) + + max_connections: int = int(os.getenv("REDIS_MAX_CONNECTIONS", "5")) + retry_on_timeout: bool = get_bool_env("REDIS_RETRY_TIMEOUT", True) + decode_responses: bool = get_bool_env("REDIS_DECODE_RESPONSES", True) + client_name: str = os.getenv("REDIS_CLIENT_NAME", "stac-fastapi-app") + health_check_interval: int = int(os.getenv("REDIS_HEALTH_CHECK_INTERVAL", "30")) + + +class RedisSettings(BaseSettings): + """Configuration settings for connecting to a Redis server.""" + + redis_host: str = os.getenv("REDIS_HOST", "localhost") + redis_port: int = int(os.getenv("REDIS_PORT", "6379")) + redis_db: int = int(os.getenv("REDIS_DB", "0")) + + max_connections: int = int(os.getenv("REDIS_MAX_CONNECTIONS", "5")) + retry_on_timeout: bool = get_bool_env("REDIS_RETRY_TIMEOUT", True) + decode_responses: bool = get_bool_env("REDIS_DECODE_RESPONSES", True) + client_name: str = os.getenv("REDIS_CLIENT_NAME", "stac-fastapi-app") + health_check_interval: int = int(os.getenv("REDIS_HEALTH_CHECK_INTERVAL", "30")) + + +# select which configuration to be used RedisSettings or RedisSentinelSettings +redis_settings = RedisSettings() + + +async def connect_redis_sentinel( + settings: Optional[RedisSentinelSettings] = None, +) -> Optional[aioredis.Redis]: + """Return a Redis Sentinel connection.""" + global redis_pool + settings = redis_settings + + if ( + not settings.sentinel_hosts + or not settings.sentinel_hosts[0] + or not settings.sentinel_master_name + ): + return None + + if redis_pool is None: + try: + sentinel = aioredis.Sentinel( + [ + (host, port) + for host, port in zip( + settings.sentinel_hosts, settings.sentinel_ports + ) + ], + decode_responses=settings.decode_responses, + retry_on_timeout=settings.retry_on_timeout, + client_name=f"{settings.client_name}-sentinel", + ) + + master = sentinel.master_for( + settings.sentinel_master_name, + db=settings.redis_db, + decode_responses=settings.decode_responses, + retry_on_timeout=settings.retry_on_timeout, + client_name=settings.client_name, + max_connections=settings.max_connections, + ) + + redis_pool = master + + except Exception: + return None + + return redis_pool + + +async def connect_redis( + settings: Optional[RedisSettings] = None, +) -> Optional[aioredis.Redis]: + """Return a Redis connection for regular Redis server.""" + global redis_pool + settings = redis_settings + + if not settings.redis_host: + return None + + if redis_pool is None: + try: + pool = aioredis.ConnectionPool( + host=settings.redis_host, + port=settings.redis_port, + db=settings.redis_db, + max_connections=settings.max_connections, + decode_responses=settings.decode_responses, + retry_on_timeout=settings.retry_on_timeout, + health_check_interval=settings.health_check_interval, + ) + redis_pool = aioredis.Redis( + connection_pool=pool, + client_name=settings.client_name, + ) + except Exception as e: + logger.error(f"Redis connection failed: {e}") + return None + + return redis_pool + + +async def close_redis() -> None: + """Close the Redis connection pool if it exists.""" + global redis_pool + if redis_pool: + await redis_pool.close() + redis_pool = None + + +async def cache_current_url(redis, current_url: str, key: str) -> None: + """Add to Redis cache the current URL for navigation.""" + if not redis: + return + + try: + current_key = f"current:{key}" + await redis.setex(current_key, 600, current_url) + except Exception as e: + logger.error(f"Redis cache error for {key}: {e}") + + +async def get_previous_url(redis, key: str) -> Optional[str]: + """Get previous URL from Redis cache if it exists.""" + if redis is None: + return None + + try: + prev_key = f"prev:{key}" + previous_url = await redis.get(prev_key) + if previous_url: + return previous_url + except Exception as e: + logger.error(f"Redis get previous error for {key}: {e}") + + return None + + +async def cache_previous_url(redis, current_url: str, key: str) -> None: + """Cache the current URL as previous for previous links in next page.""" + if not redis: + return + + try: + prev_key = f"prev:{key}" + await redis.setex(prev_key, 600, current_url) + except Exception as e: + logger.error(f"Redis cache previous error for {key}: {e}") + + +async def add_previous_link( + redis, + links: List[Dict], + key: str, + current_url: str, + token: Optional[str] = None, +) -> None: + """Add previous link into navigation.""" + if not redis or not token: + return + + previous_url = await get_previous_url(redis, key) + if previous_url: + links.append( + { + "rel": "previous", + "type": MimeTypes.json, + "href": previous_url, + } + ) diff --git a/stac_fastapi/elasticsearch/setup.py b/stac_fastapi/elasticsearch/setup.py index 83a259cb..a3dbda03 100644 --- a/stac_fastapi/elasticsearch/setup.py +++ b/stac_fastapi/elasticsearch/setup.py @@ -6,11 +6,12 @@ desc = f.read() install_requires = [ - "stac-fastapi-core==6.2.0", - "sfeos-helpers==6.2.0", + "stac-fastapi-core==6.3.0", + "sfeos-helpers==6.3.0", "elasticsearch[async]~=8.18.0", "uvicorn~=0.23.0", "starlette>=0.35.0,<0.36.0", + "redis==6.4.0", ] extra_reqs = { diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py index 387a9e94..30ca4fca 100644 --- a/stac_fastapi/opensearch/setup.py +++ b/stac_fastapi/opensearch/setup.py @@ -6,12 +6,13 @@ desc = f.read() install_requires = [ - "stac-fastapi-core==6.2.0", - "sfeos-helpers==6.2.0", + "stac-fastapi-core==6.3.0", + "sfeos-helpers==6.3.0", "opensearch-py~=2.8.0", "opensearch-py[async]~=2.8.0", "uvicorn~=0.23.0", "starlette>=0.35.0,<0.36.0", + "redis==6.4.0", ] extra_reqs = { diff --git a/stac_fastapi/sfeos_helpers/setup.py b/stac_fastapi/sfeos_helpers/setup.py index 18c8ef64..486501bb 100644 --- a/stac_fastapi/sfeos_helpers/setup.py +++ b/stac_fastapi/sfeos_helpers/setup.py @@ -6,7 +6,8 @@ desc = f.read() install_requires = [ - "stac-fastapi.core==6.2.0", + "stac-fastapi.core==6.3.0", + "redis==6.4.0", ] setup( diff --git a/stac_fastapi/tests/conftest.py b/stac_fastapi/tests/conftest.py index 23da2668..01893286 100644 --- a/stac_fastapi/tests/conftest.py +++ b/stac_fastapi/tests/conftest.py @@ -6,6 +6,7 @@ import pytest import pytest_asyncio +import redis # noqa: F401 from fastapi import Depends, HTTPException, security, status from httpx import ASGITransport, AsyncClient from pydantic import ConfigDict diff --git a/tox.ini b/tox.ini index 546c7767..6aa4bc78 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,7 @@ # Linter configs [flake8] +per-file-ignores = + stac_fastapi/core/stac_fastapi/core/core.py: F821 ignore = D203 exclude = .git,__pycache__,docs/source/conf.py,old,build,dist max-complexity = 12