From d6d268ef6deb1e24a2b78b6f7907457d14bdc641 Mon Sep 17 00:00:00 2001 From: Yuri Zmytrakov Date: Wed, 8 Oct 2025 13:15:14 +0200 Subject: [PATCH 1/5] feat(navigation): implement Redis caching for navigation - Configure Redis image for tests of caching navigation - Update Make file Redis with test targets for ES and OS - Integrate Redis/Redis Sentinel client to cache navigation - Add Redis funcs Sentinel for navigation caching --- .github/workflows/cicd.yml | 13 ++ Makefile | 16 ++- compose-redis.yml | 13 ++ mypy.ini | 3 + stac_fastapi/core/stac_fastapi/core/core.py | 77 ++++++++++- .../core/stac_fastapi/core/redis_utils.py | 124 ++++++++++++++++++ stac_fastapi/elasticsearch/setup.py | 1 + stac_fastapi/opensearch/setup.py | 1 + 8 files changed, 245 insertions(+), 3 deletions(-) create mode 100644 compose-redis.yml create mode 100644 mypy.ini create mode 100644 stac_fastapi/core/stac_fastapi/core/redis_utils.py diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 06615a7e..b639133d 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -66,6 +66,16 @@ jobs: ports: - 9202:9202 + redis: + image: redis:7-alpine + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 + strategy: matrix: python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13"] @@ -126,3 +136,6 @@ jobs: DATABASE_REFRESH: true ES_VERIFY_CERTS: false BACKEND: ${{ matrix.backend == 'elasticsearch7' && 'elasticsearch' || matrix.backend == 'elasticsearch8' && 'elasticsearch' || 'opensearch' }} + REDIS_ENABLE: true + REDIS_HOST: localhost + REDIS_PORT: 6379 diff --git a/Makefile b/Makefile index 204b31a1..bde31064 100644 --- a/Makefile +++ b/Makefile @@ -82,7 +82,7 @@ test-datetime-filtering-os: docker compose down .PHONY: test -test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os +test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os test-redis-es test-redis-os .PHONY: run-database-es run-database-es: @@ -117,4 +117,16 @@ docs-image: .PHONY: docs docs: docs-image docker compose -f compose.docs.yml \ - run docs \ No newline at end of file + run docs + +.PHONY: test-redis-es +test-redis-es: + docker compose -f compose-redis.yml up -d + -$(run_es) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest redis/ -v' + docker compose -f compose-redis.yml down + +.PHONY: test-redis-os +test-redis-os: + docker compose -f compose-redis.yml up -d + -$(run_os) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest redis/ -v' + docker compose -f compose-redis.yml down diff --git a/compose-redis.yml b/compose-redis.yml new file mode 100644 index 00000000..b572e731 --- /dev/null +++ b/compose-redis.yml @@ -0,0 +1,13 @@ +version: '3.8' + +services: + redis: + image: redis:7-alpine + ports: + - "6379:6379" + volumes: + - redis_test_data:/data + command: redis-server --appendonly yes + +volumes: + redis_test_data: diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..1ddba638 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +[mypy-redis.*] +ignore_missing_imports = True diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index 143b4d5a..c0548c1a 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -24,9 +24,10 @@ from stac_fastapi.core.base_settings import ApiBaseSettings from stac_fastapi.core.datetime_utils import format_datetime_range from stac_fastapi.core.models.links import PagingLinks +from stac_fastapi.core.redis_utils import connect_redis, get_prev_link, save_self_link from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.core.session import Session -from stac_fastapi.core.utilities import filter_fields +from stac_fastapi.core.utilities import filter_fields, get_bool_env from stac_fastapi.extensions.core.transaction import AsyncBaseTransactionsClient from stac_fastapi.extensions.core.transaction.request import ( PartialCollection, @@ -328,6 +329,20 @@ async def all_collections( if parsed_sort: sort = parsed_sort + current_url = str(request.url) + redis_enable = get_bool_env("REDIS_ENABLE", default=False) + + redis = None + if redis_enable: + try: + redis = await connect_redis() + logger.info("Redis connection established successfully") + except Exception as e: + redis = None + logger.warning( + f"Redis connection failed, continuing without Redis: {e}" + ) + # Convert q to a list if it's a string q_list = None if q is not None: @@ -426,6 +441,22 @@ async def all_collections( }, ] + if redis_enable and redis: + if next_token: + await save_self_link(redis, next_token, current_url) + + prev_link = await get_prev_link(redis, token) + if prev_link: + links.insert( + 0, + { + "rel": "prev", + "type": "application/json", + "method": "GET", + "href": prev_link, + }, + ) + if next_token: next_link = PagingLinks(next=next_token, request=request).link_next() links.append(next_link) @@ -744,6 +775,7 @@ async def post_search( HTTPException: If there is an error with the cql2_json filter. """ base_url = str(request.base_url) + redis_enable = get_bool_env("REDIS_ENABLE", default=False) search = self.database.make_search() @@ -850,6 +882,49 @@ async def post_search( ] links = await PagingLinks(request=request, next=next_token).get_links() + collection_links = [] + if search_request.collections: + for collection_id in search_request.collections: + collection_links.extend( + [ + { + "rel": "collection", + "type": "application/json", + "href": urljoin(base_url, f"collections/{collection_id}"), + }, + { + "rel": "parent", + "type": "application/json", + "href": urljoin(base_url, f"collections/{collection_id}"), + }, + ] + ) + links.extend(collection_links) + + if redis_enable: + redis = None + try: + redis = await connect_redis() + logger.info("Redis connection established successfully") + self_link = str(request.url) + await save_self_link(redis, next_token, self_link) + + prev_link = await get_prev_link(redis, token_param) + if prev_link: + links.insert( + 0, + { + "rel": "prev", + "type": "application/json", + "method": "GET", + "href": prev_link, + }, + ) + except Exception as e: + logger.warning( + f"Redis connection failed, continuing without Redis: {e}" + ) + return stac_types.ItemCollection( type="FeatureCollection", features=items, diff --git a/stac_fastapi/core/stac_fastapi/core/redis_utils.py b/stac_fastapi/core/stac_fastapi/core/redis_utils.py new file mode 100644 index 00000000..b7923daa --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/redis_utils.py @@ -0,0 +1,124 @@ +"""Utilities for connecting to and managing Redis connections.""" + +from typing import Optional + +from pydantic_settings import BaseSettings +from redis import asyncio as aioredis +from redis.asyncio.sentinel import Sentinel + +redis_pool: Optional[aioredis.Redis] = None + + +class RedisSentinelSettings(BaseSettings): + """Configuration for connecting to Redis Sentinel.""" + + REDIS_SENTINEL_HOSTS: str = "" + REDIS_SENTINEL_PORTS: str = "26379" + REDIS_SENTINEL_MASTER_NAME: str = "master" + REDIS_DB: int = 15 + + REDIS_MAX_CONNECTIONS: int = 10 + REDIS_RETRY_TIMEOUT: bool = True + REDIS_DECODE_RESPONSES: bool = True + REDIS_CLIENT_NAME: str = "stac-fastapi-app" + REDIS_HEALTH_CHECK_INTERVAL: int = 30 + + +class RedisSettings(BaseSettings): + """Configuration for connecting Redis Sentinel.""" + + REDIS_HOST: str = "" + REDIS_PORT: int = 6379 + REDIS_DB: int = 0 + + REDIS_MAX_CONNECTIONS: int = 10 + REDIS_RETRY_TIMEOUT: bool = True + REDIS_DECODE_RESPONSES: bool = True + REDIS_CLIENT_NAME: str = "stac-fastapi-app" + REDIS_HEALTH_CHECK_INTERVAL: int = 30 + + +# Select the Redis or Redis Sentinel configuration +redis_settings: BaseSettings = RedisSettings() + + +async def connect_redis(settings: Optional[RedisSettings] = None) -> aioredis.Redis: + """Return a Redis connection.""" + global redis_pool + settings = settings or redis_settings + + if not settings.REDIS_HOST or not settings.REDIS_PORT: + return None + + if redis_pool is None: + pool = aioredis.ConnectionPool( + host=settings.REDIS_HOST, + port=settings.REDIS_PORT, + db=settings.REDIS_DB, + max_connections=settings.REDIS_MAX_CONNECTIONS, + decode_responses=settings.REDIS_DECODE_RESPONSES, + retry_on_timeout=settings.REDIS_RETRY_TIMEOUT, + health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL, + ) + redis_pool = aioredis.Redis( + connection_pool=pool, client_name=settings.REDIS_CLIENT_NAME + ) + return redis_pool + + +async def connect_redis_sentinel( + settings: Optional[RedisSentinelSettings] = None, +) -> Optional[aioredis.Redis]: + """Return a Redis Sentinel connection.""" + global redis_pool + + settings = settings or redis_settings + + if ( + not settings.REDIS_SENTINEL_HOSTS + or not settings.REDIS_SENTINEL_PORTS + or not settings.REDIS_SENTINEL_MASTER_NAME + ): + return None + + hosts = [h.strip() for h in settings.REDIS_SENTINEL_HOSTS.split(",") if h.strip()] + ports = [ + int(p.strip()) for p in settings.REDIS_SENTINEL_PORTS.split(",") if p.strip() + ] + + if redis_pool is None: + try: + sentinel = Sentinel( + [(h, p) for h, p in zip(hosts, ports)], + decode_responses=settings.REDIS_DECODE_RESPONSES, + ) + master = sentinel.master_for( + service_name=settings.REDIS_SENTINEL_MASTER_NAME, + db=settings.REDIS_DB, + decode_responses=settings.REDIS_DECODE_RESPONSES, + retry_on_timeout=settings.REDIS_RETRY_TIMEOUT, + client_name=settings.REDIS_CLIENT_NAME, + max_connections=settings.REDIS_MAX_CONNECTIONS, + health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL, + ) + redis_pool = master + + except Exception: + return None + + return redis_pool + + +async def save_self_link( + redis: aioredis.Redis, token: Optional[str], self_href: str +) -> None: + """Save the self link for the current token with 30 min TTL.""" + if token: + await redis.setex(f"nav:self:{token}", 1800, self_href) + + +async def get_prev_link(redis: aioredis.Redis, token: Optional[str]) -> Optional[str]: + """Get the previous page link for the current token (if exists).""" + if not token: + return None + return await redis.get(f"nav:self:{token}") diff --git a/stac_fastapi/elasticsearch/setup.py b/stac_fastapi/elasticsearch/setup.py index 1751df78..612c7587 100644 --- a/stac_fastapi/elasticsearch/setup.py +++ b/stac_fastapi/elasticsearch/setup.py @@ -21,6 +21,7 @@ "pre-commit~=3.0.0", "ciso8601~=2.3.0", "httpx>=0.24.0,<0.28.0", + "redis==6.4.0", ], "docs": ["mkdocs~=1.4.0", "mkdocs-material~=9.0.0", "pdocs~=1.2.0"], "server": ["uvicorn[standard]~=0.23.0"], diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py index d7727267..f7d17575 100644 --- a/stac_fastapi/opensearch/setup.py +++ b/stac_fastapi/opensearch/setup.py @@ -22,6 +22,7 @@ "pre-commit~=3.0.0", "ciso8601~=2.3.0", "httpx>=0.24.0,<0.28.0", + "redis==6.4.0", ], "docs": ["mkdocs~=1.4.0", "mkdocs-material~=9.0.0", "pdocs~=1.2.0"], "server": ["uvicorn[standard]~=0.23.0"], From 52a57ffffba1fd6374be2f9fd6a642e20e275436 Mon Sep 17 00:00:00 2001 From: Yuri Zmytrakov Date: Wed, 8 Oct 2025 14:22:45 +0200 Subject: [PATCH 2/5] test: add Redis cache for navigation tests Add tests for Redis pagination caching in search and collections endpoints, plus utility function tests. --- stac_fastapi/tests/redis/__init__.py | 0 .../tests/redis/test_redis_pagination.py | 80 +++++++++++++++++++ stac_fastapi/tests/redis/test_redis_utils.py | 44 ++++++++++ 3 files changed, 124 insertions(+) create mode 100644 stac_fastapi/tests/redis/__init__.py create mode 100644 stac_fastapi/tests/redis/test_redis_pagination.py create mode 100644 stac_fastapi/tests/redis/test_redis_utils.py diff --git a/stac_fastapi/tests/redis/__init__.py b/stac_fastapi/tests/redis/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/tests/redis/test_redis_pagination.py b/stac_fastapi/tests/redis/test_redis_pagination.py new file mode 100644 index 00000000..0fa30c46 --- /dev/null +++ b/stac_fastapi/tests/redis/test_redis_pagination.py @@ -0,0 +1,80 @@ +import uuid + +import pytest + +from ..conftest import create_collection, create_item + + +@pytest.mark.asyncio +async def test_search_pagination_uses_redis_cache( + app_client, txn_client, load_test_data +): + """Test Redis caching and navigation for the /search endpoint.""" + + collection = load_test_data("test_collection.json") + collection_id = f"test-pagination-collection-{uuid.uuid4()}" + collection["id"] = collection_id + await create_collection(txn_client, collection) + + for i in range(5): + item = load_test_data("test_item.json") + item["id"] = f"test-pagination-item-{uuid.uuid4()}" + item["collection"] = collection_id + await create_item(txn_client, item) + + resp = await app_client.post( + "/search", json={"collections": [collection_id], "limit": 1} + ) + resp_json = resp.json() + + next_link = next( + (link for link in resp_json["links"] if link["rel"] == "next"), None + ) + next_token = next_link["body"]["token"] + + # Expect the previous link on the second page to be retrieved from Redis cache + resp2 = await app_client.post( + "/search", + json={"collections": [collection_id], "limit": 1, "token": next_token}, + ) + resp2_json = resp2.json() + + prev_link = next( + (link for link in resp2_json["links"] if link["rel"] == "prev"), None + ) + assert prev_link is not None + + +@pytest.mark.asyncio +async def test_collections_pagination_uses_redis_cache( + app_client, txn_client, load_test_data +): + """Test Redis caching and navigation for the /collection endpoint.""" + + collection_data = load_test_data("test_collection.json") + for i in range(5): + collection = collection_data.copy() + collection["id"] = f"test-collection-pagination-{uuid.uuid4()}" + collection["title"] = f"Test Collection Pagination {i}" + await create_collection(txn_client, collection) + + resp = await app_client.get("/collections", params={"limit": 1}) + assert resp.status_code == 200 + resp1_json = resp.json() + + next_link = next( + (link for link in resp1_json["links"] if link["rel"] == "next"), None + ) + next_token = next_link["href"].split("token=")[1] + + # Expect the previous link on the second page to be retrieved from Redis cache + resp2 = await app_client.get( + "/collections", params={"limit": 1, "token": next_token} + ) + assert resp2.status_code == 200 + resp2_json = resp2.json() + + prev_link = next( + (link for link in resp2_json["links"] if link["rel"] == "prev"), None + ) + assert prev_link is not None diff --git a/stac_fastapi/tests/redis/test_redis_utils.py b/stac_fastapi/tests/redis/test_redis_utils.py new file mode 100644 index 00000000..d4e80ce5 --- /dev/null +++ b/stac_fastapi/tests/redis/test_redis_utils.py @@ -0,0 +1,44 @@ +import pytest + +from stac_fastapi.core.redis_utils import connect_redis, get_prev_link, save_self_link + + +@pytest.mark.asyncio +async def test_redis_connection(): + """Test Redis connection.""" + redis = await connect_redis() + assert redis is not None + + # Test set/get + await redis.set("string_key", "string_value") + string_value = await redis.get("string_key") + assert string_value == "string_value" + + # Test key retrieval operation + exists = await redis.exists("string_key") + assert exists == 1 + + # Test key deletion + await redis.delete("string_key") + deleted_value = await redis.get("string_key") + assert deleted_value is None + + +@pytest.mark.asyncio +async def test_redis_utils_functions(): + redis = await connect_redis() + assert redis is not None + + token = "test_token_123" + self_link = "http://mywebsite.com/search?token=test_token_123" + + await save_self_link(redis, token, self_link) + retrieved_link = await get_prev_link(redis, token) + assert retrieved_link == self_link + + await save_self_link(redis, None, "should_not_save") + null_result = await get_prev_link(redis, None) + assert null_result is None + + non_existent = await get_prev_link(redis, "non_existent_token") + assert non_existent is None From cb7ee7cacb501d166c481cf82defc9cfd9129609 Mon Sep 17 00:00:00 2001 From: Yuri Zmytrakov Date: Wed, 8 Oct 2025 14:36:26 +0200 Subject: [PATCH 3/5] docs: add setup instructions to readme and update changelog --- CHANGELOG.md | 1 + README.md | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0f028bc..ef1d9c50 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - CloudFerro logo to sponsors and supporters list [#485](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/485) - Latest news section to README [#485](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/485) +- Added Redis caching configuration for navigation pagination support, enabling proper `prev` and `next` links in paginated responses. [#488](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/488) ### Changed diff --git a/README.md b/README.md index b87bd21b..670b8788 100644 --- a/README.md +++ b/README.md @@ -317,6 +317,31 @@ You can customize additional settings in your `.env` file: > [!NOTE] > The variables `ES_HOST`, `ES_PORT`, `ES_USE_SSL`, `ES_VERIFY_CERTS` and `ES_TIMEOUT` apply to both Elasticsearch and OpenSearch backends, so there is no need to rename the key names to `OS_` even if you're using OpenSearch. +**Redis for Navigation:** +These Redis configuration variables enable proper navigation functionality in STAC FastAPI. The Redis cache stores navigation state for paginated results, allowing the system to maintain previous page links using tokens. The configuration supports either Redis Sentinel or standalone Redis setups. + +| Variable | Description | Default | Required | +|-------------------------------|----------------------------------------------------------------------------------------------|--------------------------|---------------------------------------------------------------------------------------------| +| **General** | | | | +| `REDIS_ENABLE` | Enables or disables Redis caching for navigation. Set to `true` to use Redis, or `false` to disable. | `false` | **Required** (determines whether Redis is used at all) | +| **Redis Sentinel** | | | | +| `REDIS_SENTINEL_HOSTS` | Comma-separated list of Redis Sentinel hostnames/IP addresses. | `""` | Conditional (required if using Sentinel) | +| `REDIS_SENTINEL_PORTS` | Comma-separated list of Redis Sentinel ports (must match order). | `"26379"` | Conditional (required if using Sentinel) | +| `REDIS_SENTINEL_MASTER_NAME` | Name of the Redis master node in Sentinel configuration. | `"master"` | Conditional (required if using Sentinel) | +| **Redis** | | | | +| `REDIS_HOST` | Redis server hostname or IP address for Redis configuration. | `""` | Conditional (required for standalone Redis) | +| `REDIS_PORT` | Redis server port for Redis configuration. | `6379` | Conditional (required for standalone Redis) | +| **Both** | | | | +| `REDIS_DB` | Redis database number to use for caching. | `0` (Sentinel) / `0` (Standalone) | Optional | +| `REDIS_MAX_CONNECTIONS` | Maximum number of connections in the Redis connection pool. | `10` | Optional | +| `REDIS_RETRY_TIMEOUT` | Enable retry on timeout for Redis operations. | `true` | Optional | +| `REDIS_DECODE_RESPONSES` | Automatically decode Redis responses to strings. | `true` | Optional | +| `REDIS_CLIENT_NAME` | Client name identifier for Redis connections. | `"stac-fastapi-app"` | Optional | +| `REDIS_HEALTH_CHECK_INTERVAL` | Interval in seconds for Redis health checks. | `30` | Optional | + +> [!NOTE] +> Use either the Sentinel configuration (`REDIS_SENTINEL_HOSTS`, `REDIS_SENTINEL_PORTS`, `REDIS_SENTINEL_MASTER_NAME`) OR the Redis configuration (`REDIS_HOST`, `REDIS_PORT`), but not both. + ## Datetime-Based Index Management ### Overview From ac1d2e4da91f55bf5daab4e2d44a3d8d15571d31 Mon Sep 17 00:00:00 2001 From: Yuri Zmytrakov Date: Mon, 13 Oct 2025 11:45:52 +0200 Subject: [PATCH 4/5] fix: implement recommendations --- .pre-commit-config.yaml | 3 +- Makefile | 14 +- README.md | 2 + compose-redis.yml | 13 - compose.yml | 19 ++ mypy.ini | 3 - stac_fastapi/core/setup.py | 2 + stac_fastapi/core/stac_fastapi/core/core.py | 70 ++--- .../core/stac_fastapi/core/redis_utils.py | 269 ++++++++++++++---- stac_fastapi/elasticsearch/setup.py | 3 +- stac_fastapi/opensearch/setup.py | 3 +- .../tests/redis/test_redis_pagination.py | 2 - stac_fastapi/tests/redis/test_redis_utils.py | 10 +- 13 files changed, 257 insertions(+), 156 deletions(-) delete mode 100644 compose-redis.yml delete mode 100644 mypy.ini diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e867050b..f550c8cb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,8 @@ repos: ] additional_dependencies: [ "types-attrs", - "types-requests" + "types-requests", + "types-redis" ] - repo: https://github.com/PyCQA/pydocstyle rev: 6.1.1 diff --git a/Makefile b/Makefile index bde31064..34b13815 100644 --- a/Makefile +++ b/Makefile @@ -82,7 +82,7 @@ test-datetime-filtering-os: docker compose down .PHONY: test -test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os test-redis-es test-redis-os +test: test-elasticsearch test-datetime-filtering-es test-opensearch test-datetime-filtering-os .PHONY: run-database-es run-database-es: @@ -118,15 +118,3 @@ docs-image: docs: docs-image docker compose -f compose.docs.yml \ run docs - -.PHONY: test-redis-es -test-redis-es: - docker compose -f compose-redis.yml up -d - -$(run_es) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh elasticsearch:9200 && cd stac_fastapi/tests/ && pytest redis/ -v' - docker compose -f compose-redis.yml down - -.PHONY: test-redis-os -test-redis-os: - docker compose -f compose-redis.yml up -d - -$(run_os) /bin/bash -c 'export REDIS_ENABLE=true REDIS_HOST=redis REDIS_PORT=6379 && ./scripts/wait-for-it-es.sh opensearch:9202 && cd stac_fastapi/tests/ && pytest redis/ -v' - docker compose -f compose-redis.yml down diff --git a/README.md b/README.md index 670b8788..5ae980ff 100644 --- a/README.md +++ b/README.md @@ -338,6 +338,8 @@ These Redis configuration variables enable proper navigation functionality in ST | `REDIS_DECODE_RESPONSES` | Automatically decode Redis responses to strings. | `true` | Optional | | `REDIS_CLIENT_NAME` | Client name identifier for Redis connections. | `"stac-fastapi-app"` | Optional | | `REDIS_HEALTH_CHECK_INTERVAL` | Interval in seconds for Redis health checks. | `30` | Optional | +| `REDIS_SELF_LINK_TTL` | Time-to-live (TTL) in seconds for storing self-links in Redis, used for pagination caching. | 1800 | Optional | + > [!NOTE] > Use either the Sentinel configuration (`REDIS_SENTINEL_HOSTS`, `REDIS_SENTINEL_PORTS`, `REDIS_SENTINEL_MASTER_NAME`) OR the Redis configuration (`REDIS_HOST`, `REDIS_PORT`), but not both. diff --git a/compose-redis.yml b/compose-redis.yml deleted file mode 100644 index b572e731..00000000 --- a/compose-redis.yml +++ /dev/null @@ -1,13 +0,0 @@ -version: '3.8' - -services: - redis: - image: redis:7-alpine - ports: - - "6379:6379" - volumes: - - redis_test_data:/data - command: redis-server --appendonly yes - -volumes: - redis_test_data: diff --git a/compose.yml b/compose.yml index 8c83ae12..82cf9fca 100644 --- a/compose.yml +++ b/compose.yml @@ -23,6 +23,9 @@ services: - BACKEND=elasticsearch - DATABASE_REFRESH=true - ENABLE_COLLECTIONS_SEARCH_ROUTE=true + - REDIS_ENABLE=true + - REDIS_HOST=redis + - REDIS_PORT=6379 ports: - "8080:8080" volumes: @@ -31,6 +34,7 @@ services: - ./esdata:/usr/share/elasticsearch/data depends_on: - elasticsearch + - redis command: bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app" @@ -58,6 +62,9 @@ services: - BACKEND=opensearch - STAC_FASTAPI_RATE_LIMIT=200/minute - ENABLE_COLLECTIONS_SEARCH_ROUTE=true + - REDIS_ENABLE=true + - REDIS_HOST=redis + - REDIS_PORT=6379 ports: - "8082:8082" volumes: @@ -66,6 +73,7 @@ services: - ./osdata:/usr/share/opensearch/data depends_on: - opensearch + - redis command: bash -c "./scripts/wait-for-it-es.sh os-container:9202 && python -m stac_fastapi.opensearch.app" @@ -96,3 +104,14 @@ services: - ./opensearch/snapshots:/usr/share/opensearch/snapshots ports: - "9202:9202" + + redis: + image: redis:7-alpine + hostname: redis + ports: + - "6379:6379" + volumes: + - redis_test_data:/data + command: redis-server +volumes: + redis_test_data: diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 1ddba638..00000000 --- a/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -[mypy-redis.*] -ignore_missing_imports = True diff --git a/stac_fastapi/core/setup.py b/stac_fastapi/core/setup.py index 92442997..8bd2c495 100644 --- a/stac_fastapi/core/setup.py +++ b/stac_fastapi/core/setup.py @@ -20,6 +20,7 @@ "jsonschema~=4.0.0", "slowapi~=0.1.9", ] +extra_reqs = {"redis": ["redis~=6.4.0"]} setup( name="stac_fastapi_core", @@ -43,4 +44,5 @@ packages=find_namespace_packages(), zip_safe=False, install_requires=install_requires, + extras_require=extra_reqs, ) diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index c0548c1a..329e0982 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -24,7 +24,7 @@ from stac_fastapi.core.base_settings import ApiBaseSettings from stac_fastapi.core.datetime_utils import format_datetime_range from stac_fastapi.core.models.links import PagingLinks -from stac_fastapi.core.redis_utils import connect_redis, get_prev_link, save_self_link +from stac_fastapi.core.redis_utils import redis_pagination_links from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.core.session import Session from stac_fastapi.core.utilities import filter_fields, get_bool_env @@ -270,6 +270,7 @@ async def all_collections( A Collections object containing all the collections in the database and links to various resources. """ base_url = str(request.base_url) + redis_enable = get_bool_env("REDIS_ENABLE", default=False) # Get the global limit from environment variable global_limit = None @@ -329,20 +330,6 @@ async def all_collections( if parsed_sort: sort = parsed_sort - current_url = str(request.url) - redis_enable = get_bool_env("REDIS_ENABLE", default=False) - - redis = None - if redis_enable: - try: - redis = await connect_redis() - logger.info("Redis connection established successfully") - except Exception as e: - redis = None - logger.warning( - f"Redis connection failed, continuing without Redis: {e}" - ) - # Convert q to a list if it's a string q_list = None if q is not None: @@ -441,21 +428,13 @@ async def all_collections( }, ] - if redis_enable and redis: - if next_token: - await save_self_link(redis, next_token, current_url) - - prev_link = await get_prev_link(redis, token) - if prev_link: - links.insert( - 0, - { - "rel": "prev", - "type": "application/json", - "method": "GET", - "href": prev_link, - }, - ) + if redis_enable: + await redis_pagination_links( + current_url=str(request.url), + token=token, + next_token=next_token, + links=links, + ) if next_token: next_link = PagingLinks(next=next_token, request=request).link_next() @@ -775,9 +754,8 @@ async def post_search( HTTPException: If there is an error with the cql2_json filter. """ base_url = str(request.base_url) - redis_enable = get_bool_env("REDIS_ENABLE", default=False) - search = self.database.make_search() + redis_enable = get_bool_env("REDIS_ENABLE", default=False) if search_request.ids: search = self.database.apply_ids_filter( @@ -902,28 +880,12 @@ async def post_search( links.extend(collection_links) if redis_enable: - redis = None - try: - redis = await connect_redis() - logger.info("Redis connection established successfully") - self_link = str(request.url) - await save_self_link(redis, next_token, self_link) - - prev_link = await get_prev_link(redis, token_param) - if prev_link: - links.insert( - 0, - { - "rel": "prev", - "type": "application/json", - "method": "GET", - "href": prev_link, - }, - ) - except Exception as e: - logger.warning( - f"Redis connection failed, continuing without Redis: {e}" - ) + await redis_pagination_links( + current_url=str(request.url), + token=token_param, + next_token=next_token, + links=links, + ) return stac_types.ItemCollection( type="FeatureCollection", diff --git a/stac_fastapi/core/stac_fastapi/core/redis_utils.py b/stac_fastapi/core/stac_fastapi/core/redis_utils.py index b7923daa..484b7910 100644 --- a/stac_fastapi/core/stac_fastapi/core/redis_utils.py +++ b/stac_fastapi/core/stac_fastapi/core/redis_utils.py @@ -1,12 +1,15 @@ """Utilities for connecting to and managing Redis connections.""" -from typing import Optional +import json +import logging +from typing import List, Optional, Tuple +from pydantic import field_validator from pydantic_settings import BaseSettings from redis import asyncio as aioredis from redis.asyncio.sentinel import Sentinel -redis_pool: Optional[aioredis.Redis] = None +logger = logging.getLogger(__name__) class RedisSentinelSettings(BaseSettings): @@ -22,10 +25,86 @@ class RedisSentinelSettings(BaseSettings): REDIS_DECODE_RESPONSES: bool = True REDIS_CLIENT_NAME: str = "stac-fastapi-app" REDIS_HEALTH_CHECK_INTERVAL: int = 30 + REDIS_SELF_LINK_TTL: int = 1800 + + @field_validator("REDIS_DB") + @classmethod + def validate_db_sentinel(cls, v: int) -> int: + """Validate REDIS_DB is not negative integer.""" + if v < 0: + raise ValueError("REDIS_DB must be a positive integer") + return v + + @field_validator("REDIS_MAX_CONNECTIONS") + @classmethod + def validate_max_connections_sentinel(cls, v: int) -> int: + """Validate REDIS_MAX_CONNECTIONS is at least 1.""" + if v < 1: + raise ValueError("REDIS_MAX_CONNECTIONS must be at least 1") + return v + + @field_validator("REDIS_HEALTH_CHECK_INTERVAL") + @classmethod + def validate_health_check_interval_sentinel(cls, v: int) -> int: + """Validate REDIS_HEALTH_CHECK_INTERVAL is not negative integer.""" + if v < 0: + raise ValueError("REDIS_HEALTH_CHECK_INTERVAL must be a positive integer") + return v + + @field_validator("REDIS_SELF_LINK_TTL") + @classmethod + def validate_self_link_ttl_sentinel(cls, v: int) -> int: + """Validate REDIS_SELF_LINK_TTL is not a negative integer.""" + if v < 0: + raise ValueError("REDIS_SELF_LINK_TTL must be a positive integer") + return v + + def get_sentinel_hosts(self) -> List[str]: + """Parse Redis Sentinel hosts from string to list.""" + if not self.REDIS_SENTINEL_HOSTS: + return [] + + if self.REDIS_SENTINEL_HOSTS.strip().startswith("["): + return json.loads(self.REDIS_SENTINEL_HOSTS) + else: + return [ + h.strip() for h in self.REDIS_SENTINEL_HOSTS.split(",") if h.strip() + ] + + def get_sentinel_ports(self) -> List[int]: + """Parse Redis Sentinel ports from string to list of integers.""" + if not self.REDIS_SENTINEL_PORTS: + return [26379] + + if self.REDIS_SENTINEL_PORTS.strip().startswith("["): + return json.loads(self.REDIS_SENTINEL_PORTS) + else: + ports_str_list = [ + p.strip() for p in self.REDIS_SENTINEL_PORTS.split(",") if p.strip() + ] + return [int(port) for port in ports_str_list] + + def get_sentinel_nodes(self) -> List[Tuple[str, int]]: + """Get list of (host, port) tuples for Sentinel connection.""" + hosts = self.get_sentinel_hosts() + ports = self.get_sentinel_ports() + + if not hosts: + return [] + + if len(ports) == 1 and len(hosts) > 1: + ports = ports * len(hosts) + + if len(hosts) != len(ports): + raise ValueError( + f"Mismatch between hosts ({len(hosts)}) and ports ({len(ports)})" + ) + + return [(str(host), int(port)) for host, port in zip(hosts, ports)] class RedisSettings(BaseSettings): - """Configuration for connecting Redis Sentinel.""" + """Configuration for connecting Redis.""" REDIS_HOST: str = "" REDIS_PORT: int = 6379 @@ -36,89 +115,153 @@ class RedisSettings(BaseSettings): REDIS_DECODE_RESPONSES: bool = True REDIS_CLIENT_NAME: str = "stac-fastapi-app" REDIS_HEALTH_CHECK_INTERVAL: int = 30 + REDIS_SELF_LINK_TTL: int = 1800 + @field_validator("REDIS_PORT") + @classmethod + def validate_port_standalone(cls, v: int) -> int: + """Validate REDIS_PORT is not a negative integer.""" + if v < 0: + raise ValueError("REDIS_PORT must be a positive integer") + return v -# Select the Redis or Redis Sentinel configuration -redis_settings: BaseSettings = RedisSettings() + @field_validator("REDIS_DB") + @classmethod + def validate_db_standalone(cls, v: int) -> int: + """Validate REDIS_DB is not a negative integer.""" + if v < 0: + raise ValueError("REDIS_DB must be a positive integer") + return v + @field_validator("REDIS_MAX_CONNECTIONS") + @classmethod + def validate_max_connections_standalone(cls, v: int) -> int: + """Validate REDIS_MAX_CONNECTIONS is at least 1.""" + if v < 1: + raise ValueError("REDIS_MAX_CONNECTIONS must be at least 1") + return v -async def connect_redis(settings: Optional[RedisSettings] = None) -> aioredis.Redis: - """Return a Redis connection.""" - global redis_pool - settings = settings or redis_settings + @field_validator("REDIS_HEALTH_CHECK_INTERVAL") + @classmethod + def validate_health_check_interval_standalone(cls, v: int) -> int: + """Validate REDIS_HEALTH_CHECK_INTERVAL is not a negative.""" + if v < 0: + raise ValueError("REDIS_HEALTH_CHECK_INTERVAL must be a positive integer") + return v - if not settings.REDIS_HOST or not settings.REDIS_PORT: - return None + @field_validator("REDIS_SELF_LINK_TTL") + @classmethod + def validate_self_link_ttl_standalone(cls, v: int) -> int: + """Validate REDIS_SELF_LINK_TTL is negative.""" + if v < 0: + raise ValueError("REDIS_SELF_LINK_TTL must be a positive integer") + return v - if redis_pool is None: - pool = aioredis.ConnectionPool( - host=settings.REDIS_HOST, - port=settings.REDIS_PORT, - db=settings.REDIS_DB, - max_connections=settings.REDIS_MAX_CONNECTIONS, - decode_responses=settings.REDIS_DECODE_RESPONSES, - retry_on_timeout=settings.REDIS_RETRY_TIMEOUT, - health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL, - ) - redis_pool = aioredis.Redis( - connection_pool=pool, client_name=settings.REDIS_CLIENT_NAME - ) - return redis_pool - - -async def connect_redis_sentinel( - settings: Optional[RedisSentinelSettings] = None, -) -> Optional[aioredis.Redis]: - """Return a Redis Sentinel connection.""" - global redis_pool - - settings = settings or redis_settings - - if ( - not settings.REDIS_SENTINEL_HOSTS - or not settings.REDIS_SENTINEL_PORTS - or not settings.REDIS_SENTINEL_MASTER_NAME - ): - return None - hosts = [h.strip() for h in settings.REDIS_SENTINEL_HOSTS.split(",") if h.strip()] - ports = [ - int(p.strip()) for p in settings.REDIS_SENTINEL_PORTS.split(",") if p.strip() - ] +# Configure only one Redis configuration +sentinel_settings = RedisSentinelSettings() +standalone_settings = RedisSettings() - if redis_pool is None: - try: + +async def connect_redis() -> Optional[aioredis.Redis]: + """Return a Redis connection Redis or Redis Sentinel.""" + try: + if sentinel_settings.REDIS_SENTINEL_HOSTS: + sentinel_nodes = sentinel_settings.get_sentinel_nodes() sentinel = Sentinel( - [(h, p) for h, p in zip(hosts, ports)], - decode_responses=settings.REDIS_DECODE_RESPONSES, + sentinel_nodes, + decode_responses=sentinel_settings.REDIS_DECODE_RESPONSES, ) - master = sentinel.master_for( - service_name=settings.REDIS_SENTINEL_MASTER_NAME, - db=settings.REDIS_DB, - decode_responses=settings.REDIS_DECODE_RESPONSES, - retry_on_timeout=settings.REDIS_RETRY_TIMEOUT, - client_name=settings.REDIS_CLIENT_NAME, - max_connections=settings.REDIS_MAX_CONNECTIONS, - health_check_interval=settings.REDIS_HEALTH_CHECK_INTERVAL, + + redis = sentinel.master_for( + service_name=sentinel_settings.REDIS_SENTINEL_MASTER_NAME, + db=sentinel_settings.REDIS_DB, + decode_responses=sentinel_settings.REDIS_DECODE_RESPONSES, + retry_on_timeout=sentinel_settings.REDIS_RETRY_TIMEOUT, + client_name=sentinel_settings.REDIS_CLIENT_NAME, + max_connections=sentinel_settings.REDIS_MAX_CONNECTIONS, + health_check_interval=sentinel_settings.REDIS_HEALTH_CHECK_INTERVAL, ) - redis_pool = master + logger.info("Connected to Redis Sentinel") - except Exception: + elif standalone_settings.REDIS_HOST: + pool = aioredis.ConnectionPool( + host=standalone_settings.REDIS_HOST, + port=standalone_settings.REDIS_PORT, + db=standalone_settings.REDIS_DB, + max_connections=standalone_settings.REDIS_MAX_CONNECTIONS, + decode_responses=standalone_settings.REDIS_DECODE_RESPONSES, + retry_on_timeout=standalone_settings.REDIS_RETRY_TIMEOUT, + health_check_interval=standalone_settings.REDIS_HEALTH_CHECK_INTERVAL, + ) + redis = aioredis.Redis( + connection_pool=pool, client_name=standalone_settings.REDIS_CLIENT_NAME + ) + logger.info("Connected to Redis") + else: + logger.warning("No Redis configuration found") return None - return redis_pool + return redis + + except aioredis.ConnectionError as e: + logger.error(f"Redis connection error: {e}") + return None + except aioredis.AuthenticationError as e: + logger.error(f"Redis authentication error: {e}") + return None + except aioredis.TimeoutError as e: + logger.error(f"Redis timeout error: {e}") + return None + except Exception as e: + logger.error(f"Failed to connect to Redis: {e}") + return None async def save_self_link( redis: aioredis.Redis, token: Optional[str], self_href: str ) -> None: - """Save the self link for the current token with 30 min TTL.""" + """Save the self link for the current token.""" if token: - await redis.setex(f"nav:self:{token}", 1800, self_href) + if sentinel_settings.REDIS_SENTINEL_HOSTS: + ttl_seconds = sentinel_settings.REDIS_SELF_LINK_TTL + elif standalone_settings.REDIS_HOST: + ttl_seconds = standalone_settings.REDIS_SELF_LINK_TTL + await redis.setex(f"nav:self:{token}", ttl_seconds, self_href) async def get_prev_link(redis: aioredis.Redis, token: Optional[str]) -> Optional[str]: - """Get the previous page link for the current token (if exists).""" + """Get the previous page link for the current token.""" if not token: return None return await redis.get(f"nav:self:{token}") + + +async def redis_pagination_links( + current_url: str, token: str, next_token: str, links: list +) -> None: + """Handle Redis pagination.""" + redis = await connect_redis() + if not redis: + logger.warning("Redis connection failed.") + return + + try: + if next_token: + await save_self_link(redis, next_token, current_url) + + prev_link = await get_prev_link(redis, token) + if prev_link: + links.insert( + 0, + { + "rel": "prev", + "type": "application/json", + "method": "GET", + "href": prev_link, + }, + ) + except Exception as e: + logger.warning(f"Redis pagination operation failed: {e}") + finally: + await redis.close() diff --git a/stac_fastapi/elasticsearch/setup.py b/stac_fastapi/elasticsearch/setup.py index 612c7587..5d95fcdb 100644 --- a/stac_fastapi/elasticsearch/setup.py +++ b/stac_fastapi/elasticsearch/setup.py @@ -21,10 +21,11 @@ "pre-commit~=3.0.0", "ciso8601~=2.3.0", "httpx>=0.24.0,<0.28.0", - "redis==6.4.0", + "stac-fastapi-core[redis]==6.5.1", ], "docs": ["mkdocs~=1.4.0", "mkdocs-material~=9.0.0", "pdocs~=1.2.0"], "server": ["uvicorn[standard]~=0.23.0"], + "redis": ["stac-fastapi-core[redis]==6.5.1"], } setup( diff --git a/stac_fastapi/opensearch/setup.py b/stac_fastapi/opensearch/setup.py index f7d17575..2522c581 100644 --- a/stac_fastapi/opensearch/setup.py +++ b/stac_fastapi/opensearch/setup.py @@ -22,10 +22,11 @@ "pre-commit~=3.0.0", "ciso8601~=2.3.0", "httpx>=0.24.0,<0.28.0", - "redis==6.4.0", + "stac-fastapi-core[redis]==6.5.1", ], "docs": ["mkdocs~=1.4.0", "mkdocs-material~=9.0.0", "pdocs~=1.2.0"], "server": ["uvicorn[standard]~=0.23.0"], + "redis": ["stac-fastapi-core[redis]==6.5.1"], } setup( diff --git a/stac_fastapi/tests/redis/test_redis_pagination.py b/stac_fastapi/tests/redis/test_redis_pagination.py index 0fa30c46..2f4c93e1 100644 --- a/stac_fastapi/tests/redis/test_redis_pagination.py +++ b/stac_fastapi/tests/redis/test_redis_pagination.py @@ -32,7 +32,6 @@ async def test_search_pagination_uses_redis_cache( ) next_token = next_link["body"]["token"] - # Expect the previous link on the second page to be retrieved from Redis cache resp2 = await app_client.post( "/search", json={"collections": [collection_id], "limit": 1, "token": next_token}, @@ -67,7 +66,6 @@ async def test_collections_pagination_uses_redis_cache( ) next_token = next_link["href"].split("token=")[1] - # Expect the previous link on the second page to be retrieved from Redis cache resp2 = await app_client.get( "/collections", params={"limit": 1, "token": next_token} ) diff --git a/stac_fastapi/tests/redis/test_redis_utils.py b/stac_fastapi/tests/redis/test_redis_utils.py index d4e80ce5..a7dc8338 100644 --- a/stac_fastapi/tests/redis/test_redis_utils.py +++ b/stac_fastapi/tests/redis/test_redis_utils.py @@ -7,18 +7,17 @@ async def test_redis_connection(): """Test Redis connection.""" redis = await connect_redis() - assert redis is not None - # Test set/get + if redis is None: + pytest.skip("Redis not configured") + await redis.set("string_key", "string_value") string_value = await redis.get("string_key") assert string_value == "string_value" - # Test key retrieval operation exists = await redis.exists("string_key") assert exists == 1 - # Test key deletion await redis.delete("string_key") deleted_value = await redis.get("string_key") assert deleted_value is None @@ -27,7 +26,8 @@ async def test_redis_connection(): @pytest.mark.asyncio async def test_redis_utils_functions(): redis = await connect_redis() - assert redis is not None + if redis is None: + pytest.skip("Redis not configured") token = "test_token_123" self_link = "http://mywebsite.com/search?token=test_token_123" From 0a022e917a12003302ef3b8372cf76d43c7fc66f Mon Sep 17 00:00:00 2001 From: Jonathan Healy Date: Thu, 23 Oct 2025 17:24:31 +0800 Subject: [PATCH 5/5] Move to Unreleased section --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 351b9b7b..6e385bce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added - Environment variable `EXCLUDED_FROM_QUERYABLES` to exclude specific fields from queryables endpoint and filtering. Supports comma-separated list of fully qualified field names (e.g., `properties.auth:schemes,properties.storage:schemes`) [#489](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/489) +- Added Redis caching configuration for navigation pagination support, enabling proper `prev` and `next` links in paginated responses. [#488](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/488) ### Changed @@ -30,7 +31,6 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Moved SFEOS Tools to its own repository at [Healy-Hyperspatial/sfeos-tools](https://github.com/Healy-Hyperspatial/sfeos-tools). The CLI package is now maintained separately. - CloudFerro logo to sponsors and supporters list [#485](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/485) - Latest news section to README [#485](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/485) -- Added Redis caching configuration for navigation pagination support, enabling proper `prev` and `next` links in paginated responses. [#488](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/488) - Python 3.14 support [#500](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/500) ### Changed