diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 5a58a87a..e9f7d08f 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -56,6 +56,16 @@ jobs: --ulimit nofile=65536:65536 --ulimit memlock=-1:-1 + redis: + image: redis:7-alpine + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 + strategy: matrix: python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] @@ -115,4 +125,7 @@ jobs: ES_USE_SSL: false DATABASE_REFRESH: true ES_VERIFY_CERTS: false - BACKEND: ${{ matrix.backend == 'elasticsearch8' && 'elasticsearch' || 'opensearch' }} \ No newline at end of file + REDIS_ENABLE: true + REDIS_HOST: localhost + REDIS_PORT: 6379 + BACKEND: ${{ matrix.backend == 'elasticsearch8' && 'elasticsearch' || 'opensearch' }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e867050b..f550c8cb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,8 @@ repos: ] additional_dependencies: [ "types-attrs", - "types-requests" + "types-requests", + "types-redis" ] - repo: https://github.com/PyCQA/pydocstyle rev: 6.1.1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 3db5c350..6e385bce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Added - Environment variable `EXCLUDED_FROM_QUERYABLES` to exclude specific fields from queryables endpoint and filtering. Supports comma-separated list of fully qualified field names (e.g., `properties.auth:schemes,properties.storage:schemes`) [#489](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/489) +- Added Redis caching configuration for navigation pagination support, enabling proper `prev` and `next` links in paginated responses. [#488](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/488) ### Changed diff --git a/Makefile b/Makefile index 204b31a1..34b13815 100644 --- a/Makefile +++ b/Makefile @@ -117,4 +117,4 @@ docs-image: .PHONY: docs docs: docs-image docker compose -f compose.docs.yml \ - run docs \ No newline at end of file + run docs diff --git a/README.md b/README.md index cdfa8815..75e88ea6 100644 --- a/README.md +++ b/README.md @@ -118,6 +118,7 @@ This project is built on the following technologies: STAC, stac-fastapi, FastAPI - [Collection Pagination](#collection-pagination) - [SFEOS Tools CLI](#sfeos-tools-cli) - [Ingesting Sample Data CLI Tool](#ingesting-sample-data-cli-tool) + - [Redis for navigation](#redis-for-navigation) - [Elasticsearch Mappings](#elasticsearch-mappings) - [Managing Elasticsearch Indices](#managing-elasticsearch-indices) - [Snapshots](#snapshots) @@ -344,6 +345,32 @@ You can customize additional settings in your `.env` file: > [!NOTE] > The variables `ES_HOST`, `ES_PORT`, `ES_USE_SSL`, `ES_VERIFY_CERTS` and `ES_TIMEOUT` apply to both Elasticsearch and OpenSearch backends, so there is no need to rename the key names to `OS_` even if you're using OpenSearch. +## Redis for Navigation environment variables: +These Redis configuration variables to enable proper navigation functionality in STAC FastAPI. + +| Variable | Description | Default | Required | +|-------------------------------|----------------------------------------------------------------------------------------------|--------------------------|---------------------------------------------------------------------------------------------| +| `REDIS_ENABLE` | Enables or disables Redis caching for navigation. Set to `true` to use Redis, or `false` to disable. | `false` | **Required** (determines whether Redis is used at all) | +| **Redis Sentinel** | | | | +| `REDIS_SENTINEL_HOSTS` | Comma-separated list of Redis Sentinel hostnames/IP addresses. | `""` | Conditional (required if using Sentinel) | +| `REDIS_SENTINEL_PORTS` | Comma-separated list of Redis Sentinel ports (must match order). | `"26379"` | Conditional (required if using Sentinel) | +| `REDIS_SENTINEL_MASTER_NAME` | Name of the Redis master node in Sentinel configuration. | `"master"` | Conditional (required if using Sentinel) | +| **Redis** | | | | +| `REDIS_HOST` | Redis server hostname or IP address for Redis configuration. | `""` | Conditional (required for standalone Redis) | +| `REDIS_PORT` | Redis server port for Redis configuration. | `6379` | Conditional (required for standalone Redis) | +| **Both** | | | | +| `REDIS_DB` | Redis database number to use for caching. | `0` (Sentinel) / `15` (Standalone) | Optional | +| `REDIS_MAX_CONNECTIONS` | Maximum number of connections in the Redis connection pool. | `10` | Optional | +| `REDIS_RETRY_TIMEOUT` | Enable retry on timeout for Redis operations. | `true` | Optional | +| `REDIS_DECODE_RESPONSES` | Automatically decode Redis responses to strings. | `true` | Optional | +| `REDIS_CLIENT_NAME` | Client name identifier for Redis connections. | `"stac-fastapi-app"` | Optional | +| `REDIS_HEALTH_CHECK_INTERVAL` | Interval in seconds for Redis health checks. | `30` | Optional | +| `REDIS_SELF_LINK_TTL` | Time-to-live (TTL) in seconds for storing self-links in Redis, used for pagination caching. | 1800 | Optional | + + +> [!NOTE] +> Use either the Sentinel configuration (`REDIS_SENTINEL_HOSTS`, `REDIS_SENTINEL_PORTS`, `REDIS_SENTINEL_MASTER_NAME`) OR the Redis configuration (`REDIS_HOST`, `REDIS_PORT`), but not both. + ## Excluding Fields from Queryables You can exclude specific fields from being exposed in the queryables endpoint and from filtering by setting the `EXCLUDED_FROM_QUERYABLES` environment variable. This is useful for hiding sensitive or internal fields that should not be queryable by API users. @@ -615,6 +642,19 @@ The system uses a precise naming convention: python3 data_loader.py --base-url http://localhost:8080 --use-bulk ``` +## Redis for Navigation + +The Redis cache stores navigation state for paginated results, allowing the system to maintain previous page links using tokens. The configuration supports both Redis Sentinel and standalone Redis setups. + +Steps to configure: +1. Ensure that a Redis instance is available, either a standalone server or a Sentinel-managed cluster. +2. Establish a connection between STAC FastAPI and Redis instance by setting the appropriate [**environment variables**](#redis-for-navigation-environment-variables). These define the Redis host, port, authentication, and optional Sentinel settings. +3. Control whether Redis caching is activated using the `REDIS_ENABLE` environment variable to `True` or `False`. +4. Ensure the appropriate version of `Redis` is installed: +``` +pip install stac-fastapi-elasticsearch[redis] +``` + ## Elasticsearch Mappings - **Overview**: Mappings apply to search index, not source data. They define how documents and their fields are stored and indexed. diff --git a/compose.yml b/compose.yml index 5dfa92e9..2c1d7be3 100644 --- a/compose.yml +++ b/compose.yml @@ -23,6 +23,9 @@ services: - BACKEND=elasticsearch - DATABASE_REFRESH=true - ENABLE_COLLECTIONS_SEARCH_ROUTE=true + - REDIS_ENABLE=true + - REDIS_HOST=redis + - REDIS_PORT=6379 ports: - "8080:8080" volumes: @@ -31,6 +34,7 @@ services: - ./esdata:/usr/share/elasticsearch/data depends_on: - elasticsearch + - redis command: bash -c "./scripts/wait-for-it-es.sh es-container:9200 && python -m stac_fastapi.elasticsearch.app" @@ -58,6 +62,9 @@ services: - BACKEND=opensearch - STAC_FASTAPI_RATE_LIMIT=200/minute - ENABLE_COLLECTIONS_SEARCH_ROUTE=true + - REDIS_ENABLE=true + - REDIS_HOST=redis + - REDIS_PORT=6379 ports: - "8082:8082" volumes: @@ -66,6 +73,7 @@ services: - ./osdata:/usr/share/opensearch/data depends_on: - opensearch + - redis command: bash -c "./scripts/wait-for-it-es.sh os-container:9202 && python -m stac_fastapi.opensearch.app" @@ -96,3 +104,14 @@ services: - ./opensearch/snapshots:/usr/share/opensearch/snapshots ports: - "9202:9202" + + redis: + image: redis:7-alpine + hostname: redis + ports: + - "6379:6379" + volumes: + - redis_test_data:/data + command: redis-server +volumes: + redis_test_data: diff --git a/stac_fastapi/core/pyproject.toml b/stac_fastapi/core/pyproject.toml index 6b79c582..5498956c 100644 --- a/stac_fastapi/core/pyproject.toml +++ b/stac_fastapi/core/pyproject.toml @@ -44,6 +44,7 @@ dependencies = [ "pygeofilter~=0.3.1", "jsonschema~=4.0.0", "slowapi~=0.1.9", + "redis==6.4.0", ] [project.urls] diff --git a/stac_fastapi/core/stac_fastapi/core/core.py b/stac_fastapi/core/stac_fastapi/core/core.py index 87671822..0c3beedc 100644 --- a/stac_fastapi/core/stac_fastapi/core/core.py +++ b/stac_fastapi/core/stac_fastapi/core/core.py @@ -24,9 +24,10 @@ from stac_fastapi.core.base_settings import ApiBaseSettings from stac_fastapi.core.datetime_utils import format_datetime_range from stac_fastapi.core.models.links import PagingLinks +from stac_fastapi.core.redis_utils import redis_pagination_links from stac_fastapi.core.serializers import CollectionSerializer, ItemSerializer from stac_fastapi.core.session import Session -from stac_fastapi.core.utilities import filter_fields +from stac_fastapi.core.utilities import filter_fields, get_bool_env from stac_fastapi.extensions.core.transaction import AsyncBaseTransactionsClient from stac_fastapi.extensions.core.transaction.request import ( PartialCollection, @@ -262,6 +263,7 @@ async def all_collections( A Collections object containing all the collections in the database and links to various resources. """ base_url = str(request.base_url) + redis_enable = get_bool_env("REDIS_ENABLE", default=False) global_max_limit = ( int(os.getenv("STAC_GLOBAL_COLLECTION_MAX_LIMIT")) @@ -417,6 +419,14 @@ async def all_collections( }, ] + if redis_enable: + await redis_pagination_links( + current_url=str(request.url), + token=token, + next_token=next_token, + links=links, + ) + if next_token: next_link = PagingLinks(next=next_token, request=request).link_next() links.append(next_link) @@ -761,8 +771,8 @@ async def post_search( search_request.limit = limit base_url = str(request.base_url) - search = self.database.make_search() + redis_enable = get_bool_env("REDIS_ENABLE", default=False) if search_request.ids: search = self.database.apply_ids_filter( @@ -866,6 +876,34 @@ async def post_search( ] links = await PagingLinks(request=request, next=next_token).get_links() + collection_links = [] + # Add "collection" and "parent" rels only for /collections/{collection_id}/items + if search_request.collections and "/items" in str(request.url): + for collection_id in search_request.collections: + collection_links.extend( + [ + { + "rel": "collection", + "type": "application/json", + "href": urljoin(base_url, f"collections/{collection_id}"), + }, + { + "rel": "parent", + "type": "application/json", + "href": urljoin(base_url, f"collections/{collection_id}"), + }, + ] + ) + links.extend(collection_links) + + if redis_enable: + await redis_pagination_links( + current_url=str(request.url), + token=token_param, + next_token=next_token, + links=links, + ) + return stac_types.ItemCollection( type="FeatureCollection", features=items, diff --git a/stac_fastapi/core/stac_fastapi/core/redis_utils.py b/stac_fastapi/core/stac_fastapi/core/redis_utils.py new file mode 100644 index 00000000..105d854e --- /dev/null +++ b/stac_fastapi/core/stac_fastapi/core/redis_utils.py @@ -0,0 +1,301 @@ +"""Utilities for connecting to and managing Redis connections.""" + +import json +import logging +from typing import List, Optional, Tuple +from urllib.parse import parse_qs, urlencode, urlparse, urlunparse + +from pydantic import field_validator +from pydantic_settings import BaseSettings +from redis import asyncio as aioredis +from redis.asyncio.sentinel import Sentinel + +logger = logging.getLogger(__name__) + + +class RedisSentinelSettings(BaseSettings): + """Configuration for connecting to Redis Sentinel.""" + + REDIS_SENTINEL_HOSTS: str = "" + REDIS_SENTINEL_PORTS: str = "26379" + REDIS_SENTINEL_MASTER_NAME: str = "master" + REDIS_DB: int = 15 + + REDIS_MAX_CONNECTIONS: int = 10 + REDIS_RETRY_TIMEOUT: bool = True + REDIS_DECODE_RESPONSES: bool = True + REDIS_CLIENT_NAME: str = "stac-fastapi-app" + REDIS_HEALTH_CHECK_INTERVAL: int = 30 + REDIS_SELF_LINK_TTL: int = 1800 + + @field_validator("REDIS_DB") + @classmethod + def validate_db_sentinel(cls, v: int) -> int: + """Validate REDIS_DB is not negative integer.""" + if v < 0: + raise ValueError("REDIS_DB must be a positive integer") + return v + + @field_validator("REDIS_MAX_CONNECTIONS") + @classmethod + def validate_max_connections_sentinel(cls, v: int) -> int: + """Validate REDIS_MAX_CONNECTIONS is at least 1.""" + if v < 1: + raise ValueError("REDIS_MAX_CONNECTIONS must be at least 1") + return v + + @field_validator("REDIS_HEALTH_CHECK_INTERVAL") + @classmethod + def validate_health_check_interval_sentinel(cls, v: int) -> int: + """Validate REDIS_HEALTH_CHECK_INTERVAL is not negative integer.""" + if v < 0: + raise ValueError("REDIS_HEALTH_CHECK_INTERVAL must be a positive integer") + return v + + @field_validator("REDIS_SELF_LINK_TTL") + @classmethod + def validate_self_link_ttl_sentinel(cls, v: int) -> int: + """Validate REDIS_SELF_LINK_TTL is not a negative integer.""" + if v < 0: + raise ValueError("REDIS_SELF_LINK_TTL must be a positive integer") + return v + + def get_sentinel_hosts(self) -> List[str]: + """Parse Redis Sentinel hosts from string to list.""" + if not self.REDIS_SENTINEL_HOSTS: + return [] + + if self.REDIS_SENTINEL_HOSTS.strip().startswith("["): + return json.loads(self.REDIS_SENTINEL_HOSTS) + else: + return [ + h.strip() for h in self.REDIS_SENTINEL_HOSTS.split(",") if h.strip() + ] + + def get_sentinel_ports(self) -> List[int]: + """Parse Redis Sentinel ports from string to list of integers.""" + if not self.REDIS_SENTINEL_PORTS: + return [26379] + + if self.REDIS_SENTINEL_PORTS.strip().startswith("["): + return json.loads(self.REDIS_SENTINEL_PORTS) + else: + ports_str_list = [ + p.strip() for p in self.REDIS_SENTINEL_PORTS.split(",") if p.strip() + ] + return [int(port) for port in ports_str_list] + + def get_sentinel_nodes(self) -> List[Tuple[str, int]]: + """Get list of (host, port) tuples for Sentinel connection.""" + hosts = self.get_sentinel_hosts() + ports = self.get_sentinel_ports() + + if not hosts: + return [] + + if len(ports) == 1 and len(hosts) > 1: + ports = ports * len(hosts) + + if len(hosts) != len(ports): + raise ValueError( + f"Mismatch between hosts ({len(hosts)}) and ports ({len(ports)})" + ) + + return [(str(host), int(port)) for host, port in zip(hosts, ports)] + + +class RedisSettings(BaseSettings): + """Configuration for connecting Redis.""" + + REDIS_HOST: str = "" + REDIS_PORT: int = 6379 + REDIS_DB: int = 15 + + REDIS_MAX_CONNECTIONS: int = 10 + REDIS_RETRY_TIMEOUT: bool = True + REDIS_DECODE_RESPONSES: bool = True + REDIS_CLIENT_NAME: str = "stac-fastapi-app" + REDIS_HEALTH_CHECK_INTERVAL: int = 30 + REDIS_SELF_LINK_TTL: int = 1800 + + @field_validator("REDIS_PORT") + @classmethod + def validate_port_standalone(cls, v: int) -> int: + """Validate REDIS_PORT is not a negative integer.""" + if v < 0: + raise ValueError("REDIS_PORT must be a positive integer") + return v + + @field_validator("REDIS_DB") + @classmethod + def validate_db_standalone(cls, v: int) -> int: + """Validate REDIS_DB is not a negative integer.""" + if v < 0: + raise ValueError("REDIS_DB must be a positive integer") + return v + + @field_validator("REDIS_MAX_CONNECTIONS") + @classmethod + def validate_max_connections_standalone(cls, v: int) -> int: + """Validate REDIS_MAX_CONNECTIONS is at least 1.""" + if v < 1: + raise ValueError("REDIS_MAX_CONNECTIONS must be at least 1") + return v + + @field_validator("REDIS_HEALTH_CHECK_INTERVAL") + @classmethod + def validate_health_check_interval_standalone(cls, v: int) -> int: + """Validate REDIS_HEALTH_CHECK_INTERVAL is not a negative.""" + if v < 0: + raise ValueError("REDIS_HEALTH_CHECK_INTERVAL must be a positive integer") + return v + + @field_validator("REDIS_SELF_LINK_TTL") + @classmethod + def validate_self_link_ttl_standalone(cls, v: int) -> int: + """Validate REDIS_SELF_LINK_TTL is negative.""" + if v < 0: + raise ValueError("REDIS_SELF_LINK_TTL must be a positive integer") + return v + + +# Configure only one Redis configuration +sentinel_settings = RedisSentinelSettings() +standalone_settings = RedisSettings() + + +async def connect_redis() -> Optional[aioredis.Redis]: + """Return a Redis connection Redis or Redis Sentinel.""" + try: + if sentinel_settings.REDIS_SENTINEL_HOSTS: + sentinel_nodes = sentinel_settings.get_sentinel_nodes() + sentinel = Sentinel( + sentinel_nodes, + decode_responses=sentinel_settings.REDIS_DECODE_RESPONSES, + ) + + redis = sentinel.master_for( + service_name=sentinel_settings.REDIS_SENTINEL_MASTER_NAME, + db=sentinel_settings.REDIS_DB, + decode_responses=sentinel_settings.REDIS_DECODE_RESPONSES, + retry_on_timeout=sentinel_settings.REDIS_RETRY_TIMEOUT, + client_name=sentinel_settings.REDIS_CLIENT_NAME, + max_connections=sentinel_settings.REDIS_MAX_CONNECTIONS, + health_check_interval=sentinel_settings.REDIS_HEALTH_CHECK_INTERVAL, + ) + logger.info("Connected to Redis Sentinel") + + elif standalone_settings.REDIS_HOST: + pool = aioredis.ConnectionPool( + host=standalone_settings.REDIS_HOST, + port=standalone_settings.REDIS_PORT, + db=standalone_settings.REDIS_DB, + max_connections=standalone_settings.REDIS_MAX_CONNECTIONS, + decode_responses=standalone_settings.REDIS_DECODE_RESPONSES, + retry_on_timeout=standalone_settings.REDIS_RETRY_TIMEOUT, + health_check_interval=standalone_settings.REDIS_HEALTH_CHECK_INTERVAL, + ) + redis = aioredis.Redis( + connection_pool=pool, client_name=standalone_settings.REDIS_CLIENT_NAME + ) + logger.info("Connected to Redis") + else: + logger.warning("No Redis configuration found") + return None + + return redis + + except aioredis.ConnectionError as e: + logger.error(f"Redis connection error: {e}") + return None + except aioredis.AuthenticationError as e: + logger.error(f"Redis authentication error: {e}") + return None + except aioredis.TimeoutError as e: + logger.error(f"Redis timeout error: {e}") + return None + except Exception as e: + logger.error(f"Failed to connect to Redis: {e}") + return None + + +def get_redis_key(url: str, token: str) -> str: + """Create Redis key using URL path and token.""" + parsed = urlparse(url) + return f"nav:{parsed.path}:{token}" + + +def build_url_with_token(base_url: str, token: str) -> str: + """Build URL with token parameter.""" + parsed = urlparse(base_url) + query_params = parse_qs(parsed.query) + + query_params["token"] = [token] + + new_query = urlencode(query_params, doseq=True) + + return urlunparse( + ( + parsed.scheme, + parsed.netloc, + parsed.path, + parsed.params, + new_query, + parsed.fragment, + ) + ) + + +async def save_prev_link( + redis: aioredis.Redis, next_url: str, current_url: str, next_token: str +) -> None: + """Save the current page as the previous link for the next URL.""" + if next_url and next_token: + if sentinel_settings.REDIS_SENTINEL_HOSTS: + ttl_seconds = sentinel_settings.REDIS_SELF_LINK_TTL + elif standalone_settings.REDIS_HOST: + ttl_seconds = standalone_settings.REDIS_SELF_LINK_TTL + key = get_redis_key(next_url, next_token) + await redis.setex(key, ttl_seconds, current_url) + + +async def get_prev_link( + redis: aioredis.Redis, current_url: str, current_token: str +) -> Optional[str]: + """Get the previous page link for the current token.""" + if not current_url or not current_token: + return None + key = get_redis_key(current_url, current_token) + return await redis.get(key) + + +async def redis_pagination_links( + current_url: str, token: str, next_token: str, links: list +) -> None: + """Handle Redis pagination.""" + redis = await connect_redis() + if not redis: + logger.warning("Redis connection failed.") + return + + try: + if next_token: + next_url = build_url_with_token(current_url, next_token) + await save_prev_link(redis, next_url, current_url, next_token) + + if token: + prev_link = await get_prev_link(redis, current_url, token) + if prev_link: + links.insert( + 0, + { + "rel": "previous", + "type": "application/json", + "method": "GET", + "href": prev_link, + }, + ) + except Exception as e: + logger.warning(f"Redis pagination operation failed: {e}") + finally: + await redis.close() diff --git a/stac_fastapi/tests/redis/__init__.py b/stac_fastapi/tests/redis/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/stac_fastapi/tests/redis/test_redis_pagination.py b/stac_fastapi/tests/redis/test_redis_pagination.py new file mode 100644 index 00000000..64e869bc --- /dev/null +++ b/stac_fastapi/tests/redis/test_redis_pagination.py @@ -0,0 +1,71 @@ +import uuid + +import pytest + +from ..conftest import create_collection, create_item + + +@pytest.mark.asyncio +async def test_search_pagination_uses_redis_cache( + app_client, txn_client, load_test_data +): + """Test Redis caching and navigation for the /search endpoint.""" + + collection = load_test_data("test_collection.json") + collection_id = f"test-pagination-collection-{uuid.uuid4()}" + collection["id"] = collection_id + await create_collection(txn_client, collection) + + for i in range(5): + item = load_test_data("test_item.json") + item["id"] = f"test-pagination-item-{uuid.uuid4()}" + item["collection"] = collection_id + await create_item(txn_client, item) + + resp = await app_client.get(f"/collections/{collection_id}/items?limit=1") + resp_json = resp.json() + + next_link = next(link for link in resp_json["links"] if link["rel"] == "next") + next_url = next_link["href"] + + resp2 = await app_client.get(next_url) + resp2_json = resp2.json() + + prev_link = next( + (link for link in resp2_json["links"] if link["rel"] == "previous"), None + ) + assert prev_link is not None + + +@pytest.mark.asyncio +async def test_collections_pagination_uses_redis_cache( + app_client, txn_client, load_test_data +): + """Test Redis caching and navigation for the /collection endpoint.""" + + collection_data = load_test_data("test_collection.json") + for i in range(5): + collection = collection_data.copy() + collection["id"] = f"test-collection-pagination-{uuid.uuid4()}" + collection["title"] = f"Test Collection Pagination {i}" + await create_collection(txn_client, collection) + + resp = await app_client.get("/collections", params={"limit": 1}) + assert resp.status_code == 200 + resp1_json = resp.json() + + next_link = next( + (link for link in resp1_json["links"] if link["rel"] == "next"), None + ) + next_token = next_link["href"].split("token=")[1] + + resp2 = await app_client.get( + "/collections", params={"limit": 1, "token": next_token} + ) + assert resp2.status_code == 200 + resp2_json = resp2.json() + + prev_link = next( + (link for link in resp2_json["links"] if link["rel"] == "previous"), None + ) + assert prev_link is not None diff --git a/stac_fastapi/tests/redis/test_redis_utils.py b/stac_fastapi/tests/redis/test_redis_utils.py new file mode 100644 index 00000000..404f59a2 --- /dev/null +++ b/stac_fastapi/tests/redis/test_redis_utils.py @@ -0,0 +1,48 @@ +import pytest + +from stac_fastapi.core.redis_utils import connect_redis, get_prev_link, save_prev_link + + +@pytest.mark.asyncio +async def test_redis_connection(): + """Test Redis connection.""" + redis = await connect_redis() + + if redis is None: + pytest.skip("Redis not configured") + + await redis.set("string_key", "string_value") + string_value = await redis.get("string_key") + assert string_value == "string_value" + + exists = await redis.exists("string_key") + assert exists == 1 + + await redis.delete("string_key") + deleted_value = await redis.get("string_key") + assert deleted_value is None + + +@pytest.mark.asyncio +async def test_redis_utils_functions(): + redis = await connect_redis() + if redis is None: + pytest.skip("Redis not configured") + + token = "test_token_123" + current_url = "http://mywebsite.com/search" + next_url = "http://mywebsite.com/search?token=test_token_123" + + await save_prev_link(redis, next_url, current_url, token) + + retrieved_link = await get_prev_link(redis, next_url, token) + assert retrieved_link == current_url + + await save_prev_link(redis, None, "should_not_save", None) + null_result = await get_prev_link(redis, None, None) + assert null_result is None + + non_existent = await get_prev_link( + redis, "http://mywebsite.com/search", "non_existent_token" + ) + assert non_existent is None