diff --git a/.gitignore b/.gitignore index 3c73b414..addaa789 100644 --- a/.gitignore +++ b/.gitignore @@ -168,4 +168,5 @@ CLAUDE.local.md *.local.mdc # Exception: AI tool configs that should be tracked -!.gemini/settings.json \ No newline at end of file +!.gemini/settings.json +logs diff --git a/backend/AGENTS.md b/backend/AGENTS.md index 1aa0dbfe..bbdf1e38 100644 --- a/backend/AGENTS.md +++ b/backend/AGENTS.md @@ -230,6 +230,42 @@ uv run pytest -v --cov=app Use `uv add ` to add new dependencies (automatically updates pyproject.toml, lockfile, and venv). Run `uv run ruff check . --fix && uv run ruff format .` after making changes. +## Observability + +OpenTelemetry-based tracing and metrics. Auto-instruments HTTP requests, database queries, Redis, httpx, and Celery tasks. + +### Configuration + +```bash +OTEL_ENABLED=true # Enable/disable all observability +OTEL_EXPORTER_ENDPOINT=localhost:4317 # OTLP collector endpoint +OTEL_SERVICE_NAME=open-wearables-api # Service name in traces +``` + +### Running the Stack + +```bash +# Start observability services (Grafana, Tempo, Prometheus, Loki, OTEL Collector) +docker compose -f docker-compose.yml -f docker-compose.observability.yml up -d + +# View traces, metrics, and logs at http://localhost:3001 (Grafana) +``` + +### Recording Metrics + +```python +from app.integrations.observability import record_metric, record_histogram + +# Counter metrics (increment by 1 or custom value) +record_metric("oauth_attempts", labels={"provider": "garmin"}) +record_metric("workouts_synced", 5, {"provider": "polar"}) + +# Histogram metrics (durations, sizes, etc.) +record_histogram("provider_sync_duration", 2.5, {"provider": "garmin"}) +``` + +All helpers are no-ops when `OTEL_ENABLED=false`. + ## Detailed Layer Rules ### Models Layer (`app/models/`) diff --git a/backend/app/api/routes/v1/oauth.py b/backend/app/api/routes/v1/oauth.py index 000a5ff8..5440b668 100644 --- a/backend/app/api/routes/v1/oauth.py +++ b/backend/app/api/routes/v1/oauth.py @@ -1,3 +1,4 @@ +from logging import getLogger from typing import Annotated from uuid import UUID @@ -6,6 +7,7 @@ from app.database import DbSession from app.integrations.celery.tasks import sync_vendor_data +from app.integrations.observability import record_metric from app.schemas import ( AuthorizationURLResponse, BulkProviderSettingsUpdate, @@ -18,6 +20,8 @@ from app.services.providers.base_strategy import BaseProviderStrategy from app.services.providers.factory import ProviderFactory +logger = getLogger(__name__) + router = APIRouter() factory = ProviderFactory() settings_service = ProviderSettingsService() @@ -46,6 +50,8 @@ async def authorize_provider( Returns authorization URL where user should be redirected to log in. """ + record_metric("oauth_attempts", labels={"provider": provider.value}) + strategy = get_oauth_strategy(provider) assert strategy.oauth @@ -68,12 +74,14 @@ async def oauth_callback( Provider redirects here after user authorizes. Exchanges code for tokens. """ if error: + record_metric("oauth_failures", labels={"provider": provider.value, "error": error}) return RedirectResponse( url=f"/api/v1/oauth/error?message={error}:+{error_description or 'Unknown+error'}", status_code=303, ) if not code or not state: + record_metric("oauth_failures", labels={"provider": provider.value, "error": "missing_params"}) return RedirectResponse( url="/api/v1/oauth/error?message=Missing+OAuth+parameters", status_code=303, @@ -81,26 +89,37 @@ async def oauth_callback( strategy = get_oauth_strategy(provider) - assert strategy.oauth - oauth_state = strategy.oauth.handle_callback(db, code, state) - - # schedule sync task - sync_vendor_data.delay( - user_id=str(oauth_state.user_id), - start_date=None, - end_date=None, - providers=[provider.value], - ) - - # If a specific redirect_uri was requested (e.g. by frontend), redirect there - if oauth_state.redirect_uri: - return RedirectResponse(url=oauth_state.redirect_uri, status_code=303) - - # Otherwise, redirect to internal success page - return RedirectResponse( - url=f"/api/v1/oauth/success?provider={provider.value}&user_id={oauth_state.user_id}", - status_code=303, - ) + try: + assert strategy.oauth + oauth_state = strategy.oauth.handle_callback(db, code, state) + + record_metric("oauth_successes", labels={"provider": provider.value}) + record_metric("provider_connections", labels={"provider": provider.value}) + + # schedule sync task + sync_vendor_data.delay( + user_id=str(oauth_state.user_id), + start_date=None, + end_date=None, + providers=[provider.value], + ) + + # If a specific redirect_uri was requested (e.g. by frontend), redirect there + if oauth_state.redirect_uri: + return RedirectResponse(url=oauth_state.redirect_uri, status_code=303) + + # Otherwise, redirect to internal success page + return RedirectResponse( + url=f"/api/v1/oauth/success?provider={provider.value}&user_id={oauth_state.user_id}", + status_code=303, + ) + except Exception as e: + logger.exception("OAuth callback failed", extra={"provider": provider.value}) + record_metric("oauth_failures", labels={"provider": provider.value, "error": type(e).__name__}) + return RedirectResponse( + url=f"/api/v1/oauth/error?message=OAuth+callback+failed:+{type(e).__name__}", + status_code=303, + ) @router.get("/success") diff --git a/backend/app/config.py b/backend/app/config.py index 6c9c5818..b0b9fde6 100644 --- a/backend/app/config.py +++ b/backend/app/config.py @@ -46,6 +46,13 @@ class Settings(BaseSettings): SENTRY_SAMPLES_RATE: float = 0.5 SENTRY_ENV: str | None = None + # OpenTelemetry + otel_enabled: bool = False + otel_service_name: str = "open-wearables-api" + otel_service_version: str = "1.0.0" + otel_exporter_endpoint: str = "otel-collector:4317" + otel_log_level: str = "INFO" + # AUTH SETTINGS secret_key: str algorithm: str = "HS256" diff --git a/backend/app/integrations/celery/core.py b/backend/app/integrations/celery/core.py index e6160daf..ade42450 100644 --- a/backend/app/integrations/celery/core.py +++ b/backend/app/integrations/celery/core.py @@ -1,41 +1,55 @@ -import logging -import sys -from logging import Formatter, StreamHandler, getLogger +import time +from logging import getLogger +from typing import Any from app.config import settings +from app.integrations.observability import ( + configure_logging, + record_task_completed, + record_task_failed, + record_task_started, +) +from app.integrations.observability.tracing import init_celery_tracing from celery import Celery, signals from celery import current_app as current_celery_app +logger = getLogger(__name__) + +_task_start_times: dict[str, float] = {} + @signals.setup_logging.connect -def setup_celery_logging(**kwargs) -> None: - """ - Configure Celery logging to use stdout instead of stderr. - - Some platforms convert stderr logs to level.error automatically, so we must use stdout - to ensure platforms correctly identify log levels from JSON structured logs. - - This signal is called when Celery sets up its logging configuration. - """ - # Get Celery's logger - celery_logger = getLogger("celery") - - # Remove existing handlers that might use stderr - celery_logger.handlers.clear() - - # Create a handler that uses stdout - stdout_handler = StreamHandler(sys.stdout) - stdout_handler.setFormatter( - Formatter( - "[%(asctime)s - %(name)s] (%(levelname)s) %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", - ) - ) +def setup_celery_logging(**kwargs: Any) -> None: + """Configure Celery to use the application's structured logging.""" + configure_logging() + + +@signals.celeryd_after_setup.connect +def init_worker_tracing(sender: Any, instance: Any, **kwargs: Any) -> None: + """Initialize OpenTelemetry tracing in Celery workers.""" + init_celery_tracing() + + +@signals.task_prerun.connect +def task_prerun_handler(task_id: str, task: Any, **kwargs: Any) -> None: + """Record task start time for duration metrics.""" + _task_start_times[task_id] = time.time() + record_task_started(getattr(task, "name", "unknown")) + + +@signals.task_postrun.connect +def task_postrun_handler(task_id: str, task: Any, retval: Any, state: str, **kwargs: Any) -> None: + """Record task completion and duration metrics.""" + if task_id in _task_start_times: + duration = time.time() - _task_start_times.pop(task_id) + record_task_completed(getattr(task, "name", "unknown"), state, duration) + - # Add stdout handler to Celery logger - celery_logger.addHandler(stdout_handler) - celery_logger.setLevel(logging.INFO) - celery_logger.propagate = False +@signals.task_failure.connect +def task_failure_handler(task_id: str, task: Any, exception: Exception, **kwargs: Any) -> None: + """Record task failures in metrics.""" + _task_start_times.pop(task_id, None) + record_task_failed(getattr(task, "name", "unknown"), type(exception).__name__) def create_celery() -> Celery: diff --git a/backend/app/integrations/celery/tasks/sync_vendor_data_task.py b/backend/app/integrations/celery/tasks/sync_vendor_data_task.py index 72ac039b..6511750e 100644 --- a/backend/app/integrations/celery/tasks/sync_vendor_data_task.py +++ b/backend/app/integrations/celery/tasks/sync_vendor_data_task.py @@ -1,10 +1,14 @@ +import time from contextlib import suppress from datetime import datetime, timedelta from logging import getLogger from typing import Any, cast from uuid import UUID +from opentelemetry import trace + from app.database import SessionLocal +from app.integrations.observability import record_histogram, record_metric from app.repositories.user_connection_repository import UserConnectionRepository from app.schemas import ProviderSyncResult, SyncVendorDataResult from app.services.providers.factory import ProviderFactory @@ -12,6 +16,7 @@ from celery import shared_task logger = getLogger(__name__) +tracer = trace.get_tracer(__name__) @shared_task @@ -33,130 +38,227 @@ def sync_vendor_data( Returns: dict with sync results per provider """ - factory = ProviderFactory() - user_connection_repo = UserConnectionRepository() - - try: - user_uuid = UUID(user_id) - except ValueError as e: - logger.error(f"[sync_vendor_data] Invalid user_id format: {user_id}") - return SyncVendorDataResult( - user_id=user_id, + with tracer.start_as_current_span("sync_vendor_data") as span: + span.set_attribute("user.id", user_id) + if start_date: + span.set_attribute("sync.start_date", start_date) + if end_date: + span.set_attribute("sync.end_date", end_date) + + factory = ProviderFactory() + user_connection_repo = UserConnectionRepository() + + try: + user_uuid = UUID(user_id) + except ValueError as e: + logger.error( + "Invalid user_id format", + extra={"user_id": user_id, "error": str(e)}, + ) + span.set_attribute("error", True) + span.set_attribute("error.type", "invalid_user_id") + return SyncVendorDataResult( + user_id=user_id, + start_date=start_date, + end_date=end_date, + errors={"user_id": f"Invalid UUID format: {str(e)}"}, + ).model_dump() + + result = SyncVendorDataResult( + user_id=user_uuid, start_date=start_date, end_date=end_date, - errors={"user_id": f"Invalid UUID format: {str(e)}"}, - ).model_dump() + ) - result = SyncVendorDataResult( - user_id=user_uuid, - start_date=start_date, - end_date=end_date, - ) + with SessionLocal() as db: + try: + connections = user_connection_repo.get_all_active_by_user(db, user_uuid) - with SessionLocal() as db: - try: - connections = user_connection_repo.get_all_active_by_user(db, user_uuid) + if providers: + connections = [c for c in connections if c.provider in providers] - if providers: - connections = [c for c in connections if c.provider in providers] + if not connections: + logger.info( + "No active connections found for user", + extra={"user_id": user_id}, + ) + result.message = "No active provider connections found" + span.set_attribute("sync.connections_count", 0) + return result.model_dump() + + span.set_attribute("sync.connections_count", len(connections)) + logger.info( + "Found active connections for user", + extra={"user_id": user_id, "connections_count": len(connections)}, + ) + + for connection in connections: + provider_name = connection.provider + _sync_single_provider( + span, + db, + user_uuid, + user_id, + provider_name, + connection, + factory, + user_connection_repo, + start_date, + end_date, + result, + ) - if not connections: - logger.info(f"[sync_vendor_data] No active connections found for user {user_id}") - result.message = "No active provider connections found" + span.set_attribute("sync.providers_synced", len(result.providers_synced)) + span.set_attribute("sync.errors_count", len(result.errors)) return result.model_dump() - logger.info( - f"[sync_vendor_data] Found {len(connections)} active connections for user {user_id}", - ) + except Exception as e: + log_and_capture_error( + e, + logger, + f"Error processing user {user_id}: {str(e)}", + extra={"user_id": user_id}, + ) + span.set_attribute("error", True) + span.record_exception(e) + result.errors["general"] = str(e) + return result.model_dump() - for connection in connections: - provider_name = connection.provider - logger.info(f"[sync_vendor_data] Syncing data from {provider_name} for user {user_id}") - - try: - strategy = factory.get_provider(provider_name) - provider_result = ProviderSyncResult(success=True, params={}) - - # Sync workouts - if strategy.workouts: - params = _build_sync_params(provider_name, start_date, end_date) - try: - success = strategy.workouts.load_data(db, user_uuid, **params) - provider_result.params["workouts"] = {"success": success, **params} - except Exception as e: - logger.warning(f"[sync_vendor_data] Workouts sync failed for {provider_name}: {e}") - provider_result.params["workouts"] = {"success": False, "error": str(e)} - - # Sync 247 data (sleep, recovery, activity) and SAVE to database - if hasattr(strategy, "data_247") and strategy.data_247: - # Determine if this is first sync (max timeframe) or subsequent sync - is_first_sync = connection.last_synced_at is None - - # Parse dates - start_dt = datetime.now() - timedelta(days=30) - end_dt = datetime.now() - - if start_date: - with suppress(ValueError): - start_dt = datetime.fromisoformat(start_date.replace("Z", "+00:00")) - if end_date: - with suppress(ValueError): - end_dt = datetime.fromisoformat(end_date.replace("Z", "+00:00")) - - try: - # Use load_and_save_all if available (saves data to DB) - # Otherwise fallback to load_all_247_data (just returns data) - provider_any = cast(Any, strategy.data_247) - if hasattr(provider_any, "load_and_save_all"): - results_247 = provider_any.load_and_save_all( - db, - user_uuid, - start_time=start_dt, - end_time=end_dt, - is_first_sync=is_first_sync, - ) - provider_result.params["data_247"] = {"success": True, "saved": True, **results_247} - else: - results_247 = strategy.data_247.load_all_247_data( - db, - user_uuid, - start_time=start_dt, - end_time=end_dt, - ) - provider_result.params["data_247"] = {"success": True, "saved": False, **results_247} - logger.info(f"[sync_vendor_data] 247 data synced for {provider_name}: {results_247}") - except Exception as e: - logger.warning(f"[sync_vendor_data] 247 data sync failed for {provider_name}: {e}") - provider_result.params["data_247"] = {"success": False, "error": str(e)} - - user_connection_repo.update_last_synced_at(db, connection) - - result.providers_synced[provider_name] = provider_result - logger.info( - f"[sync_vendor_data] Successfully synced {provider_name} for user {user_id}", - ) - except Exception as e: - log_and_capture_error( - e, - logger, - f"[sync_vendor_data] Error syncing {provider_name} for user {user_id}: {str(e)}", - extra={"user_id": user_id, "provider": provider_name}, - ) - result.errors[provider_name] = str(e) - continue +def _sync_single_provider( + parent_span: trace.Span, + db: Any, + user_uuid: UUID, + user_id: str, + provider_name: str, + connection: Any, + factory: ProviderFactory, + user_connection_repo: UserConnectionRepository, + start_date: str | None, + end_date: str | None, + result: SyncVendorDataResult, +) -> None: + """Sync data from a single provider with tracing and metrics.""" + with tracer.start_as_current_span(f"sync_provider.{provider_name}") as span: + span.set_attribute("provider.name", provider_name) + span.set_attribute("user.id", user_id) + + sync_start_time = time.time() + + logger.info( + "Syncing data from provider", + extra={"provider": provider_name, "user_id": user_id}, + ) - return result.model_dump() + try: + strategy = factory.get_provider(provider_name) + provider_result = ProviderSyncResult(success=True, params={}) + + # Sync workouts + if strategy.workouts: + with tracer.start_as_current_span("sync_workouts") as workout_span: + workout_span.set_attribute("provider.name", provider_name) + params = _build_sync_params(provider_name, start_date, end_date) + try: + success = strategy.workouts.load_data(db, user_uuid, **params) + provider_result.params["workouts"] = {"success": success, **params} + workout_span.set_attribute("sync.success", success) + if success: + record_metric("workouts_synced", labels={"provider": provider_name}) + except Exception as e: + logger.warning( + "Workouts sync failed", + extra={"provider": provider_name, "error": str(e)}, + ) + provider_result.params["workouts"] = {"success": False, "error": str(e)} + workout_span.set_attribute("error", True) + workout_span.record_exception(e) + + # Sync 247 data (sleep, recovery, activity) and SAVE to database + if hasattr(strategy, "data_247") and strategy.data_247: + with tracer.start_as_current_span("sync_247_data") as data_span: + data_span.set_attribute("provider.name", provider_name) + is_first_sync = connection.last_synced_at is None + data_span.set_attribute("sync.is_first_sync", is_first_sync) + + start_dt = datetime.now() - timedelta(days=30) + end_dt = datetime.now() + + if start_date: + with suppress(ValueError): + start_dt = datetime.fromisoformat(start_date.replace("Z", "+00:00")) + if end_date: + with suppress(ValueError): + end_dt = datetime.fromisoformat(end_date.replace("Z", "+00:00")) + + try: + provider_any = cast(Any, strategy.data_247) + if hasattr(provider_any, "load_and_save_all"): + results_247 = provider_any.load_and_save_all( + db, + user_uuid, + start_time=start_dt, + end_time=end_dt, + is_first_sync=is_first_sync, + ) + provider_result.params["data_247"] = {"success": True, "saved": True, **results_247} + data_span.set_attribute("sync.saved", True) + else: + results_247 = strategy.data_247.load_all_247_data( + db, + user_uuid, + start_time=start_dt, + end_time=end_dt, + ) + provider_result.params["data_247"] = {"success": True, "saved": False, **results_247} + data_span.set_attribute("sync.saved", False) + + logger.info( + "247 data synced", + extra={"provider": provider_name, "results": results_247}, + ) + record_metric("activities_synced", labels={"provider": provider_name}) + except Exception as e: + logger.warning( + "247 data sync failed", + extra={"provider": provider_name, "error": str(e)}, + ) + provider_result.params["data_247"] = {"success": False, "error": str(e)} + data_span.set_attribute("error", True) + data_span.record_exception(e) + + user_connection_repo.update_last_synced_at(db, connection) + + result.providers_synced[provider_name] = provider_result + span.set_attribute("sync.success", True) + + sync_duration = time.time() - sync_start_time + record_metric("provider_syncs", labels={"provider": provider_name, "status": "success"}) + record_histogram("provider_sync_duration", sync_duration, {"provider": provider_name}) + + logger.info( + "Successfully synced provider", + extra={ + "provider": provider_name, + "user_id": user_id, + "duration_seconds": round(sync_duration, 2), + }, + ) except Exception as e: log_and_capture_error( e, logger, - f"[sync_vendor_data] Error processing user {user_id}: {str(e)}", - extra={"user_id": user_id}, + f"Error syncing {provider_name} for user {user_id}: {str(e)}", + extra={"user_id": user_id, "provider": provider_name}, + ) + span.set_attribute("error", True) + span.record_exception(e) + result.errors[provider_name] = str(e) + record_metric( + "provider_sync_errors", + labels={"provider": provider_name, "error_type": type(e).__name__}, ) - result.errors["general"] = str(e) - return result.model_dump() def _build_sync_params(provider_name: str, start_date: str | None, end_date: str | None) -> dict[str, Any]: diff --git a/backend/app/integrations/observability/__init__.py b/backend/app/integrations/observability/__init__.py new file mode 100644 index 00000000..f2415a54 --- /dev/null +++ b/backend/app/integrations/observability/__init__.py @@ -0,0 +1,74 @@ +"""OpenTelemetry observability integration for traces, logs, and metrics. + +QUICK START: + from app.integrations.observability import ( + ensure_providers_initialized, + add_observability_middleware, + create_observed_lifespan, + ) + + ensure_providers_initialized() + api = FastAPI(lifespan=create_observed_lifespan(engine)) + add_observability_middleware(api) + +RECORDING METRICS: + from app.integrations.observability import record_metric, record_histogram + + record_metric("oauth_attempts", labels={"provider": "garmin"}) + record_histogram("sync_duration", 2.5, {"provider": "polar"}) + +INITIALIZATION ORDER (if not using simplified API): + 1. Call init_providers() BEFORE creating FastAPI app + 2. Call init_observability() during FastAPI lifespan startup +""" + +from app.integrations.observability.decorators import ( + record_histogram, + record_metric, + record_task_completed, + record_task_failed, + record_task_started, +) +from app.integrations.observability.logging import configure_logging +from app.integrations.observability.metrics import get_app_metrics, init_metrics +from app.integrations.observability.setup import ( + add_observability_middleware, + create_observed_lifespan, + ensure_providers_initialized, +) +from app.integrations.observability.tracing import get_tracer, init_providers, init_tracing + +__all__ = [ + # Simplified API (recommended) + "ensure_providers_initialized", + "add_observability_middleware", + "create_observed_lifespan", + # Metric helpers + "record_metric", + "record_histogram", + "record_task_started", + "record_task_completed", + "record_task_failed", + # Lower-level API + "configure_logging", + "init_tracing", + "init_metrics", + "init_providers", + "get_tracer", + "get_app_metrics", +] + + +def init_observability(fastapi_app: object, db_engine: object) -> None: + """Initialize observability instrumentations during app startup. + + This should be called during the FastAPI lifespan startup. + Note: Providers should already be initialized via init_providers() or + ensure_providers_initialized() before the FastAPI app was created. + + Args: + fastapi_app: FastAPI application instance + db_engine: SQLAlchemy engine for database instrumentation + """ + configure_logging() + init_tracing(fastapi_app, db_engine) diff --git a/backend/app/integrations/observability/decorators.py b/backend/app/integrations/observability/decorators.py new file mode 100644 index 00000000..3c3b4b16 --- /dev/null +++ b/backend/app/integrations/observability/decorators.py @@ -0,0 +1,100 @@ +"""Helper functions for recording metrics. + +All functions are no-ops when OTEL_ENABLED=false. +""" + +from app.config import settings +from app.integrations.observability.metrics import get_app_metrics + + +def record_task_started(task_name: str) -> None: + """Record that a Celery task has started. No-op when OTEL disabled.""" + if not settings.otel_enabled: + return + metrics = get_app_metrics() + if metrics: + metrics.celery_tasks_started.add(1, {"task": task_name}) + + +def record_task_completed(task_name: str, state: str, duration: float) -> None: + """Record Celery task completion with duration. No-op when OTEL disabled.""" + if not settings.otel_enabled: + return + metrics = get_app_metrics() + if metrics: + metrics.celery_task_duration.record(duration, {"task": task_name}) + metrics.celery_tasks_completed.add(1, {"task": task_name, "state": state}) + + +def record_task_failed(task_name: str, error_type: str) -> None: + """Record a Celery task failure. No-op when OTEL disabled.""" + if not settings.otel_enabled: + return + metrics = get_app_metrics() + if metrics: + metrics.celery_tasks_failed.add(1, {"task": task_name, "error_type": error_type}) + + +def record_metric( + metric_name: str, + value: int | float = 1, + labels: dict[str, str] | None = None, +) -> None: + """Record a counter metric. No-op when OTEL disabled. + + Args: + metric_name: Attribute name on AppMetrics (e.g., "oauth_attempts") + value: Value to add (default 1) + labels: Labels/attributes dict + + Example: + record_metric("oauth_attempts", labels={"provider": "garmin"}) + record_metric("workouts_synced", 5, {"provider": "polar"}) + """ + if not settings.otel_enabled: + return + + metrics = get_app_metrics() + if not metrics: + return + + metric = getattr(metrics, metric_name, None) + if metric is None: + return + + if labels: + metric.add(value, labels) + else: + metric.add(value) + + +def record_histogram( + metric_name: str, + value: float, + labels: dict[str, str] | None = None, +) -> None: + """Record a histogram metric (e.g., duration). No-op when OTEL disabled. + + Args: + metric_name: Attribute name on AppMetrics (e.g., "provider_sync_duration") + value: Value to record + labels: Labels/attributes dict + + Example: + record_histogram("provider_sync_duration", 2.5, {"provider": "garmin"}) + """ + if not settings.otel_enabled: + return + + metrics = get_app_metrics() + if not metrics: + return + + metric = getattr(metrics, metric_name, None) + if metric is None: + return + + if labels: + metric.record(value, labels) + else: + metric.record(value) diff --git a/backend/app/integrations/observability/logging.py b/backend/app/integrations/observability/logging.py new file mode 100644 index 00000000..ac0c7a02 --- /dev/null +++ b/backend/app/integrations/observability/logging.py @@ -0,0 +1,190 @@ +"""OpenTelemetry-compliant structured logging with trace correlation.""" + +import logging +import sys +import traceback +from typing import Any + +from opentelemetry import trace +from opentelemetry._logs import set_logger_provider +from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor +from opentelemetry.sdk.resources import Resource +from pythonjsonlogger import jsonlogger + +from app.config import settings + +# Mapping Python log levels to OTel severity numbers (1-24 scale) +# See: https://opentelemetry.io/docs/specs/otel/logs/data-model/#severity-fields +SEVERITY_NUMBER_MAP = { + logging.DEBUG: 5, # DEBUG + logging.INFO: 9, # INFO + logging.WARNING: 13, # WARN + logging.ERROR: 17, # ERROR + logging.CRITICAL: 21, # FATAL +} + + +class OTelStructuredFormatter(jsonlogger.JsonFormatter): + """JSON formatter following OpenTelemetry semantic conventions. + + Produces logs compatible with: + - OpenTelemetry Log Data Model + - Loki/Grafana ingestion + - Trace correlation (trace_id, span_id) + """ + + def add_fields( + self, + log_record: dict[str, Any], + record: logging.LogRecord, + message_dict: dict[str, Any], + ) -> None: + super().add_fields(log_record, record, message_dict) + + # Timestamp (ISO 8601 format) + log_record["timestamp"] = self.formatTime(record, self.datefmt) + + # Severity per OTel spec (both number and text) + log_record["severity_number"] = SEVERITY_NUMBER_MAP.get(record.levelno, 9) + log_record["severity_text"] = record.levelname + + # Body (the actual log message) + log_record["body"] = record.getMessage() + + # Resource attributes (service identification) + log_record["resource"] = { + "service.name": settings.otel_service_name, + "service.version": settings.otel_service_version, + "deployment.environment": settings.environment.value, + } + + # Code location attributes (OTel semantic conventions) + log_record["attributes"] = { + "code.filepath": record.pathname, + "code.function": record.funcName, + "code.lineno": record.lineno, + "code.namespace": record.name, + } + + # Trace context correlation (if available) + span = trace.get_current_span() + if span and span.is_recording(): + ctx = span.get_span_context() + if ctx.is_valid: + log_record["trace_id"] = format(ctx.trace_id, "032x") + log_record["span_id"] = format(ctx.span_id, "016x") + log_record["trace_flags"] = ctx.trace_flags + + # Exception attributes (OTel semantic conventions) + if record.exc_info and record.exc_info[0] is not None: + exc_type, exc_value, exc_tb = record.exc_info + log_record["attributes"].update( + { + "exception.type": exc_type.__name__ if exc_type else "Unknown", + "exception.message": str(exc_value) if exc_value else "", + "exception.stacktrace": "".join(traceback.format_exception(exc_type, exc_value, exc_tb)), + "exception.escaped": False, + } + ) + + # Merge extra attributes from log call (e.g., logger.info("msg", extra={...})) + reserved_keys = { + "message", + "asctime", + "args", + "msg", + "exc_info", + "exc_text", + "levelname", + "levelno", + "name", + "pathname", + "lineno", + "funcName", + "created", + "msecs", + "relativeCreated", + "thread", + "threadName", + "processName", + "process", + "stack_info", + "taskName", + } + for key, value in record.__dict__.items(): + if key not in reserved_keys and not key.startswith("_"): + log_record["attributes"][key] = value + + # Clean up redundant fields from base formatter + for key in ["levelname", "levelno", "name", "pathname", "lineno", "funcName", "message", "asctime"]: + log_record.pop(key, None) + + +def configure_logging() -> None: + """Configure structured logging with OpenTelemetry correlation. + + This sets up: + - JSON-formatted logs to stdout (for container environments) + - Trace ID/Span ID correlation in every log entry + - OTel semantic conventions for log attributes + - OTLP log export (when otel_enabled=True) + """ + # Determine log level + log_level = getattr(logging, settings.otel_log_level.upper(), logging.INFO) + + # Create JSON formatter + formatter = OTelStructuredFormatter( + fmt="%(timestamp)s %(severity_text)s %(name)s %(body)s", + datefmt="%Y-%m-%dT%H:%M:%S.%fZ", + ) + + # Console handler (stdout for container log aggregation) + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(formatter) + console_handler.setLevel(log_level) + + # Configure root logger + root_logger = logging.getLogger() + root_logger.setLevel(log_level) + root_logger.handlers.clear() + root_logger.addHandler(console_handler) + + # Reduce noise from verbose libraries + logging.getLogger("httpx").setLevel(logging.WARNING) + logging.getLogger("httpcore").setLevel(logging.WARNING) + logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING) + logging.getLogger("celery").setLevel(logging.INFO) + logging.getLogger("opentelemetry").setLevel(logging.WARNING) + + # OTLP log export (when enabled) + if settings.otel_enabled: + resource = Resource.create( + { + "service.name": settings.otel_service_name, + "service.version": settings.otel_service_version, + "deployment.environment": settings.environment.value, + } + ) + + logger_provider = LoggerProvider(resource=resource) + logger_provider.add_log_record_processor( + BatchLogRecordProcessor( + OTLPLogExporter( + endpoint=settings.otel_exporter_endpoint, + insecure=True, + ) + ) + ) + set_logger_provider(logger_provider) + + # Add OTLP handler + otel_handler = LoggingHandler(logger_provider=logger_provider) + otel_handler.setLevel(log_level) + root_logger.addHandler(otel_handler) + + logging.getLogger(__name__).info( + "Structured logging configured", + extra={"otel_enabled": settings.otel_enabled}, + ) diff --git a/backend/app/integrations/observability/metrics.py b/backend/app/integrations/observability/metrics.py new file mode 100644 index 00000000..310eb214 --- /dev/null +++ b/backend/app/integrations/observability/metrics.py @@ -0,0 +1,186 @@ +"""OpenTelemetry metrics for application monitoring.""" + +from logging import getLogger + +from opentelemetry import metrics +from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader +from opentelemetry.sdk.resources import Resource + +from app.config import settings + +logger = getLogger(__name__) + + +def init_metrics() -> None: + """Initialize OpenTelemetry metrics with OTLP export.""" + global _app_metrics + + if not settings.otel_enabled: + logger.info("OpenTelemetry metrics disabled") + return + + logger.info(f"Initializing OpenTelemetry metrics for {settings.otel_service_name}") + + resource = Resource.create( + { + "service.name": settings.otel_service_name, + "service.version": settings.otel_service_version, + "deployment.environment": settings.environment.value, + } + ) + + reader = PeriodicExportingMetricReader( + OTLPMetricExporter( + endpoint=settings.otel_exporter_endpoint, + insecure=True, + ), + export_interval_millis=15000, # Export every 15 seconds + ) + + provider = MeterProvider(resource=resource, metric_readers=[reader]) + metrics.set_meter_provider(provider) + + # Initialize AppMetrics after provider is set + _app_metrics = AppMetrics() + + logger.info("OpenTelemetry metrics initialized") + + +class AppMetrics: + """Application-specific metrics for Open Wearables. + + Usage: + from app.integrations.observability import app_metrics + + # Count user creation + app_metrics.users_created.add(1) + + # Record sync duration + with app_metrics.record_sync_duration("garmin"): + perform_sync() + + # Track provider errors + app_metrics.provider_sync_errors.add(1, {"provider": "garmin", "error_type": "api_error"}) + """ + + def __init__(self) -> None: + meter = metrics.get_meter(__name__) + + # User metrics + self.users_created = meter.create_counter( + name="app.users.created", + description="Total number of users created", + unit="1", + ) + + # Provider sync metrics + self.provider_syncs = meter.create_counter( + name="app.provider.syncs", + description="Total provider sync operations", + unit="1", + ) + + self.provider_sync_errors = meter.create_counter( + name="app.provider.sync_errors", + description="Provider sync failures", + unit="1", + ) + + self.provider_sync_duration = meter.create_histogram( + name="app.provider.sync_duration", + description="Time to sync provider data", + unit="s", + ) + + # Data metrics + self.workouts_synced = meter.create_counter( + name="app.workouts.synced", + description="Total workouts synced from providers", + unit="1", + ) + + self.activities_synced = meter.create_counter( + name="app.activities.synced", + description="Total activities synced from providers", + unit="1", + ) + + # Connection metrics + self.provider_connections = meter.create_up_down_counter( + name="app.provider.connections", + description="Active provider connections", + unit="1", + ) + + # OAuth metrics + self.oauth_attempts = meter.create_counter( + name="app.oauth.attempts", + description="OAuth flow attempts", + unit="1", + ) + + self.oauth_successes = meter.create_counter( + name="app.oauth.successes", + description="Successful OAuth completions", + unit="1", + ) + + self.oauth_failures = meter.create_counter( + name="app.oauth.failures", + description="Failed OAuth attempts", + unit="1", + ) + + # API metrics + self.api_requests = meter.create_counter( + name="app.api.requests", + description="Total API requests", + unit="1", + ) + + # Celery task metrics + self.celery_tasks_started = meter.create_counter( + name="app.celery.tasks_started", + description="Celery tasks started", + unit="1", + ) + + self.celery_tasks_completed = meter.create_counter( + name="app.celery.tasks_completed", + description="Celery tasks completed successfully", + unit="1", + ) + + self.celery_tasks_failed = meter.create_counter( + name="app.celery.tasks_failed", + description="Celery tasks that failed", + unit="1", + ) + + self.celery_task_duration = meter.create_histogram( + name="app.celery.task_duration", + description="Celery task execution duration", + unit="s", + ) + + +# Global metrics instance - intentionally a singleton to avoid creating duplicate meters. +# Initialized by init_metrics() when OTEL is enabled, otherwise remains None. +_app_metrics: AppMetrics | None = None + + +def get_app_metrics() -> AppMetrics | None: + """Get the global AppMetrics instance. + + Returns None when OTEL_ENABLED=false or before initialization. + This allows callers to gracefully skip metric recording: + + metrics = get_app_metrics() + if metrics: + metrics.oauth_attempts.add(1, {"provider": "garmin"}) + + For simpler inline usage, prefer record_metric() from decorators module. + """ + return _app_metrics diff --git a/backend/app/integrations/observability/setup.py b/backend/app/integrations/observability/setup.py new file mode 100644 index 00000000..e73220b3 --- /dev/null +++ b/backend/app/integrations/observability/setup.py @@ -0,0 +1,93 @@ +"""Unified observability setup for FastAPI applications. + +This module provides simplified APIs for setting up observability, +hiding the complexity of initialization order and provider configuration. +""" + +from collections.abc import Callable +from contextlib import asynccontextmanager +from typing import AsyncIterator + +from fastapi import FastAPI + +from app.config import settings +from app.integrations.observability.logging import configure_logging +from app.integrations.observability.tracing import init_providers, init_tracing + + +def ensure_providers_initialized() -> None: + """Ensure OpenTelemetry providers are initialized. + + Safe to call multiple times - will only initialize once. + Must be called BEFORE FastAPI app creation to ensure middleware + can access the correct providers. + + Example: + ensure_providers_initialized() + api = FastAPI(...) + """ + init_providers() + + +def add_observability_middleware(app: FastAPI) -> None: + """Add OpenTelemetry ASGI middleware to trace HTTP requests. + + Must be called AFTER app creation but BEFORE the app starts. + Typically called at module level right after FastAPI() instantiation. + + Args: + app: FastAPI application instance + """ + if not settings.otel_enabled: + return + + from opentelemetry import metrics, trace + from opentelemetry.instrumentation.asgi import OpenTelemetryMiddleware + + app.add_middleware( + OpenTelemetryMiddleware, + tracer_provider=trace.get_tracer_provider(), + meter_provider=metrics.get_meter_provider(), + ) + + +def create_observed_lifespan( + engine: object, + additional_startup: Callable[[], None] | None = None, + additional_shutdown: Callable[[], None] | None = None, +) -> Callable[[FastAPI], AsyncIterator[None]]: + """Create a lifespan context manager with observability auto-instrumentation. + + Initializes SQLAlchemy, Redis, httpx, and Celery instrumentation on startup. + + Args: + engine: SQLAlchemy engine for database instrumentation + additional_startup: Optional callback to run after observability setup + additional_shutdown: Optional callback to run before shutdown + + Returns: + Async context manager suitable for FastAPI lifespan parameter + + Example: + from app.integrations.observability import ( + ensure_providers_initialized, + add_observability_middleware, + create_observed_lifespan, + ) + + ensure_providers_initialized() + api = FastAPI(lifespan=create_observed_lifespan(engine, init_sentry)) + add_observability_middleware(api) + """ + + @asynccontextmanager + async def lifespan(app: FastAPI) -> AsyncIterator[None]: + configure_logging() + init_tracing(app, engine) + if additional_startup: + additional_startup() + yield + if additional_shutdown: + additional_shutdown() + + return lifespan diff --git a/backend/app/integrations/observability/tracing.py b/backend/app/integrations/observability/tracing.py new file mode 100644 index 00000000..8485128b --- /dev/null +++ b/backend/app/integrations/observability/tracing.py @@ -0,0 +1,169 @@ +"""OpenTelemetry tracing configuration and instrumentation.""" + +from logging import getLogger + +from opentelemetry import metrics, trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.instrumentation.celery import CeleryInstrumentor +from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor +from opentelemetry.instrumentation.redis import RedisInstrumentor +from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + +from app.config import settings + +logger = getLogger(__name__) + +_tracer: trace.Tracer | None = None +_providers_initialized = False + + +def init_providers() -> None: + """Initialize OpenTelemetry trace and metric providers. + + TIMING: Must be called BEFORE FastAPI app creation. + + The reason is that FastAPI's add_middleware() captures provider references + at call time, not when middleware is instantiated. If providers aren't set + up yet, the ASGI middleware receives None/proxy references and won't create + traces or metrics for HTTP requests. + + Safe to call multiple times - will only initialize once. + """ + global _providers_initialized + + if _providers_initialized: + return + + if not settings.otel_enabled: + logger.info("OpenTelemetry disabled") + return + + logger.info(f"Initializing OpenTelemetry providers for {settings.otel_service_name}") + + # Create resource with service information + resource = Resource.create( + { + "service.name": settings.otel_service_name, + "service.version": settings.otel_service_version, + "deployment.environment": settings.environment.value, + } + ) + + # Configure trace provider with OTLP exporter + tracer_provider = TracerProvider(resource=resource) + processor = BatchSpanProcessor( + OTLPSpanExporter( + endpoint=settings.otel_exporter_endpoint, + insecure=True, + ) + ) + tracer_provider.add_span_processor(processor) + trace.set_tracer_provider(tracer_provider) + + # Initialize metrics (this sets the global MeterProvider) + from app.integrations.observability.metrics import init_metrics + + init_metrics() + + _providers_initialized = True + logger.info("OpenTelemetry providers initialized") + + +def get_tracer(name: str | None = None) -> trace.Tracer: + """Get a tracer instance for creating custom spans. + + Args: + name: Optional tracer name, defaults to module name + + Returns: + OpenTelemetry Tracer instance + """ + return trace.get_tracer(name or __name__) + + +def init_tracing(fastapi_app: object, db_engine: object) -> None: + """Initialize OpenTelemetry auto-instrumentations. + + Note: Providers should already be initialized via init_providers() before + the FastAPI app is created. This function sets up the library instrumentations. + + Args: + fastapi_app: FastAPI application instance + db_engine: SQLAlchemy engine for database instrumentation + """ + if not settings.otel_enabled: + logger.info("OpenTelemetry tracing disabled") + return + + logger.info("Initializing OpenTelemetry library instrumentations") + + # Auto-instrument SQLAlchemy + SQLAlchemyInstrumentor().instrument(engine=db_engine) # type: ignore[arg-type] + logger.debug("SQLAlchemy instrumentation enabled") + + # Auto-instrument Redis + RedisInstrumentor().instrument() + logger.debug("Redis instrumentation enabled") + + # Auto-instrument httpx (used for provider API calls) + HTTPXClientInstrumentor().instrument() + logger.debug("HTTPX instrumentation enabled") + + # Celery instrumentation is handled separately in celery/core.py + # to ensure it's initialized in worker processes + + logger.info("OpenTelemetry tracing initialized successfully") + + +def init_celery_tracing() -> None: + """Initialize Celery-specific tracing and metrics for worker processes.""" + if not settings.otel_enabled: + return + + from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter + from opentelemetry.sdk.metrics import MeterProvider + from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader + + import app.integrations.observability.metrics as metrics_module + from app.integrations.observability.metrics import AppMetrics + + # Create resource with service information for workers + resource = Resource.create( + { + "service.name": f"{settings.otel_service_name}-worker", + "service.version": settings.otel_service_version, + "deployment.environment": settings.environment.value, + } + ) + + # Configure trace provider + provider = TracerProvider(resource=resource) + processor = BatchSpanProcessor( + OTLPSpanExporter( + endpoint=settings.otel_exporter_endpoint, + insecure=True, + ) + ) + provider.add_span_processor(processor) + trace.set_tracer_provider(provider) + + # Configure metrics provider for workers + metric_reader = PeriodicExportingMetricReader( + OTLPMetricExporter( + endpoint=settings.otel_exporter_endpoint, + insecure=True, + ), + export_interval_millis=15000, + ) + metric_provider = MeterProvider(resource=resource, metric_readers=[metric_reader]) + metrics.set_meter_provider(metric_provider) + + # Initialize AppMetrics in the worker + metrics_module._app_metrics = AppMetrics() + + # Instrument Celery + CeleryInstrumentor().instrument() + logger.info("Celery tracing and metrics instrumentation enabled") diff --git a/backend/app/main.py b/backend/app/main.py index 2dadbbdb..d78735b9 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -1,5 +1,3 @@ -import sys -from logging import INFO, StreamHandler, basicConfig from pathlib import Path from fastapi import FastAPI, Request @@ -8,24 +6,23 @@ from app.api import head_router from app.config import settings +from app.database import engine from app.integrations.celery import create_celery +from app.integrations.observability import ( + add_observability_middleware, + create_observed_lifespan, + ensure_providers_initialized, +) from app.integrations.sentry import init_sentry from app.middlewares import add_cors_middleware from app.utils.exceptions import DatetimeParseError, handle_exception -# Configure logging to use stdout instead of stderr -# Some platforms convert stderr logs to level.error automatically, so we must use stdout -# This ensures platforms correctly identify log levels from JSON structured logs -basicConfig( - level=INFO, - format="[%(asctime)s - %(name)s] (%(levelname)s) %(message)s", - handlers=[StreamHandler(sys.stdout)], -) +ensure_providers_initialized() -api = FastAPI(title=settings.api_name) +api = FastAPI(title=settings.api_name, lifespan=create_observed_lifespan(engine, init_sentry)) celery_app = create_celery() -init_sentry() +add_observability_middleware(api) add_cors_middleware(api) # Mount static files for provider icons diff --git a/backend/app/services/user_service.py b/backend/app/services/user_service.py index 5d051287..49039098 100644 --- a/backend/app/services/user_service.py +++ b/backend/app/services/user_service.py @@ -27,9 +27,15 @@ def get_count_in_range(self, db_session: DbSession, start_date: datetime, end_da def create(self, db_session: DbSession, creator: UserCreate) -> User: """Create a user with server-generated id and created_at.""" + from app.integrations.observability import record_metric + creation_data = creator.model_dump() internal_creator = UserCreateInternal(**creation_data) - return super().create(db_session, internal_creator) + user = super().create(db_session, internal_creator) + + record_metric("users_created") + + return user def update( self, diff --git a/backend/config/.env.example b/backend/config/.env.example index dce047f7..30b6adff 100644 --- a/backend/config/.env.example +++ b/backend/config/.env.example @@ -31,6 +31,13 @@ SENTRY_DSN="" SENTRY_ENV=production SENTRY_SAMPLES_RATE=0.5 +#--- OPENTELEMETRY ---# +OTEL_ENABLED=False +OTEL_SERVICE_NAME=open-wearables-api +OTEL_SERVICE_VERSION=1.0.0 +OTEL_EXPORTER_ENDPOINT=otel-collector:4317 +OTEL_LOG_LEVEL=INFO + #--- AUTH ---# # python3 -c "import secrets; print(secrets.token_urlsafe(64))" SECRET_KEY=secret-key-str diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 4d3b2d8b..e2cbab65 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -25,6 +25,17 @@ dependencies = [ "isodate>=0.7.2", "resend>=2.0.0", "pyyaml>=6.0.3", + # OpenTelemetry + "opentelemetry-api>=1.25.0", + "opentelemetry-sdk>=1.25.0", + "opentelemetry-exporter-otlp>=1.25.0", + "opentelemetry-instrumentation-fastapi>=0.46b0", + "opentelemetry-instrumentation-sqlalchemy>=0.46b0", + "opentelemetry-instrumentation-celery>=0.46b0", + "opentelemetry-instrumentation-redis>=0.46b0", + "opentelemetry-instrumentation-httpx>=0.46b0", + "opentelemetry-instrumentation-logging>=0.46b0", + "python-json-logger>=2.0.7", ] [dependency-groups] diff --git a/backend/scripts/start/app.sh b/backend/scripts/start/app.sh index c0c06a74..ad2c7033 100755 --- a/backend/scripts/start/app.sh +++ b/backend/scripts/start/app.sh @@ -12,7 +12,9 @@ uv run python scripts/init_provider_settings.py # Init app echo "Starting the FastAPI application..." if [ "$ENVIRONMENT" = "local" ]; then - uv run fastapi dev app/main.py --host 0.0.0.0 --port 8000 + # Use uvicorn directly without auto-reload to avoid issues with + # OpenTelemetry metrics export in forked processes + uv run uvicorn app.main:api --host 0.0.0.0 --port 8000 else uv run fastapi run app/main.py --host 0.0.0.0 --port 8000 fi diff --git a/backend/uv.lock b/backend/uv.lock index 669eba59..6d8d62e0 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -58,6 +58,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" }, ] +[[package]] +name = "asgiref" +version = "3.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/b9/4db2509eabd14b4a8c71d1b24c8d5734c52b8560a7b1e1a8b56c8d25568b/asgiref-3.11.0.tar.gz", hash = "sha256:13acff32519542a1736223fb79a715acdebe24286d98e8b164a73085f40da2c4", size = 37969, upload-time = "2025-11-19T15:32:20.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/be/317c2c55b8bbec407257d45f5c8d1b6867abc76d12043f2d3d58c538a4ea/asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d", size = 24096, upload-time = "2025-11-19T15:32:19.004Z" }, +] + [[package]] name = "bcrypt" version = "5.0.0" @@ -603,6 +612,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5e/2e/b41d8a1a917d6581fc27a35d05561037b048e47df50f27f8ac9c7e27a710/freezegun-1.5.5-py3-none-any.whl", hash = "sha256:cd557f4a75cf074e84bc374249b9dd491eaeacd61376b9eb3c423282211619d2", size = 19266, upload-time = "2025-08-09T10:39:06.636Z" }, ] +[[package]] +name = "googleapis-common-protos" +version = "1.72.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" }, +] + [[package]] name = "greenlet" version = "3.2.4" @@ -631,6 +652,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, ] +[[package]] +name = "grpcio" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/ed/71467ab770effc9e8cef5f2e7388beb2be26ed642d567697bb103a790c72/grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2", size = 5807716, upload-time = "2025-10-21T16:21:48.475Z" }, + { url = "https://files.pythonhosted.org/packages/2c/85/c6ed56f9817fab03fa8a111ca91469941fb514e3e3ce6d793cb8f1e1347b/grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468", size = 11821522, upload-time = "2025-10-21T16:21:51.142Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/2b8a235ab40c39cbc141ef647f8a6eb7b0028f023015a4842933bc0d6831/grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3", size = 6362558, upload-time = "2025-10-21T16:21:54.213Z" }, + { url = "https://files.pythonhosted.org/packages/bd/64/9784eab483358e08847498ee56faf8ff6ea8e0a4592568d9f68edc97e9e9/grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb", size = 7049990, upload-time = "2025-10-21T16:21:56.476Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/8c12319a6369434e7a184b987e8e9f3b49a114c489b8315f029e24de4837/grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae", size = 6575387, upload-time = "2025-10-21T16:21:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/15/0f/f12c32b03f731f4a6242f771f63039df182c8b8e2cf8075b245b409259d4/grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77", size = 7166668, upload-time = "2025-10-21T16:22:02.049Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2d/3ec9ce0c2b1d92dd59d1c3264aaec9f0f7c817d6e8ac683b97198a36ed5a/grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03", size = 8124928, upload-time = "2025-10-21T16:22:04.984Z" }, + { url = "https://files.pythonhosted.org/packages/1a/74/fd3317be5672f4856bcdd1a9e7b5e17554692d3db9a3b273879dc02d657d/grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42", size = 7589983, upload-time = "2025-10-21T16:22:07.881Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/ca038cf420f405971f19821c8c15bcbc875505f6ffadafe9ffd77871dc4c/grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f", size = 3984727, upload-time = "2025-10-21T16:22:10.032Z" }, + { url = "https://files.pythonhosted.org/packages/41/80/84087dc56437ced7cdd4b13d7875e7439a52a261e3ab4e06488ba6173b0a/grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8", size = 4702799, upload-time = "2025-10-21T16:22:12.709Z" }, + { url = "https://files.pythonhosted.org/packages/b4/46/39adac80de49d678e6e073b70204091e76631e03e94928b9ea4ecf0f6e0e/grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62", size = 5808417, upload-time = "2025-10-21T16:22:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/a4531f7fb8b4e2a60b94e39d5d924469b7a6988176b3422487be61fe2998/grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd", size = 11828219, upload-time = "2025-10-21T16:22:17.954Z" }, + { url = "https://files.pythonhosted.org/packages/4b/1c/de55d868ed7a8bd6acc6b1d6ddc4aa36d07a9f31d33c912c804adb1b971b/grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc", size = 6367826, upload-time = "2025-10-21T16:22:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/59/64/99e44c02b5adb0ad13ab3adc89cb33cb54bfa90c74770f2607eea629b86f/grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a", size = 7049550, upload-time = "2025-10-21T16:22:23.637Z" }, + { url = "https://files.pythonhosted.org/packages/43/28/40a5be3f9a86949b83e7d6a2ad6011d993cbe9b6bd27bea881f61c7788b6/grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba", size = 6575564, upload-time = "2025-10-21T16:22:26.016Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a9/1be18e6055b64467440208a8559afac243c66a8b904213af6f392dc2212f/grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09", size = 7176236, upload-time = "2025-10-21T16:22:28.362Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/dba05d3fcc151ce6e81327541d2cc8394f442f6b350fead67401661bf041/grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc", size = 8125795, upload-time = "2025-10-21T16:22:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/4a/45/122df922d05655f63930cf42c9e3f72ba20aadb26c100ee105cad4ce4257/grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc", size = 7592214, upload-time = "2025-10-21T16:22:33.831Z" }, + { url = "https://files.pythonhosted.org/packages/4a/6e/0b899b7f6b66e5af39e377055fb4a6675c9ee28431df5708139df2e93233/grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e", size = 4062961, upload-time = "2025-10-21T16:22:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/19/41/0b430b01a2eb38ee887f88c1f07644a1df8e289353b78e82b37ef988fb64/grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e", size = 4834462, upload-time = "2025-10-21T16:22:39.772Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -717,6 +769,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + [[package]] name = "iniconfig" version = "2.3.0" @@ -869,9 +933,19 @@ dependencies = [ { name = "flower" }, { name = "httpx" }, { name = "isodate" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp" }, + { name = "opentelemetry-instrumentation-celery" }, + { name = "opentelemetry-instrumentation-fastapi" }, + { name = "opentelemetry-instrumentation-httpx" }, + { name = "opentelemetry-instrumentation-logging" }, + { name = "opentelemetry-instrumentation-redis" }, + { name = "opentelemetry-instrumentation-sqlalchemy" }, + { name = "opentelemetry-sdk" }, { name = "psycopg" }, { name = "pydantic-settings" }, { name = "python-jose", extra = ["cryptography"] }, + { name = "python-json-logger" }, { name = "python-multipart" }, { name = "pyyaml" }, { name = "redis" }, @@ -912,9 +986,19 @@ requires-dist = [ { name = "flower", specifier = ">=2.0.1" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "isodate", specifier = ">=0.7.2" }, + { name = "opentelemetry-api", specifier = ">=1.25.0" }, + { name = "opentelemetry-exporter-otlp", specifier = ">=1.25.0" }, + { name = "opentelemetry-instrumentation-celery", specifier = ">=0.46b0" }, + { name = "opentelemetry-instrumentation-fastapi", specifier = ">=0.46b0" }, + { name = "opentelemetry-instrumentation-httpx", specifier = ">=0.46b0" }, + { name = "opentelemetry-instrumentation-logging", specifier = ">=0.46b0" }, + { name = "opentelemetry-instrumentation-redis", specifier = ">=0.46b0" }, + { name = "opentelemetry-instrumentation-sqlalchemy", specifier = ">=0.46b0" }, + { name = "opentelemetry-sdk", specifier = ">=1.25.0" }, { name = "psycopg", specifier = ">=3.2.9" }, { name = "pydantic-settings", specifier = ">=2.10.1" }, { name = "python-jose", extras = ["cryptography"], specifier = ">=3.5.0" }, + { name = "python-json-logger", specifier = ">=2.0.7" }, { name = "python-multipart", specifier = ">=0.0.20" }, { name = "pyyaml", specifier = ">=6.0.3" }, { name = "redis", specifier = ">=7.0.1" }, @@ -942,6 +1026,249 @@ dev = [ { name = "setuptools", specifier = ">=75.0.0" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-exporter-otlp-proto-grpc" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/9c/3ab1db90f32da200dba332658f2bbe602369e3d19f6aba394031a42635be/opentelemetry_exporter_otlp-1.39.1.tar.gz", hash = "sha256:7cf7470e9fd0060c8a38a23e4f695ac686c06a48ad97f8d4867bc9b420180b9c", size = 6147, upload-time = "2025-12-11T13:32:40.309Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/6c/bdc82a066e6fb1dcf9e8cc8d4e026358fe0f8690700cc6369a6bf9bd17a7/opentelemetry_exporter_otlp-1.39.1-py3-none-any.whl", hash = "sha256:68ae69775291f04f000eb4b698ff16ff685fdebe5cb52871bc4e87938a7b00fe", size = 7019, upload-time = "2025-12-11T13:32:19.387Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-proto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/9d/22d241b66f7bbde88a3bfa6847a351d2c46b84de23e71222c6aae25c7050/opentelemetry_exporter_otlp_proto_common-1.39.1.tar.gz", hash = "sha256:763370d4737a59741c89a67b50f9e39271639ee4afc999dadfe768541c027464", size = 20409, upload-time = "2025-12-11T13:32:40.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/02/ffc3e143d89a27ac21fd557365b98bd0653b98de8a101151d5805b5d4c33/opentelemetry_exporter_otlp_proto_common-1.39.1-py3-none-any.whl", hash = "sha256:08f8a5862d64cc3435105686d0216c1365dc5701f86844a8cd56597d0c764fde", size = 18366, upload-time = "2025-12-11T13:32:20.2Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/48/b329fed2c610c2c32c9366d9dc597202c9d1e58e631c137ba15248d8850f/opentelemetry_exporter_otlp_proto_grpc-1.39.1.tar.gz", hash = "sha256:772eb1c9287485d625e4dbe9c879898e5253fea111d9181140f51291b5fec3ad", size = 24650, upload-time = "2025-12-11T13:32:41.429Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/a3/cc9b66575bd6597b98b886a2067eea2693408d2d5f39dad9ab7fc264f5f3/opentelemetry_exporter_otlp_proto_grpc-1.39.1-py3-none-any.whl", hash = "sha256:fa1c136a05c7e9b4c09f739469cbdb927ea20b34088ab1d959a849b5cc589c18", size = 19766, upload-time = "2025-12-11T13:32:21.027Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/04/2a08fa9c0214ae38880df01e8bfae12b067ec0793446578575e5080d6545/opentelemetry_exporter_otlp_proto_http-1.39.1.tar.gz", hash = "sha256:31bdab9745c709ce90a49a0624c2bd445d31a28ba34275951a6a362d16a0b9cb", size = 17288, upload-time = "2025-12-11T13:32:42.029Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/f1/b27d3e2e003cd9a3592c43d099d2ed8d0a947c15281bf8463a256db0b46c/opentelemetry_exporter_otlp_proto_http-1.39.1-py3-none-any.whl", hash = "sha256:d9f5207183dd752a412c4cd564ca8875ececba13be6e9c6c370ffb752fd59985", size = 19641, upload-time = "2025-12-11T13:32:22.248Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/0f/7e6b713ac117c1f5e4e3300748af699b9902a2e5e34c9cf443dde25a01fa/opentelemetry_instrumentation-0.60b1.tar.gz", hash = "sha256:57ddc7974c6eb35865af0426d1a17132b88b2ed8586897fee187fd5b8944bd6a", size = 31706, upload-time = "2025-12-11T13:36:42.515Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/d2/6788e83c5c86a2690101681aeef27eeb2a6bf22df52d3f263a22cee20915/opentelemetry_instrumentation-0.60b1-py3-none-any.whl", hash = "sha256:04480db952b48fb1ed0073f822f0ee26012b7be7c3eac1a3793122737c78632d", size = 33096, upload-time = "2025-12-11T13:35:33.067Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asgiref" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/db/851fa88db7441da82d50bd80f2de5ee55213782e25dc858e04d0c9961d60/opentelemetry_instrumentation_asgi-0.60b1.tar.gz", hash = "sha256:16bfbe595cd24cda309a957456d0fc2523f41bc7b076d1f2d7e98a1ad9876d6f", size = 26107, upload-time = "2025-12-11T13:36:47.015Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/76/1fb94367cef64420d2171157a6b9509582873bd09a6afe08a78a8d1f59d9/opentelemetry_instrumentation_asgi-0.60b1-py3-none-any.whl", hash = "sha256:d48def2dbed10294c99cfcf41ebbd0c414d390a11773a41f472d20000fcddc25", size = 16933, upload-time = "2025-12-11T13:35:40.462Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-celery" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/b3/eb0f83e5ef774fc1d65a9ed1b3dd8fbd8d47ec204029794074b76a116d85/opentelemetry_instrumentation_celery-0.60b1.tar.gz", hash = "sha256:896bb9eda2d7c4a39bbc5bee2caae9c06a3a41ba283bafc414b224bc8a0f04c8", size = 14768, upload-time = "2025-12-11T13:36:52.916Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/ae/1b868805cf9a9b72450fc5ff6cb36a15735d68bc71c1dc1ffaf2a5ffdabe/opentelemetry_instrumentation_celery-0.60b1-py3-none-any.whl", hash = "sha256:ee946f85a3e6893d8edf09402c2c773cacc09854dcea35ae2a694320f85403cf", size = 13805, upload-time = "2025-12-11T13:35:53.223Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-instrumentation-asgi" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/e7/e7e5e50218cf488377209d85666b182fa2d4928bf52389411ceeee1b2b60/opentelemetry_instrumentation_fastapi-0.60b1.tar.gz", hash = "sha256:de608955f7ff8eecf35d056578346a5365015fd7d8623df9b1f08d1c74769c01", size = 24958, upload-time = "2025-12-11T13:36:59.35Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/cc/6e808328ba54662e50babdcab21138eae4250bc0fddf67d55526a615a2ca/opentelemetry_instrumentation_fastapi-0.60b1-py3-none-any.whl", hash = "sha256:af94b7a239ad1085fc3a820ecf069f67f579d7faf4c085aaa7bd9b64eafc8eaf", size = 13478, upload-time = "2025-12-11T13:36:00.811Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-httpx" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/08/11208bcfcab4fc2023252c3f322aa397fd9ad948355fea60f5fc98648603/opentelemetry_instrumentation_httpx-0.60b1.tar.gz", hash = "sha256:a506ebaf28c60112cbe70ad4f0338f8603f148938cb7b6794ce1051cd2b270ae", size = 20611, upload-time = "2025-12-11T13:37:01.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/59/b98e84eebf745ffc75397eaad4763795bff8a30cbf2373a50ed4e70646c5/opentelemetry_instrumentation_httpx-0.60b1-py3-none-any.whl", hash = "sha256:f37636dd742ad2af83d896ba69601ed28da51fa4e25d1ab62fde89ce413e275b", size = 15701, upload-time = "2025-12-11T13:36:04.56Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-logging" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/a6/4515895b383113677fd2ad21813df5e56108a2df14ebb7916c962c9a0234/opentelemetry_instrumentation_logging-0.60b1.tar.gz", hash = "sha256:98f4b9c7aeb9314a30feee7c002c7ea9abea07c90df5f97fb058b850bc45b89a", size = 9968, upload-time = "2025-12-11T13:37:03.974Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/f9/8a4ce3901bc52277794e4b18c4ac43dc5929806eff01d22812364132f45f/opentelemetry_instrumentation_logging-0.60b1-py3-none-any.whl", hash = "sha256:f2e18cbc7e1dd3628c80e30d243897fdc93c5b7e0c8ae60abd2b9b6a99f82343", size = 12577, upload-time = "2025-12-11T13:36:08.123Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-redis" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/1e/225364fab4db793f6f5024ed9f3dd53774fd7c7c21fa242460234dcdf8d9/opentelemetry_instrumentation_redis-0.60b1.tar.gz", hash = "sha256:ecafa8f81c88917b59f0d842fb3d157f3a8edc71fb4b85bebca3bc19432ce7b8", size = 14774, upload-time = "2025-12-11T13:37:11.201Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/bd/d55d3b34fd49df08d9d9fa3701dff0051b216e2c7e9adaaa4ff6aa1de8d7/opentelemetry_instrumentation_redis-0.60b1-py3-none-any.whl", hash = "sha256:33bef0ff9af6f2d88de90c1cd7e25675c10a16d4f9ee5ae7592b28bb08b78139", size = 15502, upload-time = "2025-12-11T13:36:21.481Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-sqlalchemy" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/16/6a4cbff1b7cd86d1e58ffd100255f6da781a88f4a2affdcc3721880191c9/opentelemetry_instrumentation_sqlalchemy-0.60b1.tar.gz", hash = "sha256:b614e874a7c0a692838a0da613d1654e81a0612867836a1f0765e40e9c8cc49b", size = 15317, upload-time = "2025-12-11T13:37:13.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/b7/2234bc761c197c7f099f30cad5d50efd8286c59b5b8f45cfd6ba6ebe7d5e/opentelemetry_instrumentation_sqlalchemy-0.60b1-py3-none-any.whl", hash = "sha256:486a5f264d264c44e07e0320e33fd19d09cecd2fd4b99c1064046e77a27d9f9f", size = 14529, upload-time = "2025-12-11T13:36:24.964Z" }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/1d/f25d76d8260c156c40c97c9ed4511ec0f9ce353f8108ca6e7561f82a06b2/opentelemetry_proto-1.39.1.tar.gz", hash = "sha256:6c8e05144fc0d3ed4d22c2289c6b126e03bcd0e6a7da0f16cedd2e1c2772e2c8", size = 46152, upload-time = "2025-12-11T13:32:48.681Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/95/b40c96a7b5203005a0b03d8ce8cd212ff23f1793d5ba289c87a097571b18/opentelemetry_proto-1.39.1-py3-none-any.whl", hash = "sha256:22cdc78efd3b3765d09e68bfbd010d4fc254c9818afd0b6b423387d9dee46007", size = 72535, upload-time = "2025-12-11T13:32:33.866Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" }, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/50/fc/c47bb04a1d8a941a4061307e1eddfa331ed4d0ab13d8a9781e6db256940a/opentelemetry_util_http-0.60b1.tar.gz", hash = "sha256:0d97152ca8c8a41ced7172d29d3622a219317f74ae6bb3027cfbdcf22c3cc0d6", size = 11053, upload-time = "2025-12-11T13:37:25.115Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/5c/d3f1733665f7cd582ef0842fb1d2ed0bc1fba10875160593342d22bba375/opentelemetry_util_http-0.60b1-py3-none-any.whl", hash = "sha256:66381ba28550c91bee14dcba8979ace443444af1ed609226634596b4b0faf199", size = 8947, upload-time = "2025-12-11T13:36:37.151Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -1006,6 +1333,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, ] +[[package]] +name = "protobuf" +version = "6.33.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, +] + [[package]] name = "psycopg" version = "3.2.13" @@ -1238,6 +1580,15 @@ cryptography = [ { name = "cryptography" }, ] +[[package]] +name = "python-json-logger" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" }, +] + [[package]] name = "python-multipart" version = "0.0.20" @@ -1752,3 +2103,51 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] + +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] diff --git a/docker-compose.observability.yml b/docker-compose.observability.yml new file mode 100644 index 00000000..aa479b67 --- /dev/null +++ b/docker-compose.observability.yml @@ -0,0 +1,76 @@ +# Observability stack for traces, metrics, and logs. +# Usage: docker compose -f docker-compose.yml -f docker-compose.observability.yml up -d +# Grafana UI: http://localhost:3001 (admin/admin) + +services: + otel-collector: + image: otel/opentelemetry-collector-contrib:0.96.0 + container_name: otel-collector__open-wearables + command: ["--config=/etc/otel-collector-config.yaml"] + volumes: + - ./infra/otel-collector-config.yaml:/etc/otel-collector-config.yaml:ro + ports: + - "4317:4317" # OTLP gRPC + - "4318:4318" # OTLP HTTP + - "8888:8888" # Prometheus metrics + depends_on: + - tempo + - loki + + tempo: + image: grafana/tempo:2.4.1 + container_name: tempo__open-wearables + command: ["-config.file=/etc/tempo.yaml"] + volumes: + - ./infra/tempo.yaml:/etc/tempo.yaml:ro + - tempo_data:/var/tempo + ports: + - "3200:3200" # Tempo API + + loki: + image: grafana/loki:2.9.6 + container_name: loki__open-wearables + command: ["-config.file=/etc/loki/local-config.yaml"] + volumes: + - ./infra/loki.yaml:/etc/loki/local-config.yaml:ro + - loki_data:/loki + ports: + - "3100:3100" + + prometheus: + image: prom/prometheus:v2.51.0 + container_name: prometheus__open-wearables + command: + - "--config.file=/etc/prometheus/prometheus.yml" + - "--storage.tsdb.path=/prometheus" + - "--web.enable-remote-write-receiver" + volumes: + - ./infra/prometheus.yml:/etc/prometheus/prometheus.yml:ro + - prometheus_data:/prometheus + ports: + - "9090:9090" + + grafana: + image: grafana/grafana:10.4.1 + container_name: grafana__open-wearables + environment: + - GF_SECURITY_ADMIN_USER=admin + - GF_SECURITY_ADMIN_PASSWORD=admin + - GF_USERS_ALLOW_SIGN_UP=false + - GF_AUTH_ANONYMOUS_ENABLED=true + - GF_AUTH_ANONYMOUS_ORG_ROLE=Viewer + volumes: + - ./infra/grafana/provisioning:/etc/grafana/provisioning:ro + - grafana_data:/var/lib/grafana + ports: + - "3001:3000" + depends_on: + - prometheus + - loki + - tempo + +volumes: + tempo_data: + loki_data: + prometheus_data: + grafana_data: diff --git a/docker-compose.yml b/docker-compose.yml index f5a1817d..82250742 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,6 +28,7 @@ services: environment: - DB_HOST=db - REDIS_HOST=redis + - OTEL_EXPORTER_ENDPOINT=otel-collector:4317 ports: - "8000:8000" depends_on: @@ -65,6 +66,7 @@ services: environment: - DB_HOST=db - REDIS_HOST=redis + - OTEL_EXPORTER_ENDPOINT=otel-collector:4317 depends_on: - redis - db @@ -87,6 +89,7 @@ services: environment: - DB_HOST=db - REDIS_HOST=redis + - OTEL_EXPORTER_ENDPOINT=otel-collector:4317 depends_on: - redis - db @@ -157,6 +160,9 @@ services: path: ./frontend/vite.config.ts restart: on-failure + # Observability stack defined in docker-compose.observability.yml + # Run with: docker compose -f docker-compose.yml -f docker-compose.observability.yml up -d + volumes: postgres_data: redis_data: \ No newline at end of file diff --git a/infra/grafana/provisioning/dashboards/dashboards.yaml b/infra/grafana/provisioning/dashboards/dashboards.yaml new file mode 100644 index 00000000..b21aea8f --- /dev/null +++ b/infra/grafana/provisioning/dashboards/dashboards.yaml @@ -0,0 +1,16 @@ +# Grafana dashboard provisioning +# Auto-imports dashboards from the specified folder + +apiVersion: 1 + +providers: + - name: 'Open Wearables' + orgId: 1 + folder: 'Open Wearables' + folderUid: 'open-wearables' + type: file + disableDeletion: false + updateIntervalSeconds: 30 + allowUiUpdates: true + options: + path: /etc/grafana/provisioning/dashboards/json diff --git a/infra/grafana/provisioning/datasources/datasources.yaml b/infra/grafana/provisioning/datasources/datasources.yaml new file mode 100644 index 00000000..ba913732 --- /dev/null +++ b/infra/grafana/provisioning/datasources/datasources.yaml @@ -0,0 +1,62 @@ +# Grafana datasource provisioning +# Auto-configures connections to Prometheus, Loki, and Tempo + +apiVersion: 1 + +datasources: + # Prometheus for metrics + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + isDefault: true + editable: false + jsonData: + httpMethod: POST + manageAlerts: true + prometheusType: Prometheus + + # Loki for logs + - name: Loki + type: loki + access: proxy + url: http://loki:3100 + editable: false + jsonData: + maxLines: 1000 + derivedFields: + # Extract trace_id from JSON logs and link to Tempo + - name: TraceID + matcherRegex: '"trace_id":\s*"([a-f0-9]+)"' + url: '$${__value.raw}' + datasourceUid: tempo + urlDisplayLabel: 'View Trace' + + # Tempo for traces + - name: Tempo + type: tempo + access: proxy + url: http://tempo:3200 + uid: tempo + editable: false + jsonData: + httpMethod: GET + tracesToLogs: + datasourceUid: loki + filterByTraceID: true + filterBySpanID: true + mapTagNamesEnabled: true + tags: + - key: service.name + value: service_name + tracesToMetrics: + datasourceUid: prometheus + tags: + - key: service.name + value: service + serviceMap: + datasourceUid: prometheus + nodeGraph: + enabled: true + lokiSearch: + datasourceUid: loki diff --git a/infra/loki.yaml b/infra/loki.yaml new file mode 100644 index 00000000..bdc6e767 --- /dev/null +++ b/infra/loki.yaml @@ -0,0 +1,52 @@ +# Grafana Loki configuration for log storage + +auth_enabled: false + +server: + http_listen_port: 3100 + grpc_listen_port: 9096 + +common: + path_prefix: /loki + storage: + filesystem: + chunks_directory: /loki/chunks + rules_directory: /loki/rules + replication_factor: 1 + ring: + kvstore: + store: inmemory + +schema_config: + configs: + - from: 2024-01-01 + store: tsdb + object_store: filesystem + schema: v13 + index: + prefix: index_ + period: 24h + +storage_config: + filesystem: + directory: /loki/chunks + +limits_config: + reject_old_samples: true + reject_old_samples_max_age: 168h + ingestion_rate_mb: 16 + ingestion_burst_size_mb: 32 + max_entries_limit_per_query: 5000 + +compactor: + working_directory: /loki/compactor + +query_range: + results_cache: + cache: + embedded_cache: + enabled: true + max_size_mb: 100 + +ruler: + alertmanager_url: "" diff --git a/infra/otel-collector-config.yaml b/infra/otel-collector-config.yaml new file mode 100644 index 00000000..f957cd85 --- /dev/null +++ b/infra/otel-collector-config.yaml @@ -0,0 +1,77 @@ +# OpenTelemetry Collector configuration +# Receives traces, logs, and metrics from the application and exports to backends + +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + cors: + allowed_origins: + - "http://localhost:3000" + - "http://localhost:8000" + +processors: + batch: + timeout: 1s + send_batch_size: 1024 + + # Add resource attributes for better filtering + resource: + attributes: + - key: deployment.environment + from_attribute: deployment.environment + action: upsert + + # Memory limiter to prevent OOM + memory_limiter: + check_interval: 1s + limit_mib: 512 + spike_limit_mib: 128 + +exporters: + # Debug exporter for troubleshooting (logs to collector stdout) + debug: + verbosity: detailed + + # Traces to Tempo + otlp/tempo: + endpoint: tempo:4317 + tls: + insecure: true + + # Logs to Loki + loki: + endpoint: http://loki:3100/loki/api/v1/push + default_labels_enabled: + exporter: false + job: true + + # Metrics to Prometheus via remote write + prometheusremotewrite: + endpoint: http://prometheus:9090/api/v1/write + tls: + insecure: true + +service: + pipelines: + traces: + receivers: [otlp] + processors: [memory_limiter, batch, resource] + exporters: [otlp/tempo] + + logs: + receivers: [otlp] + processors: [memory_limiter, batch, resource] + exporters: [loki] + + metrics: + receivers: [otlp] + processors: [memory_limiter, batch] + exporters: [prometheusremotewrite] + + telemetry: + logs: + level: info diff --git a/infra/prometheus.yml b/infra/prometheus.yml new file mode 100644 index 00000000..33f79b18 --- /dev/null +++ b/infra/prometheus.yml @@ -0,0 +1,39 @@ +# Prometheus configuration + +global: + scrape_interval: 15s + evaluation_interval: 15s + external_labels: + monitor: 'open-wearables' + +# Enable remote write receiver for OTel collector +remote_write: [] + +# Alerting configuration (optional - can be expanded later) +alerting: + alertmanagers: [] + +# Rule files (optional - can be expanded later) +rule_files: [] + +# Scrape configurations +scrape_configs: + # Prometheus self-monitoring + - job_name: 'prometheus' + static_configs: + - targets: ['localhost:9090'] + + # OTel Collector metrics + - job_name: 'otel-collector' + static_configs: + - targets: ['otel-collector:8888'] + + # Tempo metrics + - job_name: 'tempo' + static_configs: + - targets: ['tempo:3200'] + + # Loki metrics + - job_name: 'loki' + static_configs: + - targets: ['loki:3100'] diff --git a/infra/tempo.yaml b/infra/tempo.yaml new file mode 100644 index 00000000..acae2f55 --- /dev/null +++ b/infra/tempo.yaml @@ -0,0 +1,52 @@ +# Grafana Tempo configuration for trace storage + +server: + http_listen_port: 3200 + +distributor: + receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + +ingester: + trace_idle_period: 10s + max_block_bytes: 1_000_000 + max_block_duration: 5m + +compactor: + compaction: + compaction_window: 1h + max_block_bytes: 100_000_000 + block_retention: 48h + compacted_block_retention: 1h + +storage: + trace: + backend: local + local: + path: /var/tempo/traces + wal: + path: /var/tempo/wal + +querier: + frontend_worker: + frontend_address: localhost:9095 + +metrics_generator: + registry: + external_labels: + source: tempo + storage: + path: /var/tempo/generator/wal + remote_write: + - url: http://prometheus:9090/api/v1/write + send_exemplars: true + +overrides: + defaults: + metrics_generator: + processors: [service-graphs, span-metrics] diff --git a/justfile b/justfile new file mode 100644 index 00000000..640e4728 --- /dev/null +++ b/justfile @@ -0,0 +1,69 @@ +docker_command := "docker compose -f docker-compose.yml" +docker_exec := docker_command + " exec app" +alembic_cmd := "uv run alembic" + +# Show this help +help: + @echo "============================================================" + @echo "This is a list of available commands for this project." + @echo "============================================================" + @just --list + +# Build docker image +build: + {{docker_command}} build --no-cache + +# Run the environment in detached mode +run: + {{docker_command}} up -d --force-recreate + +# Run the non-detached environment +up: + {{docker_command}} up --force-recreate + +# Run the environment with hot-reload +watch: + {{docker_command}} watch + +# Run the environment with observability stack (Grafana, Prometheus, Tempo, Loki) +observe: + {{docker_command}} --profile observability up -d --force-recreate + +# Run the environment with hot-reload and observability stack +watch-observe: + {{docker_command}} --profile observability watch + +# Stop running instance +stop: + {{docker_command}} stop + +# Kill running instance +down: + {{docker_command}} down + +# Run the tests +test: + cd backend && ENV=backend/config/.env.test uv run pytest -v --cov=app + +# Apply all migrations +migrate: + {{docker_exec}} {{alembic_cmd}} upgrade head + +# Seed sample data +init: + {{docker_exec}} uv sync --group dev + {{docker_exec}} uv run python scripts/init/seed_admin.py + {{docker_exec}} uv run python scripts/init/seed_series_types.py + {{docker_exec}} uv run python scripts/init/seed_activity_data.py + +# Create a new migration (usage: just create_migration "Description of the change") +create_migration m: + {{docker_exec}} {{alembic_cmd}} revision --autogenerate -m "{{m}}" + +# Revert the last migration +downgrade: + {{docker_exec}} {{alembic_cmd}} downgrade -1 + +# Truncate all tables in the database (WARNING: deletes all data) +reset_db: + {{docker_exec}} uv run python scripts/reset_database.py