Skip to content

Commit 07f6b3b

Browse files
authored
Improved logging (#8)
* feat: updated logging to use loguru * fix: duplicate fetching of processing jobs * feat: improved logging with correlation ID * fix: linting issues
1 parent e4b357e commit 07f6b3b

File tree

15 files changed

+136
-69
lines changed

15 files changed

+136
-69
lines changed

app/config/logger.py

Lines changed: 82 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,86 @@
11
import logging
2-
import logging.config
3-
4-
5-
LOGGING_CONFIG = {
6-
"version": 1,
7-
"disable_existing_loggers": False, # keeps Uvicorn's loggers
8-
"formatters": {
9-
"default": {
10-
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
11-
},
12-
},
13-
"handlers": {
14-
"console": {
15-
"class": "logging.StreamHandler",
16-
"formatter": "default",
17-
},
18-
},
19-
"root": { # applies to all loggers unless overridden
20-
"level": "INFO",
21-
"handlers": ["console"],
22-
},
23-
"loggers": {
24-
"uvicorn": {"level": "INFO"},
25-
"uvicorn.error": {"level": "INFO"},
26-
"uvicorn.access": {"level": "INFO"},
27-
# custom API loggers
28-
"app.routers": {"level": "DEBUG"}, # all your routers
29-
"app.services": {"level": "DEBUG"}, # all your services
30-
"app.platforms": {"level": "DEBUG"}, # all platform implementations
31-
},
32-
}
2+
import os
3+
import sys
4+
5+
6+
from app.middleware.correlation_id import correlation_id_ctx
7+
from loguru import logger
8+
9+
10+
class InterceptHandler(logging.Handler):
11+
"""
12+
Redirect standard logging (incl. uvicorn) to Loguru.
13+
"""
14+
15+
def emit(self, record):
16+
try:
17+
level = logger.level(record.levelname).name
18+
corr_id = correlation_id_ctx.get()
19+
except ValueError:
20+
level = record.levelno
21+
except LookupError:
22+
corr_id = None
23+
24+
frame, depth = logging.currentframe(), 2
25+
while frame and frame.f_code.co_filename == logging.__file__:
26+
frame = frame.f_back
27+
depth += 1
28+
logger.opt(depth=depth, exception=record.exc_info).log(
29+
level, record.getMessage()
30+
)
31+
if corr_id:
32+
logger.bind(correlation_id=corr_id)
33+
34+
35+
def correlation_id_filter(record):
36+
# Always inject the current correlation ID into the log record
37+
record["extra"]["correlation_id"] = correlation_id_ctx.get()
38+
return True
3339

3440

3541
def setup_logging():
36-
logging.config.dictConfig(LOGGING_CONFIG)
42+
logger.remove() # remove default handler
43+
env = os.getenv("APP_ENV", "development")
44+
45+
logger.configure(extra={"correlation_id": None})
46+
if env == "production":
47+
# JSON logs for ELK
48+
logger.add(
49+
sys.stdout,
50+
serialize=True,
51+
backtrace=False,
52+
diagnose=False,
53+
level="INFO",
54+
filter=correlation_id_filter,
55+
)
56+
else:
57+
# Pretty logs for dev
58+
logger.add(
59+
sys.stdout,
60+
colorize=True,
61+
format=(
62+
"<green>{time:YYYY-MM-DD HH:mm:ss}</green> | "
63+
"<i>{extra[correlation_id]}</i> | "
64+
"<level>{level: <8}</level> | "
65+
"<cyan>{name}</cyan>:<cyan>{function}</cyan> :<cyan>{line}</cyan> - "
66+
"{message}"
67+
),
68+
backtrace=True,
69+
diagnose=True,
70+
level="DEBUG",
71+
filter=correlation_id_filter,
72+
)
73+
74+
for name in (
75+
"uvicorn",
76+
"uvicorn.error",
77+
"uvicorn.access",
78+
"fastapi",
79+
"app.routers",
80+
"app.services",
81+
"app.platforms",
82+
):
83+
logging.getLogger(name).handlers = [InterceptHandler()]
84+
logging.getLogger(name).propagate = False
85+
86+
return logger

app/database/db.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
1-
import logging
21
import os
32
from typing import Optional
43

54
from dotenv import load_dotenv
5+
from loguru import logger
66
from sqlalchemy import create_engine
77
from sqlalchemy.orm import sessionmaker, declarative_base
88

99
load_dotenv()
10-
logger = logging.getLogger(__name__)
1110

1211
DATABASE_URL: Optional[str] = os.getenv("DATABASE_URL")
1312

@@ -36,6 +35,7 @@ def get_db():
3635
yield db
3736
db.commit()
3837
except Exception:
38+
logger.exception("An error occurred during database retrieval")
3939
db.rollback()
4040
raise
4141
finally:

app/database/models/processing_job.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,13 @@
11
import datetime
2-
import logging
32
from typing import List, Optional
3+
from loguru import logger
44
from sqlalchemy import DateTime, Enum, Integer, String
55
from app.database.db import Base
66
from sqlalchemy.orm import Session, Mapped, mapped_column
77

88
from app.schemas.unit_job import ProcessTypeEnum, ProcessingStatusEnum
99

1010

11-
logger = logging.getLogger(__name__)
12-
13-
1411
class ProcessingJobRecord(Base):
1512
__tablename__ = "processing_jobs"
1613

app/main.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from fastapi import FastAPI
22

3+
from app.middleware.correlation_id import add_correlation_id
34
from app.platforms.dispatcher import load_processing_platforms
45
from .config.logger import setup_logging
56
from .config.settings import settings
@@ -15,6 +16,8 @@
1516
version="1.0.0",
1617
)
1718

19+
app.middleware("http")(add_correlation_id)
20+
1821
# Register Keycloak - must be done after FastAPI app creation
1922
# keycloak.register(app, prefix="/auth") # mounts OIDC endpoints for login if needed
2023

app/middleware/correlation_id.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import uuid
2+
import contextvars
3+
from fastapi import Request
4+
from loguru import logger
5+
6+
# Context var to store correlation ID per request
7+
correlation_id_ctx = contextvars.ContextVar("correlation_id", default="-")
8+
9+
10+
async def add_correlation_id(request: Request, call_next):
11+
# Try to reuse correlation ID from client headers, else generate one
12+
corr_id = request.headers.get("X-Correlation-ID", str(uuid.uuid4()))
13+
token = correlation_id_ctx.set(corr_id)
14+
15+
# Log at start
16+
logger.bind(correlation_id=corr_id).info(
17+
f"Incoming request: {request.method} - {request.url.path}",
18+
method=request.method,
19+
path=request.url.path,
20+
)
21+
22+
response = await call_next(request)
23+
24+
# Add correlation ID to response headers
25+
response.headers["X-Correlation-ID"] = corr_id
26+
27+
# Log at end
28+
logger.bind(correlation_id=corr_id).info(
29+
f"Request completed with status code {response.status_code}",
30+
status_code=response.status_code,
31+
)
32+
correlation_id_ctx.reset(token) # Reset after request
33+
return response

app/platforms/dispatcher.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import importlib
2-
import logging
2+
3+
from loguru import logger
34
import app.platforms.implementations
45
import pkgutil
56
from typing import Dict, Type
@@ -8,8 +9,6 @@
89

910
PROCESSING_PLATFORMS: Dict[ProcessTypeEnum, Type[BaseProcessingPlatform]] = {}
1011

11-
logger = logging.getLogger(__name__)
12-
1312

1413
def register_platform(service_type: ProcessTypeEnum):
1514
def decorator(cls: Type[BaseProcessingPlatform]):
@@ -40,4 +39,5 @@ def get_processing_platform(service_type: ProcessTypeEnum) -> BaseProcessingPlat
4039
try:
4140
return PROCESSING_PLATFORMS[service_type]()
4241
except KeyError:
42+
logger.error(f"Processing platform for service type {service_type} not found.")
4343
raise ValueError(f"Unsupported service type: {service_type}")

app/platforms/implementations/ogc_api_process.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,8 @@
1-
import logging
2-
31
from app.platforms.base import BaseProcessingPlatform
42
from app.platforms.dispatcher import register_platform
53
from app.schemas.enum import ProcessTypeEnum, ProcessingStatusEnum
64
from app.schemas.unit_job import ServiceDetails
75

8-
logger = logging.getLogger(__name__)
9-
106

117
@register_platform(ProcessTypeEnum.OGC_API_PROCESS)
128
class OGCAPIProcessPlatform(BaseProcessingPlatform):

app/platforms/implementations/openeo.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import datetime
2-
import logging
32
import os
43
import re
54
import urllib
65
import jwt
76

7+
from loguru import logger
88
import openeo
99
import requests
1010
from dotenv import load_dotenv
@@ -15,7 +15,6 @@
1515
from app.schemas.unit_job import ServiceDetails
1616

1717
load_dotenv()
18-
logger = logging.getLogger(__name__)
1918

2019
# Constants
2120
BACKEND_AUTH_ENV_MAP = {
@@ -56,7 +55,7 @@ def _connection_expired(self, connection: openeo.Connection) -> bool:
5655
logger.debug("JWT bearer token is valid.")
5756
return False # Token is valid
5857
except Exception as e:
59-
logger.warning(f"JWT token validation failed: {e}")
58+
logger.error(f"JWT token validation failed: {e}")
6059
return True # Token is expired or invalid
6160
else:
6261
logger.warning("No JWT bearer token found in connection.")

app/routers/jobs_status.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,14 @@
11
import asyncio
2-
import logging
32

43
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
54
from sqlalchemy.orm import Session
5+
from loguru import logger
66

77
from app.database.db import get_db
88
from app.schemas.jobs_status import JobsStatusResponse
99
from app.services.processing import get_processing_jobs_by_user_id
1010

1111
router = APIRouter()
12-
logger = logging.getLogger(__name__)
1312

1413

1514
@router.get(
@@ -25,8 +24,6 @@ async def get_jobs_status(
2524
Return combined list of upscaling tasks and processing jobs for the authenticated user.
2625
"""
2726
logger.debug(f"Fetching jobs list for user {user}")
28-
processing_jobs = get_processing_jobs_by_user_id(db, user)
29-
print(processing_jobs)
3027
return JobsStatusResponse(
3128
upscaling_tasks=[], processing_jobs=get_processing_jobs_by_user_id(db, user)
3229
)

app/routers/tiles.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
1-
import logging
21
from fastapi import APIRouter, HTTPException, status
32
from geojson_pydantic import GeometryCollection
3+
from loguru import logger
44

55
from app.schemas.tiles import TileRequest
66
from app.services.tiles.base import split_polygon_by_grid
77

88

99
router = APIRouter()
10-
logger = logging.getLogger(__name__)
1110

1211

1312
@router.post(

0 commit comments

Comments
 (0)