Skip to content

Commit 310c10d

Browse files
committed
feat: updated logging to use loguru
1 parent e4b357e commit 310c10d

File tree

13 files changed

+84
-68
lines changed

13 files changed

+84
-68
lines changed

app/config/logger.py

Lines changed: 65 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,69 @@
11
import logging
2-
import logging.config
3-
4-
5-
LOGGING_CONFIG = {
6-
"version": 1,
7-
"disable_existing_loggers": False, # keeps Uvicorn's loggers
8-
"formatters": {
9-
"default": {
10-
"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s",
11-
},
12-
},
13-
"handlers": {
14-
"console": {
15-
"class": "logging.StreamHandler",
16-
"formatter": "default",
17-
},
18-
},
19-
"root": { # applies to all loggers unless overridden
20-
"level": "INFO",
21-
"handlers": ["console"],
22-
},
23-
"loggers": {
24-
"uvicorn": {"level": "INFO"},
25-
"uvicorn.error": {"level": "INFO"},
26-
"uvicorn.access": {"level": "INFO"},
27-
# custom API loggers
28-
"app.routers": {"level": "DEBUG"}, # all your routers
29-
"app.services": {"level": "DEBUG"}, # all your services
30-
"app.platforms": {"level": "DEBUG"}, # all platform implementations
31-
},
32-
}
2+
import os
3+
import sys
4+
5+
6+
from loguru import logger
7+
8+
9+
class InterceptHandler(logging.Handler):
10+
"""
11+
Redirect standard logging (incl. uvicorn) to Loguru.
12+
"""
13+
14+
def emit(self, record):
15+
try:
16+
level = logger.level(record.levelname).name
17+
except ValueError:
18+
level = record.levelno
19+
frame, depth = logging.currentframe(), 2
20+
while frame and frame.f_code.co_filename == logging.__file__:
21+
frame = frame.f_back
22+
depth += 1
23+
logger.opt(depth=depth, exception=record.exc_info).log(
24+
level, record.getMessage()
25+
)
3326

3427

3528
def setup_logging():
36-
logging.config.dictConfig(LOGGING_CONFIG)
29+
logger.remove() # remove default handler
30+
env = os.getenv("APP_ENV", "development")
31+
32+
if env == "production":
33+
# JSON logs for ELK
34+
logger.add(
35+
sys.stdout,
36+
serialize=True,
37+
backtrace=False,
38+
diagnose=False,
39+
level="INFO",
40+
)
41+
else:
42+
# Pretty logs for dev
43+
logger.add(
44+
sys.stdout,
45+
colorize=True,
46+
format=(
47+
"<green>{time:YYYY-MM-DD HH:mm:ss}</green> | "
48+
"<level>{level: <8}</level> | "
49+
"<cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - "
50+
"{message}"
51+
),
52+
backtrace=True,
53+
diagnose=True,
54+
level="DEBUG",
55+
)
56+
57+
for name in (
58+
"uvicorn",
59+
"uvicorn.error",
60+
"uvicorn.access",
61+
"fastapi",
62+
"app.routers",
63+
"app.services",
64+
"app.platforms",
65+
):
66+
logging.getLogger(name).handlers = [InterceptHandler()]
67+
logging.getLogger(name).propagate = False
68+
69+
return logger

app/database/db.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
1-
import logging
21
import os
32
from typing import Optional
43

54
from dotenv import load_dotenv
5+
from loguru import logger
66
from sqlalchemy import create_engine
77
from sqlalchemy.orm import sessionmaker, declarative_base
88

99
load_dotenv()
10-
logger = logging.getLogger(__name__)
1110

1211
DATABASE_URL: Optional[str] = os.getenv("DATABASE_URL")
1312

@@ -36,6 +35,7 @@ def get_db():
3635
yield db
3736
db.commit()
3837
except Exception:
38+
logger.exception("An error occurred during database retrieval")
3939
db.rollback()
4040
raise
4141
finally:

app/database/models/processing_job.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,13 @@
11
import datetime
2-
import logging
32
from typing import List, Optional
3+
from loguru import logger
44
from sqlalchemy import DateTime, Enum, Integer, String
55
from app.database.db import Base
66
from sqlalchemy.orm import Session, Mapped, mapped_column
77

88
from app.schemas.unit_job import ProcessTypeEnum, ProcessingStatusEnum
99

1010

11-
logger = logging.getLogger(__name__)
12-
13-
1411
class ProcessingJobRecord(Base):
1512
__tablename__ = "processing_jobs"
1613

app/platforms/dispatcher.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import importlib
2-
import logging
2+
3+
from loguru import logger
34
import app.platforms.implementations
45
import pkgutil
56
from typing import Dict, Type
@@ -8,8 +9,6 @@
89

910
PROCESSING_PLATFORMS: Dict[ProcessTypeEnum, Type[BaseProcessingPlatform]] = {}
1011

11-
logger = logging.getLogger(__name__)
12-
1312

1413
def register_platform(service_type: ProcessTypeEnum):
1514
def decorator(cls: Type[BaseProcessingPlatform]):
@@ -40,4 +39,5 @@ def get_processing_platform(service_type: ProcessTypeEnum) -> BaseProcessingPlat
4039
try:
4140
return PROCESSING_PLATFORMS[service_type]()
4241
except KeyError:
42+
logger.error(f"Processing platform for service type {service_type} not found.")
4343
raise ValueError(f"Unsupported service type: {service_type}")

app/platforms/implementations/ogc_api_process.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,8 @@
1-
import logging
2-
31
from app.platforms.base import BaseProcessingPlatform
42
from app.platforms.dispatcher import register_platform
53
from app.schemas.enum import ProcessTypeEnum, ProcessingStatusEnum
64
from app.schemas.unit_job import ServiceDetails
7-
8-
logger = logging.getLogger(__name__)
5+
from loguru import logger
96

107

118
@register_platform(ProcessTypeEnum.OGC_API_PROCESS)

app/platforms/implementations/openeo.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import datetime
2-
import logging
32
import os
43
import re
54
import urllib
65
import jwt
76

7+
from loguru import logger
88
import openeo
99
import requests
1010
from dotenv import load_dotenv
@@ -15,7 +15,6 @@
1515
from app.schemas.unit_job import ServiceDetails
1616

1717
load_dotenv()
18-
logger = logging.getLogger(__name__)
1918

2019
# Constants
2120
BACKEND_AUTH_ENV_MAP = {
@@ -56,7 +55,7 @@ def _connection_expired(self, connection: openeo.Connection) -> bool:
5655
logger.debug("JWT bearer token is valid.")
5756
return False # Token is valid
5857
except Exception as e:
59-
logger.warning(f"JWT token validation failed: {e}")
58+
logger.error(f"JWT token validation failed: {e}")
6059
return True # Token is expired or invalid
6160
else:
6261
logger.warning("No JWT bearer token found in connection.")

app/routers/jobs_status.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,14 @@
11
import asyncio
2-
import logging
32

43
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
54
from sqlalchemy.orm import Session
5+
from loguru import logger
66

77
from app.database.db import get_db
88
from app.schemas.jobs_status import JobsStatusResponse
99
from app.services.processing import get_processing_jobs_by_user_id
1010

1111
router = APIRouter()
12-
logger = logging.getLogger(__name__)
1312

1413

1514
@router.get(
@@ -26,7 +25,6 @@ async def get_jobs_status(
2625
"""
2726
logger.debug(f"Fetching jobs list for user {user}")
2827
processing_jobs = get_processing_jobs_by_user_id(db, user)
29-
print(processing_jobs)
3028
return JobsStatusResponse(
3129
upscaling_tasks=[], processing_jobs=get_processing_jobs_by_user_id(db, user)
3230
)

app/routers/tiles.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
1-
import logging
21
from fastapi import APIRouter, HTTPException, status
32
from geojson_pydantic import GeometryCollection
3+
from loguru import logger
44

55
from app.schemas.tiles import TileRequest
66
from app.services.tiles.base import split_polygon_by_grid
77

88

99
router = APIRouter()
10-
logger = logging.getLogger(__name__)
1110

1211

1312
@router.post(

app/routers/unit_jobs.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
1-
import logging
2-
31
from fastapi import APIRouter, Depends, HTTPException, status
2+
from loguru import logger
43
from sqlalchemy.orm import Session
54

65
from app.database.db import get_db
@@ -10,7 +9,6 @@
109
# from app.auth import get_current_user
1110

1211
router = APIRouter()
13-
logger = logging.getLogger(__name__)
1412

1513

1614
@router.post(
@@ -26,7 +24,7 @@ async def create_unit_job(
2624
try:
2725
return create_processing_job(db, user, payload)
2826
except Exception as e:
29-
logger.error(f"Error creating unit job for user {user}: {e}")
27+
logger.exception(f"Error creating unit job for user {user}: {e}")
3028
raise HTTPException(
3129
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
3230
detail=f"An error occurred while creating the processing job: {e}",
@@ -43,6 +41,7 @@ async def get_job(
4341
) -> ProcessingJob:
4442
job = get_processing_job_by_user_id(db, job_id, user)
4543
if not job:
44+
logger.error(f"Processing job {job_id} not found for user {user}")
4645
raise HTTPException(
4746
status_code=404,
4847
detail=f"Processing job {job_id} not found",

app/services/processing.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import json
2-
import logging
32
from typing import List, Optional
3+
4+
from loguru import logger
45
from app.database.models.processing_job import (
56
ProcessingJobRecord,
67
get_job_by_user_id,
@@ -21,9 +22,6 @@
2122
)
2223

2324

24-
logger = logging.getLogger(__name__)
25-
26-
2725
def create_processing_job(
2826
database: Session, user: str, summary: BaseJobRequest
2927
) -> ProcessingJobSummary:

0 commit comments

Comments
 (0)