Skip to content

Commit 4d246b7

Browse files
authored
Merge pull request #123 from JulianFP/logging
Add proper logging system to backend&frontend, update/fix docs
2 parents 6c7b619 + 4a32056 commit 4d246b7

29 files changed

+661
-443
lines changed

.github/workflows/ci.yml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,6 @@ jobs:
4545
with:
4646
version: 10
4747

48-
- name: Run development containers
49-
run: docker compose up -d
50-
5148
- name: Install backend
5249
run: |
5350
sudo apt-get update && sudo apt-get install -y --no-install-recommends git gcc libldap2-dev libsasl2-dev

backend/project_W/__main__.py

Lines changed: 49 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
import platform
23
from pathlib import Path
34

@@ -7,9 +8,10 @@
78
from granian.server import Server
89
from itsdangerous import URLSafeTimedSerializer
910
from project_W_lib.config import load_config
10-
from project_W_lib.logger import get_logger
11+
from project_W_lib.logger import configure_logging
1112

1213
import project_W.dependencies as dp
14+
from project_W_lib.models.shared_setting_models import LoggingEnum
1315

1416
from ._version import __commit_id__, __version__
1517
from .models.setting_models import Settings, SecretKeyValidated
@@ -20,7 +22,6 @@
2022
)
2123

2224
program_name = "project-W"
23-
logger = get_logger(program_name)
2425

2526

2627
@click.command()
@@ -80,16 +81,6 @@ def main(
8081
development: bool,
8182
root_static_files: Path | None,
8283
):
83-
# post application version for debug purposes and bug reports
84-
logger.info(f"Running application version {__version__}")
85-
if __commit_id__ is None:
86-
raise Exception(
87-
"Couldn't read git hash from _version.py file. Make sure to install this package from a working git repository!"
88-
)
89-
dp.git_hash = __commit_id__.removeprefix("g")
90-
logger.info(f"Application was built from git hash {dp.git_hash}")
91-
logger.info(f"Python version: {platform.python_version()}")
92-
9384
dp.client_path = root_static_files
9485

9586
# parse config file
@@ -98,10 +89,37 @@ def main(
9889
if custom_config_path
9990
else load_config(program_name, Settings)
10091
)
92+
93+
# now we can setup the logger and everything else
94+
logging_dict = configure_logging(dp.config.logging)
95+
dp.logger = logging.getLogger(program_name)
96+
debug_enabled_anywhere = False
97+
access_log_handlers = [] # for granian
98+
if dp.config.logging.console.level == LoggingEnum.DEBUG:
99+
debug_enabled_anywhere = True
100+
access_log_handlers.append("console")
101+
if dp.config.logging.file.level == LoggingEnum.DEBUG:
102+
debug_enabled_anywhere = True
103+
access_log_handlers.append("file")
104+
if debug_enabled_anywhere:
105+
dp.logger.warning(
106+
"The level of at least one of the loggers has been set to 'DEBUG'. This will produce A LOT of logs which WILL INCLUDE SENSITIVE INFORMATION like tokens or user data, as well as result in slightly lower performance. If this is a production system, then please change the log level as soon as possible after you are done triaging a bug."
107+
)
108+
101109
dp.auth_s = URLSafeTimedSerializer(
102110
dp.config.security.secret_key.root.get_secret_value(), "Project-W"
103111
)
104112

113+
# post application version for debug purposes and bug reports
114+
dp.logger.info(f"Running application version {__version__}")
115+
if __commit_id__ is None:
116+
raise Exception(
117+
"Couldn't read git hash from _version.py file. Make sure to install this package from a working git repository!"
118+
)
119+
dp.git_hash = __commit_id__.removeprefix("g")
120+
dp.logger.info(f"Application was built from git hash {dp.git_hash}")
121+
dp.logger.info(f"Python version: {platform.python_version()}")
122+
105123
if run_periodic_tasks:
106124
execute_background_tasks()
107125
return
@@ -130,6 +148,25 @@ def main(
130148
"address": str(dp.config.web_server.address.ip),
131149
"port": dp.config.web_server.port,
132150
"workers": dp.config.web_server.worker_count,
151+
"log_enabled": True,
152+
"log_level": LogLevels.debug if debug_enabled_anywhere else LogLevels.debug,
153+
"log_access_format": '%(addr)s - "%(method)s %(path)s %(protocol)s" %(status)d %(dt_ms).3f',
154+
"log_access": debug_enabled_anywhere,
155+
"log_dictconfig": {
156+
**logging_dict,
157+
"loggers": {
158+
"_granian": {
159+
"handlers": list(logging_dict["handlers"].keys()),
160+
"level": "DEBUG" if debug_enabled_anywhere else "INFO",
161+
"propagate": False,
162+
},
163+
"granian.access": {
164+
"handlers": access_log_handlers,
165+
"level": "DEBUG" if debug_enabled_anywhere else "INFO",
166+
"propagate": False,
167+
},
168+
},
169+
},
133170
}
134171

135172
if dp.config.web_server.ssl:
@@ -145,8 +182,6 @@ def main(
145182
)
146183

147184
if development:
148-
granian_options["log_level"] = LogLevels.debug
149-
granian_options["log_access"] = True
150185
granian_options["reload"] = True
151186
granian_options["reload_paths"] = [Path(__file__).parent.absolute()]
152187

backend/project_W/app.py

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
11
from contextlib import asynccontextmanager
22

3-
from fastapi import FastAPI
3+
from fastapi import FastAPI, Request
44
from fastapi.routing import APIRoute
55
from fastapi.staticfiles import StaticFiles
66
from starlette.middleware.sessions import SessionMiddleware
77
from starlette.middleware.trustedhost import TrustedHostMiddleware
88
from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware
9+
from logging import getLogger
910
from project_W_lib.models.response_models import LocalAccountOperationModeEnum
1011

1112
import project_W.dependencies as dp
13+
from project_W_lib.models.shared_setting_models import LoggingEnum
1214

1315
from ._version import __version__
1416
from .caching import RedisAdapter
@@ -171,6 +173,40 @@ def custom_generate_unique_id(route: APIRoute):
171173
terms_of_service=f"{dp.config.client_url}/tos" if dp.config.terms_of_services else None,
172174
generate_unique_id_function=custom_generate_unique_id,
173175
)
176+
# logging middleware for debug mode
177+
if (
178+
dp.config.logging.console.level == LoggingEnum.DEBUG
179+
or dp.config.logging.file.level == LoggingEnum.DEBUG
180+
):
181+
middleware_logger = getLogger("project-W.middleware")
182+
183+
@app.middleware("http")
184+
async def logging_middleware(request: Request, call_next):
185+
middleware_logger.debug(
186+
"Incoming request",
187+
extra={
188+
"url_path": request.url.path,
189+
"url_host": request.url.hostname,
190+
"scheme": request.url.scheme,
191+
"method": request.method,
192+
"headers": dict(request.headers),
193+
"query_params": request.query_params,
194+
"path_params": request.path_params,
195+
"cookies": request.cookies,
196+
"client": request.client,
197+
},
198+
)
199+
response = await call_next(request)
200+
middleware_logger.debug(
201+
"Outgoing response",
202+
extra={
203+
"status": response.status_code,
204+
"headers": dict(response.headers),
205+
},
206+
)
207+
return response
208+
209+
174210
# middleware required by authlib for oidc
175211
app.add_middleware(
176212
SessionMiddleware,

backend/project_W/caching.py

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from redis.asyncio.client import Pipeline
88
from redis.asyncio.retry import Retry
99
from redis.backoff import ExponentialBackoff
10-
from project_W_lib.logger import get_logger
1110
from project_W_lib.models.request_models import RunnerRegisterRequest
1211

1312
import project_W.dependencies as dp
@@ -18,8 +17,6 @@
1817

1918

2019
class CachingAdapter(ABC):
21-
logger = get_logger("project-W")
22-
2320
@abstractmethod
2421
async def open(self, connection_obj):
2522
"""
@@ -211,7 +208,7 @@ async def open(self, connection_obj: RedisConnection):
211208

212209
await self.__check_server_version()
213210

214-
self.logger.info("Successfully connected to Redis")
211+
dp.logger.info("Successfully connected to Redis")
215212

216213
async def __check_server_version(self):
217214
query_result = await self.client.info("server")
@@ -227,10 +224,10 @@ async def __check_server_version(self):
227224
elif redis_version_split[i] > self.minimal_required_redis_version[i]:
228225
break
229226

230-
self.logger.info(f"Redis server is on version {redis_version}")
227+
dp.logger.info(f"Redis server is on version {redis_version}")
231228

232229
async def close(self):
233-
self.logger.info("Closing Redis connections...")
230+
dp.logger.info("Closing Redis connections...")
234231
await self.client.close()
235232

236233
async def register_new_online_runner(
@@ -279,7 +276,7 @@ async def get_online_runner_by_id(self, runner_id: int) -> OnlineRunner | None:
279276
try:
280277
return OnlineRunner.model_validate(runner_dict)
281278
except ValidationError:
282-
self.logger.error(
279+
dp.logger.error(
283280
f"Validation error occurred while reading from redis! The following data was read instead of an OnlineRunner object: {runner_dict}. Continuing..."
284281
)
285282
return None
@@ -465,7 +462,7 @@ async def get_in_process_job(self, job_id: int) -> InProcessJob | None:
465462
try:
466463
return InProcessJob.model_validate(job_dict)
467464
except ValidationError:
468-
self.logger.error(
465+
dp.logger.error(
469466
f"Validation error occurred while reading from redis! The following data was read instead of an InProcessJob object: {job_dict}. Continuing..."
470467
)
471468
return None

backend/project_W/cli_tasks.py

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,12 @@
11
import asyncio
22

3-
from project_W_lib.logger import get_logger
4-
53
import project_W.dependencies as dp
64

75
from .models.setting_models import SecretKeyValidated
86
from .database import PostgresAdapter
97
from .security import ldap_deps, oidc_deps
108
from .smtp import SmtpClient
119

12-
logger = get_logger("project-W")
13-
1410

1511
async def db_init():
1612
dp.db = PostgresAdapter(str(dp.config.postgres_connection_string))
@@ -40,35 +36,35 @@ async def finish():
4036

4137

4238
async def database_cleanup():
43-
logger.info("Starting database cleanup task...")
39+
dp.logger.info("Starting database cleanup task...")
4440
if dp.config.cleanup.user_retention_in_days is not None:
4541
await dp.db.user_cleanup(dp.config.cleanup.user_retention_in_days)
4642
if dp.config.cleanup.finished_job_retention_in_days is not None:
4743
await dp.db.job_cleanup(dp.config.cleanup.finished_job_retention_in_days)
4844
await dp.db.general_cleanup()
49-
logger.info("Finished database cleanup task")
45+
dp.logger.info("Finished database cleanup task")
5046

5147

5248
async def oidc_token_invalidation():
53-
logger.info(
49+
dp.logger.info(
5450
"Starting cleanup of tokens associated with non-existing OIDC users or users with insufficient permissions..."
5551
)
5652
tokens_of_oidc_users = await dp.db.get_oidc_tokens()
5753
for token in tokens_of_oidc_users:
5854
await oidc_deps.invalidate_token_if_oidc_user_lost_privileges(token)
59-
logger.info(
55+
dp.logger.info(
6056
f"Finished cleanup of OIDC tokens, successfully checked {len(tokens_of_oidc_users)}"
6157
)
6258

6359

6460
async def ldap_token_invalidation():
65-
logger.info(
61+
dp.logger.info(
6662
"Starting cleanup of tokens associated with non-existing LDAP users or users with insufficient permissions..."
6763
)
6864
tokens_of_ldap_users = await dp.db.get_ldap_tokens()
6965
for token in tokens_of_ldap_users:
7066
await ldap_deps.invalidate_token_if_ldap_user_lost_privileges(ldap_deps.ldap_adapter, token)
71-
logger.info(
67+
dp.logger.info(
7268
f"Finished cleanup of LDAP tokens, successfully checked {len(tokens_of_ldap_users)}"
7369
)
7470

@@ -94,20 +90,20 @@ async def database_delete_encryption_loop():
9490

9591

9692
def execute_background_tasks():
97-
logger.info("Starting background tasks...")
93+
dp.logger.info("Starting background tasks...")
9894
asyncio.run(background_tasks_loop())
99-
logger.info("Finished all background tasks")
95+
dp.logger.info("Finished all background tasks")
10096

10197

10298
def perform_secret_key_rotation(secret_key: SecretKeyValidated):
103-
logger.info("Starting database re-encryption...")
99+
dp.logger.info("Starting database re-encryption...")
104100
asyncio.run(database_secret_rotation_loop(secret_key))
105-
logger.info("Finished database re-encryption")
101+
dp.logger.info("Finished database re-encryption")
106102

107103

108104
def perform_database_encrypted_content_deletion():
109-
logger.info("Starting deletion of encrypted contents in database...")
105+
dp.logger.info("Starting deletion of encrypted contents in database...")
110106
asyncio.run(database_delete_encryption_loop())
111-
logger.info(
107+
dp.logger.info(
112108
"Finished deletion of encrypted contents in database. Don't forget to also flush redis"
113109
)

0 commit comments

Comments
 (0)