Skip to content

Commit 4fbac04

Browse files
Merge remote-tracking branch 'upstream/master' into add-notifications-service
2 parents c12db38 + 2699607 commit 4fbac04

File tree

162 files changed

+1608
-665
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

162 files changed

+1608
-665
lines changed

.env-devel

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ INVITATIONS_USERNAME=admin
161161
INVITATIONS_TRACING={}
162162

163163
LOG_FORMAT_LOCAL_DEV_ENABLED=1
164-
LOG_FILTER_MAPPING='{"gunicorn.access":[" /v0/ ", " /v0/health "], "uvicorn.access":[" / "]}'
164+
LOG_FILTER_MAPPING='{"gunicorn.access":[" /v0/ ", " /v0/health "], "uvicorn.access":[" / ", " /v0/ "]}'
165165

166166
NOTIFICATIONS_LOGLEVEL=INFO
167167
NOTIFICATIONS_TRACING={}

.gitignore

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@ locust_report/
5555
nosetests.xml
5656
test_failures/
5757

58-
5958
# Translations
6059
*.mo
6160
*.pot
@@ -182,3 +181,6 @@ tests/public-api/osparc_python_wheels/*
182181

183182
# osparc-config repo files
184183
repo.config
184+
185+
# repomix
186+
.repomix/*

packages/aws-library/tests/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
"pytest_simcore.aws_ssm_service",
1515
"pytest_simcore.environment_configs",
1616
"pytest_simcore.file_extra",
17+
"pytest_simcore.logging",
1718
"pytest_simcore.pydantic_models",
1819
"pytest_simcore.pytest_global_environs",
1920
"pytest_simcore.repository_paths",

packages/celery-library/tests/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
"pytest_simcore.docker_compose",
2727
"pytest_simcore.docker_swarm",
2828
"pytest_simcore.environment_configs",
29+
"pytest_simcore.logging",
2930
"pytest_simcore.rabbit_service",
3031
"pytest_simcore.redis_service",
3132
"pytest_simcore.repository_paths",

packages/notifications-library/tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def product_data(
7272
return ProductData( # type: ignore
7373
product_name=product_name,
7474
display_name=product["display_name"],
75-
vendor_display_inline=f"{vendor.get('name','')}, {vendor.get('address','')}",
75+
vendor_display_inline=f"{vendor.get('name', '')}, {vendor.get('address', '')}",
7676
support_email=product["support_email"],
7777
homepage_url=vendor.get("url"),
7878
ui=product_ui,
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
# In conftest.py or test_logging_utils.py
2+
import contextlib
3+
import logging
4+
from collections.abc import Iterator
5+
from contextlib import contextmanager
6+
7+
import pytest
8+
from pytest_mock import MockerFixture
9+
from servicelib.logging_utils import async_loggers
10+
11+
12+
@pytest.fixture(autouse=True)
13+
def preserve_caplog_for_async_logging(mocker: MockerFixture) -> None:
14+
# Patch async_loggers to preserve caplog handlers,
15+
# and pytest logs in general as pytest captures logs in a special way
16+
# that is not compatible with the queue handler used in async logging.
17+
original_setup = async_loggers
18+
19+
@contextmanager
20+
def patched_async_loggers(**kwargs) -> Iterator[None]:
21+
# Find caplog's handler in root logger
22+
root_logger = logging.getLogger()
23+
caplog_handlers = [
24+
h for h in root_logger.handlers if "LogCaptureHandler" in f"{type(h)}"
25+
]
26+
27+
with original_setup(**kwargs):
28+
# After setup, restore caplog handlers alongside queue handler
29+
for handler in caplog_handlers:
30+
if handler not in root_logger.handlers:
31+
root_logger.addHandler(handler)
32+
yield
33+
34+
methods_to_patch = [
35+
"servicelib.logging_utils.async_loggers",
36+
"servicelib.fastapi.logging_lifespan.async_loggers",
37+
"tests.test_logging_utils.async_loggers",
38+
]
39+
for method in methods_to_patch:
40+
with contextlib.suppress(AttributeError, ModuleNotFoundError):
41+
# Patch the method to use our patched version
42+
mocker.patch(method, patched_async_loggers)

packages/service-library/src/servicelib/fastapi/lifespan_utils.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,15 @@
11
import contextlib
2-
from collections.abc import Iterator
3-
from typing import Final
2+
from collections.abc import AsyncIterator, Callable, Iterator
3+
from typing import Final, TypeAlias
44

55
from common_library.errors_classes import OsparcErrorMixin
66
from fastapi import FastAPI
77
from fastapi_lifespan_manager import State
88

99
from ..logging_utils import log_context
1010

11+
Lifespan: TypeAlias = Callable[[FastAPI], AsyncIterator[None]]
12+
1113

1214
class LifespanError(OsparcErrorMixin, RuntimeError): ...
1315

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
import logging
2+
from collections.abc import AsyncIterator, Awaitable, Callable
3+
from contextlib import AsyncExitStack
4+
5+
from fastapi import FastAPI
6+
from settings_library.tracing import TracingSettings
7+
8+
from ..logging_utils import (
9+
LogLevelInt,
10+
async_loggers,
11+
log_context,
12+
)
13+
from ..logging_utils_filtering import LoggerName, MessageSubstring
14+
from .lifespan_utils import Lifespan
15+
16+
_logger = logging.getLogger(__name__)
17+
18+
19+
def create_logging_lifespan(
20+
*,
21+
log_format_local_dev_enabled: bool,
22+
logger_filter_mapping: dict[LoggerName, list[MessageSubstring]],
23+
tracing_settings: TracingSettings | None,
24+
log_base_level: LogLevelInt,
25+
noisy_loggers: tuple[str, ...] | None,
26+
) -> Lifespan:
27+
"""Returns a FastAPI-compatible lifespan handler to set up async logging."""
28+
exit_stack = AsyncExitStack()
29+
exit_stack.enter_context(
30+
async_loggers(
31+
log_base_level=log_base_level,
32+
noisy_loggers=noisy_loggers,
33+
log_format_local_dev_enabled=log_format_local_dev_enabled,
34+
logger_filter_mapping=logger_filter_mapping,
35+
tracing_settings=tracing_settings,
36+
)
37+
)
38+
39+
async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]:
40+
assert app is not None, "app must be provided"
41+
yield
42+
with log_context(_logger, logging.INFO, "Re-enable Blocking logger"):
43+
await exit_stack.aclose()
44+
45+
return _logging_lifespan
46+
47+
48+
def create_logging_shutdown_event(
49+
*,
50+
log_format_local_dev_enabled: bool,
51+
logger_filter_mapping: dict[LoggerName, list[MessageSubstring]],
52+
tracing_settings: TracingSettings | None,
53+
log_base_level: LogLevelInt,
54+
noisy_loggers: tuple[str, ...] | None,
55+
) -> Callable[[], Awaitable[None]]:
56+
"""retruns a fastapi-compatible shutdown event handler to be used with old style lifespan
57+
handlers. This is useful for applications that do not use the new async lifespan
58+
handlers introduced in fastapi 0.100.0.
59+
60+
Note: This function is for backwards compatibility only and will be removed in the future.
61+
setup_logging_lifespan should be used instead for new style lifespan handlers.
62+
"""
63+
exit_stack = AsyncExitStack()
64+
exit_stack.enter_context(
65+
async_loggers(
66+
log_base_level=log_base_level,
67+
noisy_loggers=noisy_loggers,
68+
log_format_local_dev_enabled=log_format_local_dev_enabled,
69+
logger_filter_mapping=logger_filter_mapping,
70+
tracing_settings=tracing_settings,
71+
)
72+
)
73+
74+
async def _on_shutdown_event() -> None:
75+
with log_context(_logger, logging.INFO, "Re-enable Blocking logger"):
76+
await exit_stack.aclose()
77+
78+
return _on_shutdown_event

0 commit comments

Comments
 (0)