From 31451c988af229be1fa825a5c3f6e6a6f4d0c8ba Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 7 Jul 2025 16:14:57 +0200 Subject: [PATCH 001/128] fix | --- .../src/servicelib/logging_utils.py | 43 ++++++++++--------- 1 file changed, 23 insertions(+), 20 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 7ef3bc28e94..61cfb51fe7b 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -106,13 +106,15 @@ def format(self, record) -> str: # SEE https://docs.python.org/3/library/logging.html#logrecord-attributes -DEFAULT_FORMATTING = ( - "log_level=%(levelname)s " - "| log_timestamp=%(asctime)s " - "| log_source=%(name)s:%(funcName)s(%(lineno)d) " - "| log_uid=%(log_uid)s " - "| log_oec=%(log_oec)s" - "| log_msg=%(message)s" +DEFAULT_FORMATTING = " | ".join( + [ + "log_level=%(levelname)s", + "log_timestamp=%(asctime)s", + "log_source=%(name)s:%(funcName)s(%(lineno)d)", + "log_uid=%(log_uid)s", + "log_oec=%(log_oec)s", + "log_msg=%(message)s", + ] ) LOCAL_FORMATTING = "%(levelname)s: [%(asctime)s/%(processName)s] [%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" @@ -138,17 +140,19 @@ def config_all_loggers( fmt = DEFAULT_FORMATTING if tracing_settings is not None: - fmt = ( - "log_level=%(levelname)s " - "| log_timestamp=%(asctime)s " - "| log_source=%(name)s:%(funcName)s(%(lineno)d) " - "| log_uid=%(log_uid)s " - "| log_oec=%(log_oec)s" - "| log_trace_id=%(otelTraceID)s " - "| log_span_id=%(otelSpanID)s " - "| log_resource.service.name=%(otelServiceName)s " - "| log_trace_sampled=%(otelTraceSampled)s] " - "| log_msg=%(message)s" + fmt = " | ".join( + [ + "log_level=%(levelname)s", + "log_timestamp=%(asctime)s", + "log_source=%(name)s:%(funcName)s(%(lineno)d)", + "log_uid=%(log_uid)s", + "log_oec=%(log_oec)s", + "log_trace_id=%(otelTraceID)s", + "log_span_id=%(otelSpanID)s", + "log_resource.service.name=%(otelServiceName)s", + "log_trace_sampled=%(otelTraceSampled)s", + "log_msg=%(message)s", + ] ) setup_log_tracing(tracing_settings=tracing_settings) if log_format_local_dev_enabled: @@ -339,7 +343,6 @@ def log_decorator( logger_obj = logger or _logger def _decorator(func_or_coro: F) -> F: - _log_exc_kwargs = LogExceptionsKwargsDict( logger=logger_obj, level=level, @@ -420,7 +423,7 @@ def log_context( logger.log(level, log_msg, *args, **kwargs, stacklevel=stackelvel) yield duration = ( - f" in {(datetime.now() - start ).total_seconds()}s" # noqa: DTZ005 + f" in {(datetime.now() - start).total_seconds()}s" # noqa: DTZ005 if log_duration else "" ) From 2805704e4d1f342a7acc34a496356053573a3720 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 7 Jul 2025 16:52:45 +0200 Subject: [PATCH 002/128] async logger trials --- .../src/servicelib/logging_utils.py | 316 ++++++++++++++++-- 1 file changed, 284 insertions(+), 32 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 61cfb51fe7b..245c1fab547 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -8,9 +8,12 @@ import asyncio import functools import logging +import logging.handlers +import queue from asyncio import iscoroutinefunction from collections.abc import Callable, Iterator from contextlib import contextmanager +from dataclasses import dataclass from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path @@ -116,62 +119,309 @@ def format(self, record) -> str: "log_msg=%(message)s", ] ) + LOCAL_FORMATTING = "%(levelname)s: [%(asctime)s/%(processName)s] [%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" +# Tracing format strings +TRACING_FORMATTING = " | ".join( + [ + "log_level=%(levelname)s", + "log_timestamp=%(asctime)s", + "log_source=%(name)s:%(funcName)s(%(lineno)d)", + "log_uid=%(log_uid)s", + "log_oec=%(log_oec)s", + "log_trace_id=%(otelTraceID)s", + "log_span_id=%(otelSpanID)s", + "log_resource.service.name=%(otelServiceName)s", + "log_trace_sampled=%(otelTraceSampled)s", + "log_msg=%(message)s", + ] +) + +LOCAL_TRACING_FORMATTING = ( + "%(levelname)s: [%(asctime)s/%(processName)s] " + "[log_trace_id=%(otelTraceID)s log_span_id=%(otelSpanID)s " + "log_resource.service.name=%(otelServiceName)s log_trace_sampled=%(otelTraceSampled)s] " + "[%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" +) + # Graylog Grok pattern extractor: # log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{DATA:log_source} \| (log_uid=%{WORD:log_uid} \| )?log_msg=%{GREEDYDATA:log_msg} +@dataclass +class AsyncLoggingSettings: + """Settings for asynchronous logging configuration.""" + + enabled: bool = True # Enable by default + queue_size: int = 10000 + batch_size: int = 100 + flush_interval: float = 1.0 + + +class SafeQueueListener(logging.handlers.QueueListener): + """ + Enhanced QueueListener with error isolation and metrics. + """ + + def __init__(self, queue_obj: queue.Queue, *handlers: logging.Handler) -> None: + super().__init__(queue_obj, *handlers, respect_handler_level=True) + self._error_count = 0 + self._processed_count = 0 + + def handle(self, record: logging.LogRecord) -> None: + """Handle a record with error isolation.""" + try: + super().handle(record) + self._processed_count += 1 + except Exception as exc: + self._error_count += 1 + # Log to stderr to avoid logging loops + import sys + + sys.stderr.write(f"Async logging handler failed: {exc}\n") + sys.stderr.flush() + + def get_metrics(self) -> dict[str, Any]: + """Get logging metrics.""" + return { + "processed_count": self._processed_count, + "error_count": self._error_count, + "queue_size": getattr(self.queue, "qsize", lambda: 0)(), + } + + +class AsyncLoggingManager: + """ + Manages the async logging lifecycle without global state. + """ + + def __init__(self, settings: AsyncLoggingSettings) -> None: + self.settings = settings + self.queue: queue.Queue | None = None + self.listener: SafeQueueListener | None = None + self._queue_handler: logging.handlers.QueueHandler | None = None + + def setup(self, handlers: list[logging.Handler] | None = None) -> bool: + """ + Set up the async logging infrastructure. + + Args: + handlers: List of handlers to route through async processing + + Returns: + True if setup successful, False otherwise + """ + if not self.settings.enabled: + return False + + if self.listener is not None: + _logger.warning("Async logging already configured") + return True + + try: + # Create queue with size limit to prevent memory exhaustion + queue_size = ( + self.settings.queue_size if self.settings.queue_size > 0 else None + ) + self.queue = ( + queue.Queue(maxsize=queue_size) if queue_size else queue.Queue() + ) + + # Use provided handlers or create default console handler + target_handlers = handlers or [logging.StreamHandler()] + + # Create and start listener + self.listener = SafeQueueListener(self.queue, *target_handlers) + self.listener.start() + + # Create queue handler + self._queue_handler = logging.handlers.QueueHandler(self.queue) + + _logger.info( + "Async logging initialized with queue size %s", self.settings.queue_size + ) + return True + + except Exception: + _logger.exception("Failed to setup async logging") + return False + + def get_queue_handler(self) -> logging.handlers.QueueHandler | None: + """Get the queue handler for attaching to loggers.""" + return self._queue_handler + + def shutdown(self) -> None: + """Shutdown the async logging infrastructure gracefully.""" + if self.listener is not None: + try: + _logger.debug("Shutting down async logging listener...") + self.listener.stop() + self.listener = None + self.queue = None + self._queue_handler = None + _logger.debug("Async logging shutdown complete") + except Exception as exc: + import sys + + sys.stderr.write(f"Error during async logging shutdown: {exc}\n") + + def get_metrics(self) -> dict[str, Any] | None: + """Get async logging performance metrics.""" + if self.listener and hasattr(self.listener, "get_metrics"): + return self.listener.get_metrics() + return None + + def is_enabled(self) -> bool: + """Check if async logging is currently enabled and running.""" + return self.listener is not None + + +def create_async_logging_manager( + settings: AsyncLoggingSettings, + existing_handlers: list[logging.Handler] | None = None, +) -> AsyncLoggingManager | None: + """ + Create and setup an async logging manager. + + Args: + settings: Async logging configuration + existing_handlers: List of handlers to route through async processing + + Returns: + AsyncLoggingManager if successful, None if disabled or failed + """ + if not settings.enabled: + return None + + manager = AsyncLoggingManager(settings) + if manager.setup(existing_handlers): + return manager + return None + + +def _setup_format_string( + *, + tracing_settings: TracingSettings | None, + log_format_local_dev_enabled: bool, +) -> str: + """Create the appropriate format string based on settings.""" + if log_format_local_dev_enabled: + if tracing_settings is not None: + return LOCAL_TRACING_FORMATTING + return LOCAL_FORMATTING + + if tracing_settings is not None: + setup_log_tracing(tracing_settings=tracing_settings) + return TRACING_FORMATTING + + return DEFAULT_FORMATTING + + +def _setup_async_logging_if_enabled( + *, + async_logging_settings: AsyncLoggingSettings | None, + loggers: list[logging.Logger], + fmt: str, + log_format_local_dev_enabled: bool, +) -> AsyncLoggingManager | None: + """Setup async logging and return AsyncLoggingManager if successful.""" + if not async_logging_settings or not async_logging_settings.enabled: + return None + + # Collect existing handlers + existing_handlers = [] + for logger in loggers: + existing_handlers.extend(logger.handlers[:]) + + # Remove handlers from loggers to avoid duplication + for logger in loggers: + logger.handlers.clear() + + # Create formatted handlers for async processing + formatted_handlers = [] + for handler in existing_handlers: + handler.setFormatter( + CustomFormatter( + fmt, log_format_local_dev_enabled=log_format_local_dev_enabled + ) + ) + formatted_handlers.append(handler) + + # Setup async logging infrastructure + async_manager = create_async_logging_manager( + async_logging_settings, formatted_handlers + ) + if async_manager: + _logger.info("Async logging enabled with queue-based processing") + return async_manager + + _logger.warning("Failed to setup async logging, falling back to synchronous") + # Restore original handlers if async setup failed + for logger in loggers: + for handler in existing_handlers: + logger.addHandler(handler) + return None + + def config_all_loggers( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, -) -> None: + async_logging_settings: AsyncLoggingSettings | None = None, +) -> AsyncLoggingManager | None: """ - Applies common configuration to ALL registered loggers + Applies common configuration to ALL registered loggers. + + Args: + log_format_local_dev_enabled: Enable local development formatting + logger_filter_mapping: Mapping of logger names to filtered message substrings + tracing_settings: OpenTelemetry tracing configuration + async_logging_settings: Async logging configuration (optional) + + Returns: + AsyncLoggingManager if async logging was enabled, None otherwise """ the_manager: logging.Manager = logging.Logger.manager root_logger = logging.getLogger() - loggers = [root_logger] + [ logging.getLogger(name) for name in the_manager.loggerDict ] - fmt = DEFAULT_FORMATTING - if tracing_settings is not None: - fmt = " | ".join( - [ - "log_level=%(levelname)s", - "log_timestamp=%(asctime)s", - "log_source=%(name)s:%(funcName)s(%(lineno)d)", - "log_uid=%(log_uid)s", - "log_oec=%(log_oec)s", - "log_trace_id=%(otelTraceID)s", - "log_span_id=%(otelSpanID)s", - "log_resource.service.name=%(otelServiceName)s", - "log_trace_sampled=%(otelTraceSampled)s", - "log_msg=%(message)s", - ] - ) - setup_log_tracing(tracing_settings=tracing_settings) - if log_format_local_dev_enabled: - fmt = LOCAL_FORMATTING - if tracing_settings is not None: - fmt = ( - "%(levelname)s: [%(asctime)s/%(processName)s] " - "[log_trace_id=%(otelTraceID)s log_span_id=%(otelSpanID)s log_resource.service.name=%(otelServiceName)s log_trace_sampled=%(otelTraceSampled)s] " - "[%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" - ) + # Create format string + fmt = _setup_format_string( + tracing_settings=tracing_settings, + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + + # Setup async logging if requested (defaults to enabled) + if async_logging_settings is None: + async_logging_settings = AsyncLoggingSettings() + async_manager = _setup_async_logging_if_enabled( + async_logging_settings=async_logging_settings, + loggers=loggers, + fmt=fmt, + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + + # Apply handlers to loggers for logger in loggers: - _set_logging_handler( - logger, fmt=fmt, log_format_local_dev_enabled=log_format_local_dev_enabled - ) + if async_manager: + queue_handler = async_manager.get_queue_handler() + if queue_handler: + logger.addHandler(queue_handler) + else: + _set_logging_handler( + logger, + fmt=fmt, + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + # Apply filters for logger_name, filtered_routes in logger_filter_mapping.items(): logger = logging.getLogger(logger_name) - # Check if the logger has any handlers or is in active use if not logger.hasHandlers(): _logger.warning( "Logger %s does not have any handlers. Filter will not be added.", @@ -182,6 +432,8 @@ def config_all_loggers( log_filter = GeneralLogFilter(filtered_routes) logger.addFilter(log_filter) + return async_manager + def _set_logging_handler( logger: logging.Logger, From e861d0dae5a954b96ee27c617b9a97995b569d15 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 08:24:58 +0200 Subject: [PATCH 003/128] async logger --- .../src/servicelib/logging_utils.py | 95 +++++-------------- 1 file changed, 25 insertions(+), 70 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 245c1fab547..17fc4512659 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -10,10 +10,10 @@ import logging import logging.handlers import queue +import sys from asyncio import iscoroutinefunction from collections.abc import Callable, Iterator from contextlib import contextmanager -from dataclasses import dataclass from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path @@ -149,14 +149,8 @@ def format(self, record) -> str: # log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{DATA:log_source} \| (log_uid=%{WORD:log_uid} \| )?log_msg=%{GREEDYDATA:log_msg} -@dataclass -class AsyncLoggingSettings: - """Settings for asynchronous logging configuration.""" - - enabled: bool = True # Enable by default - queue_size: int = 10000 - batch_size: int = 100 - flush_interval: float = 1.0 +# Default async logging queue size +DEFAULT_ASYNC_QUEUE_SIZE = 10000 class SafeQueueListener(logging.handlers.QueueListener): @@ -177,8 +171,6 @@ def handle(self, record: logging.LogRecord) -> None: except Exception as exc: self._error_count += 1 # Log to stderr to avoid logging loops - import sys - sys.stderr.write(f"Async logging handler failed: {exc}\n") sys.stderr.flush() @@ -193,11 +185,11 @@ def get_metrics(self) -> dict[str, Any]: class AsyncLoggingManager: """ - Manages the async logging lifecycle without global state. + Simplified async logging manager with queue-based processing. """ - def __init__(self, settings: AsyncLoggingSettings) -> None: - self.settings = settings + def __init__(self, *, queue_size: int = DEFAULT_ASYNC_QUEUE_SIZE) -> None: + self.queue_size = queue_size self.queue: queue.Queue | None = None self.listener: SafeQueueListener | None = None self._queue_handler: logging.handlers.QueueHandler | None = None @@ -212,21 +204,13 @@ def setup(self, handlers: list[logging.Handler] | None = None) -> bool: Returns: True if setup successful, False otherwise """ - if not self.settings.enabled: - return False - if self.listener is not None: _logger.warning("Async logging already configured") return True try: # Create queue with size limit to prevent memory exhaustion - queue_size = ( - self.settings.queue_size if self.settings.queue_size > 0 else None - ) - self.queue = ( - queue.Queue(maxsize=queue_size) if queue_size else queue.Queue() - ) + self.queue = queue.Queue(maxsize=self.queue_size) # Use provided handlers or create default console handler target_handlers = handlers or [logging.StreamHandler()] @@ -239,7 +223,7 @@ def setup(self, handlers: list[logging.Handler] | None = None) -> bool: self._queue_handler = logging.handlers.QueueHandler(self.queue) _logger.info( - "Async logging initialized with queue size %s", self.settings.queue_size + "Async logging initialized with queue size %s", self.queue_size ) return True @@ -262,8 +246,6 @@ def shutdown(self) -> None: self._queue_handler = None _logger.debug("Async logging shutdown complete") except Exception as exc: - import sys - sys.stderr.write(f"Error during async logging shutdown: {exc}\n") def get_metrics(self) -> dict[str, Any] | None: @@ -277,29 +259,6 @@ def is_enabled(self) -> bool: return self.listener is not None -def create_async_logging_manager( - settings: AsyncLoggingSettings, - existing_handlers: list[logging.Handler] | None = None, -) -> AsyncLoggingManager | None: - """ - Create and setup an async logging manager. - - Args: - settings: Async logging configuration - existing_handlers: List of handlers to route through async processing - - Returns: - AsyncLoggingManager if successful, None if disabled or failed - """ - if not settings.enabled: - return None - - manager = AsyncLoggingManager(settings) - if manager.setup(existing_handlers): - return manager - return None - - def _setup_format_string( *, tracing_settings: TracingSettings | None, @@ -318,17 +277,14 @@ def _setup_format_string( return DEFAULT_FORMATTING -def _setup_async_logging_if_enabled( +def _setup_async_logging( *, - async_logging_settings: AsyncLoggingSettings | None, loggers: list[logging.Logger], fmt: str, log_format_local_dev_enabled: bool, + queue_size: int = DEFAULT_ASYNC_QUEUE_SIZE, ) -> AsyncLoggingManager | None: """Setup async logging and return AsyncLoggingManager if successful.""" - if not async_logging_settings or not async_logging_settings.enabled: - return None - # Collect existing handlers existing_handlers = [] for logger in loggers: @@ -349,10 +305,8 @@ def _setup_async_logging_if_enabled( formatted_handlers.append(handler) # Setup async logging infrastructure - async_manager = create_async_logging_manager( - async_logging_settings, formatted_handlers - ) - if async_manager: + async_manager = AsyncLoggingManager(queue_size=queue_size) + if async_manager.setup(formatted_handlers): _logger.info("Async logging enabled with queue-based processing") return async_manager @@ -369,7 +323,8 @@ def config_all_loggers( log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, - async_logging_settings: AsyncLoggingSettings | None = None, + disable_async_logging: bool = False, + async_queue_size: int = DEFAULT_ASYNC_QUEUE_SIZE, ) -> AsyncLoggingManager | None: """ Applies common configuration to ALL registered loggers. @@ -378,7 +333,8 @@ def config_all_loggers( log_format_local_dev_enabled: Enable local development formatting logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration - async_logging_settings: Async logging configuration (optional) + disable_async_logging: Set to True to disable async logging (default: False) + async_queue_size: Queue size for async logging (default: 10000) Returns: AsyncLoggingManager if async logging was enabled, None otherwise @@ -395,16 +351,15 @@ def config_all_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, ) - # Setup async logging if requested (defaults to enabled) - if async_logging_settings is None: - async_logging_settings = AsyncLoggingSettings() - - async_manager = _setup_async_logging_if_enabled( - async_logging_settings=async_logging_settings, - loggers=loggers, - fmt=fmt, - log_format_local_dev_enabled=log_format_local_dev_enabled, - ) + # Setup async logging by default + async_manager = None + if not disable_async_logging: + async_manager = _setup_async_logging( + loggers=loggers, + fmt=fmt, + log_format_local_dev_enabled=log_format_local_dev_enabled, + queue_size=async_queue_size, + ) # Apply handlers to loggers for logger in loggers: From 41815e2b815978f95d7969d8a8df9f72dbd1f11b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 09:14:52 +0200 Subject: [PATCH 004/128] add repomix to ignore list --- .gitignore | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 964ac9035e6..4c2bf1934a9 100644 --- a/.gitignore +++ b/.gitignore @@ -55,7 +55,6 @@ locust_report/ nosetests.xml test_failures/ - # Translations *.mo *.pot @@ -182,3 +181,6 @@ tests/public-api/osparc_python_wheels/* # osparc-config repo files repo.config + +# repomix +.repomix/* From 3820a520867bc88d23a1072981ec399c4266ba25 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 09:16:29 +0200 Subject: [PATCH 005/128] use same version --- services/api-server/tests/unit/_with_db/data/docker-compose.yml | 2 +- services/director-v2/docker-compose-extra.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/api-server/tests/unit/_with_db/data/docker-compose.yml b/services/api-server/tests/unit/_with_db/data/docker-compose.yml index ae76474af7c..75eb2f48245 100644 --- a/services/api-server/tests/unit/_with_db/data/docker-compose.yml +++ b/services/api-server/tests/unit/_with_db/data/docker-compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:14.5-alpine@sha256:db802f226b620fc0b8adbeca7859eb203c8d3c9ce5d84870fadee05dea8f50ce + image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" environment: - POSTGRES_USER=${POSTGRES_USER:-test} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-test} diff --git a/services/director-v2/docker-compose-extra.yml b/services/director-v2/docker-compose-extra.yml index 5923b000384..4ec97a3c48b 100644 --- a/services/director-v2/docker-compose-extra.yml +++ b/services/director-v2/docker-compose-extra.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:14.5-alpine@sha256:db802f226b620fc0b8adbeca7859eb203c8d3c9ce5d84870fadee05dea8f50ce + image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" init: true environment: - POSTGRES_USER=${POSTGRES_USER:-test} From 8f0600935c9ebeb5ea7c7b1d0b10713700af8887 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 09:41:56 +0200 Subject: [PATCH 006/128] removed max queue --- .../src/servicelib/logging_utils.py | 21 +++++-------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 17fc4512659..ec85c35124e 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -149,10 +149,6 @@ def format(self, record) -> str: # log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{DATA:log_source} \| (log_uid=%{WORD:log_uid} \| )?log_msg=%{GREEDYDATA:log_msg} -# Default async logging queue size -DEFAULT_ASYNC_QUEUE_SIZE = 10000 - - class SafeQueueListener(logging.handlers.QueueListener): """ Enhanced QueueListener with error isolation and metrics. @@ -188,8 +184,7 @@ class AsyncLoggingManager: Simplified async logging manager with queue-based processing. """ - def __init__(self, *, queue_size: int = DEFAULT_ASYNC_QUEUE_SIZE) -> None: - self.queue_size = queue_size + def __init__(self) -> None: self.queue: queue.Queue | None = None self.listener: SafeQueueListener | None = None self._queue_handler: logging.handlers.QueueHandler | None = None @@ -209,8 +204,8 @@ def setup(self, handlers: list[logging.Handler] | None = None) -> bool: return True try: - # Create queue with size limit to prevent memory exhaustion - self.queue = queue.Queue(maxsize=self.queue_size) + # Create unlimited queue to prevent queue.Full exceptions + self.queue = queue.Queue() # Use provided handlers or create default console handler target_handlers = handlers or [logging.StreamHandler()] @@ -222,9 +217,7 @@ def setup(self, handlers: list[logging.Handler] | None = None) -> bool: # Create queue handler self._queue_handler = logging.handlers.QueueHandler(self.queue) - _logger.info( - "Async logging initialized with queue size %s", self.queue_size - ) + _logger.info("Async logging initialized with unlimited queue") return True except Exception: @@ -282,7 +275,6 @@ def _setup_async_logging( loggers: list[logging.Logger], fmt: str, log_format_local_dev_enabled: bool, - queue_size: int = DEFAULT_ASYNC_QUEUE_SIZE, ) -> AsyncLoggingManager | None: """Setup async logging and return AsyncLoggingManager if successful.""" # Collect existing handlers @@ -305,7 +297,7 @@ def _setup_async_logging( formatted_handlers.append(handler) # Setup async logging infrastructure - async_manager = AsyncLoggingManager(queue_size=queue_size) + async_manager = AsyncLoggingManager() if async_manager.setup(formatted_handlers): _logger.info("Async logging enabled with queue-based processing") return async_manager @@ -324,7 +316,6 @@ def config_all_loggers( logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, disable_async_logging: bool = False, - async_queue_size: int = DEFAULT_ASYNC_QUEUE_SIZE, ) -> AsyncLoggingManager | None: """ Applies common configuration to ALL registered loggers. @@ -334,7 +325,6 @@ def config_all_loggers( logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration disable_async_logging: Set to True to disable async logging (default: False) - async_queue_size: Queue size for async logging (default: 10000) Returns: AsyncLoggingManager if async logging was enabled, None otherwise @@ -358,7 +348,6 @@ def config_all_loggers( loggers=loggers, fmt=fmt, log_format_local_dev_enabled=log_format_local_dev_enabled, - queue_size=async_queue_size, ) # Apply handlers to loggers From 5111d4c741f44f337e03b864ab3ed2c2bfda258f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 09:52:09 +0200 Subject: [PATCH 007/128] initial --- .../service-library/async_logging_example.py | 97 ++++ .../src/servicelib/logging_utils.py | 497 +++++++++++------- 2 files changed, 399 insertions(+), 195 deletions(-) create mode 100644 packages/service-library/async_logging_example.py diff --git a/packages/service-library/async_logging_example.py b/packages/service-library/async_logging_example.py new file mode 100644 index 00000000000..d8ccfd55486 --- /dev/null +++ b/packages/service-library/async_logging_example.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 + +""" +Example demonstrating the new async logging functionality. +This shows how to use both the context manager and manual setup/shutdown approaches. +""" + +import asyncio +import logging +import sys +from pathlib import Path + +from servicelib.logging_utils import ( + async_logging_context, + setup_async_loggers, + shutdown_async_loggers, +) + +# Add the servicelib to the path +sys.path.insert(0, str(Path(__file__).parent / "src")) + + +async def example_with_context_manager(): + """Example using the async context manager approach.""" + print("\n=== Example 1: Using async context manager ===") + + async with async_logging_context(log_format_local_dev_enabled=True): + logger = logging.getLogger("example1") + logger.setLevel(logging.DEBUG) + + logger.info("Starting non-blocking async logging example") + + # Simulate some async work with logging + for i in range(5): + logger.debug(f"Processing item {i}") + await asyncio.sleep(0.1) # Simulate async work + + logger.info("Completed async work") + + +async def example_with_manual_setup(): + """Example using manual setup and shutdown.""" + print("\n=== Example 2: Using manual setup/shutdown ===") + + # Setup async logging + await setup_async_loggers(log_format_local_dev_enabled=True) + + try: + logger = logging.getLogger("example2") + logger.setLevel(logging.DEBUG) + + logger.info("Starting manual async logging example") + + # Simulate some async work with logging + tasks = [] + for i in range(3): + tasks.append(worker_task(f"worker-{i}")) + + await asyncio.gather(*tasks) + + logger.info("All workers completed") + + finally: + # Always shutdown to ensure clean cleanup + await shutdown_async_loggers() + + +async def worker_task(name: str): + """Simulate a worker task that logs messages.""" + logger = logging.getLogger(f"worker.{name}") + + logger.info(f"{name} starting work") + await asyncio.sleep(0.2) # Simulate work + logger.debug(f"{name} processing data") + await asyncio.sleep(0.1) + logger.info(f"{name} work completed") + + +async def main(): + """Run both examples.""" + print("Async Logging Examples") + print("=====================") + + # Example 1: Context manager (recommended) + await example_with_context_manager() + + # Small delay between examples + await asyncio.sleep(0.5) + + # Example 2: Manual setup/shutdown + await example_with_manual_setup() + + print("\n=== All examples completed ===") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index ec85c35124e..5212b1ac7fa 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -6,14 +6,15 @@ """ import asyncio +import contextlib import functools import logging import logging.handlers import queue import sys from asyncio import iscoroutinefunction -from collections.abc import Callable, Iterator -from contextlib import contextmanager +from collections.abc import AsyncGenerator, Callable, Iterator +from contextlib import asynccontextmanager, contextmanager from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path @@ -149,109 +150,6 @@ def format(self, record) -> str: # log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{DATA:log_source} \| (log_uid=%{WORD:log_uid} \| )?log_msg=%{GREEDYDATA:log_msg} -class SafeQueueListener(logging.handlers.QueueListener): - """ - Enhanced QueueListener with error isolation and metrics. - """ - - def __init__(self, queue_obj: queue.Queue, *handlers: logging.Handler) -> None: - super().__init__(queue_obj, *handlers, respect_handler_level=True) - self._error_count = 0 - self._processed_count = 0 - - def handle(self, record: logging.LogRecord) -> None: - """Handle a record with error isolation.""" - try: - super().handle(record) - self._processed_count += 1 - except Exception as exc: - self._error_count += 1 - # Log to stderr to avoid logging loops - sys.stderr.write(f"Async logging handler failed: {exc}\n") - sys.stderr.flush() - - def get_metrics(self) -> dict[str, Any]: - """Get logging metrics.""" - return { - "processed_count": self._processed_count, - "error_count": self._error_count, - "queue_size": getattr(self.queue, "qsize", lambda: 0)(), - } - - -class AsyncLoggingManager: - """ - Simplified async logging manager with queue-based processing. - """ - - def __init__(self) -> None: - self.queue: queue.Queue | None = None - self.listener: SafeQueueListener | None = None - self._queue_handler: logging.handlers.QueueHandler | None = None - - def setup(self, handlers: list[logging.Handler] | None = None) -> bool: - """ - Set up the async logging infrastructure. - - Args: - handlers: List of handlers to route through async processing - - Returns: - True if setup successful, False otherwise - """ - if self.listener is not None: - _logger.warning("Async logging already configured") - return True - - try: - # Create unlimited queue to prevent queue.Full exceptions - self.queue = queue.Queue() - - # Use provided handlers or create default console handler - target_handlers = handlers or [logging.StreamHandler()] - - # Create and start listener - self.listener = SafeQueueListener(self.queue, *target_handlers) - self.listener.start() - - # Create queue handler - self._queue_handler = logging.handlers.QueueHandler(self.queue) - - _logger.info("Async logging initialized with unlimited queue") - return True - - except Exception: - _logger.exception("Failed to setup async logging") - return False - - def get_queue_handler(self) -> logging.handlers.QueueHandler | None: - """Get the queue handler for attaching to loggers.""" - return self._queue_handler - - def shutdown(self) -> None: - """Shutdown the async logging infrastructure gracefully.""" - if self.listener is not None: - try: - _logger.debug("Shutting down async logging listener...") - self.listener.stop() - self.listener = None - self.queue = None - self._queue_handler = None - _logger.debug("Async logging shutdown complete") - except Exception as exc: - sys.stderr.write(f"Error during async logging shutdown: {exc}\n") - - def get_metrics(self) -> dict[str, Any] | None: - """Get async logging performance metrics.""" - if self.listener and hasattr(self.listener, "get_metrics"): - return self.listener.get_metrics() - return None - - def is_enabled(self) -> bool: - """Check if async logging is currently enabled and running.""" - return self.listener is not None - - def _setup_format_string( *, tracing_settings: TracingSettings | None, @@ -270,44 +168,18 @@ def _setup_format_string( return DEFAULT_FORMATTING -def _setup_async_logging( +def _set_logging_handler( + logger: logging.Logger, *, - loggers: list[logging.Logger], fmt: str, log_format_local_dev_enabled: bool, -) -> AsyncLoggingManager | None: - """Setup async logging and return AsyncLoggingManager if successful.""" - # Collect existing handlers - existing_handlers = [] - for logger in loggers: - existing_handlers.extend(logger.handlers[:]) - - # Remove handlers from loggers to avoid duplication - for logger in loggers: - logger.handlers.clear() - - # Create formatted handlers for async processing - formatted_handlers = [] - for handler in existing_handlers: +) -> None: + for handler in logger.handlers: handler.setFormatter( CustomFormatter( fmt, log_format_local_dev_enabled=log_format_local_dev_enabled ) ) - formatted_handlers.append(handler) - - # Setup async logging infrastructure - async_manager = AsyncLoggingManager() - if async_manager.setup(formatted_handlers): - _logger.info("Async logging enabled with queue-based processing") - return async_manager - - _logger.warning("Failed to setup async logging, falling back to synchronous") - # Restore original handlers if async setup failed - for logger in loggers: - for handler in existing_handlers: - logger.addHandler(handler) - return None def config_all_loggers( @@ -315,8 +187,7 @@ def config_all_loggers( log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, - disable_async_logging: bool = False, -) -> AsyncLoggingManager | None: +) -> None: """ Applies common configuration to ALL registered loggers. @@ -324,10 +195,6 @@ def config_all_loggers( log_format_local_dev_enabled: Enable local development formatting logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration - disable_async_logging: Set to True to disable async logging (default: False) - - Returns: - AsyncLoggingManager if async logging was enabled, None otherwise """ the_manager: logging.Manager = logging.Logger.manager root_logger = logging.getLogger() @@ -341,28 +208,14 @@ def config_all_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, ) - # Setup async logging by default - async_manager = None - if not disable_async_logging: - async_manager = _setup_async_logging( - loggers=loggers, + # Apply handlers to loggers + for logger in loggers: + _set_logging_handler( + logger, fmt=fmt, log_format_local_dev_enabled=log_format_local_dev_enabled, ) - # Apply handlers to loggers - for logger in loggers: - if async_manager: - queue_handler = async_manager.get_queue_handler() - if queue_handler: - logger.addHandler(queue_handler) - else: - _set_logging_handler( - logger, - fmt=fmt, - log_format_local_dev_enabled=log_format_local_dev_enabled, - ) - # Apply filters for logger_name, filtered_routes in logger_filter_mapping.items(): logger = logging.getLogger(logger_name) @@ -376,42 +229,6 @@ def config_all_loggers( log_filter = GeneralLogFilter(filtered_routes) logger.addFilter(log_filter) - return async_manager - - -def _set_logging_handler( - logger: logging.Logger, - *, - fmt: str, - log_format_local_dev_enabled: bool, -) -> None: - for handler in logger.handlers: - handler.setFormatter( - CustomFormatter( - fmt, log_format_local_dev_enabled=log_format_local_dev_enabled - ) - ) - - -def test_logger_propagation(logger: logging.Logger) -> None: - """log propagation and levels can sometimes be daunting to get it right. - - This function uses the `logger`` passed as argument to log the same message at different levels - - This should help to visually test a given configuration - - USAGE: - from .logging_utils import test_logger_propagation - for n in ("aiohttp.access", "gunicorn.access"): - test_logger_propagation(logging.getLogger(n)) - """ - msg = f"TESTING %s log using {logger=}" - logger.critical(msg, "critical") - logger.error(msg, "error") - logger.info(msg, "info") - logger.warning(msg, "warning") - logger.debug(msg, "debug") - class LogExceptionsKwargsDict(TypedDict, total=True): logger: logging.Logger @@ -658,3 +475,293 @@ def guess_message_log_level(message: str) -> LogLevelInt: def set_parent_module_log_level(current_module: str, desired_log_level: int) -> None: parent_module = ".".join(current_module.split(".")[:-1]) logging.getLogger(parent_module).setLevel(desired_log_level) + + +# Global reference to keep the logging task alive +_ASYNC_LOGGING_TASK: asyncio.Task | None = None + + +class AsyncLoggingContext: + """ + Async context manager for non-blocking logging infrastructure. + Based on the pattern from SuperFastPython article. + """ + + def __init__( + self, + *, + handlers: list[logging.Handler] | None = None, + log_format_local_dev_enabled: bool = False, + fmt: str | None = None, + ) -> None: + self.handlers = handlers or [logging.StreamHandler()] + self.log_format_local_dev_enabled = log_format_local_dev_enabled + self.fmt = fmt or DEFAULT_FORMATTING + self.queue: queue.Queue | None = None + self.listener: logging.handlers.QueueListener | None = None + self.queue_handler: logging.handlers.QueueHandler | None = None + self.original_handlers: dict[str, list[logging.Handler]] = {} + + async def __aenter__(self) -> "AsyncLoggingContext": + """Set up async logging infrastructure.""" + await self._setup_async_logging() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: + """Clean up async logging infrastructure.""" + await self._cleanup_async_logging() + + async def _setup_async_logging(self) -> None: + """Configure non-blocking logging using queue-based approach.""" + # Create unlimited queue for log messages + self.queue = queue.Queue() + + # Configure handlers with proper formatting + formatted_handlers = [] + for handler in self.handlers: + handler.setFormatter( + CustomFormatter( + self.fmt, + log_format_local_dev_enabled=self.log_format_local_dev_enabled, + ) + ) + formatted_handlers.append(handler) + + # Create and start the queue listener + self.listener = logging.handlers.QueueListener( + self.queue, *formatted_handlers, respect_handler_level=True + ) + self.listener.start() + + # Create queue handler for loggers + self.queue_handler = logging.handlers.QueueHandler(self.queue) + + # Configure all existing loggers + await self._configure_loggers() + + _logger.info("Async logging context initialized with unlimited queue") + + async def _configure_loggers(self) -> None: + """Replace all logger handlers with queue handler.""" + # Get all loggers + manager: logging.Manager = logging.Logger.manager + root_logger = logging.getLogger() + all_loggers = [root_logger] + [ + logging.getLogger(name) for name in manager.loggerDict + ] + + # Store original handlers and replace with queue handler + for logger in all_loggers: + logger_name = logger.name or "root" + + # Store original handlers + self.original_handlers[logger_name] = logger.handlers[:] + + # Clear existing handlers + logger.handlers.clear() + + # Add queue handler + if self.queue_handler: + logger.addHandler(self.queue_handler) + + # Allow other coroutines to run + await asyncio.sleep(0) + + async def _cleanup_async_logging(self) -> None: + """Restore original logging configuration.""" + try: + # Restore original handlers + manager: logging.Manager = logging.Logger.manager + root_logger = logging.getLogger() + all_loggers = [root_logger] + [ + logging.getLogger(name) for name in manager.loggerDict + ] + + for logger in all_loggers: + logger_name = logger.name or "root" + if logger_name in self.original_handlers: + # Clear queue handlers + logger.handlers.clear() + + # Restore original handlers + for handler in self.original_handlers[logger_name]: + logger.addHandler(handler) + + # Stop the queue listener + if self.listener: + _logger.debug("Shutting down async logging listener...") + self.listener.stop() + + _logger.debug("Async logging context cleanup complete") + + except Exception as exc: + sys.stderr.write(f"Error during async logging cleanup: {exc}\n") + sys.stderr.flush() + finally: + self.queue = None + self.listener = None + self.queue_handler = None + self.original_handlers.clear() + + def get_metrics(self) -> dict[str, Any] | None: + """Get logging performance metrics.""" + if self.queue: + return { + "queue_size": self.queue.qsize(), + "listener_active": self.listener is not None, + } + return None + + +async def _logging_task_runner( + *, + handlers: list[logging.Handler] | None = None, + log_format_local_dev_enabled: bool = False, + fmt: str | None = None, +) -> None: + """ + Background task that manages the async logging infrastructure. + Runs indefinitely until canceled. + """ + async with AsyncLoggingContext( + handlers=handlers, + log_format_local_dev_enabled=log_format_local_dev_enabled, + fmt=fmt, + ): + _logger.info("Async logging task started") + try: + # Use Event instead of sleep loop for better responsiveness + shutdown_event = asyncio.Event() + await shutdown_event.wait() + except asyncio.CancelledError: + _logger.debug("Async logging task canceled") + raise + except Exception: + _logger.exception("Async logging task failed") + raise + finally: + _logger.debug("Async logging task finished") + + +async def setup_async_loggers( + *, + log_format_local_dev_enabled: bool = False, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, + tracing_settings: TracingSettings | None = None, + handlers: list[logging.Handler] | None = None, +) -> None: + """ + Set up non-blocking async logging infrastructure. + + This function starts a background task that manages the logging infrastructure + for the entire application lifecycle. The task automatically shuts down when + the event loop terminates. + + Args: + log_format_local_dev_enabled: Enable local development formatting + logger_filter_mapping: Mapping of logger names to filtered message substrings + tracing_settings: OpenTelemetry tracing configuration + handlers: Custom handlers to use (defaults to StreamHandler) + + Note: + This function should be called once at application startup. + The logging infrastructure will remain active until the event loop shuts down. + """ + global _ASYNC_LOGGING_TASK + + if _ASYNC_LOGGING_TASK and not _ASYNC_LOGGING_TASK.done(): + _logger.warning("Async logging task already running") + return + + # Create format string + fmt = _setup_format_string( + tracing_settings=tracing_settings, + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + + # Start the background logging task + _ASYNC_LOGGING_TASK = asyncio.create_task( + _logging_task_runner( + handlers=handlers, + log_format_local_dev_enabled=log_format_local_dev_enabled, + fmt=fmt, + ) + ) + + # Allow the task to start + await asyncio.sleep(0) + + # Apply filters if provided + if logger_filter_mapping: + _apply_logger_filters(logger_filter_mapping) + + _logger.info("Async logging setup completed") + + +def _apply_logger_filters( + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], +) -> None: + """Apply filters to specific loggers.""" + for logger_name, filtered_routes in logger_filter_mapping.items(): + logger = logging.getLogger(logger_name) + if not logger.hasHandlers(): + _logger.warning( + "Logger %s does not have any handlers. Filter will not be added.", + logger_name, + ) + continue + + log_filter = GeneralLogFilter(filtered_routes) + logger.addFilter(log_filter) + + +async def shutdown_async_loggers() -> None: + """ + Gracefully shutdown the async logging infrastructure. + + This function should be called during application shutdown to ensure + all log messages are processed before termination. + """ + global _ASYNC_LOGGING_TASK + + if _ASYNC_LOGGING_TASK and not _ASYNC_LOGGING_TASK.done(): + _logger.debug("Shutting down async logging task...") + _ASYNC_LOGGING_TASK.cancel() + with contextlib.suppress(asyncio.CancelledError): + await _ASYNC_LOGGING_TASK + _ASYNC_LOGGING_TASK = None + _logger.debug("Async logging shutdown complete") + + +@asynccontextmanager +async def async_logging_context( + *, + log_format_local_dev_enabled: bool = False, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, + tracing_settings: TracingSettings | None = None, + handlers: list[logging.Handler] | None = None, +) -> AsyncGenerator[None, None]: + """ + Async context manager for non-blocking logging. + + Usage: + async with async_logging_context(log_format_local_dev_enabled=True): + # Your async application code here + logger.info("This is non-blocking!") + + Args: + log_format_local_dev_enabled: Enable local development formatting + logger_filter_mapping: Mapping of logger names to filtered message substrings + tracing_settings: OpenTelemetry tracing configuration + handlers: Custom handlers to use (defaults to StreamHandler) + """ + await setup_async_loggers( + log_format_local_dev_enabled=log_format_local_dev_enabled, + logger_filter_mapping=logger_filter_mapping, + tracing_settings=tracing_settings, + handlers=handlers, + ) + try: + yield + finally: + await shutdown_async_loggers() From 03d05a5f514c85e7c8de3d183c00c84212f4795d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 10:07:42 +0200 Subject: [PATCH 008/128] too much --- .../src/servicelib/logging_utils.py | 236 ++++++++++-------- 1 file changed, 137 insertions(+), 99 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 5212b1ac7fa..5f84e1ffcf7 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -6,7 +6,6 @@ """ import asyncio -import contextlib import functools import logging import logging.handlers @@ -15,7 +14,7 @@ from asyncio import iscoroutinefunction from collections.abc import AsyncGenerator, Callable, Iterator from contextlib import asynccontextmanager, contextmanager -from datetime import datetime +from datetime import datetime, timedelta from inspect import getframeinfo, stack from pathlib import Path from typing import Any, NotRequired, TypeAlias, TypedDict, TypeVar @@ -477,14 +476,13 @@ def set_parent_module_log_level(current_module: str, desired_log_level: int) -> logging.getLogger(parent_module).setLevel(desired_log_level) -# Global reference to keep the logging task alive -_ASYNC_LOGGING_TASK: asyncio.Task | None = None +# Remove the global task variable since we'll use background_task infrastructure class AsyncLoggingContext: """ Async context manager for non-blocking logging infrastructure. - Based on the pattern from SuperFastPython article. + Based on the pattern from SuperFastPython article and integrated with background_task. """ def __init__( @@ -613,65 +611,96 @@ def get_metrics(self) -> dict[str, Any] | None: return None -async def _logging_task_runner( +async def _logging_keep_alive() -> None: + """ + Simple keep-alive function for the logging infrastructure. + This function does nothing but allows the background task to run. + """ + # Just sleep to keep the task alive - the real work is done in the context manager + await asyncio.sleep(1.0) + + +@asynccontextmanager +async def setup_async_loggers( *, - handlers: list[logging.Handler] | None = None, log_format_local_dev_enabled: bool = False, - fmt: str | None = None, -) -> None: + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, + tracing_settings: TracingSettings | None = None, + handlers: list[logging.Handler] | None = None, +) -> AsyncGenerator[None, None]: """ - Background task that manages the async logging infrastructure. - Runs indefinitely until canceled. + Async context manager for non-blocking logging infrastructure. + + This function sets up the async logging infrastructure using the background_task + infrastructure for proper lifecycle management. + + Usage: + async with setup_async_loggers(log_format_local_dev_enabled=True): + # Your async application code here + logger.info("This is non-blocking!") + + Args: + log_format_local_dev_enabled: Enable local development formatting + logger_filter_mapping: Mapping of logger names to filtered message substrings + tracing_settings: OpenTelemetry tracing configuration + handlers: Custom handlers to use (defaults to StreamHandler) """ + # Create format string + fmt = _setup_format_string( + tracing_settings=tracing_settings, + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + + # Start the async logging context async with AsyncLoggingContext( handlers=handlers, log_format_local_dev_enabled=log_format_local_dev_enabled, fmt=fmt, ): - _logger.info("Async logging task started") + # Apply filters if provided + if logger_filter_mapping: + _apply_logger_filters(logger_filter_mapping) + + _logger.info("Async logging setup completed") + try: - # Use Event instead of sleep loop for better responsiveness - shutdown_event = asyncio.Event() - await shutdown_event.wait() - except asyncio.CancelledError: - _logger.debug("Async logging task canceled") - raise - except Exception: - _logger.exception("Async logging task failed") - raise + yield finally: - _logger.debug("Async logging task finished") + _logger.debug("Async logging context exiting") -async def setup_async_loggers( +@asynccontextmanager +async def setup_async_loggers_with_background_task( *, log_format_local_dev_enabled: bool = False, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, tracing_settings: TracingSettings | None = None, handlers: list[logging.Handler] | None = None, -) -> None: + log_monitoring_interval: int = 10, # seconds +) -> AsyncGenerator[None, None]: """ - Set up non-blocking async logging infrastructure. + Enhanced async context manager using background_task infrastructure. - This function starts a background task that manages the logging infrastructure - for the entire application lifecycle. The task automatically shuts down when - the event loop terminates. + This function sets up the async logging infrastructure with a background task + that monitors the logging queue for health metrics. + + Usage: + async with setup_async_loggers_with_background_task( + log_format_local_dev_enabled=True, + log_monitoring_interval=5 + ): + # Your async application code here + logger.info("This is non-blocking with monitoring!") Args: log_format_local_dev_enabled: Enable local development formatting logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration handlers: Custom handlers to use (defaults to StreamHandler) - - Note: - This function should be called once at application startup. - The logging infrastructure will remain active until the event loop shuts down. + log_monitoring_interval: Interval in seconds for monitoring background task """ - global _ASYNC_LOGGING_TASK - - if _ASYNC_LOGGING_TASK and not _ASYNC_LOGGING_TASK.done(): - _logger.warning("Async logging task already running") - return + # Import background_task locally to avoid circular imports + from . import background_task # Create format string fmt = _setup_format_string( @@ -679,23 +708,42 @@ async def setup_async_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, ) - # Start the background logging task - _ASYNC_LOGGING_TASK = asyncio.create_task( - _logging_task_runner( + # Define the monitoring task + async def log_monitoring_task() -> None: + """Background task to monitor logging queue health.""" + # This could be extended to monitor queue size, listener health, etc. + _logger.debug("Async logging monitoring task running") + + # Check if we have access to the queue for monitoring + # This is a placeholder for more sophisticated monitoring + manager: logging.Manager = logging.Logger.manager + active_loggers_count = len(manager.loggerDict) + _logger.debug("Active loggers count: %d", active_loggers_count) + + # Start the async logging context with background monitoring + async with ( + AsyncLoggingContext( handlers=handlers, log_format_local_dev_enabled=log_format_local_dev_enabled, fmt=fmt, - ) - ) - - # Allow the task to start - await asyncio.sleep(0) + ), + background_task.periodic_task( + log_monitoring_task, + interval=timedelta(seconds=log_monitoring_interval), + task_name="async_logging_monitor", + raise_on_error=False, + ), + ): + # Apply filters if provided + if logger_filter_mapping: + _apply_logger_filters(logger_filter_mapping) - # Apply filters if provided - if logger_filter_mapping: - _apply_logger_filters(logger_filter_mapping) + _logger.info("Async logging with background monitoring setup completed") - _logger.info("Async logging setup completed") + try: + yield + finally: + _logger.debug("Async logging with background monitoring context exiting") def _apply_logger_filters( @@ -715,53 +763,43 @@ def _apply_logger_filters( logger.addFilter(log_filter) -async def shutdown_async_loggers() -> None: - """ - Gracefully shutdown the async logging infrastructure. - - This function should be called during application shutdown to ensure - all log messages are processed before termination. - """ - global _ASYNC_LOGGING_TASK - - if _ASYNC_LOGGING_TASK and not _ASYNC_LOGGING_TASK.done(): - _logger.debug("Shutting down async logging task...") - _ASYNC_LOGGING_TASK.cancel() - with contextlib.suppress(asyncio.CancelledError): - await _ASYNC_LOGGING_TASK - _ASYNC_LOGGING_TASK = None - _logger.debug("Async logging shutdown complete") - - -@asynccontextmanager -async def async_logging_context( - *, - log_format_local_dev_enabled: bool = False, - logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, - tracing_settings: TracingSettings | None = None, - handlers: list[logging.Handler] | None = None, -) -> AsyncGenerator[None, None]: - """ - Async context manager for non-blocking logging. - - Usage: - async with async_logging_context(log_format_local_dev_enabled=True): - # Your async application code here - logger.info("This is non-blocking!") - - Args: - log_format_local_dev_enabled: Enable local development formatting - logger_filter_mapping: Mapping of logger names to filtered message substrings - tracing_settings: OpenTelemetry tracing configuration - handlers: Custom handlers to use (defaults to StreamHandler) - """ - await setup_async_loggers( - log_format_local_dev_enabled=log_format_local_dev_enabled, - logger_filter_mapping=logger_filter_mapping, - tracing_settings=tracing_settings, - handlers=handlers, - ) - try: - yield - finally: - await shutdown_async_loggers() +# Alias for backward compatibility and simpler API +async_logging_context = setup_async_loggers + +# ============================================================================= +# SUMMARY: ASYNC LOGGING REFACTORING COMPLETED +# ============================================================================= +# +# This module now provides robust, non-blocking async logging infrastructure with: +# +# 1. CORE FEATURES: +# - Unlimited queue size (no more queue.Full errors) +# - Proper context manager-based lifecycle management +# - Integration with background_task infrastructure +# - Clean separation of sync and async logging setup +# +# 2. API OPTIONS: +# - setup_async_loggers(): Basic async context manager +# - setup_async_loggers_with_background_task(): Enhanced version with monitoring +# - async_logging_context: Alias for backward compatibility +# - config_all_loggers(): Original synchronous setup (unchanged) +# +# 3. BACKGROUND TASK INTEGRATION: +# - Uses periodic_task from background_task.py for monitoring +# - Automatic lifecycle management +# - Concurrent task support +# - Graceful shutdown handling +# +# 4. BEST PRACTICES IMPLEMENTED: +# - No global state (context manager based) +# - Proper resource cleanup +# - SuperFastPython async logging patterns +# - Thread-safe queue operations +# - Backward compatibility maintained +# +# Usage examples available in: +# - async_logging_example_new.py (basic async logging) +# - async_logging_with_background_task_example.py (with monitoring) +# - background_task_logging_example.py (integration with background tasks) +# +# ============================================================================= From d98793085396cbb2b03b30a5b60873167a9cbe1a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 10:13:48 +0200 Subject: [PATCH 009/128] simplify --- .../src/servicelib/logging_utils.py | 105 +----------------- 1 file changed, 3 insertions(+), 102 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 5f84e1ffcf7..f1e96060de4 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -14,7 +14,7 @@ from asyncio import iscoroutinefunction from collections.abc import AsyncGenerator, Callable, Iterator from contextlib import asynccontextmanager, contextmanager -from datetime import datetime, timedelta +from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path from typing import Any, NotRequired, TypeAlias, TypedDict, TypeVar @@ -611,15 +611,6 @@ def get_metrics(self) -> dict[str, Any] | None: return None -async def _logging_keep_alive() -> None: - """ - Simple keep-alive function for the logging infrastructure. - This function does nothing but allows the background task to run. - """ - # Just sleep to keep the task alive - the real work is done in the context manager - await asyncio.sleep(1.0) - - @asynccontextmanager async def setup_async_loggers( *, @@ -631,9 +622,6 @@ async def setup_async_loggers( """ Async context manager for non-blocking logging infrastructure. - This function sets up the async logging infrastructure using the background_task - infrastructure for proper lifecycle management. - Usage: async with setup_async_loggers(log_format_local_dev_enabled=True): # Your async application code here @@ -669,83 +657,6 @@ async def setup_async_loggers( _logger.debug("Async logging context exiting") -@asynccontextmanager -async def setup_async_loggers_with_background_task( - *, - log_format_local_dev_enabled: bool = False, - logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, - tracing_settings: TracingSettings | None = None, - handlers: list[logging.Handler] | None = None, - log_monitoring_interval: int = 10, # seconds -) -> AsyncGenerator[None, None]: - """ - Enhanced async context manager using background_task infrastructure. - - This function sets up the async logging infrastructure with a background task - that monitors the logging queue for health metrics. - - Usage: - async with setup_async_loggers_with_background_task( - log_format_local_dev_enabled=True, - log_monitoring_interval=5 - ): - # Your async application code here - logger.info("This is non-blocking with monitoring!") - - Args: - log_format_local_dev_enabled: Enable local development formatting - logger_filter_mapping: Mapping of logger names to filtered message substrings - tracing_settings: OpenTelemetry tracing configuration - handlers: Custom handlers to use (defaults to StreamHandler) - log_monitoring_interval: Interval in seconds for monitoring background task - """ - # Import background_task locally to avoid circular imports - from . import background_task - - # Create format string - fmt = _setup_format_string( - tracing_settings=tracing_settings, - log_format_local_dev_enabled=log_format_local_dev_enabled, - ) - - # Define the monitoring task - async def log_monitoring_task() -> None: - """Background task to monitor logging queue health.""" - # This could be extended to monitor queue size, listener health, etc. - _logger.debug("Async logging monitoring task running") - - # Check if we have access to the queue for monitoring - # This is a placeholder for more sophisticated monitoring - manager: logging.Manager = logging.Logger.manager - active_loggers_count = len(manager.loggerDict) - _logger.debug("Active loggers count: %d", active_loggers_count) - - # Start the async logging context with background monitoring - async with ( - AsyncLoggingContext( - handlers=handlers, - log_format_local_dev_enabled=log_format_local_dev_enabled, - fmt=fmt, - ), - background_task.periodic_task( - log_monitoring_task, - interval=timedelta(seconds=log_monitoring_interval), - task_name="async_logging_monitor", - raise_on_error=False, - ), - ): - # Apply filters if provided - if logger_filter_mapping: - _apply_logger_filters(logger_filter_mapping) - - _logger.info("Async logging with background monitoring setup completed") - - try: - yield - finally: - _logger.debug("Async logging with background monitoring context exiting") - - def _apply_logger_filters( logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], ) -> None: @@ -775,22 +686,14 @@ def _apply_logger_filters( # 1. CORE FEATURES: # - Unlimited queue size (no more queue.Full errors) # - Proper context manager-based lifecycle management -# - Integration with background_task infrastructure # - Clean separation of sync and async logging setup # # 2. API OPTIONS: -# - setup_async_loggers(): Basic async context manager -# - setup_async_loggers_with_background_task(): Enhanced version with monitoring +# - setup_async_loggers(): Async context manager for non-blocking logging # - async_logging_context: Alias for backward compatibility # - config_all_loggers(): Original synchronous setup (unchanged) # -# 3. BACKGROUND TASK INTEGRATION: -# - Uses periodic_task from background_task.py for monitoring -# - Automatic lifecycle management -# - Concurrent task support -# - Graceful shutdown handling -# -# 4. BEST PRACTICES IMPLEMENTED: +# 3. BEST PRACTICES IMPLEMENTED: # - No global state (context manager based) # - Proper resource cleanup # - SuperFastPython async logging patterns @@ -799,7 +702,5 @@ def _apply_logger_filters( # # Usage examples available in: # - async_logging_example_new.py (basic async logging) -# - async_logging_with_background_task_example.py (with monitoring) -# - background_task_logging_example.py (integration with background tasks) # # ============================================================================= From d3f87d109259635b7cb6ccc9ebc4ca4bc8672e10 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 10:15:08 +0200 Subject: [PATCH 010/128] refactor --- .../src/servicelib/logging_utils.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index f1e96060de4..232b41c2953 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -17,7 +17,7 @@ from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path -from typing import Any, NotRequired, TypeAlias, TypedDict, TypeVar +from typing import Any, Final, NotRequired, TypeAlias, TypedDict, TypeVar from settings_library.tracing import TracingSettings @@ -109,7 +109,7 @@ def format(self, record) -> str: # SEE https://docs.python.org/3/library/logging.html#logrecord-attributes -DEFAULT_FORMATTING = " | ".join( +_DEFAULT_FORMATTING: Final[str] = " | ".join( [ "log_level=%(levelname)s", "log_timestamp=%(asctime)s", @@ -120,10 +120,12 @@ def format(self, record) -> str: ] ) -LOCAL_FORMATTING = "%(levelname)s: [%(asctime)s/%(processName)s] [%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" +_LOCAL_FORMATTING: Final[str] = ( + "%(levelname)s: [%(asctime)s/%(processName)s] [%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" +) # Tracing format strings -TRACING_FORMATTING = " | ".join( +_TRACING_FORMATTING: Final[str] = " | ".join( [ "log_level=%(levelname)s", "log_timestamp=%(asctime)s", @@ -138,7 +140,7 @@ def format(self, record) -> str: ] ) -LOCAL_TRACING_FORMATTING = ( +_LOCAL_TRACING_FORMATTING: Final[str] = ( "%(levelname)s: [%(asctime)s/%(processName)s] " "[log_trace_id=%(otelTraceID)s log_span_id=%(otelSpanID)s " "log_resource.service.name=%(otelServiceName)s log_trace_sampled=%(otelTraceSampled)s] " @@ -157,14 +159,14 @@ def _setup_format_string( """Create the appropriate format string based on settings.""" if log_format_local_dev_enabled: if tracing_settings is not None: - return LOCAL_TRACING_FORMATTING - return LOCAL_FORMATTING + return _LOCAL_TRACING_FORMATTING + return _LOCAL_FORMATTING if tracing_settings is not None: setup_log_tracing(tracing_settings=tracing_settings) - return TRACING_FORMATTING + return _TRACING_FORMATTING - return DEFAULT_FORMATTING + return _DEFAULT_FORMATTING def _set_logging_handler( @@ -494,7 +496,7 @@ def __init__( ) -> None: self.handlers = handlers or [logging.StreamHandler()] self.log_format_local_dev_enabled = log_format_local_dev_enabled - self.fmt = fmt or DEFAULT_FORMATTING + self.fmt = fmt or _DEFAULT_FORMATTING self.queue: queue.Queue | None = None self.listener: logging.handlers.QueueListener | None = None self.queue_handler: logging.handlers.QueueHandler | None = None From 9244c376b4e5c8082c517b098880b92e2083ccc9 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 10:16:49 +0200 Subject: [PATCH 011/128] cleanup --- .../src/servicelib/logging_utils.py | 32 ------------------- 1 file changed, 32 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 232b41c2953..c17f03ec135 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -674,35 +674,3 @@ def _apply_logger_filters( log_filter = GeneralLogFilter(filtered_routes) logger.addFilter(log_filter) - - -# Alias for backward compatibility and simpler API -async_logging_context = setup_async_loggers - -# ============================================================================= -# SUMMARY: ASYNC LOGGING REFACTORING COMPLETED -# ============================================================================= -# -# This module now provides robust, non-blocking async logging infrastructure with: -# -# 1. CORE FEATURES: -# - Unlimited queue size (no more queue.Full errors) -# - Proper context manager-based lifecycle management -# - Clean separation of sync and async logging setup -# -# 2. API OPTIONS: -# - setup_async_loggers(): Async context manager for non-blocking logging -# - async_logging_context: Alias for backward compatibility -# - config_all_loggers(): Original synchronous setup (unchanged) -# -# 3. BEST PRACTICES IMPLEMENTED: -# - No global state (context manager based) -# - Proper resource cleanup -# - SuperFastPython async logging patterns -# - Thread-safe queue operations -# - Backward compatibility maintained -# -# Usage examples available in: -# - async_logging_example_new.py (basic async logging) -# -# ============================================================================= From 70c5835c372d670fa13b19181f51d790bed473e6 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:00:26 +0200 Subject: [PATCH 012/128] add test --- .../tests/test_logging_utils.py | 153 +++++++++++++++++- 1 file changed, 151 insertions(+), 2 deletions(-) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index d56e07962f2..b595cd08406 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -18,6 +18,7 @@ log_decorator, log_exceptions, set_parent_module_log_level, + setup_async_loggers, ) _logger = logging.getLogger(__name__) @@ -325,8 +326,9 @@ def test_log_exceptions_and_suppress_without_exc_info( caplog.set_level(level) exc_msg = "logs exceptions and suppresses" - with suppress(ValueError), log_exceptions( - _logger, level, "CONTEXT", exc_info=False + with ( + suppress(ValueError), + log_exceptions(_logger, level, "CONTEXT", exc_info=False), ): raise ValueError(exc_msg) @@ -410,3 +412,150 @@ def test_set_parent_module_log_level_(caplog: pytest.LogCaptureFixture): assert "parent warning" in caplog.text assert "child warning" in caplog.text + + +@pytest.mark.parametrize("log_format_local_dev_enabled", [True, False]) +async def test_setup_async_loggers_basic( + caplog: pytest.LogCaptureFixture, + log_format_local_dev_enabled: bool, +): + """Test basic async logging setup without filters.""" + caplog.clear() + caplog.set_level(logging.INFO) + + async with setup_async_loggers( + log_format_local_dev_enabled=log_format_local_dev_enabled, + ): + test_logger = logging.getLogger("test_async_logger") + test_logger.info("Test async log message") + + # Give some time for async logging to process + import asyncio + + await asyncio.sleep(0.1) + + # Check that the log message was captured + assert "Test async log message" in caplog.text + assert "Async logging setup completed" in caplog.text + + +async def test_setup_async_loggers_with_filters( + caplog: pytest.LogCaptureFixture, +): + """Test async logging setup with logger filters.""" + caplog.clear() + caplog.set_level(logging.INFO) + + # Define filter mapping + filter_mapping = { + "test_filtered_logger": ["filtered_message"], + } + + async with setup_async_loggers( + log_format_local_dev_enabled=True, + logger_filter_mapping=filter_mapping, + ): + test_logger = logging.getLogger("test_filtered_logger") + unfiltered_logger = logging.getLogger("test_unfiltered_logger") + + # This should be filtered out + test_logger.info("This is a filtered_message") + + # This should pass through + test_logger.info("This is an unfiltered message") + unfiltered_logger.info("This is from unfiltered logger") + + # Give some time for async logging to process + import asyncio + + await asyncio.sleep(0.1) + + # Check that filtered message was not captured + assert "This is a filtered_message" not in caplog.text + + # Check that unfiltered messages were captured + assert "This is an unfiltered message" in caplog.text + assert "This is from unfiltered logger" in caplog.text + + +async def test_setup_async_loggers_with_tracing_settings( + caplog: pytest.LogCaptureFixture, +): + """Test async logging setup with tracing settings.""" + caplog.clear() + caplog.set_level(logging.INFO) + + # Note: We can't easily test actual tracing without setting up OpenTelemetry + # But we can test that the function accepts the parameter + async with setup_async_loggers( + log_format_local_dev_enabled=False, + tracing_settings=None, # Would normally be TracingSettings object + ): + test_logger = logging.getLogger("test_tracing_logger") + test_logger.info("Test message with tracing settings") + + # Give some time for async logging to process + import asyncio + + await asyncio.sleep(0.1) + + assert "Test message with tracing settings" in caplog.text + + +async def test_setup_async_loggers_context_manager_cleanup( + caplog: pytest.LogCaptureFixture, +): + """Test that async logging context manager properly cleans up.""" + caplog.clear() + caplog.set_level(logging.DEBUG) + + test_logger = logging.getLogger("test_cleanup_logger") + + async with setup_async_loggers(log_format_local_dev_enabled=True): + # During the context, handlers should be replaced + test_logger.info("Message during context") + + # Give some time for async logging to process + import asyncio + + await asyncio.sleep(0.1) + + # After context exit, check cleanup message + assert "Async logging context exiting" in caplog.text + + # Note: We can't easily test handler restoration without more complex setup + # but we can verify the function completed without errors + + +async def test_setup_async_loggers_exception_handling( + caplog: pytest.LogCaptureFixture, +): + """Test that async logging handles exceptions gracefully.""" + caplog.clear() + caplog.set_level(logging.DEBUG) # Set to DEBUG to capture cleanup messages + + def _raise_test_exception(): + """Helper function to raise exception for testing.""" + exc_msg = "Test exception" + raise ValueError(exc_msg) + + try: + async with setup_async_loggers(log_format_local_dev_enabled=True): + test_logger = logging.getLogger("test_exception_logger") + test_logger.info("Message before exception") + + # Give some time for async logging to process + import asyncio + + await asyncio.sleep(0.1) + + # Raise an exception to test cleanup + _raise_test_exception() + + except ValueError: + # Expected exception + pass + + # Check that the message was logged and cleanup happened + assert "Message before exception" in caplog.text + assert "Async logging context exiting" in caplog.text From 7efa1672d1875eaa2ef8900bec65d07d3f378a70 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:00:45 +0200 Subject: [PATCH 013/128] this might be working --- .../src/servicelib/logging_utils.py | 177 ++++++++---------- 1 file changed, 76 insertions(+), 101 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index c17f03ec135..8aa0f27f53f 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -183,6 +183,23 @@ def _set_logging_handler( ) +def _apply_logger_filters( + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], +) -> None: + """Apply filters to specific loggers.""" + for logger_name, filtered_routes in logger_filter_mapping.items(): + logger = logging.getLogger(logger_name) + if not logger.hasHandlers(): + _logger.warning( + "Logger %s does not have any handlers. Filter will not be added.", + logger_name, + ) + continue + + log_filter = GeneralLogFilter(filtered_routes) + logger.addFilter(log_filter) + + def config_all_loggers( *, log_format_local_dev_enabled: bool, @@ -218,17 +235,50 @@ def config_all_loggers( ) # Apply filters - for logger_name, filtered_routes in logger_filter_mapping.items(): - logger = logging.getLogger(logger_name) - if not logger.hasHandlers(): - _logger.warning( - "Logger %s does not have any handlers. Filter will not be added.", - logger_name, - ) - continue + _apply_logger_filters(logger_filter_mapping) - log_filter = GeneralLogFilter(filtered_routes) - logger.addFilter(log_filter) + +@asynccontextmanager +async def setup_async_loggers( + *, + log_format_local_dev_enabled: bool = False, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, + tracing_settings: TracingSettings | None = None, +) -> AsyncGenerator[None, None]: + """ + Async context manager for non-blocking logging infrastructure. + + Usage: + async with setup_async_loggers(log_format_local_dev_enabled=True): + # Your async application code here + logger.info("This is non-blocking!") + + Args: + log_format_local_dev_enabled: Enable local development formatting + logger_filter_mapping: Mapping of logger names to filtered message substrings + tracing_settings: OpenTelemetry tracing configuration + """ + # Create format string + fmt = _setup_format_string( + tracing_settings=tracing_settings, + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + + # Start the async logging context + async with AsyncLoggingContext( + log_format_local_dev_enabled=log_format_local_dev_enabled, + fmt=fmt, + ): + # Apply filters if provided + if logger_filter_mapping: + _apply_logger_filters(logger_filter_mapping) + + _logger.info("Async logging setup completed") + + try: + yield + finally: + _logger.debug("Async logging context exiting") class LogExceptionsKwargsDict(TypedDict, total=True): @@ -484,17 +534,15 @@ def set_parent_module_log_level(current_module: str, desired_log_level: int) -> class AsyncLoggingContext: """ Async context manager for non-blocking logging infrastructure. - Based on the pattern from SuperFastPython article and integrated with background_task. + Based on the pattern from SuperFastPython article. """ def __init__( self, *, - handlers: list[logging.Handler] | None = None, log_format_local_dev_enabled: bool = False, fmt: str | None = None, ) -> None: - self.handlers = handlers or [logging.StreamHandler()] self.log_format_local_dev_enabled = log_format_local_dev_enabled self.fmt = fmt or _DEFAULT_FORMATTING self.queue: queue.Queue | None = None @@ -516,20 +564,18 @@ async def _setup_async_logging(self) -> None: # Create unlimited queue for log messages self.queue = queue.Queue() - # Configure handlers with proper formatting - formatted_handlers = [] - for handler in self.handlers: - handler.setFormatter( - CustomFormatter( - self.fmt, - log_format_local_dev_enabled=self.log_format_local_dev_enabled, - ) + # Use default StreamHandler with proper formatting + handler = logging.StreamHandler() + handler.setFormatter( + CustomFormatter( + self.fmt, + log_format_local_dev_enabled=self.log_format_local_dev_enabled, ) - formatted_handlers.append(handler) + ) # Create and start the queue listener self.listener = logging.handlers.QueueListener( - self.queue, *formatted_handlers, respect_handler_level=True + self.queue, handler, respect_handler_level=True ) self.listener.start() @@ -542,7 +588,7 @@ async def _setup_async_logging(self) -> None: _logger.info("Async logging context initialized with unlimited queue") async def _configure_loggers(self) -> None: - """Replace all logger handlers with queue handler.""" + """Add queue handler to all loggers while preserving existing handlers.""" # Get all loggers manager: logging.Manager = logging.Logger.manager root_logger = logging.getLogger() @@ -550,17 +596,14 @@ async def _configure_loggers(self) -> None: logging.getLogger(name) for name in manager.loggerDict ] - # Store original handlers and replace with queue handler + # Store original handlers and add queue handler for logger in all_loggers: logger_name = logger.name or "root" # Store original handlers self.original_handlers[logger_name] = logger.handlers[:] - # Clear existing handlers - logger.handlers.clear() - - # Add queue handler + # Add queue handler alongside existing handlers if self.queue_handler: logger.addHandler(self.queue_handler) @@ -570,7 +613,7 @@ async def _configure_loggers(self) -> None: async def _cleanup_async_logging(self) -> None: """Restore original logging configuration.""" try: - # Restore original handlers + # Remove queue handlers from all loggers manager: logging.Manager = logging.Logger.manager root_logger = logging.getLogger() all_loggers = [root_logger] + [ @@ -578,14 +621,9 @@ async def _cleanup_async_logging(self) -> None: ] for logger in all_loggers: - logger_name = logger.name or "root" - if logger_name in self.original_handlers: - # Clear queue handlers - logger.handlers.clear() - - # Restore original handlers - for handler in self.original_handlers[logger_name]: - logger.addHandler(handler) + # Remove only the queue handler we added + if self.queue_handler and self.queue_handler in logger.handlers: + logger.removeHandler(self.queue_handler) # Stop the queue listener if self.listener: @@ -611,66 +649,3 @@ def get_metrics(self) -> dict[str, Any] | None: "listener_active": self.listener is not None, } return None - - -@asynccontextmanager -async def setup_async_loggers( - *, - log_format_local_dev_enabled: bool = False, - logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, - tracing_settings: TracingSettings | None = None, - handlers: list[logging.Handler] | None = None, -) -> AsyncGenerator[None, None]: - """ - Async context manager for non-blocking logging infrastructure. - - Usage: - async with setup_async_loggers(log_format_local_dev_enabled=True): - # Your async application code here - logger.info("This is non-blocking!") - - Args: - log_format_local_dev_enabled: Enable local development formatting - logger_filter_mapping: Mapping of logger names to filtered message substrings - tracing_settings: OpenTelemetry tracing configuration - handlers: Custom handlers to use (defaults to StreamHandler) - """ - # Create format string - fmt = _setup_format_string( - tracing_settings=tracing_settings, - log_format_local_dev_enabled=log_format_local_dev_enabled, - ) - - # Start the async logging context - async with AsyncLoggingContext( - handlers=handlers, - log_format_local_dev_enabled=log_format_local_dev_enabled, - fmt=fmt, - ): - # Apply filters if provided - if logger_filter_mapping: - _apply_logger_filters(logger_filter_mapping) - - _logger.info("Async logging setup completed") - - try: - yield - finally: - _logger.debug("Async logging context exiting") - - -def _apply_logger_filters( - logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], -) -> None: - """Apply filters to specific loggers.""" - for logger_name, filtered_routes in logger_filter_mapping.items(): - logger = logging.getLogger(logger_name) - if not logger.hasHandlers(): - _logger.warning( - "Logger %s does not have any handlers. Filter will not be added.", - logger_name, - ) - continue - - log_filter = GeneralLogFilter(filtered_routes) - logger.addFilter(log_filter) From 9843048d9825314a911a6ef4f9a68e3ae5382490 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:02:21 +0200 Subject: [PATCH 014/128] cleanup --- packages/service-library/src/servicelib/logging_utils.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 8aa0f27f53f..bc3855b28dd 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -528,9 +528,6 @@ def set_parent_module_log_level(current_module: str, desired_log_level: int) -> logging.getLogger(parent_module).setLevel(desired_log_level) -# Remove the global task variable since we'll use background_task infrastructure - - class AsyncLoggingContext: """ Async context manager for non-blocking logging infrastructure. From 9eef0e4c488e9955d47b17c7d22e2841f7957062 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:08:51 +0200 Subject: [PATCH 015/128] initial --- .../src/servicelib/logging_utils.py | 25 ++++++++----------- .../tests/test_logging_utils.py | 1 - 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index bc3855b28dd..15010666b8c 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -214,18 +214,18 @@ def config_all_loggers( logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration """ - the_manager: logging.Manager = logging.Logger.manager - root_logger = logging.getLogger() - loggers = [root_logger] + [ - logging.getLogger(name) for name in the_manager.loggerDict - ] - # Create format string fmt = _setup_format_string( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, ) + the_manager: logging.Manager = logging.Logger.manager + root_logger = logging.getLogger() + loggers = [root_logger] + [ + logging.getLogger(name) for name in the_manager.loggerDict + ] + # Apply handlers to loggers for logger in loggers: _set_logging_handler( @@ -241,9 +241,9 @@ def config_all_loggers( @asynccontextmanager async def setup_async_loggers( *, - log_format_local_dev_enabled: bool = False, - logger_filter_mapping: dict[LoggerName, list[MessageSubstring]] | None = None, - tracing_settings: TracingSettings | None = None, + log_format_local_dev_enabled: bool, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], + tracing_settings: TracingSettings | None, ) -> AsyncGenerator[None, None]: """ Async context manager for non-blocking logging infrastructure. @@ -273,12 +273,7 @@ async def setup_async_loggers( if logger_filter_mapping: _apply_logger_filters(logger_filter_mapping) - _logger.info("Async logging setup completed") - - try: - yield - finally: - _logger.debug("Async logging context exiting") + yield class LogExceptionsKwargsDict(TypedDict, total=True): diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index b595cd08406..bbc072b7f97 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -436,7 +436,6 @@ async def test_setup_async_loggers_basic( # Check that the log message was captured assert "Test async log message" in caplog.text - assert "Async logging setup completed" in caplog.text async def test_setup_async_loggers_with_filters( From 8f88d4430a3a039d3943f5f102a70dcaa49431e2 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:11:09 +0200 Subject: [PATCH 016/128] almost ready --- .../tests/test_logging_utils.py | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index bbc072b7f97..b43f672fc65 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -425,6 +425,8 @@ async def test_setup_async_loggers_basic( async with setup_async_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, + logger_filter_mapping={}, # No filters for this test + tracing_settings=None, # No tracing for this test ): test_logger = logging.getLogger("test_async_logger") test_logger.info("Test async log message") @@ -453,6 +455,7 @@ async def test_setup_async_loggers_with_filters( async with setup_async_loggers( log_format_local_dev_enabled=True, logger_filter_mapping=filter_mapping, + tracing_settings=None, # No tracing for this test ): test_logger = logging.getLogger("test_filtered_logger") unfiltered_logger = logging.getLogger("test_unfiltered_logger") @@ -488,7 +491,8 @@ async def test_setup_async_loggers_with_tracing_settings( # But we can test that the function accepts the parameter async with setup_async_loggers( log_format_local_dev_enabled=False, - tracing_settings=None, # Would normally be TracingSettings object + logger_filter_mapping={}, # No filters for this test + tracing_settings=None, ): test_logger = logging.getLogger("test_tracing_logger") test_logger.info("Test message with tracing settings") @@ -510,7 +514,11 @@ async def test_setup_async_loggers_context_manager_cleanup( test_logger = logging.getLogger("test_cleanup_logger") - async with setup_async_loggers(log_format_local_dev_enabled=True): + async with setup_async_loggers( + log_format_local_dev_enabled=True, + logger_filter_mapping={}, + tracing_settings=None, + ): # During the context, handlers should be replaced test_logger.info("Message during context") @@ -519,12 +527,6 @@ async def test_setup_async_loggers_context_manager_cleanup( await asyncio.sleep(0.1) - # After context exit, check cleanup message - assert "Async logging context exiting" in caplog.text - - # Note: We can't easily test handler restoration without more complex setup - # but we can verify the function completed without errors - async def test_setup_async_loggers_exception_handling( caplog: pytest.LogCaptureFixture, @@ -539,7 +541,11 @@ def _raise_test_exception(): raise ValueError(exc_msg) try: - async with setup_async_loggers(log_format_local_dev_enabled=True): + async with setup_async_loggers( + log_format_local_dev_enabled=True, + logger_filter_mapping={}, + tracing_settings=None, + ): test_logger = logging.getLogger("test_exception_logger") test_logger.info("Message before exception") @@ -557,4 +563,3 @@ def _raise_test_exception(): # Check that the message was logged and cleanup happened assert "Message before exception" in caplog.text - assert "Async logging context exiting" in caplog.text From d58c88bcaadf7445c81c41c53355b2464266fc2a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:15:25 +0200 Subject: [PATCH 017/128] almost ready --- packages/service-library/tests/test_logging_utils.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index b43f672fc65..ffb703560e5 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -1,6 +1,7 @@ # pylint:disable=redefined-outer-name # pylint:disable=unused-argument +import asyncio import logging from collections.abc import Iterable from contextlib import suppress @@ -432,8 +433,6 @@ async def test_setup_async_loggers_basic( test_logger.info("Test async log message") # Give some time for async logging to process - import asyncio - await asyncio.sleep(0.1) # Check that the log message was captured @@ -468,8 +467,6 @@ async def test_setup_async_loggers_with_filters( unfiltered_logger.info("This is from unfiltered logger") # Give some time for async logging to process - import asyncio - await asyncio.sleep(0.1) # Check that filtered message was not captured @@ -498,8 +495,6 @@ async def test_setup_async_loggers_with_tracing_settings( test_logger.info("Test message with tracing settings") # Give some time for async logging to process - import asyncio - await asyncio.sleep(0.1) assert "Test message with tracing settings" in caplog.text @@ -523,8 +518,6 @@ async def test_setup_async_loggers_context_manager_cleanup( test_logger.info("Message during context") # Give some time for async logging to process - import asyncio - await asyncio.sleep(0.1) @@ -550,8 +543,6 @@ def _raise_test_exception(): test_logger.info("Message before exception") # Give some time for async logging to process - import asyncio - await asyncio.sleep(0.1) # Raise an exception to test cleanup From 023bc5741aaec9e26838ee5594c50564149ebc89 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:24:50 +0200 Subject: [PATCH 018/128] use tenacity --- .../tests/test_logging_utils.py | 64 +++++++++++++++---- 1 file changed, 53 insertions(+), 11 deletions(-) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index ffb703560e5..1a168d18c5d 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -1,7 +1,6 @@ # pylint:disable=redefined-outer-name # pylint:disable=unused-argument -import asyncio import logging from collections.abc import Iterable from contextlib import suppress @@ -21,6 +20,12 @@ set_parent_module_log_level, setup_async_loggers, ) +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) _logger = logging.getLogger(__name__) _ALL_LOGGING_LEVELS = [ @@ -432,8 +437,15 @@ async def test_setup_async_loggers_basic( test_logger = logging.getLogger("test_async_logger") test_logger.info("Test async log message") - # Give some time for async logging to process - await asyncio.sleep(0.1) + # Wait for log message to appear in caplog using tenacity + async for attempt in AsyncRetrying( + wait=wait_fixed(0.01), + stop=stop_after_delay(2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + assert "Test async log message" in caplog.text # Check that the log message was captured assert "Test async log message" in caplog.text @@ -466,8 +478,17 @@ async def test_setup_async_loggers_with_filters( test_logger.info("This is an unfiltered message") unfiltered_logger.info("This is from unfiltered logger") - # Give some time for async logging to process - await asyncio.sleep(0.1) + # Wait for log messages to appear in caplog using tenacity + async for attempt in AsyncRetrying( + wait=wait_fixed(0.01), + stop=stop_after_delay(2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + # Check that unfiltered messages were captured + assert "This is an unfiltered message" in caplog.text + assert "This is from unfiltered logger" in caplog.text # Check that filtered message was not captured assert "This is a filtered_message" not in caplog.text @@ -494,8 +515,15 @@ async def test_setup_async_loggers_with_tracing_settings( test_logger = logging.getLogger("test_tracing_logger") test_logger.info("Test message with tracing settings") - # Give some time for async logging to process - await asyncio.sleep(0.1) + # Wait for log message to appear in caplog using tenacity + async for attempt in AsyncRetrying( + wait=wait_fixed(0.01), + stop=stop_after_delay(2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + assert "Test message with tracing settings" in caplog.text assert "Test message with tracing settings" in caplog.text @@ -517,8 +545,15 @@ async def test_setup_async_loggers_context_manager_cleanup( # During the context, handlers should be replaced test_logger.info("Message during context") - # Give some time for async logging to process - await asyncio.sleep(0.1) + # Wait for log message to appear in caplog using tenacity + async for attempt in AsyncRetrying( + wait=wait_fixed(0.01), + stop=stop_after_delay(2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + assert "Message during context" in caplog.text async def test_setup_async_loggers_exception_handling( @@ -542,8 +577,15 @@ def _raise_test_exception(): test_logger = logging.getLogger("test_exception_logger") test_logger.info("Message before exception") - # Give some time for async logging to process - await asyncio.sleep(0.1) + # Wait for log message to appear in caplog using tenacity + async for attempt in AsyncRetrying( + wait=wait_fixed(0.01), + stop=stop_after_delay(2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + assert "Message before exception" in caplog.text # Raise an exception to test cleanup _raise_test_exception() From 1b419b1f203a6e287133829bbfeff7690a6ca3e9 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:29:35 +0200 Subject: [PATCH 019/128] simplify --- .../tests/test_logging_utils.py | 74 ++++++------------- 1 file changed, 22 insertions(+), 52 deletions(-) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index 1a168d18c5d..f9f1e5232d7 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -28,6 +28,22 @@ ) _logger = logging.getLogger(__name__) + + +async def _assert_check_log_message( + caplog: pytest.LogCaptureFixture, expected_message: str +) -> None: + """Helper to reliably check if a log message appears in caplog using tenacity.""" + async for attempt in AsyncRetrying( + wait=wait_fixed(0.01), + stop=stop_after_delay(2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + assert expected_message in caplog.text + + _ALL_LOGGING_LEVELS = [ logging.CRITICAL, logging.ERROR, @@ -437,18 +453,7 @@ async def test_setup_async_loggers_basic( test_logger = logging.getLogger("test_async_logger") test_logger.info("Test async log message") - # Wait for log message to appear in caplog using tenacity - async for attempt in AsyncRetrying( - wait=wait_fixed(0.01), - stop=stop_after_delay(2.0), - reraise=True, - retry=retry_if_exception_type(AssertionError), - ): - with attempt: - assert "Test async log message" in caplog.text - - # Check that the log message was captured - assert "Test async log message" in caplog.text + await _assert_check_log_message(caplog, "Test async log message") async def test_setup_async_loggers_with_filters( @@ -478,17 +483,8 @@ async def test_setup_async_loggers_with_filters( test_logger.info("This is an unfiltered message") unfiltered_logger.info("This is from unfiltered logger") - # Wait for log messages to appear in caplog using tenacity - async for attempt in AsyncRetrying( - wait=wait_fixed(0.01), - stop=stop_after_delay(2.0), - reraise=True, - retry=retry_if_exception_type(AssertionError), - ): - with attempt: - # Check that unfiltered messages were captured - assert "This is an unfiltered message" in caplog.text - assert "This is from unfiltered logger" in caplog.text + await _assert_check_log_message(caplog, "This is an unfiltered message") + await _assert_check_log_message(caplog, "This is from unfiltered logger") # Check that filtered message was not captured assert "This is a filtered_message" not in caplog.text @@ -515,17 +511,7 @@ async def test_setup_async_loggers_with_tracing_settings( test_logger = logging.getLogger("test_tracing_logger") test_logger.info("Test message with tracing settings") - # Wait for log message to appear in caplog using tenacity - async for attempt in AsyncRetrying( - wait=wait_fixed(0.01), - stop=stop_after_delay(2.0), - reraise=True, - retry=retry_if_exception_type(AssertionError), - ): - with attempt: - assert "Test message with tracing settings" in caplog.text - - assert "Test message with tracing settings" in caplog.text + await _assert_check_log_message(caplog, "Test message with tracing settings") async def test_setup_async_loggers_context_manager_cleanup( @@ -545,15 +531,7 @@ async def test_setup_async_loggers_context_manager_cleanup( # During the context, handlers should be replaced test_logger.info("Message during context") - # Wait for log message to appear in caplog using tenacity - async for attempt in AsyncRetrying( - wait=wait_fixed(0.01), - stop=stop_after_delay(2.0), - reraise=True, - retry=retry_if_exception_type(AssertionError), - ): - with attempt: - assert "Message during context" in caplog.text + await _assert_check_log_message(caplog, "Message during context") async def test_setup_async_loggers_exception_handling( @@ -577,15 +555,7 @@ def _raise_test_exception(): test_logger = logging.getLogger("test_exception_logger") test_logger.info("Message before exception") - # Wait for log message to appear in caplog using tenacity - async for attempt in AsyncRetrying( - wait=wait_fixed(0.01), - stop=stop_after_delay(2.0), - reraise=True, - retry=retry_if_exception_type(AssertionError), - ): - with attempt: - assert "Message before exception" in caplog.text + await _assert_check_log_message(caplog, "Message before exception") # Raise an exception to test cleanup _raise_test_exception() From d946a6545178d825a4c6fccf1d18e7090376086d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 16:22:03 +0200 Subject: [PATCH 020/128] minor --- .../src/servicelib/logging_utils.py | 174 +++++------------- 1 file changed, 49 insertions(+), 125 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 15010666b8c..1f5cf63841e 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -264,17 +264,61 @@ async def setup_async_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, ) - # Start the async logging context - async with AsyncLoggingContext( - log_format_local_dev_enabled=log_format_local_dev_enabled, - fmt=fmt, - ): + # Set up async logging infrastructure + log_queue: queue.Queue = queue.Queue() + # Create handler with proper formatting + handler = logging.StreamHandler() + handler.setFormatter( + CustomFormatter( + fmt, + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + ) + + # Create and start the queue listener + listener = logging.handlers.QueueListener( + log_queue, handler, respect_handler_level=True + ) + listener.start() + + # Create queue handler for loggers + queue_handler = logging.handlers.QueueHandler(log_queue) + + # Configure all existing loggers - add queue handler alongside existing handlers + manager: logging.Manager = logging.Logger.manager + root_logger = logging.getLogger() + all_loggers = [root_logger] + [ + logging.getLogger(name) for name in manager.loggerDict + ] + + # Add queue handler to all loggers (preserving existing handlers) + for logger in all_loggers: + logger.addHandler(queue_handler) + + try: # Apply filters if provided if logger_filter_mapping: _apply_logger_filters(logger_filter_mapping) + _logger.info("Async logging context initialized with unlimited queue") yield + finally: + # Cleanup: Remove queue handlers from all loggers + try: + for logger in all_loggers: + if queue_handler in logger.handlers: + logger.removeHandler(queue_handler) + + # Stop the queue listener + _logger.debug("Shutting down async logging listener...") + listener.stop() + _logger.debug("Async logging context cleanup complete") + + except Exception as exc: + sys.stderr.write(f"Error during async logging cleanup: {exc}\n") + sys.stderr.flush() + class LogExceptionsKwargsDict(TypedDict, total=True): logger: logging.Logger @@ -521,123 +565,3 @@ def guess_message_log_level(message: str) -> LogLevelInt: def set_parent_module_log_level(current_module: str, desired_log_level: int) -> None: parent_module = ".".join(current_module.split(".")[:-1]) logging.getLogger(parent_module).setLevel(desired_log_level) - - -class AsyncLoggingContext: - """ - Async context manager for non-blocking logging infrastructure. - Based on the pattern from SuperFastPython article. - """ - - def __init__( - self, - *, - log_format_local_dev_enabled: bool = False, - fmt: str | None = None, - ) -> None: - self.log_format_local_dev_enabled = log_format_local_dev_enabled - self.fmt = fmt or _DEFAULT_FORMATTING - self.queue: queue.Queue | None = None - self.listener: logging.handlers.QueueListener | None = None - self.queue_handler: logging.handlers.QueueHandler | None = None - self.original_handlers: dict[str, list[logging.Handler]] = {} - - async def __aenter__(self) -> "AsyncLoggingContext": - """Set up async logging infrastructure.""" - await self._setup_async_logging() - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: - """Clean up async logging infrastructure.""" - await self._cleanup_async_logging() - - async def _setup_async_logging(self) -> None: - """Configure non-blocking logging using queue-based approach.""" - # Create unlimited queue for log messages - self.queue = queue.Queue() - - # Use default StreamHandler with proper formatting - handler = logging.StreamHandler() - handler.setFormatter( - CustomFormatter( - self.fmt, - log_format_local_dev_enabled=self.log_format_local_dev_enabled, - ) - ) - - # Create and start the queue listener - self.listener = logging.handlers.QueueListener( - self.queue, handler, respect_handler_level=True - ) - self.listener.start() - - # Create queue handler for loggers - self.queue_handler = logging.handlers.QueueHandler(self.queue) - - # Configure all existing loggers - await self._configure_loggers() - - _logger.info("Async logging context initialized with unlimited queue") - - async def _configure_loggers(self) -> None: - """Add queue handler to all loggers while preserving existing handlers.""" - # Get all loggers - manager: logging.Manager = logging.Logger.manager - root_logger = logging.getLogger() - all_loggers = [root_logger] + [ - logging.getLogger(name) for name in manager.loggerDict - ] - - # Store original handlers and add queue handler - for logger in all_loggers: - logger_name = logger.name or "root" - - # Store original handlers - self.original_handlers[logger_name] = logger.handlers[:] - - # Add queue handler alongside existing handlers - if self.queue_handler: - logger.addHandler(self.queue_handler) - - # Allow other coroutines to run - await asyncio.sleep(0) - - async def _cleanup_async_logging(self) -> None: - """Restore original logging configuration.""" - try: - # Remove queue handlers from all loggers - manager: logging.Manager = logging.Logger.manager - root_logger = logging.getLogger() - all_loggers = [root_logger] + [ - logging.getLogger(name) for name in manager.loggerDict - ] - - for logger in all_loggers: - # Remove only the queue handler we added - if self.queue_handler and self.queue_handler in logger.handlers: - logger.removeHandler(self.queue_handler) - - # Stop the queue listener - if self.listener: - _logger.debug("Shutting down async logging listener...") - self.listener.stop() - - _logger.debug("Async logging context cleanup complete") - - except Exception as exc: - sys.stderr.write(f"Error during async logging cleanup: {exc}\n") - sys.stderr.flush() - finally: - self.queue = None - self.listener = None - self.queue_handler = None - self.original_handlers.clear() - - def get_metrics(self) -> dict[str, Any] | None: - """Get logging performance metrics.""" - if self.queue: - return { - "queue_size": self.queue.qsize(), - "listener_active": self.listener is not None, - } - return None From ccf5d33c28f4041deeeee80735c492a4c2e7286d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 16:25:45 +0200 Subject: [PATCH 021/128] minor --- packages/service-library/src/servicelib/logging_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 1f5cf63841e..e250df23efe 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -220,7 +220,7 @@ def config_all_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, ) - the_manager: logging.Manager = logging.Logger.manager + the_manager = logging.Logger.manager root_logger = logging.getLogger() loggers = [root_logger] + [ logging.getLogger(name) for name in the_manager.loggerDict @@ -265,7 +265,7 @@ async def setup_async_loggers( ) # Set up async logging infrastructure - log_queue: queue.Queue = queue.Queue() + log_queue = queue.Queue() # Create handler with proper formatting handler = logging.StreamHandler() handler.setFormatter( @@ -285,7 +285,7 @@ async def setup_async_loggers( queue_handler = logging.handlers.QueueHandler(log_queue) # Configure all existing loggers - add queue handler alongside existing handlers - manager: logging.Manager = logging.Logger.manager + manager = logging.Logger.manager root_logger = logging.getLogger() all_loggers = [root_logger] + [ logging.getLogger(name) for name in manager.loggerDict From 716cfcdeabdc4c289041e90573e4d6667777ec52 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 16:30:36 +0200 Subject: [PATCH 022/128] minor --- .../src/servicelib/logging_utils.py | 34 +++++++------------ 1 file changed, 13 insertions(+), 21 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index e250df23efe..3ad83226013 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -183,6 +183,12 @@ def _set_logging_handler( ) +def _get_all_loggers() -> list[logging.Logger]: + manager = logging.Logger.manager + root_logger = logging.getLogger() + return [root_logger] + [logging.getLogger(name) for name in manager.loggerDict] + + def _apply_logger_filters( logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], ) -> None: @@ -220,14 +226,9 @@ def config_all_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, ) - the_manager = logging.Logger.manager - root_logger = logging.getLogger() - loggers = [root_logger] + [ - logging.getLogger(name) for name in the_manager.loggerDict - ] - - # Apply handlers to loggers - for logger in loggers: + # Get all loggers and apply formatting + all_loggers = _get_all_loggers() + for logger in all_loggers: _set_logging_handler( logger, fmt=fmt, @@ -265,14 +266,11 @@ async def setup_async_loggers( ) # Set up async logging infrastructure - log_queue = queue.Queue() + log_queue: queue.Queue = queue.Queue() # Create handler with proper formatting handler = logging.StreamHandler() handler.setFormatter( - CustomFormatter( - fmt, - log_format_local_dev_enabled=log_format_local_dev_enabled, - ) + CustomFormatter(fmt, log_format_local_dev_enabled=log_format_local_dev_enabled) ) # Create and start the queue listener @@ -284,14 +282,8 @@ async def setup_async_loggers( # Create queue handler for loggers queue_handler = logging.handlers.QueueHandler(log_queue) - # Configure all existing loggers - add queue handler alongside existing handlers - manager = logging.Logger.manager - root_logger = logging.getLogger() - all_loggers = [root_logger] + [ - logging.getLogger(name) for name in manager.loggerDict - ] - - # Add queue handler to all loggers (preserving existing handlers) + # Get all loggers and add queue handler alongside existing handlers + all_loggers = _get_all_loggers() for logger in all_loggers: logger.addHandler(queue_handler) From 3e242f2aa2ddbd152a214e26c11d8d5c67dcd44a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 16:57:36 +0200 Subject: [PATCH 023/128] print if UV is installed --- services/agent/docker/entrypoint.sh | 1 + services/api-server/docker/entrypoint.sh | 1 + services/autoscaling/docker/entrypoint.sh | 1 + services/catalog/docker/entrypoint.sh | 1 + services/clusters-keeper/docker/entrypoint.sh | 1 + services/datcore-adapter/docker/entrypoint.sh | 1 + services/director-v2/docker/entrypoint.sh | 1 + services/director/docker/entrypoint.sh | 1 + services/dynamic-scheduler/docker/entrypoint.sh | 1 + services/dynamic-sidecar/docker/entrypoint.sh | 1 + services/efs-guardian/docker/entrypoint.sh | 1 + services/invitations/docker/entrypoint.sh | 1 + services/migration/docker/entrypoint.sh | 1 + services/notifications/docker/entrypoint.sh | 1 + services/payments/docker/entrypoint.sh | 1 + services/resource-usage-tracker/docker/entrypoint.sh | 1 + services/storage/docker/entrypoint.sh | 1 + services/web/server/docker/entrypoint.sh | 1 + 18 files changed, 18 insertions(+) diff --git a/services/agent/docker/entrypoint.sh b/services/agent/docker/entrypoint.sh index c1697b74b85..a319c6824d7 100755 --- a/services/agent/docker/entrypoint.sh +++ b/services/agent/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/api-server/docker/entrypoint.sh b/services/api-server/docker/entrypoint.sh index e5060ff8f7b..0124a12961b 100755 --- a/services/api-server/docker/entrypoint.sh +++ b/services/api-server/docker/entrypoint.sh @@ -20,6 +20,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" USERNAME=scu GROUPNAME=scu diff --git a/services/autoscaling/docker/entrypoint.sh b/services/autoscaling/docker/entrypoint.sh index 1071a664b92..651a1ea875f 100755 --- a/services/autoscaling/docker/entrypoint.sh +++ b/services/autoscaling/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/catalog/docker/entrypoint.sh b/services/catalog/docker/entrypoint.sh index 9e820488a53..5e534412b4a 100755 --- a/services/catalog/docker/entrypoint.sh +++ b/services/catalog/docker/entrypoint.sh @@ -20,6 +20,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" USERNAME=scu GROUPNAME=scu diff --git a/services/clusters-keeper/docker/entrypoint.sh b/services/clusters-keeper/docker/entrypoint.sh index 1071a664b92..651a1ea875f 100755 --- a/services/clusters-keeper/docker/entrypoint.sh +++ b/services/clusters-keeper/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/datcore-adapter/docker/entrypoint.sh b/services/datcore-adapter/docker/entrypoint.sh index b55d4bc7716..357d8b604d9 100755 --- a/services/datcore-adapter/docker/entrypoint.sh +++ b/services/datcore-adapter/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/director-v2/docker/entrypoint.sh b/services/director-v2/docker/entrypoint.sh index 244f9b6a1db..9d5a38625a1 100755 --- a/services/director-v2/docker/entrypoint.sh +++ b/services/director-v2/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/director/docker/entrypoint.sh b/services/director/docker/entrypoint.sh index 1071a664b92..651a1ea875f 100755 --- a/services/director/docker/entrypoint.sh +++ b/services/director/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/dynamic-scheduler/docker/entrypoint.sh b/services/dynamic-scheduler/docker/entrypoint.sh index b55d4bc7716..357d8b604d9 100755 --- a/services/dynamic-scheduler/docker/entrypoint.sh +++ b/services/dynamic-scheduler/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/dynamic-sidecar/docker/entrypoint.sh b/services/dynamic-sidecar/docker/entrypoint.sh index 74eb8867b74..8f5a3799d78 100755 --- a/services/dynamic-sidecar/docker/entrypoint.sh +++ b/services/dynamic-sidecar/docker/entrypoint.sh @@ -27,6 +27,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/efs-guardian/docker/entrypoint.sh b/services/efs-guardian/docker/entrypoint.sh index a1b9ea0137d..b3083ac06cd 100755 --- a/services/efs-guardian/docker/entrypoint.sh +++ b/services/efs-guardian/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/invitations/docker/entrypoint.sh b/services/invitations/docker/entrypoint.sh index b55d4bc7716..357d8b604d9 100755 --- a/services/invitations/docker/entrypoint.sh +++ b/services/invitations/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/migration/docker/entrypoint.sh b/services/migration/docker/entrypoint.sh index 3b639e936ed..05671ccd2c7 100755 --- a/services/migration/docker/entrypoint.sh +++ b/services/migration/docker/entrypoint.sh @@ -12,6 +12,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" echo "$INFO ${SC_USER_NAME} rights : $(id "$SC_USER_NAME")" echo "$INFO local dir : $(ls -al)" diff --git a/services/notifications/docker/entrypoint.sh b/services/notifications/docker/entrypoint.sh index b55d4bc7716..357d8b604d9 100755 --- a/services/notifications/docker/entrypoint.sh +++ b/services/notifications/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/payments/docker/entrypoint.sh b/services/payments/docker/entrypoint.sh index b55d4bc7716..357d8b604d9 100755 --- a/services/payments/docker/entrypoint.sh +++ b/services/payments/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/resource-usage-tracker/docker/entrypoint.sh b/services/resource-usage-tracker/docker/entrypoint.sh index c1697b74b85..a319c6824d7 100755 --- a/services/resource-usage-tracker/docker/entrypoint.sh +++ b/services/resource-usage-tracker/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE diff --git a/services/storage/docker/entrypoint.sh b/services/storage/docker/entrypoint.sh index 8511f90e1d7..ad8718d5716 100755 --- a/services/storage/docker/entrypoint.sh +++ b/services/storage/docker/entrypoint.sh @@ -27,6 +27,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" if [ "${SC_BUILD_TARGET}" = "development" ]; then echo "$INFO" "development mode detected..." diff --git a/services/web/server/docker/entrypoint.sh b/services/web/server/docker/entrypoint.sh index 9e7e1a2bc2e..184ea4e00a1 100755 --- a/services/web/server/docker/entrypoint.sh +++ b/services/web/server/docker/entrypoint.sh @@ -27,6 +27,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" if [ "${SC_BUILD_TARGET}" = "development" ]; then echo "$INFO" "development mode detected..." From c4cdf2fe44d58a8b15f6a242ee934d0459f045bf Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 17:31:12 +0200 Subject: [PATCH 024/128] minor --- packages/service-library/src/servicelib/logging_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 3ad83226013..0486f0bd0f4 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -12,7 +12,7 @@ import queue import sys from asyncio import iscoroutinefunction -from collections.abc import AsyncGenerator, Callable, Iterator +from collections.abc import AsyncIterator, Callable, Iterator from contextlib import asynccontextmanager, contextmanager from datetime import datetime from inspect import getframeinfo, stack @@ -245,7 +245,7 @@ async def setup_async_loggers( log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, -) -> AsyncGenerator[None, None]: +) -> AsyncIterator[None]: """ Async context manager for non-blocking logging infrastructure. From fcacc20b22156281662588787c16c671b651bf4a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 18:37:29 +0200 Subject: [PATCH 025/128] cleanup --- packages/service-library/src/servicelib/logging_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 0486f0bd0f4..9bec8ff9185 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -266,7 +266,7 @@ async def setup_async_loggers( ) # Set up async logging infrastructure - log_queue: queue.Queue = queue.Queue() + log_queue = queue.Queue() # Create handler with proper formatting handler = logging.StreamHandler() handler.setFormatter( From 3430474cb0c2639a1e5957d21411229c71e84d86 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 18:37:51 +0200 Subject: [PATCH 026/128] better integration --- .../src/simcore_service_webserver/cli.py | 19 ++++++++++++++++++- .../src/simcore_service_webserver/log.py | 17 +++++++++++------ 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index 67dd2b445bc..e8171e8b2b7 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -13,6 +13,7 @@ """ +from contextlib import AsyncExitStack import logging import os from typing import Annotated, Final @@ -22,6 +23,8 @@ from common_library.json_serialization import json_dumps from settings_library.utils_cli import create_settings_command +from servicelib.logging_utils import setup_async_loggers + from .application_settings import ApplicationSettings from .login import cli as login_cli @@ -80,12 +83,26 @@ async def app_factory() -> web.Application: tracing_settings=app_settings.WEBSERVER_TRACING, ) - if app_settings.WEBSERVER_APP_FACTORY_NAME == "WEBSERVER_AUTHZ_APP_FACTORY": + exit_stack = AsyncExitStack() + await exit_stack.enter_async_context( + setup_async_loggers( + log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.WEBSERVER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.WEBSERVER_TRACING, + ) + ) + if app_settings.WEBSERVER_APP_FACTORY_NAME == "WEBSERVER_AUTHZ_APP_FACTORY": app = create_application_auth() else: app, _ = _setup_app_from_settings(app_settings) + async def _cleanup_event(app: web.Application) -> None: + assert app # nosec + _logger.info("Cleaning up application resources") + await exit_stack.aclose() + + app.on_cleanup.append(_cleanup_event) return app diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index b8d3ba328b3..c33f8b4a411 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -1,12 +1,17 @@ """Configuration and utilities for service logging""" import logging +from collections.abc import AsyncGenerator from aiodebug import log_slow_callbacks # type: ignore[import-untyped] +from aiohttp import web from aiohttp.log import access_logger -from servicelib.logging_utils import config_all_loggers +from servicelib.aiohttp.application_setup import ensure_single_setup +from servicelib.logging_utils import setup_async_loggers from settings_library.tracing import TracingSettings +from simcore_service_webserver.application_settings import get_application_settings + LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR NOISY_LOGGERS = ( "aio_pika", @@ -36,11 +41,11 @@ def setup_logging( # root logging.root.setLevel(level) - config_all_loggers( - log_format_local_dev_enabled=log_format_local_dev_enabled, - logger_filter_mapping=logger_filter_mapping, - tracing_settings=tracing_settings, - ) + # config_all_loggers( + # log_format_local_dev_enabled=log_format_local_dev_enabled, + # logger_filter_mapping=logger_filter_mapping, + # tracing_settings=tracing_settings, + # ) # Enforces same log-level to aiohttp & gunicorn access loggers # From dcebbc18aff7e2ab5bac0bfcffbc3a5f9d18d4fc Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 18:42:18 +0200 Subject: [PATCH 027/128] fix --- .../web/server/src/simcore_service_webserver/log.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index c33f8b4a411..20d5b2fe55b 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -1,17 +1,11 @@ """Configuration and utilities for service logging""" import logging -from collections.abc import AsyncGenerator from aiodebug import log_slow_callbacks # type: ignore[import-untyped] -from aiohttp import web from aiohttp.log import access_logger -from servicelib.aiohttp.application_setup import ensure_single_setup -from servicelib.logging_utils import setup_async_loggers from settings_library.tracing import TracingSettings -from simcore_service_webserver.application_settings import get_application_settings - LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR NOISY_LOGGERS = ( "aio_pika", @@ -41,11 +35,6 @@ def setup_logging( # root logging.root.setLevel(level) - # config_all_loggers( - # log_format_local_dev_enabled=log_format_local_dev_enabled, - # logger_filter_mapping=logger_filter_mapping, - # tracing_settings=tracing_settings, - # ) # Enforces same log-level to aiohttp & gunicorn access loggers # From 259484246f4842b415b182097de9fac5a24606b7 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Tue, 8 Jul 2025 18:43:45 +0200 Subject: [PATCH 028/128] fix --- .../web/server/src/simcore_service_webserver/cli.py | 8 ++------ .../web/server/src/simcore_service_webserver/log.py | 10 +--------- .../unit/with_dbs/04/studies_dispatcher/conftest.py | 7 +------ 3 files changed, 4 insertions(+), 21 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index e8171e8b2b7..73a6a98d94b 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -13,17 +13,16 @@ """ -from contextlib import AsyncExitStack import logging import os +from contextlib import AsyncExitStack from typing import Annotated, Final import typer from aiohttp import web from common_library.json_serialization import json_dumps -from settings_library.utils_cli import create_settings_command - from servicelib.logging_utils import setup_async_loggers +from settings_library.utils_cli import create_settings_command from .application_settings import ApplicationSettings from .login import cli as login_cli @@ -78,9 +77,6 @@ async def app_factory() -> web.Application: setup_logging( level=app_settings.log_level, slow_duration=app_settings.AIODEBUG_SLOW_DURATION_SECS, - log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=app_settings.WEBSERVER_LOG_FILTER_MAPPING, - tracing_settings=app_settings.WEBSERVER_TRACING, ) exit_stack = AsyncExitStack() diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index 20d5b2fe55b..4fb0be6715b 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -4,7 +4,6 @@ from aiodebug import log_slow_callbacks # type: ignore[import-untyped] from aiohttp.log import access_logger -from settings_library.tracing import TracingSettings LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR NOISY_LOGGERS = ( @@ -22,14 +21,7 @@ ) -def setup_logging( - *, - level: str | int, - slow_duration: float | None = None, - log_format_local_dev_enabled: bool, - logger_filter_mapping: dict, - tracing_settings: TracingSettings | None, -): +def setup_logging(*, level: str | int, slow_duration: float | None = None): # service log level logging.basicConfig(level=level) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py index cdae2960c74..63f697c3ad2 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py @@ -51,12 +51,7 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc ) # NOTE: To see logs, use pytest -s --log-cli-level=DEBUG - setup_logging( - level=logging.DEBUG, - log_format_local_dev_enabled=True, - logger_filter_mapping={}, - tracing_settings=None, - ) + setup_logging(level=logging.DEBUG) plugin_settings = StudiesDispatcherSettings.create_from_envs() print(plugin_settings.model_dump_json(indent=1)) From c6f8e115bb184b4e73caecfba9d82b5c2d63445b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 08:43:54 +0200 Subject: [PATCH 029/128] rename --- packages/service-library/src/servicelib/logging_utils.py | 2 +- .../agent/src/simcore_service_agent/core/application.py | 4 ++-- .../src/simcore_service_api_server/core/application.py | 4 ++-- .../autoscaling/src/simcore_service_autoscaling/main.py | 8 +++----- services/catalog/src/simcore_service_catalog/main.py | 7 +++---- .../src/simcore_service_clusters_keeper/main.py | 8 +++----- .../src/simcore_service_dask_sidecar/utils/logs.py | 4 ++-- .../src/simcore_service_datcore_adapter/main.py | 4 ++-- .../src/simcore_service_director_v2/core/application.py | 8 +++++--- services/director/src/simcore_service_director/main.py | 8 +++----- .../src/simcore_service_dynamic_scheduler/main.py | 8 +++----- .../simcore_service_dynamic_sidecar/core/application.py | 4 ++-- .../efs-guardian/src/simcore_service_efs_guardian/main.py | 8 +++----- .../invitations/src/simcore_service_invitations/main.py | 8 +++----- .../src/simcore_service_notifications/core/application.py | 4 ++-- services/payments/src/simcore_service_payments/main.py | 8 +++----- .../src/simcore_service_resource_usage_tracker/main.py | 8 +++----- services/storage/src/simcore_service_storage/main.py | 4 ++-- .../simcore_service_storage/modules/celery/worker_main.py | 4 ++-- 19 files changed, 49 insertions(+), 64 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 9bec8ff9185..bcaf8cd175f 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -206,7 +206,7 @@ def _apply_logger_filters( logger.addFilter(log_filter) -def config_all_loggers( +def setup_loggers( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index 442c4649c62..a89b968c150 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -9,7 +9,7 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from .._meta import ( API_VTAG, @@ -34,7 +34,7 @@ def _setup_logger(settings: ApplicationSettings): # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=settings.LOG_LEVEL.value) # NOSONAR logging.root.setLevel(settings.LOG_LEVEL.value) - config_all_loggers( + setup_loggers( log_format_local_dev_enabled=settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, tracing_settings=settings.AGENT_TRACING, diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 145ff8efb77..ba9331279b4 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -9,7 +9,7 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from .. import exceptions from .._meta import API_VERSION, API_VTAG, APP_NAME @@ -55,7 +55,7 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: logging.basicConfig(level=settings.log_level) logging.root.setLevel(settings.log_level) - config_all_loggers( + setup_loggers( log_format_local_dev_enabled=settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.API_SERVER_LOG_FILTER_MAPPING, tracing_settings=settings.API_SERVER_TRACING, diff --git a/services/autoscaling/src/simcore_service_autoscaling/main.py b/services/autoscaling/src/simcore_service_autoscaling/main.py index 102258cac70..65604fee17d 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/main.py +++ b/services/autoscaling/src/simcore_service_autoscaling/main.py @@ -1,18 +1,16 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_autoscaling.core.application import create_app from simcore_service_autoscaling.core.settings import ApplicationSettings the_settings = ApplicationSettings.create_from_envs() logging.basicConfig(level=the_settings.log_level) logging.root.setLevel(the_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=the_settings.AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.AUTOSCALING_LOG_FILTER_MAPPING, tracing_settings=the_settings.AUTOSCALING_TRACING, diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index 52bd949a542..c88d967fc0f 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -1,10 +1,9 @@ -"""Main application to be deployed in for example uvicorn. -""" +"""Main application to be deployed in for example uvicorn.""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_catalog.core.application import create_app from simcore_service_catalog.core.settings import ApplicationSettings @@ -13,7 +12,7 @@ # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=_the_settings.log_level) # NOSONAR logging.root.setLevel(_the_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=_the_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.CATALOG_LOG_FILTER_MAPPING, tracing_settings=_the_settings.CATALOG_TRACING, diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py index b2844bde6af..afa325dd89d 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py @@ -1,18 +1,16 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_clusters_keeper.core.application import create_app from simcore_service_clusters_keeper.core.settings import ApplicationSettings the_settings = ApplicationSettings.create_from_envs() logging.basicConfig(level=the_settings.log_level) logging.root.setLevel(the_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=the_settings.CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.CLUSTERS_KEEPER_LOG_FILTER_MAPPING, tracing_settings=the_settings.CLUSTERS_KEEPER_TRACING, diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py index 74b158de9e2..0ce1ca9c405 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py @@ -1,6 +1,6 @@ import logging -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from ..settings import ApplicationSettings @@ -13,7 +13,7 @@ def setup_app_logging(settings: ApplicationSettings) -> None: # removing them solves dual propagation of logs for handler in logging.getLogger("distributed").handlers: logging.getLogger("distributed").removeHandler(handler) - config_all_loggers( + setup_loggers( log_format_local_dev_enabled=settings.DASK_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.DASK_LOG_FILTER_MAPPING, tracing_settings=None, # no tracing for dask sidecar diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py index 7bd6a787163..efde47c998e 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py @@ -3,7 +3,7 @@ import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_datcore_adapter.core.application import create_app from simcore_service_datcore_adapter.core.settings import ApplicationSettings @@ -12,7 +12,7 @@ # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=_the_settings.log_level) # NOSONAR logging.root.setLevel(_the_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=_the_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, tracing_settings=_the_settings.DATCORE_ADAPTER_TRACING, diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 9dfc9ec6974..6a73ab5105a 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -11,7 +11,7 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from .._meta import API_VERSION, API_VTAG, APP_NAME, PROJECT_NAME, SUMMARY from ..api.entrypoints import api_router @@ -110,7 +110,7 @@ def create_base_app(settings: AppSettings | None = None) -> FastAPI: logging.basicConfig(level=settings.LOG_LEVEL.value) logging.root.setLevel(settings.LOG_LEVEL.value) - config_all_loggers( + setup_loggers( log_format_local_dev_enabled=settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.DIRECTOR_V2_LOG_FILTER_MAPPING, tracing_settings=settings.DIRECTOR_V2_TRACING, @@ -202,7 +202,9 @@ def init_app(settings: AppSettings | None = None) -> FastAPI: socketio.setup(app) notifier.setup(app) - if settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND.COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED: + if ( + settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND.COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED + ): dask_clients_pool.setup(app, settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND) if computational_backend_enabled: diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index da0c480065f..0f15c827ac6 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -1,11 +1,9 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_director.core.application import create_app from simcore_service_director.core.settings import ApplicationSettings @@ -14,7 +12,7 @@ # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=_the_settings.DIRECTOR_LOGLEVEL) logging.root.setLevel(_the_settings.DIRECTOR_LOGLEVEL) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=_the_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, tracing_settings=_the_settings.DIRECTOR_TRACING, diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py index ab726883237..fe0d89d7bb0 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py @@ -1,18 +1,16 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_dynamic_scheduler.core.application import create_app from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings _the_settings = ApplicationSettings.create_from_envs() logging.basicConfig(level=_the_settings.DYNAMIC_SCHEDULER_LOGLEVEL.value) logging.root.setLevel(_the_settings.DYNAMIC_SCHEDULER_LOGLEVEL.value) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=_the_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, tracing_settings=_the_settings.DYNAMIC_SCHEDULER_TRACING, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index b141e7ca236..496c7291598 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -13,7 +13,7 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_sdk.node_ports_common.exceptions import NodeNotFound from .._meta import API_VERSION, API_VTAG, PROJECT_NAME, SUMMARY, __version__ @@ -119,7 +119,7 @@ def setup_logger(settings: ApplicationSettings): # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=settings.log_level) logging.root.setLevel(settings.log_level) - config_all_loggers( + setup_loggers( log_format_local_dev_enabled=settings.DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.DY_SIDECAR_LOG_FILTER_MAPPING, tracing_settings=settings.DYNAMIC_SIDECAR_TRACING, diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/main.py b/services/efs-guardian/src/simcore_service_efs_guardian/main.py index 711d32d83ee..037a7b6a181 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/main.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/main.py @@ -1,18 +1,16 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_efs_guardian.core.application import create_app from simcore_service_efs_guardian.core.settings import ApplicationSettings the_settings = ApplicationSettings.create_from_envs() logging.basicConfig(level=the_settings.log_level) logging.root.setLevel(the_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=the_settings.EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.EFS_GUARDIAN_LOG_FILTER_MAPPING, tracing_settings=the_settings.EFS_GUARDIAN_TRACING, diff --git a/services/invitations/src/simcore_service_invitations/main.py b/services/invitations/src/simcore_service_invitations/main.py index 4a21e994b31..7df3563c581 100644 --- a/services/invitations/src/simcore_service_invitations/main.py +++ b/services/invitations/src/simcore_service_invitations/main.py @@ -1,11 +1,9 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_invitations.core.application import create_app from simcore_service_invitations.core.settings import ApplicationSettings @@ -14,7 +12,7 @@ # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=the_settings.log_level) # NOSONAR logging.root.setLevel(the_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=the_settings.INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.INVITATIONS_LOG_FILTER_MAPPING, tracing_settings=the_settings.INVITATIONS_TRACING, diff --git a/services/notifications/src/simcore_service_notifications/core/application.py b/services/notifications/src/simcore_service_notifications/core/application.py index 5f3245d9d52..4a9515f3f96 100644 --- a/services/notifications/src/simcore_service_notifications/core/application.py +++ b/services/notifications/src/simcore_service_notifications/core/application.py @@ -12,7 +12,7 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from .._meta import API_VTAG, APP_NAME, SUMMARY, VERSION from ..api.rest.routing import initialize_rest_api @@ -26,7 +26,7 @@ def _initialise_logger(settings: ApplicationSettings): # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=settings.LOG_LEVEL.value) # NOSONAR logging.root.setLevel(settings.LOG_LEVEL.value) - config_all_loggers( + setup_loggers( log_format_local_dev_enabled=settings.NOTIFICATIONS_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.NOTIFICATIONS_VOLUMES_LOG_FILTER_MAPPING, tracing_settings=settings.NOTIFICATIONS_TRACING, diff --git a/services/payments/src/simcore_service_payments/main.py b/services/payments/src/simcore_service_payments/main.py index 53e19bd22a1..0a21865e0dd 100644 --- a/services/payments/src/simcore_service_payments/main.py +++ b/services/payments/src/simcore_service_payments/main.py @@ -1,11 +1,9 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_payments.core.application import create_app from simcore_service_payments.core.settings import ApplicationSettings @@ -14,7 +12,7 @@ # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=_the_settings.log_level) # NOSONAR logging.root.setLevel(_the_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=_the_settings.PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.PAYMENTS_LOG_FILTER_MAPPING, tracing_settings=_the_settings.PAYMENTS_TRACING, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py index 079ba5cdf79..6f0ec64fb3d 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py @@ -1,11 +1,9 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_resource_usage_tracker.core.application import create_app from simcore_service_resource_usage_tracker.core.settings import ApplicationSettings @@ -14,7 +12,7 @@ # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=the_settings.log_level) # NOSONAR logging.root.setLevel(the_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=the_settings.RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.RESOURCE_USAGE_TRACKER_LOG_FILTER_MAPPING, tracing_settings=the_settings.RESOURCE_USAGE_TRACKER_TRACING, diff --git a/services/storage/src/simcore_service_storage/main.py b/services/storage/src/simcore_service_storage/main.py index a37ead2cefc..d8309302029 100644 --- a/services/storage/src/simcore_service_storage/main.py +++ b/services/storage/src/simcore_service_storage/main.py @@ -2,7 +2,7 @@ import logging -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from simcore_service_storage.core.application import create_app from simcore_service_storage.core.settings import ApplicationSettings @@ -11,7 +11,7 @@ # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 logging.basicConfig(level=_settings.log_level) # NOSONAR logging.root.setLevel(_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, tracing_settings=_settings.STORAGE_TRACING, diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index ebd9832b9e1..d364f37e244 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -10,7 +10,7 @@ on_worker_shutdown, ) from servicelib.fastapi.celery.app_server import FastAPIAppServer -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from ...api._worker_tasks.tasks import setup_worker_tasks from ...core.application import create_app @@ -20,7 +20,7 @@ logging.basicConfig(level=_settings.log_level) # NOSONAR logging.root.setLevel(_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, tracing_settings=_settings.STORAGE_TRACING, From fd6fbf89896e405f9fa53cb8624342e2f750f79f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 08:45:34 +0200 Subject: [PATCH 030/128] clean --- .../service-library/async_logging_example.py | 97 ------------------- .../src/servicelib/logging_utils.py | 4 +- .../tests/test_logging_utils.py | 12 +-- .../src/simcore_service_webserver/cli.py | 4 +- 4 files changed, 10 insertions(+), 107 deletions(-) delete mode 100644 packages/service-library/async_logging_example.py diff --git a/packages/service-library/async_logging_example.py b/packages/service-library/async_logging_example.py deleted file mode 100644 index d8ccfd55486..00000000000 --- a/packages/service-library/async_logging_example.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 - -""" -Example demonstrating the new async logging functionality. -This shows how to use both the context manager and manual setup/shutdown approaches. -""" - -import asyncio -import logging -import sys -from pathlib import Path - -from servicelib.logging_utils import ( - async_logging_context, - setup_async_loggers, - shutdown_async_loggers, -) - -# Add the servicelib to the path -sys.path.insert(0, str(Path(__file__).parent / "src")) - - -async def example_with_context_manager(): - """Example using the async context manager approach.""" - print("\n=== Example 1: Using async context manager ===") - - async with async_logging_context(log_format_local_dev_enabled=True): - logger = logging.getLogger("example1") - logger.setLevel(logging.DEBUG) - - logger.info("Starting non-blocking async logging example") - - # Simulate some async work with logging - for i in range(5): - logger.debug(f"Processing item {i}") - await asyncio.sleep(0.1) # Simulate async work - - logger.info("Completed async work") - - -async def example_with_manual_setup(): - """Example using manual setup and shutdown.""" - print("\n=== Example 2: Using manual setup/shutdown ===") - - # Setup async logging - await setup_async_loggers(log_format_local_dev_enabled=True) - - try: - logger = logging.getLogger("example2") - logger.setLevel(logging.DEBUG) - - logger.info("Starting manual async logging example") - - # Simulate some async work with logging - tasks = [] - for i in range(3): - tasks.append(worker_task(f"worker-{i}")) - - await asyncio.gather(*tasks) - - logger.info("All workers completed") - - finally: - # Always shutdown to ensure clean cleanup - await shutdown_async_loggers() - - -async def worker_task(name: str): - """Simulate a worker task that logs messages.""" - logger = logging.getLogger(f"worker.{name}") - - logger.info(f"{name} starting work") - await asyncio.sleep(0.2) # Simulate work - logger.debug(f"{name} processing data") - await asyncio.sleep(0.1) - logger.info(f"{name} work completed") - - -async def main(): - """Run both examples.""" - print("Async Logging Examples") - print("=====================") - - # Example 1: Context manager (recommended) - await example_with_context_manager() - - # Small delay between examples - await asyncio.sleep(0.5) - - # Example 2: Manual setup/shutdown - await example_with_manual_setup() - - print("\n=== All examples completed ===") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index bcaf8cd175f..f462e1ec3f9 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -240,7 +240,7 @@ def setup_loggers( @asynccontextmanager -async def setup_async_loggers( +async def async_loggers_lifespan( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], @@ -250,7 +250,7 @@ async def setup_async_loggers( Async context manager for non-blocking logging infrastructure. Usage: - async with setup_async_loggers(log_format_local_dev_enabled=True): + async with async_loggers_lifespan(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): # Your async application code here logger.info("This is non-blocking!") diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index f9f1e5232d7..a4e123bba27 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -13,12 +13,12 @@ LogExtra, LogLevelInt, LogMessageStr, + async_loggers_lifespan, guess_message_log_level, log_context, log_decorator, log_exceptions, set_parent_module_log_level, - setup_async_loggers, ) from tenacity import ( AsyncRetrying, @@ -445,7 +445,7 @@ async def test_setup_async_loggers_basic( caplog.clear() caplog.set_level(logging.INFO) - async with setup_async_loggers( + async with async_loggers_lifespan( log_format_local_dev_enabled=log_format_local_dev_enabled, logger_filter_mapping={}, # No filters for this test tracing_settings=None, # No tracing for this test @@ -468,7 +468,7 @@ async def test_setup_async_loggers_with_filters( "test_filtered_logger": ["filtered_message"], } - async with setup_async_loggers( + async with async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping=filter_mapping, tracing_settings=None, # No tracing for this test @@ -503,7 +503,7 @@ async def test_setup_async_loggers_with_tracing_settings( # Note: We can't easily test actual tracing without setting up OpenTelemetry # But we can test that the function accepts the parameter - async with setup_async_loggers( + async with async_loggers_lifespan( log_format_local_dev_enabled=False, logger_filter_mapping={}, # No filters for this test tracing_settings=None, @@ -523,7 +523,7 @@ async def test_setup_async_loggers_context_manager_cleanup( test_logger = logging.getLogger("test_cleanup_logger") - async with setup_async_loggers( + async with async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, @@ -547,7 +547,7 @@ def _raise_test_exception(): raise ValueError(exc_msg) try: - async with setup_async_loggers( + async with async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index 73a6a98d94b..094f7bf84c9 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -21,7 +21,7 @@ import typer from aiohttp import web from common_library.json_serialization import json_dumps -from servicelib.logging_utils import setup_async_loggers +from servicelib.logging_utils import async_loggers_lifespan from settings_library.utils_cli import create_settings_command from .application_settings import ApplicationSettings @@ -81,7 +81,7 @@ async def app_factory() -> web.Application: exit_stack = AsyncExitStack() await exit_stack.enter_async_context( - setup_async_loggers( + async_loggers_lifespan( log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.WEBSERVER_LOG_FILTER_MAPPING, tracing_settings=app_settings.WEBSERVER_TRACING, From b7134f3a7733ddcd8ee9108a5964021bc9391cb6 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 08:49:06 +0200 Subject: [PATCH 031/128] clean --- .../src/servicelib/logging_utils.py | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index f462e1ec3f9..f4877c7b512 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -27,6 +27,8 @@ _logger = logging.getLogger(__name__) +LogLevelInt: TypeAlias = int +LogMessageStr: TypeAlias = str BLACK = "\033[0;30m" BLUE = "\033[0;34m" @@ -220,13 +222,11 @@ def setup_loggers( logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration """ - # Create format string fmt = _setup_format_string( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, ) - # Get all loggers and apply formatting all_loggers = _get_all_loggers() for logger in all_loggers: _set_logging_handler( @@ -259,7 +259,6 @@ async def async_loggers_lifespan( logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration """ - # Create format string fmt = _setup_format_string( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, @@ -303,9 +302,12 @@ async def async_loggers_lifespan( logger.removeHandler(queue_handler) # Stop the queue listener - _logger.debug("Shutting down async logging listener...") - listener.stop() - _logger.debug("Async logging context cleanup complete") + with log_context( + _logger, + level=logging.DEBUG, + msg="Shutdown async logging listener", + ): + listener.stop() except Exception as exc: sys.stderr.write(f"Error during async logging cleanup: {exc}\n") @@ -314,7 +316,7 @@ async def async_loggers_lifespan( class LogExceptionsKwargsDict(TypedDict, total=True): logger: logging.Logger - level: int + level: LogLevelInt msg_prefix: str exc_info: bool stack_info: bool @@ -323,7 +325,7 @@ class LogExceptionsKwargsDict(TypedDict, total=True): @contextmanager def log_exceptions( logger: logging.Logger, - level: int, + level: LogLevelInt, msg_prefix: str = "", *, exc_info: bool = False, @@ -363,7 +365,7 @@ def log_exceptions( def _log_before_call( - logger_obj: logging.Logger, level: int, func: Callable, *args, **kwargs + logger_obj: logging.Logger, level: LogLevelInt, func: Callable, *args, **kwargs ) -> dict[str, str]: # NOTE: We should avoid logging arguments but in the meantime, we are trying to # avoid exposing sensitive data in the logs. For `args` is more difficult. We could eventually @@ -401,7 +403,7 @@ def _log_before_call( def _log_after_call( logger_obj: logging.Logger, - level: int, + level: LogLevelInt, func: Callable, result: Any, extra_args: dict[str, str], @@ -421,7 +423,7 @@ def _log_after_call( def log_decorator( logger: logging.Logger | None, - level: int = logging.DEBUG, + level: LogLevelInt = logging.DEBUG, *, # NOTE: default defined by legacy: ANE defined full stack tracebacks # on exceptions @@ -488,10 +490,6 @@ def log_catch(logger: logging.Logger, *, reraise: bool = True) -> Iterator[None] raise exc from exc -LogLevelInt: TypeAlias = int -LogMessageStr: TypeAlias = str - - def _un_capitalize(s: str) -> str: return s[:1].lower() + s[1:] if s else "" @@ -554,6 +552,8 @@ def guess_message_log_level(message: str) -> LogLevelInt: return logging.INFO -def set_parent_module_log_level(current_module: str, desired_log_level: int) -> None: +def set_parent_module_log_level( + current_module: str, desired_log_level: LogLevelInt +) -> None: parent_module = ".".join(current_module.split(".")[:-1]) logging.getLogger(parent_module).setLevel(desired_log_level) From c270e8227575a0ee76234042128e75de124aa9fa Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 10:13:11 +0200 Subject: [PATCH 032/128] rename --- .../service-library/src/servicelib/logging_utils.py | 4 ++-- packages/service-library/tests/test_logging_utils.py | 12 ++++++------ .../web/server/src/simcore_service_webserver/cli.py | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index f4877c7b512..d191b53c5b0 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -240,7 +240,7 @@ def setup_loggers( @asynccontextmanager -async def async_loggers_lifespan( +async def setup_async_loggers_lifespan( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], @@ -250,7 +250,7 @@ async def async_loggers_lifespan( Async context manager for non-blocking logging infrastructure. Usage: - async with async_loggers_lifespan(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): + async with setup_async_loggers_lifespan(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): # Your async application code here logger.info("This is non-blocking!") diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index a4e123bba27..a91a9c0e792 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -13,12 +13,12 @@ LogExtra, LogLevelInt, LogMessageStr, - async_loggers_lifespan, guess_message_log_level, log_context, log_decorator, log_exceptions, set_parent_module_log_level, + setup_async_loggers_lifespan, ) from tenacity import ( AsyncRetrying, @@ -445,7 +445,7 @@ async def test_setup_async_loggers_basic( caplog.clear() caplog.set_level(logging.INFO) - async with async_loggers_lifespan( + async with setup_async_loggers_lifespan( log_format_local_dev_enabled=log_format_local_dev_enabled, logger_filter_mapping={}, # No filters for this test tracing_settings=None, # No tracing for this test @@ -468,7 +468,7 @@ async def test_setup_async_loggers_with_filters( "test_filtered_logger": ["filtered_message"], } - async with async_loggers_lifespan( + async with setup_async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping=filter_mapping, tracing_settings=None, # No tracing for this test @@ -503,7 +503,7 @@ async def test_setup_async_loggers_with_tracing_settings( # Note: We can't easily test actual tracing without setting up OpenTelemetry # But we can test that the function accepts the parameter - async with async_loggers_lifespan( + async with setup_async_loggers_lifespan( log_format_local_dev_enabled=False, logger_filter_mapping={}, # No filters for this test tracing_settings=None, @@ -523,7 +523,7 @@ async def test_setup_async_loggers_context_manager_cleanup( test_logger = logging.getLogger("test_cleanup_logger") - async with async_loggers_lifespan( + async with setup_async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, @@ -547,7 +547,7 @@ def _raise_test_exception(): raise ValueError(exc_msg) try: - async with async_loggers_lifespan( + async with setup_async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index 094f7bf84c9..455a4955f0c 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -21,7 +21,7 @@ import typer from aiohttp import web from common_library.json_serialization import json_dumps -from servicelib.logging_utils import async_loggers_lifespan +from servicelib.logging_utils import setup_async_loggers_lifespan from settings_library.utils_cli import create_settings_command from .application_settings import ApplicationSettings @@ -81,7 +81,7 @@ async def app_factory() -> web.Application: exit_stack = AsyncExitStack() await exit_stack.enter_async_context( - async_loggers_lifespan( + setup_async_loggers_lifespan( log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.WEBSERVER_LOG_FILTER_MAPPING, tracing_settings=app_settings.WEBSERVER_TRACING, From 40cafe98dfb3fe14da78b8594029a3b7d5c6a4a5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 10:24:42 +0200 Subject: [PATCH 033/128] ensure we replace any handlers in the logger --- .../src/servicelib/logging_utils.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index d191b53c5b0..8be5d488f80 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -281,9 +281,15 @@ async def setup_async_loggers_lifespan( # Create queue handler for loggers queue_handler = logging.handlers.QueueHandler(log_queue) - # Get all loggers and add queue handler alongside existing handlers + # Get all loggers and replace existing handlers with queue handler all_loggers = _get_all_loggers() + original_handlers: dict[logging.Logger, list[logging.Handler]] = {} + for logger in all_loggers: + # Store original handlers for cleanup + original_handlers[logger] = logger.handlers.copy() + # Clear existing handlers and add only the queue handler + logger.handlers.clear() logger.addHandler(queue_handler) try: @@ -295,11 +301,12 @@ async def setup_async_loggers_lifespan( yield finally: - # Cleanup: Remove queue handlers from all loggers + # Cleanup: Remove queue handlers and restore original handlers try: for logger in all_loggers: - if queue_handler in logger.handlers: - logger.removeHandler(queue_handler) + # Clear queue handler and restore original handlers + logger.handlers.clear() + logger.handlers.extend(original_handlers.get(logger, [])) # Stop the queue listener with log_context( From cce07aa3b5ea5327f37cdcc34636ba80a800b5cb Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 10:39:16 +0200 Subject: [PATCH 034/128] only change needed handler --- .../src/servicelib/logging_utils.py | 59 +++++++++++++++---- 1 file changed, 47 insertions(+), 12 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 8be5d488f80..b5d4f401b8a 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -281,16 +281,53 @@ async def setup_async_loggers_lifespan( # Create queue handler for loggers queue_handler = logging.handlers.QueueHandler(log_queue) - # Get all loggers and replace existing handlers with queue handler + # Use root-only approach for better performance and simplicity + root_logger = logging.getLogger() + original_root_handlers = root_logger.handlers.copy() + + # Check for edge cases and warn if found all_loggers = _get_all_loggers() - original_handlers: dict[logging.Logger, list[logging.Handler]] = {} + edge_case_loggers = [] for logger in all_loggers: - # Store original handlers for cleanup - original_handlers[logger] = logger.handlers.copy() - # Clear existing handlers and add only the queue handler - logger.handlers.clear() - logger.addHandler(queue_handler) + if logger is root_logger: + continue + + # Check for loggers that might bypass root logging + has_handlers = bool(logger.handlers) + propagate_disabled = not logger.propagate + + # Filter out harmless cases: NullHandler with propagate=True is fine + has_meaningful_handlers = ( + any(not isinstance(h, logging.NullHandler) for h in logger.handlers) + if logger.handlers + else False + ) + + if has_meaningful_handlers or propagate_disabled: + edge_case_loggers.append( + { + "name": logger.name, + "has_handlers": has_handlers, + "propagate": logger.propagate, + "handlers": [type(h).__name__ for h in logger.handlers], + } + ) + + if edge_case_loggers: + _logger.warning( + "Found %d loggers that may bypass async logging: %s. " + "Consider reviewing logger configuration.", + len(edge_case_loggers), + [ + f"{logger_info['name']}(handlers={logger_info['handlers']}, propagate={logger_info['propagate']})" + for logger_info in edge_case_loggers[:3] + ], # Show first 3 to avoid spam + ) + + # Replace only root logger handlers + root_logger.handlers.clear() + root_logger.addHandler(queue_handler) try: # Apply filters if provided @@ -301,12 +338,10 @@ async def setup_async_loggers_lifespan( yield finally: - # Cleanup: Remove queue handlers and restore original handlers + # Cleanup: Restore original root logger handlers try: - for logger in all_loggers: - # Clear queue handler and restore original handlers - logger.handlers.clear() - logger.handlers.extend(original_handlers.get(logger, [])) + root_logger.handlers.clear() + root_logger.handlers.extend(original_root_handlers) # Stop the queue listener with log_context( From 0964b1a353588a5893ade73c1e44c6f1c9335c52 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 11:31:08 +0200 Subject: [PATCH 035/128] added a autouse fixture to fix tests when using caplog --- .../src/pytest_simcore/logging.py | 47 +++++++++++++++++++ packages/service-library/tests/conftest.py | 1 + 2 files changed, 48 insertions(+) create mode 100644 packages/pytest-simcore/src/pytest_simcore/logging.py diff --git a/packages/pytest-simcore/src/pytest_simcore/logging.py b/packages/pytest-simcore/src/pytest_simcore/logging.py new file mode 100644 index 00000000000..4f1399094bc --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/logging.py @@ -0,0 +1,47 @@ +# In conftest.py or test_logging_utils.py +import logging +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from unittest.mock import patch + +import pytest +from servicelib.logging_utils import setup_async_loggers_lifespan + + +@pytest.fixture(autouse=True) +def preserve_caplog_for_async_logging(request): + """Automatically preserve caplog handlers when both caplog and async logging are used.""" + # Check if this test uses caplog fixture + if "caplog" not in request.fixturenames: + yield # No caplog, no patching needed + return + + # Patch setup_async_loggers_lifespan to preserve caplog handlers + original_setup = setup_async_loggers_lifespan + + @asynccontextmanager + async def patched_setup_async_loggers_lifespan(**kwargs) -> AsyncIterator[None]: + # Find caplog's handler in root logger + root_logger = logging.getLogger() + caplog_handlers = [ + h for h in root_logger.handlers if "LogCaptureHandler" in f"{type(h)}" + ] + + async with original_setup(**kwargs): + # After setup, restore caplog handlers alongside queue handler + for handler in caplog_handlers: + if handler not in root_logger.handlers: + root_logger.addHandler(handler) + yield + + with ( + patch( + "tests.test_logging_utils.setup_async_loggers_lifespan", + patched_setup_async_loggers_lifespan, + ), + patch( + "servicelib.logging_utils.setup_async_loggers_lifespan", + patched_setup_async_loggers_lifespan, + ), + ): + yield diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py index 45a456fed02..d123e16f12e 100644 --- a/packages/service-library/tests/conftest.py +++ b/packages/service-library/tests/conftest.py @@ -24,6 +24,7 @@ "pytest_simcore.docker", "pytest_simcore.environment_configs", "pytest_simcore.file_extra", + "pytest_simcore.logging", "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", From 80afced383f1f6a9d918159ca3b597a7d26793ca Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 11:52:31 +0200 Subject: [PATCH 036/128] ensure all loggers propagate --- .../src/servicelib/logging_utils.py | 67 ++++++++++--------- 1 file changed, 37 insertions(+), 30 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index b5d4f401b8a..9c921734e8a 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -281,51 +281,55 @@ async def setup_async_loggers_lifespan( # Create queue handler for loggers queue_handler = logging.handlers.QueueHandler(log_queue) - # Use root-only approach for better performance and simplicity + # Comprehensive approach: ensure ALL logs go through async queue root_logger = logging.getLogger() - original_root_handlers = root_logger.handlers.copy() - - # Check for edge cases and warn if found all_loggers = _get_all_loggers() - edge_case_loggers = [] + # Store original state for restoration + original_logger_state = [ + { + "logger": logger, + "handlers": logger.handlers.copy(), + "propagate": logger.propagate, + } + for logger in all_loggers + ] + + # Remove all handlers from all loggers and ensure propagation + loggers_modified = [] for logger in all_loggers: if logger is root_logger: continue - # Check for loggers that might bypass root logging - has_handlers = bool(logger.handlers) - propagate_disabled = not logger.propagate + # Track what we're modifying for logging purposes + had_handlers = bool(logger.handlers) + had_propagate_disabled = not logger.propagate - # Filter out harmless cases: NullHandler with propagate=True is fine - has_meaningful_handlers = ( - any(not isinstance(h, logging.NullHandler) for h in logger.handlers) - if logger.handlers - else False - ) - - if has_meaningful_handlers or propagate_disabled: - edge_case_loggers.append( + if had_handlers or had_propagate_disabled: + loggers_modified.append( { "name": logger.name, - "has_handlers": has_handlers, - "propagate": logger.propagate, + "had_handlers": had_handlers, + "had_propagate_disabled": had_propagate_disabled, "handlers": [type(h).__name__ for h in logger.handlers], } ) - if edge_case_loggers: - _logger.warning( - "Found %d loggers that may bypass async logging: %s. " - "Consider reviewing logger configuration.", - len(edge_case_loggers), + # Clear handlers and ensure propagation + logger.handlers.clear() + logger.propagate = True + + if loggers_modified: + _logger.info( + "Modified %d loggers for async logging: %s", + len(loggers_modified), [ - f"{logger_info['name']}(handlers={logger_info['handlers']}, propagate={logger_info['propagate']})" - for logger_info in edge_case_loggers[:3] + f"{info['name']}(removed_handlers={info['handlers']}, enabled_propagate={info['had_propagate_disabled']})" + for info in loggers_modified[:3] ], # Show first 3 to avoid spam ) - # Replace only root logger handlers + # Set up root logger with queue handler only root_logger.handlers.clear() root_logger.addHandler(queue_handler) @@ -338,10 +342,13 @@ async def setup_async_loggers_lifespan( yield finally: - # Cleanup: Restore original root logger handlers + # Cleanup: Restore all loggers to their original state try: - root_logger.handlers.clear() - root_logger.handlers.extend(original_root_handlers) + for state in original_logger_state: + logger = state["logger"] + logger.handlers.clear() + logger.handlers.extend(state["handlers"]) + logger.propagate = state["propagate"] # Stop the queue listener with log_context( From ea564b37dc998491766bbc859a16d16e8f06a06a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 12:04:50 +0200 Subject: [PATCH 037/128] done --- .../src/servicelib/logging_utils.py | 174 ++++++++++-------- 1 file changed, 98 insertions(+), 76 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 9c921734e8a..2da365cc678 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -171,20 +171,6 @@ def _setup_format_string( return _DEFAULT_FORMATTING -def _set_logging_handler( - logger: logging.Logger, - *, - fmt: str, - log_format_local_dev_enabled: bool, -) -> None: - for handler in logger.handlers: - handler.setFormatter( - CustomFormatter( - fmt, log_format_local_dev_enabled=log_format_local_dev_enabled - ) - ) - - def _get_all_loggers() -> list[logging.Logger]: manager = logging.Logger.manager root_logger = logging.getLogger() @@ -215,7 +201,12 @@ def setup_loggers( tracing_settings: TracingSettings | None, ) -> None: """ - Applies common configuration to ALL registered loggers. + Applies comprehensive configuration to ALL registered loggers. + + This function uses a comprehensive approach: + - Removes all handlers from all loggers + - Ensures all loggers propagate to root + - Sets up root logger with properly formatted handler Args: log_format_local_dev_enabled: Enable local development formatting @@ -227,13 +218,17 @@ def setup_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, ) + # Create a properly formatted handler for the root logger + root_handler = logging.StreamHandler() + root_handler.setFormatter( + CustomFormatter(fmt, log_format_local_dev_enabled=log_format_local_dev_enabled) + ) + all_loggers = _get_all_loggers() - for logger in all_loggers: - _set_logging_handler( - logger, - fmt=fmt, - log_format_local_dev_enabled=log_format_local_dev_enabled, - ) + + # Apply comprehensive logging setup + # Note: We don't store the original state here since this is a permanent setup + _apply_comprehensive_logging_setup(all_loggers, root_handler) # Apply filters _apply_logger_filters(logger_filter_mapping) @@ -281,57 +276,11 @@ async def setup_async_loggers_lifespan( # Create queue handler for loggers queue_handler = logging.handlers.QueueHandler(log_queue) - # Comprehensive approach: ensure ALL logs go through async queue - root_logger = logging.getLogger() + # Apply comprehensive logging setup and store original state for restoration all_loggers = _get_all_loggers() - - # Store original state for restoration - original_logger_state = [ - { - "logger": logger, - "handlers": logger.handlers.copy(), - "propagate": logger.propagate, - } - for logger in all_loggers - ] - - # Remove all handlers from all loggers and ensure propagation - loggers_modified = [] - for logger in all_loggers: - if logger is root_logger: - continue - - # Track what we're modifying for logging purposes - had_handlers = bool(logger.handlers) - had_propagate_disabled = not logger.propagate - - if had_handlers or had_propagate_disabled: - loggers_modified.append( - { - "name": logger.name, - "had_handlers": had_handlers, - "had_propagate_disabled": had_propagate_disabled, - "handlers": [type(h).__name__ for h in logger.handlers], - } - ) - - # Clear handlers and ensure propagation - logger.handlers.clear() - logger.propagate = True - - if loggers_modified: - _logger.info( - "Modified %d loggers for async logging: %s", - len(loggers_modified), - [ - f"{info['name']}(removed_handlers={info['handlers']}, enabled_propagate={info['had_propagate_disabled']})" - for info in loggers_modified[:3] - ], # Show first 3 to avoid spam - ) - - # Set up root logger with queue handler only - root_logger.handlers.clear() - root_logger.addHandler(queue_handler) + original_logger_state = _apply_comprehensive_logging_setup( + all_loggers, queue_handler + ) try: # Apply filters if provided @@ -344,11 +293,7 @@ async def setup_async_loggers_lifespan( finally: # Cleanup: Restore all loggers to their original state try: - for state in original_logger_state: - logger = state["logger"] - logger.handlers.clear() - logger.handlers.extend(state["handlers"]) - logger.propagate = state["propagate"] + _restore_logger_state(original_logger_state) # Stop the queue listener with log_context( @@ -606,3 +551,80 @@ def set_parent_module_log_level( ) -> None: parent_module = ".".join(current_module.split(".")[:-1]) logging.getLogger(parent_module).setLevel(desired_log_level) + + +def _store_logger_state(loggers: list[logging.Logger]) -> list[dict[str, Any]]: + """Store the original state of loggers for later restoration.""" + return [ + { + "logger": logger, + "handlers": logger.handlers.copy(), + "propagate": logger.propagate, + } + for logger in loggers + ] + + +def _restore_logger_state(original_state: list[dict[str, Any]]) -> None: + """Restore loggers to their original state.""" + for state in original_state: + logger = state["logger"] + logger.handlers.clear() + logger.handlers.extend(state["handlers"]) + logger.propagate = state["propagate"] + + +def _apply_comprehensive_logging_setup( + all_loggers: list[logging.Logger], + root_handler: logging.Handler, +) -> list[dict[str, Any]]: + """ + Apply comprehensive logging setup: clear all handlers, ensure propagation, + and set up root logger with the provided handler. + + Returns the original logger state for restoration. + """ + root_logger = logging.getLogger() + + # Store original state for restoration + original_logger_state = _store_logger_state(all_loggers) + + # Remove all handlers from all loggers and ensure propagation + loggers_modified = [] + for logger in all_loggers: + if logger is root_logger: + continue + + # Track what we're modifying for logging purposes + had_handlers = bool(logger.handlers) + had_propagate_disabled = not logger.propagate + + if had_handlers or had_propagate_disabled: + loggers_modified.append( + { + "name": logger.name, + "had_handlers": had_handlers, + "had_propagate_disabled": had_propagate_disabled, + "handlers": [type(h).__name__ for h in logger.handlers], + } + ) + + # Clear handlers and ensure propagation + logger.handlers.clear() + logger.propagate = True + + if loggers_modified: + _logger.info( + "Modified %d loggers for comprehensive logging: %s", + len(loggers_modified), + [ + f"{info['name']}(removed_handlers={info['handlers']}, enabled_propagate={info['had_propagate_disabled']})" + for info in loggers_modified[:3] + ], # Show first 3 to avoid spam + ) + + # Set up root logger with the provided handler only + root_logger.handlers.clear() + root_logger.addHandler(root_handler) + + return original_logger_state From 2df7185cb906618719ab78792a232bbabda59957 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:10:07 +0200 Subject: [PATCH 038/128] cleanup --- .../src/servicelib/logging_utils.py | 27 ++++++++++ .../src/simcore_service_webserver/cli.py | 23 +-------- .../src/simcore_service_webserver/log.py | 50 ++++++++++++------- 3 files changed, 61 insertions(+), 39 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 2da365cc678..ff9ce0e0bff 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -194,11 +194,31 @@ def _apply_logger_filters( logger.addFilter(log_filter) +def _setup_base_logging_level(log_level: LogLevelInt) -> None: + logging.basicConfig(level=log_level) + logging.root.setLevel(log_level) + + +def _dampen_noisy_loggers( + noisy_loggers: tuple[str, ...], +) -> None: + """Sets a less verbose level for noisy loggers.""" + quiet_level: int = max( + min(logging.root.level + logging.CRITICAL - logging.ERROR, logging.CRITICAL), + logging.WARNING, + ) + + for name in noisy_loggers: + logging.getLogger(name).setLevel(quiet_level) + + def setup_loggers( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...], ) -> None: """ Applies comprehensive configuration to ALL registered loggers. @@ -213,6 +233,8 @@ def setup_loggers( logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration """ + _setup_base_logging_level(log_base_level) + _dampen_noisy_loggers(noisy_loggers) fmt = _setup_format_string( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, @@ -240,6 +262,8 @@ async def setup_async_loggers_lifespan( log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...], ) -> AsyncIterator[None]: """ Async context manager for non-blocking logging infrastructure. @@ -254,6 +278,9 @@ async def setup_async_loggers_lifespan( logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration """ + _setup_base_logging_level(log_base_level) + _dampen_noisy_loggers(noisy_loggers) + fmt = _setup_format_string( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index 455a4955f0c..ce8319f4171 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -15,13 +15,11 @@ import logging import os -from contextlib import AsyncExitStack from typing import Annotated, Final import typer from aiohttp import web from common_library.json_serialization import json_dumps -from servicelib.logging_utils import setup_async_loggers_lifespan from settings_library.utils_cli import create_settings_command from .application_settings import ApplicationSettings @@ -74,31 +72,14 @@ async def app_factory() -> web.Application: "Using application factory: %s", app_settings.WEBSERVER_APP_FACTORY_NAME ) - setup_logging( - level=app_settings.log_level, - slow_duration=app_settings.AIODEBUG_SLOW_DURATION_SECS, - ) - - exit_stack = AsyncExitStack() - await exit_stack.enter_async_context( - setup_async_loggers_lifespan( - log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=app_settings.WEBSERVER_LOG_FILTER_MAPPING, - tracing_settings=app_settings.WEBSERVER_TRACING, - ) - ) + logging_lifespan_cleanup_event = await setup_logging(app_settings) if app_settings.WEBSERVER_APP_FACTORY_NAME == "WEBSERVER_AUTHZ_APP_FACTORY": app = create_application_auth() else: app, _ = _setup_app_from_settings(app_settings) - async def _cleanup_event(app: web.Application) -> None: - assert app # nosec - _logger.info("Cleaning up application resources") - await exit_stack.aclose() - - app.on_cleanup.append(_cleanup_event) + app.on_cleanup.append(logging_lifespan_cleanup_event) return app diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index 4fb0be6715b..8dd7da254e2 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -1,12 +1,18 @@ """Configuration and utilities for service logging""" import logging +from collections.abc import Awaitable, Callable +from contextlib import AsyncExitStack +from typing import Final, TypeAlias from aiodebug import log_slow_callbacks # type: ignore[import-untyped] +from aiohttp import web from aiohttp.log import access_logger +from servicelib.logging_utils import setup_async_loggers_lifespan +from simcore_service_webserver.application_settings import ApplicationSettings -LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -NOISY_LOGGERS = ( +LOG_LEVEL_STEP: Final[int] = logging.CRITICAL - logging.ERROR +NOISY_LOGGERS: Final[tuple[str, ...]] = ( "aio_pika", "aiormq", "engineio", @@ -20,13 +26,22 @@ "sqlalchemy", ) +_logger = logging.getLogger(__name__) -def setup_logging(*, level: str | int, slow_duration: float | None = None): - # service log level - logging.basicConfig(level=level) +CleanupEvent: TypeAlias = Callable[[web.Application], Awaitable[None]] - # root - logging.root.setLevel(level) + +async def setup_logging(app_settings: ApplicationSettings) -> CleanupEvent: + exit_stack = AsyncExitStack() + await exit_stack.enter_async_context( + setup_async_loggers_lifespan( + log_base_level=app_settings.log_level, + noisy_loggers=NOISY_LOGGERS, + log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.WEBSERVER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.WEBSERVER_TRACING, + ) + ) # Enforces same log-level to aiohttp & gunicorn access loggers # @@ -36,17 +51,16 @@ def setup_logging(*, level: str | int, slow_duration: float | None = None): # they are not applied globally but only upon setup_logging ... # gunicorn_access_log = logging.getLogger("gunicorn.access") - access_logger.setLevel(level) - gunicorn_access_log.setLevel(level) + access_logger.setLevel(app_settings.log_level) + gunicorn_access_log.setLevel(app_settings.log_level) - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) + if app_settings.AIODEBUG_SLOW_DURATION_SECS: + # NOTE: Every task blocking > AIODEBUG_SLOW_DURATION_SECS secs is considered slow and logged as warning + log_slow_callbacks.enable(abs(app_settings.AIODEBUG_SLOW_DURATION_SECS)) - for name in NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) + async def _cleanup_event(app: web.Application) -> None: + assert app # nosec + _logger.info("Cleaning up application resources") + await exit_stack.aclose() - if slow_duration: - # NOTE: Every task blocking > AIODEBUG_SLOW_DURATION_SECS secs is considered slow and logged as warning - log_slow_callbacks.enable(abs(slow_duration)) + return _cleanup_event From e9a37dad3e57467a305dc461254df039e9a86162 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:11:35 +0200 Subject: [PATCH 039/128] simplify --- packages/service-library/tests/test_logging_utils.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index a91a9c0e792..afd50b032fc 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -449,6 +449,8 @@ async def test_setup_async_loggers_basic( log_format_local_dev_enabled=log_format_local_dev_enabled, logger_filter_mapping={}, # No filters for this test tracing_settings=None, # No tracing for this test + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test ): test_logger = logging.getLogger("test_async_logger") test_logger.info("Test async log message") @@ -472,6 +474,8 @@ async def test_setup_async_loggers_with_filters( log_format_local_dev_enabled=True, logger_filter_mapping=filter_mapping, tracing_settings=None, # No tracing for this test + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test ): test_logger = logging.getLogger("test_filtered_logger") unfiltered_logger = logging.getLogger("test_unfiltered_logger") @@ -507,6 +511,8 @@ async def test_setup_async_loggers_with_tracing_settings( log_format_local_dev_enabled=False, logger_filter_mapping={}, # No filters for this test tracing_settings=None, + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test ): test_logger = logging.getLogger("test_tracing_logger") test_logger.info("Test message with tracing settings") @@ -527,6 +533,8 @@ async def test_setup_async_loggers_context_manager_cleanup( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test ): # During the context, handlers should be replaced test_logger.info("Message during context") @@ -551,6 +559,8 @@ def _raise_test_exception(): log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test ): test_logger = logging.getLogger("test_exception_logger") test_logger.info("Message before exception") From 652d04fa1978d4179a5ef6d3e26989ca4006c88e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:35:53 +0200 Subject: [PATCH 040/128] autoscaling --- .../core/application.py | 16 ---------------- .../src/simcore_service_autoscaling/main.py | 14 +++++++++++--- 2 files changed, 11 insertions(+), 19 deletions(-) diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/application.py b/services/autoscaling/src/simcore_service_autoscaling/core/application.py index f0c9d7f3c32..6abe3a52265 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/application.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/application.py @@ -31,26 +31,10 @@ from ..modules.ssm import setup as setup_ssm from .settings import ApplicationSettings -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aiobotocore", - "aio_pika", - "aiormq", - "botocore", - "werkzeug", -) - logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( diff --git a/services/autoscaling/src/simcore_service_autoscaling/main.py b/services/autoscaling/src/simcore_service_autoscaling/main.py index 65604fee17d..8aad6b87fe4 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/main.py +++ b/services/autoscaling/src/simcore_service_autoscaling/main.py @@ -1,19 +1,27 @@ """Main application to be deployed by uvicorn (or equivalent) server""" -import logging +from typing import Final from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_autoscaling.core.application import create_app from simcore_service_autoscaling.core.settings import ApplicationSettings +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", +) + the_settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=the_settings.log_level) -logging.root.setLevel(the_settings.log_level) setup_loggers( log_format_local_dev_enabled=the_settings.AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.AUTOSCALING_LOG_FILTER_MAPPING, tracing_settings=the_settings.AUTOSCALING_TRACING, + log_base_level=the_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, ) # SINGLETON FastAPI app From 0ae5828e03de4ce116b58ad7b096efd7ac0cd262 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:38:24 +0200 Subject: [PATCH 041/128] agent --- services/agent/src/simcore_service_agent/core/application.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index a89b968c150..7eae91d3cc4 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -31,13 +31,12 @@ def _setup_logger(settings: ApplicationSettings): - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 - logging.basicConfig(level=settings.LOG_LEVEL.value) # NOSONAR - logging.root.setLevel(settings.LOG_LEVEL.value) setup_loggers( log_format_local_dev_enabled=settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, tracing_settings=settings.AGENT_TRACING, + log_base_level=settings.log_level, + noisy_loggers=(), ) From 47dcfe9852d0fd160c2bd70e927a08b197d49e83 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:39:23 +0200 Subject: [PATCH 042/128] apiserver --- .../src/simcore_service_api_server/core/application.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index ba9331279b4..9c5e7d81b3b 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -53,12 +53,12 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: settings = ApplicationSettings.create_from_envs() assert settings # nosec - logging.basicConfig(level=settings.log_level) - logging.root.setLevel(settings.log_level) setup_loggers( log_format_local_dev_enabled=settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.API_SERVER_LOG_FILTER_MAPPING, tracing_settings=settings.API_SERVER_TRACING, + log_base_level=settings.log_level, + noisy_loggers=(), ) _logger.debug("App settings:\n%s", settings.model_dump_json(indent=2)) From fce15f77b7bd57a5648b0409b91700531cff0c97 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:40:39 +0200 Subject: [PATCH 043/128] catalog --- .../simcore_service_catalog/core/application.py | 17 ----------------- .../catalog/src/simcore_service_catalog/main.py | 16 ++++++++++++---- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py index 3f726883066..b06ed4dd457 100644 --- a/services/catalog/src/simcore_service_catalog/core/application.py +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -27,25 +27,8 @@ _logger = logging.getLogger(__name__) -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aio_pika", - "aiobotocore", - "aiormq", - "botocore", - "httpcore", - "werkzeug", -) - def create_app() -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - settings = ApplicationSettings.create_from_envs() _logger.debug(settings.model_dump_json(indent=2)) diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index c88d967fc0f..65a96e09808 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -1,6 +1,6 @@ """Main application to be deployed in for example uvicorn.""" -import logging +from typing import Final from fastapi import FastAPI from servicelib.logging_utils import setup_loggers @@ -9,13 +9,21 @@ _the_settings = ApplicationSettings.create_from_envs() -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.log_level) # NOSONAR -logging.root.setLevel(_the_settings.log_level) +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiobotocore", + "aiormq", + "botocore", + "httpcore", + "werkzeug", +) + setup_loggers( log_format_local_dev_enabled=_the_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.CATALOG_LOG_FILTER_MAPPING, tracing_settings=_the_settings.CATALOG_TRACING, + log_base_level=_the_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, ) From b439f0ea991194ad90dd2129637810734777237e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:41:52 +0200 Subject: [PATCH 044/128] clusters-keeper --- .../core/application.py | 16 ---------------- .../src/simcore_service_clusters_keeper/main.py | 14 +++++++++++--- 2 files changed, 11 insertions(+), 19 deletions(-) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py index bbda1b456a4..1c8c7e448c0 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py @@ -26,26 +26,10 @@ from ..rpc.rpc_routes import setup_rpc_routes from .settings import ApplicationSettings -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aiobotocore", - "aio_pika", - "aiormq", - "botocore", - "werkzeug", -) - _logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py index afa325dd89d..1f11ed1b7f8 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py @@ -1,19 +1,27 @@ """Main application to be deployed by uvicorn (or equivalent) server""" -import logging +from typing import Final from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_clusters_keeper.core.application import create_app from simcore_service_clusters_keeper.core.settings import ApplicationSettings +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", +) + the_settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=the_settings.log_level) -logging.root.setLevel(the_settings.log_level) setup_loggers( log_format_local_dev_enabled=the_settings.CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.CLUSTERS_KEEPER_LOG_FILTER_MAPPING, tracing_settings=the_settings.CLUSTERS_KEEPER_TRACING, + log_base_level=the_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, ) # SINGLETON FastAPI app From c7e00d65ebcd250a26b034497fc4c31d599fa8b5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:45:27 +0200 Subject: [PATCH 045/128] datcore and dask-sidecar --- .../service-library/src/servicelib/logging_utils.py | 10 ++++++---- .../src/simcore_service_agent/core/application.py | 2 +- .../simcore_service_api_server/core/application.py | 2 +- .../src/simcore_service_dask_sidecar/utils/logs.py | 11 ++--------- .../src/simcore_service_datcore_adapter/main.py | 7 ++----- 5 files changed, 12 insertions(+), 20 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index ff9ce0e0bff..125ff517343 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -218,7 +218,7 @@ def setup_loggers( logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, log_base_level: LogLevelInt, - noisy_loggers: tuple[str, ...], + noisy_loggers: tuple[str, ...] | None, ) -> None: """ Applies comprehensive configuration to ALL registered loggers. @@ -234,7 +234,8 @@ def setup_loggers( tracing_settings: OpenTelemetry tracing configuration """ _setup_base_logging_level(log_base_level) - _dampen_noisy_loggers(noisy_loggers) + if noisy_loggers is not None: + _dampen_noisy_loggers(noisy_loggers) fmt = _setup_format_string( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, @@ -263,7 +264,7 @@ async def setup_async_loggers_lifespan( logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, log_base_level: LogLevelInt, - noisy_loggers: tuple[str, ...], + noisy_loggers: tuple[str, ...] | None, ) -> AsyncIterator[None]: """ Async context manager for non-blocking logging infrastructure. @@ -279,7 +280,8 @@ async def setup_async_loggers_lifespan( tracing_settings: OpenTelemetry tracing configuration """ _setup_base_logging_level(log_base_level) - _dampen_noisy_loggers(noisy_loggers) + if noisy_loggers is not None: + _dampen_noisy_loggers(noisy_loggers) fmt = _setup_format_string( tracing_settings=tracing_settings, diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index 7eae91d3cc4..c7f28faf599 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -36,7 +36,7 @@ def _setup_logger(settings: ApplicationSettings): logger_filter_mapping=settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, tracing_settings=settings.AGENT_TRACING, log_base_level=settings.log_level, - noisy_loggers=(), + noisy_loggers=None, ) diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 9c5e7d81b3b..197e2306b53 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -58,7 +58,7 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: logger_filter_mapping=settings.API_SERVER_LOG_FILTER_MAPPING, tracing_settings=settings.API_SERVER_TRACING, log_base_level=settings.log_level, - noisy_loggers=(), + noisy_loggers=None, ) _logger.debug("App settings:\n%s", settings.model_dump_json(indent=2)) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py index 0ce1ca9c405..04b0630015c 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py @@ -1,20 +1,13 @@ -import logging - from servicelib.logging_utils import setup_loggers from ..settings import ApplicationSettings def setup_app_logging(settings: ApplicationSettings) -> None: - # set up logging - logging.basicConfig(level=settings.DASK_SIDECAR_LOGLEVEL.value) - logging.root.setLevel(level=settings.DASK_SIDECAR_LOGLEVEL.value) - # NOTE: Dask attaches a StreamHandler to the logger in distributed - # removing them solves dual propagation of logs - for handler in logging.getLogger("distributed").handlers: - logging.getLogger("distributed").removeHandler(handler) setup_loggers( log_format_local_dev_enabled=settings.DASK_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.DASK_LOG_FILTER_MAPPING, tracing_settings=None, # no tracing for dask sidecar + log_base_level=settings.log_level, + noisy_loggers=None, ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py index efde47c998e..8178e69ae26 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py @@ -1,7 +1,5 @@ """Main application to be deployed in for example uvicorn""" -import logging - from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_datcore_adapter.core.application import create_app @@ -9,13 +7,12 @@ _the_settings = ApplicationSettings.create_from_envs() -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.log_level) # NOSONAR -logging.root.setLevel(_the_settings.log_level) setup_loggers( log_format_local_dev_enabled=_the_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, tracing_settings=_the_settings.DATCORE_ADAPTER_TRACING, + log_base_level=_the_settings.log_level, + noisy_loggers=None, ) # SINGLETON FastAPI app From 78265ceb11f75f5779cf364c0eacd89a30a96b12 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:46:11 +0200 Subject: [PATCH 046/128] director-v2 --- .../core/application.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 6a73ab5105a..456c64b92ab 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -1,4 +1,5 @@ import logging +from typing import Final from fastapi import FastAPI, HTTPException, status from fastapi.exceptions import RequestValidationError @@ -94,8 +95,7 @@ def _set_exception_handlers(app: FastAPI): ) -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( "aio_pika", "aiormq", "httpcore", @@ -108,23 +108,15 @@ def create_base_app(settings: AppSettings | None = None) -> FastAPI: settings = AppSettings.create_from_envs() assert settings # nosec - logging.basicConfig(level=settings.LOG_LEVEL.value) - logging.root.setLevel(settings.LOG_LEVEL.value) setup_loggers( log_format_local_dev_enabled=settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.DIRECTOR_V2_LOG_FILTER_MAPPING, tracing_settings=settings.DIRECTOR_V2_TRACING, + log_base_level=settings.log_level, + noisy_loggers=_NOISY_LOGGERS, ) _logger.debug(settings.model_dump_json(indent=2)) - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - assert settings.SC_BOOT_MODE # nosec app = FastAPI( debug=settings.SC_BOOT_MODE.is_devel_mode(), From ce036878f68bbeafe5bdd8821dc115c8e34b6e72 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:46:39 +0200 Subject: [PATCH 047/128] director --- services/director/src/simcore_service_director/main.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index 0f15c827ac6..dbf61c016ab 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -1,7 +1,5 @@ """Main application to be deployed by uvicorn (or equivalent) server""" -import logging - from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_director.core.application import create_app @@ -9,13 +7,12 @@ _the_settings = ApplicationSettings.create_from_envs() -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.DIRECTOR_LOGLEVEL) -logging.root.setLevel(_the_settings.DIRECTOR_LOGLEVEL) setup_loggers( log_format_local_dev_enabled=_the_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, tracing_settings=_the_settings.DIRECTOR_TRACING, + log_base_level=_the_settings.log_level, + noisy_loggers=None, ) # SINGLETON FastAPI app From ef558581c24f9eb2f752f8aae9deb124f662e032 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:47:05 +0200 Subject: [PATCH 048/128] dynamic-scheduler --- .../src/simcore_service_dynamic_scheduler/main.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py index fe0d89d7bb0..60902e59c92 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py @@ -1,19 +1,17 @@ """Main application to be deployed by uvicorn (or equivalent) server""" -import logging - from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_dynamic_scheduler.core.application import create_app from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings _the_settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=_the_settings.DYNAMIC_SCHEDULER_LOGLEVEL.value) -logging.root.setLevel(_the_settings.DYNAMIC_SCHEDULER_LOGLEVEL.value) setup_loggers( log_format_local_dev_enabled=_the_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, tracing_settings=_the_settings.DYNAMIC_SCHEDULER_TRACING, + log_base_level=_the_settings.log_level, + noisy_loggers=None, ) # SINGLETON FastAPI app From deb9fa4a5d8c55f62bf01bf209df3c87c613d7fb Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:47:45 +0200 Subject: [PATCH 049/128] dynamic-sidecar --- .../core/application.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index 496c7291598..f9d294fe022 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -40,7 +40,6 @@ from .settings import ApplicationSettings from .utils import volumes_fix_permissions -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR _NOISY_LOGGERS = ( "aio_pika", "aiormq", @@ -116,24 +115,16 @@ def compose_spec(self) -> str | None: def setup_logger(settings: ApplicationSettings): - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 - logging.basicConfig(level=settings.log_level) - logging.root.setLevel(settings.log_level) setup_loggers( log_format_local_dev_enabled=settings.DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.DY_SIDECAR_LOG_FILTER_MAPPING, tracing_settings=settings.DYNAMIC_SIDECAR_TRACING, + log_base_level=settings.log_level, + noisy_loggers=_NOISY_LOGGERS, ) def create_base_app() -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - # settings settings = ApplicationSettings.create_from_envs() setup_logger(settings) From b5509d00f7090624685e8bf919153b7621e2272a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:48:04 +0200 Subject: [PATCH 050/128] efs --- .../efs-guardian/src/simcore_service_efs_guardian/main.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/main.py b/services/efs-guardian/src/simcore_service_efs_guardian/main.py index 037a7b6a181..e6aafbdf9a2 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/main.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/main.py @@ -1,19 +1,17 @@ """Main application to be deployed by uvicorn (or equivalent) server""" -import logging - from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_efs_guardian.core.application import create_app from simcore_service_efs_guardian.core.settings import ApplicationSettings the_settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=the_settings.log_level) -logging.root.setLevel(the_settings.log_level) setup_loggers( log_format_local_dev_enabled=the_settings.EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.EFS_GUARDIAN_LOG_FILTER_MAPPING, tracing_settings=the_settings.EFS_GUARDIAN_TRACING, + log_base_level=the_settings.log_level, + noisy_loggers=None, ) # SINGLETON FastAPI app From 093f4c4eb8df1bbd9a19935e6a55774f7f83645c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:48:25 +0200 Subject: [PATCH 051/128] invitations --- .../invitations/src/simcore_service_invitations/main.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/services/invitations/src/simcore_service_invitations/main.py b/services/invitations/src/simcore_service_invitations/main.py index 7df3563c581..c17e5a406af 100644 --- a/services/invitations/src/simcore_service_invitations/main.py +++ b/services/invitations/src/simcore_service_invitations/main.py @@ -1,7 +1,5 @@ """Main application to be deployed by uvicorn (or equivalent) server""" -import logging - from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_invitations.core.application import create_app @@ -9,13 +7,12 @@ the_settings = ApplicationSettings.create_from_envs() -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=the_settings.log_level) # NOSONAR -logging.root.setLevel(the_settings.log_level) setup_loggers( log_format_local_dev_enabled=the_settings.INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.INVITATIONS_LOG_FILTER_MAPPING, tracing_settings=the_settings.INVITATIONS_TRACING, + log_base_level=the_settings.log_level, + noisy_loggers=None, ) # SINGLETON FastAPI app From 0f4eda17e0c8c2d188eddf076b6a3006be136c20 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:48:43 +0200 Subject: [PATCH 052/128] notifications --- .../src/simcore_service_notifications/core/application.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/notifications/src/simcore_service_notifications/core/application.py b/services/notifications/src/simcore_service_notifications/core/application.py index 4a9515f3f96..f83ac8f528a 100644 --- a/services/notifications/src/simcore_service_notifications/core/application.py +++ b/services/notifications/src/simcore_service_notifications/core/application.py @@ -23,13 +23,12 @@ def _initialise_logger(settings: ApplicationSettings): - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 - logging.basicConfig(level=settings.LOG_LEVEL.value) # NOSONAR - logging.root.setLevel(settings.LOG_LEVEL.value) setup_loggers( log_format_local_dev_enabled=settings.NOTIFICATIONS_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.NOTIFICATIONS_VOLUMES_LOG_FILTER_MAPPING, tracing_settings=settings.NOTIFICATIONS_TRACING, + log_base_level=settings.log_level, + noisy_loggers=None, ) From e610fd1833a559202d9a0260ef7a7d6f17bdc93b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:49:04 +0200 Subject: [PATCH 053/128] payments --- services/payments/src/simcore_service_payments/main.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/services/payments/src/simcore_service_payments/main.py b/services/payments/src/simcore_service_payments/main.py index 0a21865e0dd..f285308c401 100644 --- a/services/payments/src/simcore_service_payments/main.py +++ b/services/payments/src/simcore_service_payments/main.py @@ -1,7 +1,5 @@ """Main application to be deployed by uvicorn (or equivalent) server""" -import logging - from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_payments.core.application import create_app @@ -9,13 +7,12 @@ _the_settings = ApplicationSettings.create_from_envs() -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.log_level) # NOSONAR -logging.root.setLevel(_the_settings.log_level) setup_loggers( log_format_local_dev_enabled=_the_settings.PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_the_settings.PAYMENTS_LOG_FILTER_MAPPING, tracing_settings=_the_settings.PAYMENTS_TRACING, + log_base_level=_the_settings.log_level, + noisy_loggers=None, ) # SINGLETON FastAPI app From 3dca72a888f4a1315402b8e8322119ef10dfd12f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:49:23 +0200 Subject: [PATCH 054/128] resources --- .../src/simcore_service_resource_usage_tracker/main.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py index 6f0ec64fb3d..6f39de0bb98 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py @@ -1,7 +1,5 @@ """Main application to be deployed by uvicorn (or equivalent) server""" -import logging - from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_resource_usage_tracker.core.application import create_app @@ -9,13 +7,12 @@ the_settings = ApplicationSettings.create_from_envs() -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=the_settings.log_level) # NOSONAR -logging.root.setLevel(the_settings.log_level) setup_loggers( log_format_local_dev_enabled=the_settings.RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=the_settings.RESOURCE_USAGE_TRACKER_LOG_FILTER_MAPPING, tracing_settings=the_settings.RESOURCE_USAGE_TRACKER_TRACING, + log_base_level=the_settings.log_level, + noisy_loggers=None, ) # SINGLETON FastAPI app From 25496b17c0a8b90d0f9d747f70cfd41e47ca0c95 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:49:39 +0200 Subject: [PATCH 055/128] resources --- services/storage/src/simcore_service_storage/main.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/services/storage/src/simcore_service_storage/main.py b/services/storage/src/simcore_service_storage/main.py index d8309302029..0e9c6fe9190 100644 --- a/services/storage/src/simcore_service_storage/main.py +++ b/services/storage/src/simcore_service_storage/main.py @@ -1,20 +1,17 @@ """Main application to be deployed in for example uvicorn.""" -import logging - from servicelib.logging_utils import setup_loggers from simcore_service_storage.core.application import create_app from simcore_service_storage.core.settings import ApplicationSettings _settings = ApplicationSettings.create_from_envs() -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_settings.log_level) # NOSONAR -logging.root.setLevel(_settings.log_level) setup_loggers( log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, tracing_settings=_settings.STORAGE_TRACING, + log_base_level=_settings.log_level, + noisy_loggers=None, ) app = create_app(_settings) From cf97990ef5995213fc03334b0cb9d520ad331941 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:50:03 +0200 Subject: [PATCH 056/128] storage --- .../simcore_service_storage/modules/celery/worker_main.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index d364f37e244..396ed37accf 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -1,6 +1,5 @@ """Main application to be deployed in for example uvicorn.""" -import logging from functools import partial from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] @@ -18,12 +17,12 @@ _settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=_settings.log_level) # NOSONAR -logging.root.setLevel(_settings.log_level) setup_loggers( log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, tracing_settings=_settings.STORAGE_TRACING, + log_base_level=_settings.log_level, + noisy_loggers=None, ) From 5389664a91d9284a23249c43241df87606bbefec Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:53:08 +0200 Subject: [PATCH 057/128] noisy --- .../core/application.py | 15 --------------- .../src/simcore_service_datcore_adapter/main.py | 10 +++++++++- .../core/application.py | 15 --------------- .../src/simcore_service_director/main.py | 10 +++++++++- .../simcore_service_storage/core/application.py | 17 ----------------- .../storage/src/simcore_service_storage/main.py | 14 +++++++++++++- 6 files changed, 31 insertions(+), 50 deletions(-) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py index 50fd0b8b888..6f56d06f26a 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py @@ -27,25 +27,10 @@ ) from .settings import ApplicationSettings -LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -NOISY_LOGGERS = ( - "aiocache", - "botocore", - "hpack", -) - _logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - - for name in NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - _logger.debug("App settings:\n%s", settings.model_dump_json(indent=1)) app = FastAPI( diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py index 8178e69ae26..88346d657c5 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py @@ -1,10 +1,18 @@ """Main application to be deployed in for example uvicorn""" +from typing import Final + from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_datcore_adapter.core.application import create_app from simcore_service_datcore_adapter.core.settings import ApplicationSettings +NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiocache", + "botocore", + "hpack", +) + _the_settings = ApplicationSettings.create_from_envs() setup_loggers( @@ -12,7 +20,7 @@ logger_filter_mapping=_the_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, tracing_settings=_the_settings.DATCORE_ADAPTER_TRACING, log_base_level=_the_settings.log_level, - noisy_loggers=None, + noisy_loggers=NOISY_LOGGERS, ) # SINGLETON FastAPI app diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index e20c59554d1..1ffd40721a8 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -1,5 +1,4 @@ import logging -from typing import Final from fastapi import FastAPI from servicelib.async_utils import cancel_sequential_workers @@ -22,24 +21,10 @@ from ..registry_proxy import setup as setup_registry from .settings import ApplicationSettings -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS: Final[tuple[str, ...]] = ( - "httpcore", - "httpx", - "werkzeug", -) - _logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index dbf61c016ab..b750df8a080 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -1,10 +1,18 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +from typing import Final + from fastapi import FastAPI from servicelib.logging_utils import setup_loggers from simcore_service_director.core.application import create_app from simcore_service_director.core.settings import ApplicationSettings +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "httpcore", + "httpx", + "werkzeug", +) + _the_settings = ApplicationSettings.create_from_envs() setup_loggers( @@ -12,7 +20,7 @@ logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, tracing_settings=_the_settings.DIRECTOR_TRACING, log_base_level=_the_settings.log_level, - noisy_loggers=None, + noisy_loggers=_NOISY_LOGGERS, ) # SINGLETON FastAPI app diff --git a/services/storage/src/simcore_service_storage/core/application.py b/services/storage/src/simcore_service_storage/core/application.py index 2a1fe8246fb..14705995680 100644 --- a/services/storage/src/simcore_service_storage/core/application.py +++ b/services/storage/src/simcore_service_storage/core/application.py @@ -44,27 +44,10 @@ from ..modules.s3 import setup_s3 from .settings import ApplicationSettings -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aio_pika", - "aiobotocore", - "aiormq", - "botocore", - "httpcore", - "urllib3", - "werkzeug", -) _logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: # noqa: C901 - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( diff --git a/services/storage/src/simcore_service_storage/main.py b/services/storage/src/simcore_service_storage/main.py index 0e9c6fe9190..0841df24105 100644 --- a/services/storage/src/simcore_service_storage/main.py +++ b/services/storage/src/simcore_service_storage/main.py @@ -1,9 +1,21 @@ """Main application to be deployed in for example uvicorn.""" +from typing import Final + from servicelib.logging_utils import setup_loggers from simcore_service_storage.core.application import create_app from simcore_service_storage.core.settings import ApplicationSettings +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiobotocore", + "aiormq", + "botocore", + "httpcore", + "urllib3", + "werkzeug", +) + _settings = ApplicationSettings.create_from_envs() setup_loggers( @@ -11,7 +23,7 @@ logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, tracing_settings=_settings.STORAGE_TRACING, log_base_level=_settings.log_level, - noisy_loggers=None, + noisy_loggers=_NOISY_LOGGERS, ) app = create_app(_settings) From 020a1d72bd3e0dbe77f0d8fddac11c5e27088823 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:25:12 +0200 Subject: [PATCH 058/128] ensure the correct loglevel is returned --- .../src/simcore_service_autoscaling/core/settings.py | 8 ++++---- .../simcore_service_clusters_keeper/core/settings.py | 5 +++-- .../simcore_service_clusters_keeper/utils/clusters.py | 2 +- .../src/simcore_service_director/core/settings.py | 11 ++++++----- .../src/simcore_service_efs_guardian/core/settings.py | 5 +++-- .../src/simcore_service_invitations/core/settings.py | 7 ++++--- .../src/simcore_service_payments/core/settings.py | 7 ++++--- .../core/settings.py | 6 ++++-- .../simcore_service_webserver/application_settings.py | 5 +++-- 9 files changed, 32 insertions(+), 24 deletions(-) diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py index ff67aeeaab1..f39f7b0208b 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py @@ -17,6 +17,7 @@ model_validator, ) from pydantic_settings import SettingsConfigDict +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.base import BaseCustomSettings @@ -34,8 +35,7 @@ AUTOSCALING_ENV_PREFIX: Final[str] = "AUTOSCALING_" -class AutoscalingSSMSettings(SSMSettings): - ... +class AutoscalingSSMSettings(SSMSettings): ... class AutoscalingEC2Settings(EC2Settings): @@ -361,8 +361,8 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] = False @cached_property - def LOG_LEVEL(self): # noqa: N802 - return self.AUTOSCALING_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.AUTOSCALING_LOGLEVEL) @field_validator("AUTOSCALING_LOGLEVEL", mode="before") @classmethod diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py index 525148fa257..2fde9fcbcd8 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py @@ -23,6 +23,7 @@ field_validator, ) from pydantic_settings import SettingsConfigDict +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings @@ -454,8 +455,8 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ] @cached_property - def LOG_LEVEL(self) -> LogLevel: # noqa: N802 - return self.CLUSTERS_KEEPER_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.CLUSTERS_KEEPER_LOGLEVEL) @field_validator("CLUSTERS_KEEPER_LOGLEVEL", mode="before") @classmethod diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py index d2820ef2b88..81bd7279399 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py @@ -96,7 +96,7 @@ def _convert_to_env_dict(entries: dict[str, Any]) -> str: f"DASK_WORKER_SATURATION={app_settings.CLUSTERS_KEEPER_DASK_WORKER_SATURATION}", f"DOCKER_IMAGE_TAG={app_settings.CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG}", f"EC2_INSTANCES_NAME_PREFIX={cluster_machines_name_prefix}", - f"LOG_LEVEL={app_settings.LOG_LEVEL}", + f"LOG_LEVEL={app_settings.log_level}", f"WORKERS_EC2_INSTANCES_ALLOWED_TYPES={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_ALLOWED_TYPES)}", f"WORKERS_EC2_INSTANCES_CUSTOM_TAGS={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_CUSTOM_TAGS | additional_custom_tags)}", f"WORKERS_EC2_INSTANCES_KEY_NAME={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_KEY_NAME}", diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 5560de876fa..01d37ee2989 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -1,5 +1,6 @@ import datetime import warnings +from functools import cached_property from typing import cast from fastapi import FastAPI @@ -27,7 +28,7 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ) DIRECTOR_REMOTE_DEBUG_PORT: PortInt = 3000 - DIRECTOR_LOGLEVEL: LogLevel = Field( + DIRECTOR_LOG_LEVEL: LogLevel = Field( ..., validation_alias=AliasChoices("DIRECTOR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL") ) DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( @@ -143,10 +144,10 @@ def _validate_substitutions(cls, v): return v - @field_validator("DIRECTOR_LOGLEVEL", mode="before") - @classmethod - def _valid_log_level(cls, value: str) -> str: - return cls.validate_log_level(value) + @cached_property + def log_level(self) -> LogLevel: + """override""" + return self.DIRECTOR_LOG_LEVEL def get_application_settings(app: FastAPI) -> ApplicationSettings: diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py index ab5377a82d3..cda71422f9e 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -6,6 +6,7 @@ from fastapi import FastAPI from models_library.basic_types import LogLevel, VersionTag from pydantic import AliasChoices, ByteSize, Field, TypeAdapter, field_validator +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.efs import AwsEfsSettings @@ -116,8 +117,8 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] @cached_property - def LOG_LEVEL(self) -> LogLevel: # noqa: N802 - return self.EFS_GUARDIAN_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.EFS_GUARDIAN_LOGLEVEL) @field_validator("EFS_GUARDIAN_LOGLEVEL", mode="before") @classmethod diff --git a/services/invitations/src/simcore_service_invitations/core/settings.py b/services/invitations/src/simcore_service_invitations/core/settings.py index 2df10527929..44ab579cbc7 100644 --- a/services/invitations/src/simcore_service_invitations/core/settings.py +++ b/services/invitations/src/simcore_service_invitations/core/settings.py @@ -1,9 +1,10 @@ from functools import cached_property -from typing import Annotated +from typing import Annotated, cast from common_library.basic_types import DEFAULT_FACTORY from models_library.products import ProductName from pydantic import AliasChoices, Field, HttpUrl, SecretStr, field_validator +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag @@ -55,8 +56,8 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] = DEFAULT_FACTORY @cached_property - def LOG_LEVEL(self): - return self.INVITATIONS_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.INVITATIONS_LOGLEVEL) @field_validator("INVITATIONS_LOGLEVEL", mode="before") @classmethod diff --git a/services/payments/src/simcore_service_payments/core/settings.py b/services/payments/src/simcore_service_payments/core/settings.py index 5d9c69d861b..67932ac466d 100644 --- a/services/payments/src/simcore_service_payments/core/settings.py +++ b/services/payments/src/simcore_service_payments/core/settings.py @@ -1,6 +1,6 @@ from decimal import Decimal from functools import cached_property -from typing import Annotated +from typing import Annotated, cast from common_library.basic_types import DEFAULT_FACTORY from models_library.basic_types import NonNegativeDecimal @@ -14,6 +14,7 @@ TypeAdapter, field_validator, ) +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag @@ -66,8 +67,8 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] = DEFAULT_FACTORY @cached_property - def LOG_LEVEL(self): # noqa: N802 - return self.PAYMENTS_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.PAYMENTS_LOGLEVEL) @field_validator("PAYMENTS_LOGLEVEL", mode="before") @classmethod diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py index 3a534b692dc..3590c3847b9 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py @@ -1,8 +1,10 @@ import datetime from functools import cached_property +from typing import cast from models_library.basic_types import BootModeEnum from pydantic import AliasChoices, Field, PositiveInt, field_validator +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings from settings_library.basic_types import BuildTargetEnum, LogLevel, VersionTag @@ -76,8 +78,8 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) @cached_property - def LOG_LEVEL(self) -> LogLevel: # noqa: N802 - return self.RESOURCE_USAGE_TRACKER_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.RESOURCE_USAGE_TRACKER_LOGLEVEL) @field_validator("RESOURCE_USAGE_TRACKER_LOGLEVEL", mode="before") @classmethod diff --git a/services/web/server/src/simcore_service_webserver/application_settings.py b/services/web/server/src/simcore_service_webserver/application_settings.py index 72aefe6a363..af7c4c79ed8 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings.py +++ b/services/web/server/src/simcore_service_webserver/application_settings.py @@ -15,6 +15,7 @@ model_validator, ) from pydantic.fields import Field +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.email import SMTPSettings @@ -462,8 +463,8 @@ def _get_healthcheck_timeout_in_seconds(cls, v): # HELPERS -------------------------------------------------------- @cached_property - def log_level(self) -> int: - level: int = getattr(logging, self.WEBSERVER_LOGLEVEL.upper()) + def log_level(self) -> LogLevelInt: + level: LogLevelInt = getattr(logging, self.WEBSERVER_LOGLEVEL.upper()) return level def is_enabled(self, field_name: str) -> bool: From 4871eb457cd02c106e38b0e8793807321c374fc3 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:45:20 +0200 Subject: [PATCH 059/128] re working application startup --- services/catalog/docker/boot.sh | 5 ++-- .../core/application.py | 19 +++++++++++++ .../src/simcore_service_catalog/main.py | 28 +------------------ 3 files changed, 23 insertions(+), 29 deletions(-) diff --git a/services/catalog/docker/boot.sh b/services/catalog/docker/boot.sh index 3efc6789681..2fc25d69e1d 100755 --- a/services/catalog/docker/boot.sh +++ b/services/catalog/docker/boot.sh @@ -44,7 +44,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/catalog/src/simcore_service_catalog && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CATALOG_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CATALOG_REMOTE_DEBUGGING_PORT} -m \ + uvicorn --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -52,7 +53,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_catalog.main:the_app \ + exec uvicorn --factory simcore_service_catalog.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py index b06ed4dd457..f87990c2d12 100644 --- a/services/catalog/src/simcore_service_catalog/core/application.py +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -1,4 +1,5 @@ import logging +from typing import Final from fastapi import FastAPI from fastapi.middleware.gzip import GZipMiddleware @@ -12,6 +13,7 @@ initialize_fastapi_app_tracing, setup_tracing, ) +from servicelib.logging_utils import setup_loggers from starlette.middleware.base import BaseHTTPMiddleware from .._meta import ( @@ -27,11 +29,28 @@ _logger = logging.getLogger(__name__) +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiobotocore", + "aiormq", + "botocore", + "httpcore", + "werkzeug", +) + def create_app() -> FastAPI: settings = ApplicationSettings.create_from_envs() _logger.debug(settings.model_dump_json(indent=2)) + setup_loggers( + log_format_local_dev_enabled=settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=settings.CATALOG_LOG_FILTER_MAPPING, + tracing_settings=settings.CATALOG_TRACING, + log_base_level=settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + app = FastAPI( debug=settings.SC_BOOT_MODE in [BootModeEnum.DEBUG, BootModeEnum.DEVELOPMENT, BootModeEnum.LOCAL], diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index 65a96e09808..04b35d01067 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -2,30 +2,4 @@ from typing import Final -from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers -from simcore_service_catalog.core.application import create_app -from simcore_service_catalog.core.settings import ApplicationSettings - -_the_settings = ApplicationSettings.create_from_envs() - -_NOISY_LOGGERS: Final[tuple[str, ...]] = ( - "aio_pika", - "aiobotocore", - "aiormq", - "botocore", - "httpcore", - "werkzeug", -) - -setup_loggers( - log_format_local_dev_enabled=_the_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.CATALOG_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.CATALOG_TRACING, - log_base_level=_the_settings.log_level, - noisy_loggers=_NOISY_LOGGERS, -) - - -# SINGLETON FastAPI app -the_app: FastAPI = create_app() +__all__: Final[tuple[str, ...]] = ("create_app",) From 8d8649deb373224208cbcd61525e2c4829060dc7 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:46:32 +0200 Subject: [PATCH 060/128] try removing stuff --- .../tests/unit/with_dbs/04/studies_dispatcher/conftest.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py index 63f697c3ad2..1b4b8e20ff2 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py @@ -4,12 +4,9 @@ # pylint: disable=too-many-arguments -import logging - import pytest from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_service_webserver.log import setup_logging from simcore_service_webserver.studies_dispatcher.settings import ( StudiesDispatcherSettings, ) @@ -50,9 +47,6 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc }, ) - # NOTE: To see logs, use pytest -s --log-cli-level=DEBUG - setup_logging(level=logging.DEBUG) - plugin_settings = StudiesDispatcherSettings.create_from_envs() print(plugin_settings.model_dump_json(indent=1)) From 1f42defffd25db9ba1ea1097e00c6293f82f7131 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:50:30 +0200 Subject: [PATCH 061/128] mypy --- packages/service-library/src/servicelib/logging_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 125ff517343..4f2e3eb0da6 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -289,7 +289,7 @@ async def setup_async_loggers_lifespan( ) # Set up async logging infrastructure - log_queue = queue.Queue() + log_queue: queue.Queue[logging.LogRecord] = queue.Queue() # Create handler with proper formatting handler = logging.StreamHandler() handler.setFormatter( From 7e375ddd7ab8051a24cf40909ec1aa707a166578 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 15:52:53 +0200 Subject: [PATCH 062/128] ensure create_app_ is not removed --- services/catalog/docker/boot.sh | 6 ++++-- services/catalog/src/simcore_service_catalog/main.py | 3 +++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/services/catalog/docker/boot.sh b/services/catalog/docker/boot.sh index 2fc25d69e1d..b404e640d4b 100755 --- a/services/catalog/docker/boot.sh +++ b/services/catalog/docker/boot.sh @@ -45,7 +45,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/catalog/src/simcore_service_catalog && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CATALOG_REMOTE_DEBUGGING_PORT} -m \ - uvicorn --factory main:create_app \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -53,7 +54,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn --factory simcore_service_catalog.main:create_app \ + exec uvicorn \ + --factory simcore_service_catalog.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index 04b35d01067..370e42d1418 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -2,4 +2,7 @@ from typing import Final +from simcore_service_catalog.core.application import create_app + +assert create_app # nosec __all__: Final[tuple[str, ...]] = ("create_app",) From 4b6105add45cd8ebb06f8f7a79a5ce2f777aca6e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 18:54:50 +0200 Subject: [PATCH 063/128] change entrypoint --- services/agent/docker/boot.sh | 4 +++- services/api-server/docker/boot.sh | 4 +++- services/autoscaling/docker/boot.sh | 4 +++- services/clusters-keeper/docker/boot.sh | 4 +++- services/datcore-adapter/docker/boot.sh | 4 +++- services/dynamic-scheduler/docker/boot.sh | 4 +++- services/dynamic-sidecar/docker/boot.sh | 4 +++- services/efs-guardian/docker/boot.sh | 4 +++- services/invitations/docker/boot.sh | 4 +++- services/notifications/docker/boot.sh | 4 +++- services/payments/docker/boot.sh | 4 +++- services/resource-usage-tracker/docker/boot.sh | 4 +++- 12 files changed, 36 insertions(+), 12 deletions(-) diff --git a/services/agent/docker/boot.sh b/services/agent/docker/boot.sh index b972c9dfd78..fa0960b2f0b 100755 --- a/services/agent/docker/boot.sh +++ b/services/agent/docker/boot.sh @@ -51,7 +51,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/agent/src/simcore_service_agent && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AGENT_SERVER_REMOTE_DEBUG_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AGENT_SERVER_REMOTE_DEBUG_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --port 8000 \ --reload \ diff --git a/services/api-server/docker/boot.sh b/services/api-server/docker/boot.sh index 1e41a3f8e13..d54168fba2c 100755 --- a/services/api-server/docker/boot.sh +++ b/services/api-server/docker/boot.sh @@ -44,7 +44,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/api-server/src/simcore_service_api_server && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${API_SERVER_REMOTE_DEBUG_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${API_SERVER_REMOTE_DEBUG_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/autoscaling/docker/boot.sh b/services/autoscaling/docker/boot.sh index 79f86d977af..34227db5208 100755 --- a/services/autoscaling/docker/boot.sh +++ b/services/autoscaling/docker/boot.sh @@ -51,7 +51,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/autoscaling/src/simcore_service_autoscaling && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AUTOSCALING_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AUTOSCALING_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/clusters-keeper/docker/boot.sh b/services/clusters-keeper/docker/boot.sh index 562dcc89f14..0c094e1e8bf 100755 --- a/services/clusters-keeper/docker/boot.sh +++ b/services/clusters-keeper/docker/boot.sh @@ -52,7 +52,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/clusters-keeper/src/simcore_service_clusters_keeper && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CLUSTERS_KEEPER_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CLUSTERS_KEEPER_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/datcore-adapter/docker/boot.sh b/services/datcore-adapter/docker/boot.sh index ca19372d572..e5062355750 100755 --- a/services/datcore-adapter/docker/boot.sh +++ b/services/datcore-adapter/docker/boot.sh @@ -49,7 +49,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/datcore-adapter/src/simcore_service_datcore_adapter && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DATCORE_ADAPTER_REMOTE_DEBUG_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DATCORE_ADAPTER_REMOTE_DEBUG_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/dynamic-scheduler/docker/boot.sh b/services/dynamic-scheduler/docker/boot.sh index 89e85223aa6..1c13c2fbf12 100755 --- a/services/dynamic-scheduler/docker/boot.sh +++ b/services/dynamic-scheduler/docker/boot.sh @@ -52,7 +52,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/dynamic-scheduler/src/simcore_service_dynamic_scheduler && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SCHEDULER_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SCHEDULER_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/dynamic-sidecar/docker/boot.sh b/services/dynamic-sidecar/docker/boot.sh index cb209e00cc9..dcb8c4efc8b 100755 --- a/services/dynamic-sidecar/docker/boot.sh +++ b/services/dynamic-sidecar/docker/boot.sh @@ -53,7 +53,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/dynamic-sidecar/src/simcore_service_dynamic_sidecar && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SIDECAR_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SIDECAR_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/efs-guardian/docker/boot.sh b/services/efs-guardian/docker/boot.sh index 317ee312459..c4025da59b8 100755 --- a/services/efs-guardian/docker/boot.sh +++ b/services/efs-guardian/docker/boot.sh @@ -52,7 +52,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/efs-guardian/src/simcore_service_efs_guardian && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${EFS_GUARDIAN_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${EFS_GUARDIAN_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/invitations/docker/boot.sh b/services/invitations/docker/boot.sh index 99afa6e1a75..e0a0c9d0b7a 100755 --- a/services/invitations/docker/boot.sh +++ b/services/invitations/docker/boot.sh @@ -52,7 +52,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/invitations/src/simcore_service_invitations && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${INVITATIONS_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${INVITATIONS_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/notifications/docker/boot.sh b/services/notifications/docker/boot.sh index 8aae19c2f8e..7ea0198c996 100755 --- a/services/notifications/docker/boot.sh +++ b/services/notifications/docker/boot.sh @@ -52,7 +52,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/notifications/src/simcore_service_notifications && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${NOTIFICATIONS_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${NOTIFICATIONS_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --port 8000 \ --reload \ diff --git a/services/payments/docker/boot.sh b/services/payments/docker/boot.sh index 740e8ff1e17..efc3a054b33 100755 --- a/services/payments/docker/boot.sh +++ b/services/payments/docker/boot.sh @@ -52,7 +52,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/payments/src/simcore_service_payments && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${PAYMENTS_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${PAYMENTS_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ diff --git a/services/resource-usage-tracker/docker/boot.sh b/services/resource-usage-tracker/docker/boot.sh index b1863b10308..dd51af48b25 100755 --- a/services/resource-usage-tracker/docker/boot.sh +++ b/services/resource-usage-tracker/docker/boot.sh @@ -52,7 +52,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/resource-usage-tracker/src/simcore_service_resource_usage_tracker && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${RESOURCE_USAGE_TRACKER_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${RESOURCE_USAGE_TRACKER_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ From 94e762c70d7aa78ac8bf85691ebc75db1f663a18 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Wed, 9 Jul 2025 18:58:14 +0200 Subject: [PATCH 064/128] change boot script --- services/agent/docker/boot.sh | 3 ++- services/api-server/docker/boot.sh | 3 ++- services/autoscaling/docker/boot.sh | 3 ++- services/clusters-keeper/docker/boot.sh | 3 ++- services/datcore-adapter/docker/boot.sh | 3 ++- services/director-v2/docker/boot.sh | 7 +++++-- services/director/docker/boot.sh | 3 ++- services/dynamic-scheduler/docker/boot.sh | 3 ++- services/dynamic-sidecar/docker/boot.sh | 3 ++- services/efs-guardian/docker/boot.sh | 3 ++- services/invitations/docker/boot.sh | 3 ++- services/notifications/docker/boot.sh | 3 ++- services/payments/docker/boot.sh | 3 ++- services/resource-usage-tracker/docker/boot.sh | 3 ++- 14 files changed, 31 insertions(+), 15 deletions(-) diff --git a/services/agent/docker/boot.sh b/services/agent/docker/boot.sh index fa0960b2f0b..621113b8857 100755 --- a/services/agent/docker/boot.sh +++ b/services/agent/docker/boot.sh @@ -62,7 +62,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_agent.main:the_app \ + exec uvicorn \ + --factory simcore_service_agent.main:create_app \ --host 0.0.0.0 \ --port 8000 \ --log-level "${SERVER_LOG_LEVEL}" \ diff --git a/services/api-server/docker/boot.sh b/services/api-server/docker/boot.sh index d54168fba2c..7a115743f29 100755 --- a/services/api-server/docker/boot.sh +++ b/services/api-server/docker/boot.sh @@ -54,7 +54,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_api_server.main:the_app \ + exec uvicorn \ + --factory simcore_service_api_server.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/autoscaling/docker/boot.sh b/services/autoscaling/docker/boot.sh index 34227db5208..f907acd4d2f 100755 --- a/services/autoscaling/docker/boot.sh +++ b/services/autoscaling/docker/boot.sh @@ -61,7 +61,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_autoscaling.main:the_app \ + exec uvicorn \ + --factory simcore_service_autoscaling.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/clusters-keeper/docker/boot.sh b/services/clusters-keeper/docker/boot.sh index 0c094e1e8bf..f5c56ea535b 100755 --- a/services/clusters-keeper/docker/boot.sh +++ b/services/clusters-keeper/docker/boot.sh @@ -62,7 +62,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_clusters_keeper.main:the_app \ + exec uvicorn \ + --factory simcore_service_clusters_keeper.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/datcore-adapter/docker/boot.sh b/services/datcore-adapter/docker/boot.sh index e5062355750..7403fdda075 100755 --- a/services/datcore-adapter/docker/boot.sh +++ b/services/datcore-adapter/docker/boot.sh @@ -59,7 +59,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_datcore_adapter.main:the_app \ + exec uvicorn \ + --factory simcore_service_datcore_adapter.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director-v2/docker/boot.sh b/services/director-v2/docker/boot.sh index 9dc04a97fcf..690bb78fb92 100755 --- a/services/director-v2/docker/boot.sh +++ b/services/director-v2/docker/boot.sh @@ -51,7 +51,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/director-v2/src/simcore_service_director_v2 && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_V2_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_V2_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:create_app \ \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -59,7 +61,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_director_v2.main:the_app \ + exec uvicorn \ + --factory simcore_service_director_v2.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh index 67298472d0e..f0fe8f24700 100755 --- a/services/director/docker/boot.sh +++ b/services/director/docker/boot.sh @@ -59,7 +59,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_director.main:the_app \ + exec uvicorn \ + --factory simcore_service_director.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/dynamic-scheduler/docker/boot.sh b/services/dynamic-scheduler/docker/boot.sh index 1c13c2fbf12..4c64efc3738 100755 --- a/services/dynamic-scheduler/docker/boot.sh +++ b/services/dynamic-scheduler/docker/boot.sh @@ -62,7 +62,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_dynamic_scheduler.main:the_app \ + exec uvicorn \ + --factory simcore_service_dynamic_scheduler.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/dynamic-sidecar/docker/boot.sh b/services/dynamic-sidecar/docker/boot.sh index dcb8c4efc8b..724d096d31e 100755 --- a/services/dynamic-sidecar/docker/boot.sh +++ b/services/dynamic-sidecar/docker/boot.sh @@ -63,7 +63,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_dynamic_sidecar.main:the_app \ + exec uvicorn \ + --factory simcore_service_dynamic_sidecar.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/efs-guardian/docker/boot.sh b/services/efs-guardian/docker/boot.sh index c4025da59b8..d32297b29c6 100755 --- a/services/efs-guardian/docker/boot.sh +++ b/services/efs-guardian/docker/boot.sh @@ -62,7 +62,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_efs_guardian.main:the_app \ + exec uvicorn \ + --factory simcore_service_efs_guardian.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/invitations/docker/boot.sh b/services/invitations/docker/boot.sh index e0a0c9d0b7a..f4748f976ec 100755 --- a/services/invitations/docker/boot.sh +++ b/services/invitations/docker/boot.sh @@ -62,7 +62,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_invitations.main:the_app \ + exec uvicorn \ + --factory simcore_service_invitations.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/notifications/docker/boot.sh b/services/notifications/docker/boot.sh index 7ea0198c996..215b23a5dc5 100755 --- a/services/notifications/docker/boot.sh +++ b/services/notifications/docker/boot.sh @@ -63,7 +63,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_notifications.main:the_app \ + exec uvicorn \ + --factory simcore_service_notifications.main:create_app \ --host 0.0.0.0 \ --port 8000 \ --log-level "${SERVER_LOG_LEVEL}" \ diff --git a/services/payments/docker/boot.sh b/services/payments/docker/boot.sh index efc3a054b33..9d6d75ebddd 100755 --- a/services/payments/docker/boot.sh +++ b/services/payments/docker/boot.sh @@ -62,7 +62,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_payments.main:the_app \ + exec uvicorn \ + --factory simcore_service_payments.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/resource-usage-tracker/docker/boot.sh b/services/resource-usage-tracker/docker/boot.sh index dd51af48b25..12d353afea9 100755 --- a/services/resource-usage-tracker/docker/boot.sh +++ b/services/resource-usage-tracker/docker/boot.sh @@ -62,7 +62,8 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_resource_usage_tracker.main:the_app \ + exec uvicorn \ + --factory simcore_service_resource_usage_tracker.main:create_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi From cd095919eef4aa36666b0b8033d9e32625bdcfe6 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 08:39:50 +0200 Subject: [PATCH 065/128] first initial non-blocking in catalog --- services/catalog/docker/boot.sh | 4 +- .../core/application.py | 4 +- .../simcore_service_catalog/core/events.py | 10 +++- .../src/simcore_service_catalog/main.py | 55 ++++++++++++++++++- 4 files changed, 65 insertions(+), 8 deletions(-) diff --git a/services/catalog/docker/boot.sh b/services/catalog/docker/boot.sh index b404e640d4b..81a4874d3fc 100755 --- a/services/catalog/docker/boot.sh +++ b/services/catalog/docker/boot.sh @@ -46,7 +46,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/catalog/src/simcore_service_catalog && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CATALOG_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -55,7 +55,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_catalog.main:create_app \ + --factory simcore_service_catalog.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py index f87990c2d12..b2ef71fefab 100644 --- a/services/catalog/src/simcore_service_catalog/core/application.py +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -39,7 +39,7 @@ ) -def create_app() -> FastAPI: +def create_app(*, logging_lifespan: events.Lifespan | None = None) -> FastAPI: settings = ApplicationSettings.create_from_envs() _logger.debug(settings.model_dump_json(indent=2)) @@ -60,7 +60,7 @@ def create_app() -> FastAPI: openapi_url=f"/api/{API_VTAG}/openapi.json", docs_url="/dev/doc", redoc_url=None, # default disabled - lifespan=events.create_app_lifespan(), + lifespan=events.create_app_lifespan(logging_lifespan=logging_lifespan), ) override_fastapi_openapi_method(app) diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py index 8695b10f15e..97a3e05763c 100644 --- a/services/catalog/src/simcore_service_catalog/core/events.py +++ b/services/catalog/src/simcore_service_catalog/core/events.py @@ -1,5 +1,6 @@ import logging -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Callable +from typing import TypeAlias from fastapi import FastAPI from fastapi_lifespan_manager import LifespanManager, State @@ -50,9 +51,14 @@ async def _settings_lifespan(app: FastAPI) -> AsyncIterator[State]: } -def create_app_lifespan() -> LifespanManager: +Lifespan: TypeAlias = Callable[[FastAPI], AsyncIterator[None]] + + +def create_app_lifespan(logging_lifespan: Lifespan | None = None) -> LifespanManager: # WARNING: order matters app_lifespan = LifespanManager() + if logging_lifespan: + app_lifespan.add(logging_lifespan) app_lifespan.add(_settings_lifespan) # - postgres diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index 370e42d1418..d9ebf4197fb 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -1,8 +1,59 @@ """Main application to be deployed in for example uvicorn.""" +import logging +from collections.abc import AsyncIterator +from contextlib import AsyncExitStack from typing import Final +from common_library.json_serialization import json_dumps +from fastapi import FastAPI +from servicelib.logging_utils import setup_async_loggers_lifespan from simcore_service_catalog.core.application import create_app +from simcore_service_catalog.core.events import Lifespan +from simcore_service_catalog.core.settings import ApplicationSettings -assert create_app # nosec -__all__: Final[tuple[str, ...]] = ("create_app",) +_logger = logging.getLogger(__name__) + + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiobotocore", + "aiormq", + "botocore", + "httpcore", + "werkzeug", +) + + +async def _setup_logging(app_settings: ApplicationSettings) -> Lifespan: + exit_stack = AsyncExitStack() + await exit_stack.enter_async_context( + setup_async_loggers_lifespan( + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + log_format_local_dev_enabled=app_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.CATALOG_LOG_FILTER_MAPPING, + tracing_settings=app_settings.CATALOG_TRACING, + ) + ) + + async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]: + assert app is not None, "app must be provided" + _logger.info("This is the non-blocking logger! Congratulations!") + yield + await exit_stack.aclose() + _logger.info("This is the blocking logger! Back to the roots!") + + return _logging_lifespan + + +async def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + + logging_lifespan = await _setup_logging(app_settings) + + return create_app(logging_lifespan=logging_lifespan) From 668a16f7a5b0179f0abb9f4f4fafbd27e8b5e25e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 08:42:43 +0200 Subject: [PATCH 066/128] clean --- services/catalog/src/simcore_service_catalog/main.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index d9ebf4197fb..7aa4877ec25 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -7,7 +7,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_async_loggers_lifespan +from servicelib.logging_utils import log_context, setup_async_loggers_lifespan from simcore_service_catalog.core.application import create_app from simcore_service_catalog.core.events import Lifespan from simcore_service_catalog.core.settings import ApplicationSettings @@ -39,10 +39,9 @@ async def _setup_logging(app_settings: ApplicationSettings) -> Lifespan: async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]: assert app is not None, "app must be provided" - _logger.info("This is the non-blocking logger! Congratulations!") - yield - await exit_stack.aclose() - _logger.info("This is the blocking logger! Back to the roots!") + with log_context(_logger, logging.INFO, "Non-blocking logger!"): + yield + await exit_stack.aclose() return _logging_lifespan From c395d7c94849a8f20a6dd33053ea8a1d42e38499 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 09:18:09 +0200 Subject: [PATCH 067/128] catalog uses async logger --- .../service-library/src/servicelib/logging_utils.py | 10 +++++----- services/catalog/src/simcore_service_catalog/main.py | 9 ++++----- .../web/server/src/simcore_service_webserver/log.py | 2 +- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 4f2e3eb0da6..cc9528eae22 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -12,8 +12,8 @@ import queue import sys from asyncio import iscoroutinefunction -from collections.abc import AsyncIterator, Callable, Iterator -from contextlib import asynccontextmanager, contextmanager +from collections.abc import Callable, Iterator +from contextlib import contextmanager from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path @@ -257,15 +257,15 @@ def setup_loggers( _apply_logger_filters(logger_filter_mapping) -@asynccontextmanager -async def setup_async_loggers_lifespan( +@contextmanager +def setup_async_loggers_lifespan( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], tracing_settings: TracingSettings | None, log_base_level: LogLevelInt, noisy_loggers: tuple[str, ...] | None, -) -> AsyncIterator[None]: +) -> Iterator[None]: """ Async context manager for non-blocking logging infrastructure. diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index 7aa4877ec25..e48a66aeca7 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -25,9 +25,9 @@ ) -async def _setup_logging(app_settings: ApplicationSettings) -> Lifespan: +def _setup_logging(app_settings: ApplicationSettings) -> Lifespan: exit_stack = AsyncExitStack() - await exit_stack.enter_async_context( + exit_stack.enter_context( setup_async_loggers_lifespan( log_base_level=app_settings.log_level, noisy_loggers=_NOISY_LOGGERS, @@ -46,13 +46,12 @@ async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]: return _logging_lifespan -async def app_factory() -> FastAPI: +def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() _logger.info( "Application settings: %s", json_dumps(app_settings, indent=2, sort_keys=True), ) - - logging_lifespan = await _setup_logging(app_settings) + logging_lifespan = _setup_logging(app_settings) return create_app(logging_lifespan=logging_lifespan) diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index 8dd7da254e2..8f7d028e2ab 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -33,7 +33,7 @@ async def setup_logging(app_settings: ApplicationSettings) -> CleanupEvent: exit_stack = AsyncExitStack() - await exit_stack.enter_async_context( + exit_stack.enter_context( setup_async_loggers_lifespan( log_base_level=app_settings.log_level, noisy_loggers=NOISY_LOGGERS, From 327549d148fe6c79a11cacc37de798e13ccace55 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 09:20:00 +0200 Subject: [PATCH 068/128] no need for async --- .../tests/test_logging_utils.py | 42 +++++++++---------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index afd50b032fc..14305872717 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -21,7 +21,7 @@ setup_async_loggers_lifespan, ) from tenacity import ( - AsyncRetrying, + retry, retry_if_exception_type, stop_after_delay, wait_fixed, @@ -30,18 +30,16 @@ _logger = logging.getLogger(__name__) -async def _assert_check_log_message( +@retry( + wait=wait_fixed(0.01), + stop=stop_after_delay(2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), +) +def _assert_check_log_message( caplog: pytest.LogCaptureFixture, expected_message: str ) -> None: - """Helper to reliably check if a log message appears in caplog using tenacity.""" - async for attempt in AsyncRetrying( - wait=wait_fixed(0.01), - stop=stop_after_delay(2.0), - reraise=True, - retry=retry_if_exception_type(AssertionError), - ): - with attempt: - assert expected_message in caplog.text + assert expected_message in caplog.text _ALL_LOGGING_LEVELS = [ @@ -445,7 +443,7 @@ async def test_setup_async_loggers_basic( caplog.clear() caplog.set_level(logging.INFO) - async with setup_async_loggers_lifespan( + with setup_async_loggers_lifespan( log_format_local_dev_enabled=log_format_local_dev_enabled, logger_filter_mapping={}, # No filters for this test tracing_settings=None, # No tracing for this test @@ -455,7 +453,7 @@ async def test_setup_async_loggers_basic( test_logger = logging.getLogger("test_async_logger") test_logger.info("Test async log message") - await _assert_check_log_message(caplog, "Test async log message") + _assert_check_log_message(caplog, "Test async log message") async def test_setup_async_loggers_with_filters( @@ -470,7 +468,7 @@ async def test_setup_async_loggers_with_filters( "test_filtered_logger": ["filtered_message"], } - async with setup_async_loggers_lifespan( + with setup_async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping=filter_mapping, tracing_settings=None, # No tracing for this test @@ -487,8 +485,8 @@ async def test_setup_async_loggers_with_filters( test_logger.info("This is an unfiltered message") unfiltered_logger.info("This is from unfiltered logger") - await _assert_check_log_message(caplog, "This is an unfiltered message") - await _assert_check_log_message(caplog, "This is from unfiltered logger") + _assert_check_log_message(caplog, "This is an unfiltered message") + _assert_check_log_message(caplog, "This is from unfiltered logger") # Check that filtered message was not captured assert "This is a filtered_message" not in caplog.text @@ -507,7 +505,7 @@ async def test_setup_async_loggers_with_tracing_settings( # Note: We can't easily test actual tracing without setting up OpenTelemetry # But we can test that the function accepts the parameter - async with setup_async_loggers_lifespan( + with setup_async_loggers_lifespan( log_format_local_dev_enabled=False, logger_filter_mapping={}, # No filters for this test tracing_settings=None, @@ -517,7 +515,7 @@ async def test_setup_async_loggers_with_tracing_settings( test_logger = logging.getLogger("test_tracing_logger") test_logger.info("Test message with tracing settings") - await _assert_check_log_message(caplog, "Test message with tracing settings") + _assert_check_log_message(caplog, "Test message with tracing settings") async def test_setup_async_loggers_context_manager_cleanup( @@ -529,7 +527,7 @@ async def test_setup_async_loggers_context_manager_cleanup( test_logger = logging.getLogger("test_cleanup_logger") - async with setup_async_loggers_lifespan( + with setup_async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, @@ -539,7 +537,7 @@ async def test_setup_async_loggers_context_manager_cleanup( # During the context, handlers should be replaced test_logger.info("Message during context") - await _assert_check_log_message(caplog, "Message during context") + _assert_check_log_message(caplog, "Message during context") async def test_setup_async_loggers_exception_handling( @@ -555,7 +553,7 @@ def _raise_test_exception(): raise ValueError(exc_msg) try: - async with setup_async_loggers_lifespan( + with setup_async_loggers_lifespan( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, @@ -565,7 +563,7 @@ def _raise_test_exception(): test_logger = logging.getLogger("test_exception_logger") test_logger.info("Message before exception") - await _assert_check_log_message(caplog, "Message before exception") + _assert_check_log_message(caplog, "Message before exception") # Raise an exception to test cleanup _raise_test_exception() From d24dc1cf00ff58b67db817ca745442771a0e03f6 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 09:34:23 +0200 Subject: [PATCH 069/128] moved logging lifespan in servicelib --- .../src/servicelib/fastapi/lifespan_utils.py | 6 ++- .../servicelib/fastapi/logging_lifespan.py | 44 +++++++++++++++++++ .../core/application.py | 35 ++++++--------- .../simcore_service_catalog/core/events.py | 7 +-- .../src/simcore_service_catalog/main.py | 37 +++++----------- 5 files changed, 73 insertions(+), 56 deletions(-) create mode 100644 packages/service-library/src/servicelib/fastapi/logging_lifespan.py diff --git a/packages/service-library/src/servicelib/fastapi/lifespan_utils.py b/packages/service-library/src/servicelib/fastapi/lifespan_utils.py index 4ccf0410930..894d9178855 100644 --- a/packages/service-library/src/servicelib/fastapi/lifespan_utils.py +++ b/packages/service-library/src/servicelib/fastapi/lifespan_utils.py @@ -1,6 +1,6 @@ import contextlib -from collections.abc import Iterator -from typing import Final +from collections.abc import AsyncIterator, Callable, Iterator +from typing import Final, TypeAlias from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI @@ -8,6 +8,8 @@ from ..logging_utils import log_context +Lifespan: TypeAlias = Callable[[FastAPI], AsyncIterator[None]] + class LifespanError(OsparcErrorMixin, RuntimeError): ... diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py new file mode 100644 index 00000000000..899c7e6f4ed --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -0,0 +1,44 @@ +import logging +from collections.abc import AsyncIterator +from contextlib import AsyncExitStack + +from fastapi import FastAPI +from settings_library.tracing import TracingSettings + +from ..logging_utils import ( + LogLevelInt, + log_context, + setup_async_loggers_lifespan, +) +from ..logging_utils_filtering import LoggerName, MessageSubstring +from .lifespan_utils import Lifespan + +_logger = logging.getLogger(__name__) + + +def setup_logging( + *, + log_format_local_dev_enabled: bool, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], + tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...] | None, +) -> Lifespan: + exit_stack = AsyncExitStack() + exit_stack.enter_context( + setup_async_loggers_lifespan( + log_base_level=log_base_level, + noisy_loggers=noisy_loggers, + log_format_local_dev_enabled=log_format_local_dev_enabled, + logger_filter_mapping=logger_filter_mapping, + tracing_settings=tracing_settings, + ) + ) + + async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]: + assert app is not None, "app must be provided" + with log_context(_logger, logging.INFO, "Non-blocking logger!"): + yield + await exit_stack.aclose() + + return _logging_lifespan diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py index b2ef71fefab..1d579741862 100644 --- a/services/catalog/src/simcore_service_catalog/core/application.py +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -1,10 +1,11 @@ import logging -from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI from fastapi.middleware.gzip import GZipMiddleware from models_library.basic_types import BootModeEnum from servicelib.fastapi import timing_middleware +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( setup_prometheus_instrumentation, ) @@ -13,7 +14,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import setup_loggers from starlette.middleware.base import BaseHTTPMiddleware from .._meta import ( @@ -29,27 +29,18 @@ _logger = logging.getLogger(__name__) -_NOISY_LOGGERS: Final[tuple[str, ...]] = ( - "aio_pika", - "aiobotocore", - "aiormq", - "botocore", - "httpcore", - "werkzeug", -) - - -def create_app(*, logging_lifespan: events.Lifespan | None = None) -> FastAPI: - settings = ApplicationSettings.create_from_envs() - _logger.debug(settings.model_dump_json(indent=2)) - setup_loggers( - log_format_local_dev_enabled=settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.CATALOG_LOG_FILTER_MAPPING, - tracing_settings=settings.CATALOG_TRACING, - log_base_level=settings.log_level, - noisy_loggers=_NOISY_LOGGERS, - ) +def create_app( + *, + settings: ApplicationSettings | None = None, + logging_lifespan: Lifespan | None = None, +) -> FastAPI: + if not settings: + settings = ApplicationSettings.create_from_envs() + _logger.info( + "Application settings: %s", + json_dumps(settings, indent=2, sort_keys=True), + ) app = FastAPI( debug=settings.SC_BOOT_MODE diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py index 97a3e05763c..673bdf2f80f 100644 --- a/services/catalog/src/simcore_service_catalog/core/events.py +++ b/services/catalog/src/simcore_service_catalog/core/events.py @@ -1,9 +1,9 @@ import logging -from collections.abc import AsyncIterator, Callable -from typing import TypeAlias +from collections.abc import AsyncIterator from fastapi import FastAPI from fastapi_lifespan_manager import LifespanManager, State +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( create_prometheus_instrumentationmain_input_state, prometheus_instrumentation_lifespan, @@ -51,9 +51,6 @@ async def _settings_lifespan(app: FastAPI) -> AsyncIterator[State]: } -Lifespan: TypeAlias = Callable[[FastAPI], AsyncIterator[None]] - - def create_app_lifespan(logging_lifespan: Lifespan | None = None) -> LifespanManager: # WARNING: order matters app_lifespan = LifespanManager() diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index e48a66aeca7..7a25a2424b2 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -1,15 +1,12 @@ """Main application to be deployed in for example uvicorn.""" import logging -from collections.abc import AsyncIterator -from contextlib import AsyncExitStack from typing import Final from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import log_context, setup_async_loggers_lifespan +from servicelib.fastapi.logging_lifespan import setup_logging from simcore_service_catalog.core.application import create_app -from simcore_service_catalog.core.events import Lifespan from simcore_service_catalog.core.settings import ApplicationSettings _logger = logging.getLogger(__name__) @@ -25,33 +22,19 @@ ) -def _setup_logging(app_settings: ApplicationSettings) -> Lifespan: - exit_stack = AsyncExitStack() - exit_stack.enter_context( - setup_async_loggers_lifespan( - log_base_level=app_settings.log_level, - noisy_loggers=_NOISY_LOGGERS, - log_format_local_dev_enabled=app_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=app_settings.CATALOG_LOG_FILTER_MAPPING, - tracing_settings=app_settings.CATALOG_TRACING, - ) - ) - - async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]: - assert app is not None, "app must be provided" - with log_context(_logger, logging.INFO, "Non-blocking logger!"): - yield - await exit_stack.aclose() - - return _logging_lifespan - - def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() + logging_lifespan = setup_logging( + log_format_local_dev_enabled=app_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.CATALOG_LOG_FILTER_MAPPING, + tracing_settings=app_settings.CATALOG_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + _logger.info( "Application settings: %s", json_dumps(app_settings, indent=2, sort_keys=True), ) - logging_lifespan = _setup_logging(app_settings) - return create_app(logging_lifespan=logging_lifespan) + return create_app(settings=app_settings, logging_lifespan=logging_lifespan) From 5c0e2bf7604c6fd3d282cf6e5129afa64cbab672 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 09:35:07 +0200 Subject: [PATCH 070/128] rename --- .../src/servicelib/fastapi/logging_lifespan.py | 2 +- services/catalog/src/simcore_service_catalog/main.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py index 899c7e6f4ed..e204d9447f6 100644 --- a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -16,7 +16,7 @@ _logger = logging.getLogger(__name__) -def setup_logging( +def setup_logging_lifespan( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index 7a25a2424b2..edf5acfff4e 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging +from servicelib.fastapi.logging_lifespan import setup_logging_lifespan from simcore_service_catalog.core.application import create_app from simcore_service_catalog.core.settings import ApplicationSettings @@ -24,7 +24,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_lifespan = setup_logging( + logging_lifespan = setup_logging_lifespan( log_format_local_dev_enabled=app_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.CATALOG_LOG_FILTER_MAPPING, tracing_settings=app_settings.CATALOG_TRACING, From 011da28dd1e47b62dc91d02c56805c32ab74556a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 09:58:56 +0200 Subject: [PATCH 071/128] dv-2 now also uses the async logger --- services/director-v2/docker/boot.sh | 4 +- .../core/application.py | 38 ++++++++--------- .../src/simcore_service_director_v2/main.py | 41 +++++++++++++++++-- 3 files changed, 58 insertions(+), 25 deletions(-) diff --git a/services/director-v2/docker/boot.sh b/services/director-v2/docker/boot.sh index 690bb78fb92..b8d637b23ec 100755 --- a/services/director-v2/docker/boot.sh +++ b/services/director-v2/docker/boot.sh @@ -53,7 +53,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/director-v2/src/simcore_service_director_v2 && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_V2_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -62,7 +62,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_director_v2.main:create_app \ + --factory simcore_service_director_v2.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 456c64b92ab..b666cc2b949 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -1,8 +1,10 @@ import logging -from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI, HTTPException, status from fastapi.exceptions import RequestValidationError +from fastapi_lifespan_manager import LifespanManager +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.openapi import ( get_common_oas_options, override_fastapi_openapi_method, @@ -12,7 +14,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import setup_loggers from .._meta import API_VERSION, API_VTAG, APP_NAME, PROJECT_NAME, SUMMARY from ..api.entrypoints import api_router @@ -95,28 +96,20 @@ def _set_exception_handlers(app: FastAPI): ) -_NOISY_LOGGERS: Final[tuple[str, ...]] = ( - "aio_pika", - "aiormq", - "httpcore", - "httpx", -) +def create_app_lifespan(logging_lifespan: Lifespan | None = None) -> LifespanManager: + app_lifespan = LifespanManager() + if logging_lifespan: + app_lifespan.add(logging_lifespan) + return app_lifespan -def create_base_app(settings: AppSettings | None = None) -> FastAPI: +def create_base_app( + settings: AppSettings | None = None, +) -> FastAPI: if settings is None: settings = AppSettings.create_from_envs() assert settings # nosec - setup_loggers( - log_format_local_dev_enabled=settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.DIRECTOR_V2_LOG_FILTER_MAPPING, - tracing_settings=settings.DIRECTOR_V2_TRACING, - log_base_level=settings.log_level, - noisy_loggers=_NOISY_LOGGERS, - ) - _logger.debug(settings.model_dump_json(indent=2)) - assert settings.SC_BOOT_MODE # nosec app = FastAPI( debug=settings.SC_BOOT_MODE.is_devel_mode(), @@ -130,13 +123,20 @@ def create_base_app(settings: AppSettings | None = None) -> FastAPI: app.state.settings = settings app.include_router(api_router) + return app -def init_app(settings: AppSettings | None = None) -> FastAPI: +def init_app( # noqa: C901, PLR0912 + settings: AppSettings | None = None, +) -> FastAPI: app = create_base_app(settings) if settings is None: settings = app.state.settings + _logger.info( + "Application settings: %s", + json_dumps(settings, indent=2, sort_keys=True), + ) assert settings # nosec substitutions.setup(app) diff --git a/services/director-v2/src/simcore_service_director_v2/main.py b/services/director-v2/src/simcore_service_director_v2/main.py index 245fb26285e..723b3628198 100644 --- a/services/director-v2/src/simcore_service_director_v2/main.py +++ b/services/director-v2/src/simcore_service_director_v2/main.py @@ -1,7 +1,40 @@ -"""Main application to be deployed in for example uvicorn. -""" +"""Main application to be deployed in for example uvicorn.""" + +import logging +from typing import Final + +from common_library.json_serialization import json_dumps from fastapi import FastAPI +from servicelib.fastapi.logging_lifespan import ( + setup_logging_shutdown_event, +) from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.settings import AppSettings + +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "httpcore", + "httpx", +) + + +def app_factory() -> FastAPI: + app_settings = AppSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DIRECTOR_V2_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DIRECTOR_V2_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -# SINGLETON FastAPI app -the_app: FastAPI = init_app() + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = init_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From c7c50a897bc3b03ab9763cccbd99512cff7cf81c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 10:47:15 +0200 Subject: [PATCH 072/128] missing ENV --- services/docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index cb92ec8b43c..b9c3c08abd2 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1255,6 +1255,7 @@ services: STORAGE_MONITORING_ENABLED: 1 STORAGE_PROFILING: ${STORAGE_PROFILING} STORAGE_PORT: ${STORAGE_PORT} + STORAGE_TRACING: ${STORAGE_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} networks: &storage_networks From ddc62a65c7227601d0811032ce814beac0b865fe Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 10:47:38 +0200 Subject: [PATCH 073/128] created shutdown event --- .../servicelib/fastapi/logging_lifespan.py | 32 +++++++++++++++++-- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py index e204d9447f6..d5c4020e5e7 100644 --- a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -1,5 +1,5 @@ import logging -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Awaitable, Callable from contextlib import AsyncExitStack from fastapi import FastAPI @@ -37,8 +37,34 @@ def setup_logging_lifespan( async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]: assert app is not None, "app must be provided" - with log_context(_logger, logging.INFO, "Non-blocking logger!"): - yield + yield + with log_context(_logger, logging.INFO, "Re-enable Blocking logger"): await exit_stack.aclose() return _logging_lifespan + + +def setup_logging_shutdown_event( + *, + log_format_local_dev_enabled: bool, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], + tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...] | None, +) -> Callable[[], Awaitable[None]]: + exit_stack = AsyncExitStack() + exit_stack.enter_context( + setup_async_loggers_lifespan( + log_base_level=log_base_level, + noisy_loggers=noisy_loggers, + log_format_local_dev_enabled=log_format_local_dev_enabled, + logger_filter_mapping=logger_filter_mapping, + tracing_settings=tracing_settings, + ) + ) + + async def _on_shutdown_event() -> None: + with log_context(_logger, logging.INFO, "Re-enable Blocking logger"): + await exit_stack.aclose() + + return _on_shutdown_event From 61f059a899e495ac75097f8ea5b8deb1582dca97 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 10:53:06 +0200 Subject: [PATCH 074/128] storage uses async loggers --- services/storage/docker/boot.sh | 7 ++-- .../core/application.py | 2 -- .../src/simcore_service_storage/main.py | 32 +++++++++++++------ 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/services/storage/docker/boot.sh b/services/storage/docker/boot.sh index 3e1fff945d1..a8cfa4d68ad 100755 --- a/services/storage/docker/boot.sh +++ b/services/storage/docker/boot.sh @@ -76,7 +76,9 @@ else exec sh -c " cd services/storage/src/simcore_service_storage && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${STORAGE_REMOTE_DEBUGGING_PORT} -m uvicorn main:app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${STORAGE_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --port ${STORAGE_PORT} \ --reload \ @@ -85,7 +87,8 @@ else --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_storage.main:app \ + exec uvicorn \ + --factory simcore_service_storage.main:app_factory \ --host 0.0.0.0 \ --port ${STORAGE_PORT} \ --log-level "${SERVER_LOG_LEVEL}" diff --git a/services/storage/src/simcore_service_storage/core/application.py b/services/storage/src/simcore_service_storage/core/application.py index 14705995680..9dbbbd9c181 100644 --- a/services/storage/src/simcore_service_storage/core/application.py +++ b/services/storage/src/simcore_service_storage/core/application.py @@ -48,8 +48,6 @@ def create_app(settings: ApplicationSettings) -> FastAPI: # noqa: C901 - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.SC_BOOT_MODE in [BootModeEnum.DEBUG, BootModeEnum.DEVELOPMENT, BootModeEnum.LOCAL], diff --git a/services/storage/src/simcore_service_storage/main.py b/services/storage/src/simcore_service_storage/main.py index 0841df24105..a55c20ed5e9 100644 --- a/services/storage/src/simcore_service_storage/main.py +++ b/services/storage/src/simcore_service_storage/main.py @@ -1,11 +1,16 @@ """Main application to be deployed in for example uvicorn.""" +import logging from typing import Final -from servicelib.logging_utils import setup_loggers +from common_library.json_serialization import json_dumps +from fastapi import FastAPI +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_storage.core.application import create_app from simcore_service_storage.core.settings import ApplicationSettings +_logger = logging.getLogger(__name__) + _NOISY_LOGGERS: Final[tuple[str, ...]] = ( "aio_pika", "aiobotocore", @@ -16,14 +21,21 @@ "werkzeug", ) -_settings = ApplicationSettings.create_from_envs() -setup_loggers( - log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, - tracing_settings=_settings.STORAGE_TRACING, - log_base_level=_settings.log_level, - noisy_loggers=_NOISY_LOGGERS, -) +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.STORAGE_LOG_FILTER_MAPPING, + tracing_settings=app_settings.STORAGE_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -app = create_app(_settings) + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 68a0b6a32f0ef3d9065050ecea1239ce965b816c Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:18:07 +0200 Subject: [PATCH 075/128] correct setup of tracing --- .../src/servicelib/logging_utils.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index cc9528eae22..2d859b91c8d 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -158,17 +158,14 @@ def _setup_format_string( tracing_settings: TracingSettings | None, log_format_local_dev_enabled: bool, ) -> str: - """Create the appropriate format string based on settings.""" if log_format_local_dev_enabled: - if tracing_settings is not None: - return _LOCAL_TRACING_FORMATTING - return _LOCAL_FORMATTING - - if tracing_settings is not None: - setup_log_tracing(tracing_settings=tracing_settings) - return _TRACING_FORMATTING + return ( + _LOCAL_TRACING_FORMATTING + if tracing_settings is not None + else _LOCAL_FORMATTING + ) - return _DEFAULT_FORMATTING + return _TRACING_FORMATTING if tracing_settings is not None else _DEFAULT_FORMATTING def _get_all_loggers() -> list[logging.Logger]: @@ -236,6 +233,8 @@ def setup_loggers( _setup_base_logging_level(log_base_level) if noisy_loggers is not None: _dampen_noisy_loggers(noisy_loggers) + if tracing_settings is not None: + setup_log_tracing(tracing_settings=tracing_settings) fmt = _setup_format_string( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, @@ -283,6 +282,8 @@ def setup_async_loggers_lifespan( if noisy_loggers is not None: _dampen_noisy_loggers(noisy_loggers) + if tracing_settings is not None: + setup_log_tracing(tracing_settings=tracing_settings) fmt = _setup_format_string( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, From 9dc0f188484c6494d8a73b0cd131c80697104123 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:26:05 +0200 Subject: [PATCH 076/128] director now has async logger too --- services/director/docker/boot.sh | 6 ++-- .../core/application.py | 2 -- .../simcore_service_director/core/settings.py | 5 +-- .../src/simcore_service_director/main.py | 32 ++++++++++++------- 4 files changed, 28 insertions(+), 17 deletions(-) diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh index f0fe8f24700..38ad83b661f 100755 --- a/services/director/docker/boot.sh +++ b/services/director/docker/boot.sh @@ -51,7 +51,9 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then exec sh -c " cd services/director/src/simcore_service_director && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -60,7 +62,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_director.main:create_app \ + --factory simcore_service_director.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index 1ffd40721a8..eb28b0a9879 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -25,8 +25,6 @@ def create_app(settings: ApplicationSettings) -> FastAPI: - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.DIRECTOR_DEBUG, title=APP_NAME, diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 01d37ee2989..2cfea313918 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -6,6 +6,7 @@ from fastapi import FastAPI from models_library.basic_types import LogLevel, PortInt, VersionTag from pydantic import AliasChoices, Field, NonNegativeInt, PositiveInt, field_validator +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.docker_registry import RegistrySettings @@ -145,9 +146,9 @@ def _validate_substitutions(cls, v): return v @cached_property - def log_level(self) -> LogLevel: + def log_level(self) -> LogLevelInt: """override""" - return self.DIRECTOR_LOG_LEVEL + return cast(LogLevelInt, self.DIRECTOR_LOG_LEVEL) def get_application_settings(app: FastAPI) -> ApplicationSettings: diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index b750df8a080..805e2c9a76a 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -1,27 +1,37 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +import logging from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_director.core.application import create_app from simcore_service_director.core.settings import ApplicationSettings +_logger = logging.getLogger(__name__) + _NOISY_LOGGERS: Final[tuple[str, ...]] = ( "httpcore", "httpx", "werkzeug", ) -_the_settings = ApplicationSettings.create_from_envs() -setup_loggers( - log_format_local_dev_enabled=_the_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.DIRECTOR_TRACING, - log_base_level=_the_settings.log_level, - noisy_loggers=_NOISY_LOGGERS, -) +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DIRECTOR_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DIRECTOR_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(_the_settings) + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 2e3a070c98d8be0f6879a4b2cefef3a2ff9a1017 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:32:03 +0200 Subject: [PATCH 077/128] changed boot scripts --- services/agent/docker/boot.sh | 4 ++-- services/api-server/docker/boot.sh | 4 ++-- services/autoscaling/docker/boot.sh | 4 ++-- services/clusters-keeper/docker/boot.sh | 4 ++-- services/datcore-adapter/docker/boot.sh | 4 ++-- services/dynamic-scheduler/docker/boot.sh | 4 ++-- services/dynamic-sidecar/docker/boot.sh | 4 ++-- services/efs-guardian/docker/boot.sh | 4 ++-- services/invitations/docker/boot.sh | 4 ++-- services/notifications/docker/boot.sh | 4 ++-- services/payments/docker/boot.sh | 4 ++-- services/resource-usage-tracker/docker/boot.sh | 4 ++-- 12 files changed, 24 insertions(+), 24 deletions(-) diff --git a/services/agent/docker/boot.sh b/services/agent/docker/boot.sh index 621113b8857..3b502cd9574 100755 --- a/services/agent/docker/boot.sh +++ b/services/agent/docker/boot.sh @@ -53,7 +53,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/agent/src/simcore_service_agent && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AGENT_SERVER_REMOTE_DEBUG_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --port 8000 \ --reload \ @@ -63,7 +63,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_agent.main:create_app \ + --factory simcore_service_agent.main:app_factory \ --host 0.0.0.0 \ --port 8000 \ --log-level "${SERVER_LOG_LEVEL}" \ diff --git a/services/api-server/docker/boot.sh b/services/api-server/docker/boot.sh index 7a115743f29..0f19b262c78 100755 --- a/services/api-server/docker/boot.sh +++ b/services/api-server/docker/boot.sh @@ -46,7 +46,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/api-server/src/simcore_service_api_server && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${API_SERVER_REMOTE_DEBUG_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -55,7 +55,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_api_server.main:create_app \ + --factory simcore_service_api_server.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/autoscaling/docker/boot.sh b/services/autoscaling/docker/boot.sh index f907acd4d2f..8a5d3adb709 100755 --- a/services/autoscaling/docker/boot.sh +++ b/services/autoscaling/docker/boot.sh @@ -53,7 +53,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/autoscaling/src/simcore_service_autoscaling && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AUTOSCALING_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -62,7 +62,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_autoscaling.main:create_app \ + --factory simcore_service_autoscaling.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/clusters-keeper/docker/boot.sh b/services/clusters-keeper/docker/boot.sh index f5c56ea535b..c465ac2316f 100755 --- a/services/clusters-keeper/docker/boot.sh +++ b/services/clusters-keeper/docker/boot.sh @@ -54,7 +54,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/clusters-keeper/src/simcore_service_clusters_keeper && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CLUSTERS_KEEPER_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -63,7 +63,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_clusters_keeper.main:create_app \ + --factory simcore_service_clusters_keeper.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/datcore-adapter/docker/boot.sh b/services/datcore-adapter/docker/boot.sh index 7403fdda075..d4c20cad1ab 100755 --- a/services/datcore-adapter/docker/boot.sh +++ b/services/datcore-adapter/docker/boot.sh @@ -51,7 +51,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/datcore-adapter/src/simcore_service_datcore_adapter && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DATCORE_ADAPTER_REMOTE_DEBUG_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -60,7 +60,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_datcore_adapter.main:create_app \ + --factory simcore_service_datcore_adapter.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/dynamic-scheduler/docker/boot.sh b/services/dynamic-scheduler/docker/boot.sh index 4c64efc3738..382bfd14d01 100755 --- a/services/dynamic-scheduler/docker/boot.sh +++ b/services/dynamic-scheduler/docker/boot.sh @@ -54,7 +54,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/dynamic-scheduler/src/simcore_service_dynamic_scheduler && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SCHEDULER_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -63,7 +63,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_dynamic_scheduler.main:create_app \ + --factory simcore_service_dynamic_scheduler.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/dynamic-sidecar/docker/boot.sh b/services/dynamic-sidecar/docker/boot.sh index 724d096d31e..984ef554a3b 100755 --- a/services/dynamic-sidecar/docker/boot.sh +++ b/services/dynamic-sidecar/docker/boot.sh @@ -55,7 +55,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/dynamic-sidecar/src/simcore_service_dynamic_sidecar && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SIDECAR_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -64,7 +64,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_dynamic_sidecar.main:create_app \ + --factory simcore_service_dynamic_sidecar.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/efs-guardian/docker/boot.sh b/services/efs-guardian/docker/boot.sh index d32297b29c6..904aace5c91 100755 --- a/services/efs-guardian/docker/boot.sh +++ b/services/efs-guardian/docker/boot.sh @@ -54,7 +54,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/efs-guardian/src/simcore_service_efs_guardian && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${EFS_GUARDIAN_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -63,7 +63,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_efs_guardian.main:create_app \ + --factory simcore_service_efs_guardian.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/invitations/docker/boot.sh b/services/invitations/docker/boot.sh index f4748f976ec..fb1e2687586 100755 --- a/services/invitations/docker/boot.sh +++ b/services/invitations/docker/boot.sh @@ -54,7 +54,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/invitations/src/simcore_service_invitations && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${INVITATIONS_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -63,7 +63,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_invitations.main:create_app \ + --factory simcore_service_invitations.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/notifications/docker/boot.sh b/services/notifications/docker/boot.sh index 215b23a5dc5..dbae76238cd 100755 --- a/services/notifications/docker/boot.sh +++ b/services/notifications/docker/boot.sh @@ -54,7 +54,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/notifications/src/simcore_service_notifications && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${NOTIFICATIONS_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --port 8000 \ --reload \ @@ -64,7 +64,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_notifications.main:create_app \ + --factory simcore_service_notifications.main:app_factory \ --host 0.0.0.0 \ --port 8000 \ --log-level "${SERVER_LOG_LEVEL}" \ diff --git a/services/payments/docker/boot.sh b/services/payments/docker/boot.sh index 9d6d75ebddd..e2d5b5f7d21 100755 --- a/services/payments/docker/boot.sh +++ b/services/payments/docker/boot.sh @@ -54,7 +54,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/payments/src/simcore_service_payments && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${PAYMENTS_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -63,7 +63,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_payments.main:create_app \ + --factory simcore_service_payments.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/resource-usage-tracker/docker/boot.sh b/services/resource-usage-tracker/docker/boot.sh index 12d353afea9..372e0f0d730 100755 --- a/services/resource-usage-tracker/docker/boot.sh +++ b/services/resource-usage-tracker/docker/boot.sh @@ -54,7 +54,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then cd services/resource-usage-tracker/src/simcore_service_resource_usage_tracker && \ python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${RESOURCE_USAGE_TRACKER_REMOTE_DEBUGGING_PORT} -m \ uvicorn \ - --factory main:create_app \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ $reload_dir_packages \ @@ -63,7 +63,7 @@ if [ "${SC_BOOT_MODE}" = "debug" ]; then " else exec uvicorn \ - --factory simcore_service_resource_usage_tracker.main:create_app \ + --factory simcore_service_resource_usage_tracker.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi From eefcc3e05200b1bfadfbdfc6f78ddc55e4e7f2fa Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:37:25 +0200 Subject: [PATCH 078/128] agent --- .../simcore_service_agent/core/application.py | 25 +++++-------- .../agent/src/simcore_service_agent/main.py | 35 ++++++++++++++++++- 2 files changed, 43 insertions(+), 17 deletions(-) diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index c7f28faf599..e7972c1042a 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -1,5 +1,6 @@ import logging +from common_library.json_serialization import json_dumps from fastapi import FastAPI from servicelib.fastapi.openapi import ( get_common_oas_options, @@ -9,7 +10,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import setup_loggers from .._meta import ( API_VTAG, @@ -27,23 +27,16 @@ from ..services.volumes_manager import setup_volume_manager from .settings import ApplicationSettings -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -def _setup_logger(settings: ApplicationSettings): - setup_loggers( - log_format_local_dev_enabled=settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, - tracing_settings=settings.AGENT_TRACING, - log_base_level=settings.log_level, - noisy_loggers=None, - ) - - -def create_app() -> FastAPI: - settings = ApplicationSettings.create_from_envs() - _setup_logger(settings) - logger.debug(settings.model_dump_json(indent=2)) +def create_app(settings: ApplicationSettings | None = None) -> FastAPI: + if settings is None: + settings = ApplicationSettings.create_from_envs() + _logger.info( + "Application settings: %s", + json_dumps(settings, indent=2, sort_keys=True), + ) assert settings.SC_BOOT_MODE # nosec app = FastAPI( diff --git a/services/agent/src/simcore_service_agent/main.py b/services/agent/src/simcore_service_agent/main.py index a16db0c3d52..315239e60c1 100644 --- a/services/agent/src/simcore_service_agent/main.py +++ b/services/agent/src/simcore_service_agent/main.py @@ -1,3 +1,36 @@ +import logging +from typing import Final + +from common_library.json_serialization import json_dumps +from fastapi import FastAPI +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_agent.core.application import create_app +from simcore_service_agent.core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "httpcore", + "httpx", +) + + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, + tracing_settings=app_settings.AGENT_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -the_app = create_app() + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 29eee796938e538a81f9399fab565ae31ed3a802 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:44:16 +0200 Subject: [PATCH 079/128] api-server --- .../core/application.py | 15 +++----- .../src/simcore_service_api_server/main.py | 38 ++++++++++++++++++- 2 files changed, 41 insertions(+), 12 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 197e2306b53..b6aa35e431e 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -1,5 +1,6 @@ import logging +from common_library.json_serialization import json_dumps from fastapi import FastAPI from fastapi_pagination import add_pagination from models_library.basic_types import BootModeEnum @@ -9,7 +10,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import setup_loggers from .. import exceptions from .._meta import API_VERSION, API_VTAG, APP_NAME @@ -51,17 +51,12 @@ def _label_title_and_version(settings: ApplicationSettings, title: str, version: def init_app(settings: ApplicationSettings | None = None) -> FastAPI: if settings is None: settings = ApplicationSettings.create_from_envs() + _logger.info( + "Application settings: %s", + json_dumps(settings, indent=2, sort_keys=True), + ) assert settings # nosec - setup_loggers( - log_format_local_dev_enabled=settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.API_SERVER_LOG_FILTER_MAPPING, - tracing_settings=settings.API_SERVER_TRACING, - log_base_level=settings.log_level, - noisy_loggers=None, - ) - _logger.debug("App settings:\n%s", settings.model_dump_json(indent=2)) - # Labeling title = "osparc.io public API" version = API_VERSION # public version identifier diff --git a/services/api-server/src/simcore_service_api_server/main.py b/services/api-server/src/simcore_service_api_server/main.py index 8b636ac4315..51be335be9b 100644 --- a/services/api-server/src/simcore_service_api_server/main.py +++ b/services/api-server/src/simcore_service_api_server/main.py @@ -1,7 +1,41 @@ -"""Main application to be deployed in for example uvicorn. -""" +"""Main application to be deployed in for example uvicorn.""" + +import logging +from typing import Final + +from common_library.json_serialization import json_dumps from fastapi import FastAPI +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_api_server.core.application import init_app +from simcore_service_api_server.core.settings import ApplicationSettings # SINGLETON FastAPI app the_app: FastAPI = init_app() + +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "httpcore", + "httpx", +) + + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.API_SERVER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.API_SERVER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = init_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 11dbf96379265832d32fe51534718b46e89c8195 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:46:04 +0200 Subject: [PATCH 080/128] autoscaling --- .../core/application.py | 2 -- .../src/simcore_service_autoscaling/main.py | 33 ++++++++++++------- 2 files changed, 22 insertions(+), 13 deletions(-) diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/application.py b/services/autoscaling/src/simcore_service_autoscaling/core/application.py index 6abe3a52265..ba833512565 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/application.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/application.py @@ -35,8 +35,6 @@ def create_app(settings: ApplicationSettings) -> FastAPI: - logger.info("app settings: %s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.AUTOSCALING_DEBUG, title=APP_NAME, diff --git a/services/autoscaling/src/simcore_service_autoscaling/main.py b/services/autoscaling/src/simcore_service_autoscaling/main.py index 8aad6b87fe4..4ebf37037cd 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/main.py +++ b/services/autoscaling/src/simcore_service_autoscaling/main.py @@ -1,12 +1,16 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +import logging from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_autoscaling.core.application import create_app from simcore_service_autoscaling.core.settings import ApplicationSettings +_logger = logging.getLogger(__name__) + _NOISY_LOGGERS: Final[tuple[str, ...]] = ( "aiobotocore", "aio_pika", @@ -15,14 +19,21 @@ "werkzeug", ) -the_settings = ApplicationSettings.create_from_envs() -setup_loggers( - log_format_local_dev_enabled=the_settings.AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.AUTOSCALING_LOG_FILTER_MAPPING, - tracing_settings=the_settings.AUTOSCALING_TRACING, - log_base_level=the_settings.log_level, - noisy_loggers=_NOISY_LOGGERS, -) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.AUTOSCALING_LOG_FILTER_MAPPING, + tracing_settings=app_settings.AUTOSCALING_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 530472db2e7cc286b9cb15d6a60d3caa6923663a Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 11:54:17 +0200 Subject: [PATCH 081/128] clusters-keeper --- .../simcore_service_clusters_keeper/main.py | 33 ++++++++++++------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py index 1f11ed1b7f8..95677e3a2fe 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py @@ -1,12 +1,16 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +import logging from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_clusters_keeper.core.application import create_app from simcore_service_clusters_keeper.core.settings import ApplicationSettings +_logger = logging.getLogger(__name__) + _NOISY_LOGGERS: Final[tuple[str, ...]] = ( "aiobotocore", "aio_pika", @@ -15,14 +19,21 @@ "werkzeug", ) -the_settings = ApplicationSettings.create_from_envs() -setup_loggers( - log_format_local_dev_enabled=the_settings.CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.CLUSTERS_KEEPER_LOG_FILTER_MAPPING, - tracing_settings=the_settings.CLUSTERS_KEEPER_TRACING, - log_base_level=the_settings.log_level, - noisy_loggers=_NOISY_LOGGERS, -) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.CLUSTERS_KEEPER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.CLUSTERS_KEEPER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 3c70f809f57aeb5d557ee2bab6d3b0824b4be582 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:04:13 +0200 Subject: [PATCH 082/128] datcore-adapter --- .../core/application.py | 2 -- .../simcore_service_datcore_adapter/main.py | 34 ++++++++++++------- 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py index 6f56d06f26a..397a686e746 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py @@ -31,8 +31,6 @@ def create_app(settings: ApplicationSettings) -> FastAPI: - _logger.debug("App settings:\n%s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.SC_BOOT_MODE in [BootModeEnum.DEBUG, BootModeEnum.DEVELOPMENT, BootModeEnum.LOCAL], diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py index 88346d657c5..dbe4c2ef1e5 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py @@ -1,27 +1,37 @@ """Main application to be deployed in for example uvicorn""" +import logging from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_datcore_adapter.core.application import create_app from simcore_service_datcore_adapter.core.settings import ApplicationSettings -NOISY_LOGGERS: Final[tuple[str, ...]] = ( +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( "aiocache", "botocore", "hpack", ) -_the_settings = ApplicationSettings.create_from_envs() -setup_loggers( - log_format_local_dev_enabled=_the_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.DATCORE_ADAPTER_TRACING, - log_base_level=_the_settings.log_level, - noisy_loggers=NOISY_LOGGERS, -) +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DATCORE_ADAPTER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(_the_settings) + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From f610b1aa991b8c6f52dd1e2462f2e71e4236db0f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:05:53 +0200 Subject: [PATCH 083/128] dynamic-scheduler --- .../simcore_service_dynamic_scheduler/main.py | 41 ++++++++++++++----- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py index 60902e59c92..61d748a0237 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py @@ -1,18 +1,39 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +import logging +from typing import Final + +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_dynamic_scheduler.core.application import create_app from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings -_the_settings = ApplicationSettings.create_from_envs() -setup_loggers( - log_format_local_dev_enabled=_the_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.DYNAMIC_SCHEDULER_TRACING, - log_base_level=_the_settings.log_level, - noisy_loggers=None, +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(_the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DYNAMIC_SCHEDULER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 79ac6264b30a41b3db77d7a6be7840d22e8dcc13 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:08:12 +0200 Subject: [PATCH 084/128] dynamic-scheduler --- .../core/application.py | 10 ++++++++-- .../simcore_service_dynamic_scheduler/core/events.py | 7 ++++++- .../src/simcore_service_dynamic_scheduler/main.py | 10 +++++----- 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py index 9f59f29859e..a2617cec567 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py @@ -1,4 +1,5 @@ from fastapi import FastAPI +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( initialize_prometheus_instrumentation, ) @@ -13,7 +14,10 @@ from .settings import ApplicationSettings -def create_app(settings: ApplicationSettings | None = None) -> FastAPI: +def create_app( + settings: ApplicationSettings | None = None, + logging_lifespan: Lifespan | None = None, +) -> FastAPI: app_settings = settings or ApplicationSettings.create_from_envs() app = FastAPI( @@ -25,7 +29,9 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: "/doc" if app_settings.DYNAMIC_SCHEDULER_SWAGGER_API_DOC_ENABLED else None ), redoc_url=None, - lifespan=events.create_app_lifespan(settings=app_settings), + lifespan=events.create_app_lifespan( + settings=app_settings, logging_lifespan=logging_lifespan + ), ) override_fastapi_openapi_method(app) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/events.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/events.py index 492834a99e3..1c293e78e71 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/events.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/events.py @@ -6,6 +6,7 @@ create_remote_docker_client_input_state, remote_docker_client_lifespan, ) +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( create_prometheus_instrumentationmain_input_state, prometheus_instrumentation_lifespan, @@ -51,8 +52,12 @@ async def _settings_lifespan(app: FastAPI) -> AsyncIterator[State]: } -def create_app_lifespan(settings: ApplicationSettings) -> LifespanManager: +def create_app_lifespan( + settings: ApplicationSettings, logging_lifespan: Lifespan | None +) -> LifespanManager: app_lifespan = LifespanManager() + if logging_lifespan: + app_lifespan.add(logging_lifespan) app_lifespan.add(_settings_lifespan) if settings.DYNAMIC_SCHEDULER_TRACING: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py index 61d748a0237..0ab505b83f4 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py @@ -5,7 +5,9 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import ( + setup_logging_lifespan, +) from simcore_service_dynamic_scheduler.core.application import create_app from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings @@ -22,7 +24,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_lifespan = setup_logging_lifespan( log_format_local_dev_enabled=app_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, tracing_settings=app_settings.DYNAMIC_SCHEDULER_TRACING, @@ -34,6 +36,4 @@ def app_factory() -> FastAPI: "Application settings: %s", json_dumps(app_settings, indent=2, sort_keys=True), ) - app = create_app(settings=app_settings) - app.add_event_handler("shutdown", logging_shutdown_event) - return app + return create_app(settings=app_settings, logging_lifespan=logging_lifespan) From 0de3c632457c1ee3c397ea791062c2eaa70973d5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:14:12 +0200 Subject: [PATCH 085/128] dynamic-sidecar --- .../core/application.py | 38 ++++++++++--------- .../simcore_service_dynamic_sidecar/main.py | 12 ++++-- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index f9d294fe022..c87da8aa4c8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -2,9 +2,11 @@ from asyncio import Lock from typing import Any, ClassVar +from common_library.json_serialization import json_dumps from fastapi import FastAPI from servicelib.async_utils import cancel_sequential_workers from servicelib.fastapi import long_running_tasks +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from servicelib.fastapi.openapi import ( get_common_oas_options, override_fastapi_openapi_method, @@ -13,7 +15,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import setup_loggers from simcore_sdk.node_ports_common.exceptions import NodeNotFound from .._meta import API_VERSION, API_VTAG, PROJECT_NAME, SUMMARY, __version__ @@ -114,34 +115,36 @@ def compose_spec(self) -> str | None: return self._shared_store.compose_spec -def setup_logger(settings: ApplicationSettings): - setup_loggers( - log_format_local_dev_enabled=settings.DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.DY_SIDECAR_LOG_FILTER_MAPPING, - tracing_settings=settings.DYNAMIC_SIDECAR_TRACING, - log_base_level=settings.log_level, +def create_base_app() -> FastAPI: + # settings + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DY_SIDECAR_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DYNAMIC_SIDECAR_TRACING, + log_base_level=app_settings.log_level, noisy_loggers=_NOISY_LOGGERS, ) - -def create_base_app() -> FastAPI: - # settings - settings = ApplicationSettings.create_from_envs() - setup_logger(settings) - logger.debug(settings.model_dump_json(indent=2)) + logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) # minimal - assert settings.SC_BOOT_MODE # nosec + assert app_settings.SC_BOOT_MODE # nosec app = FastAPI( - debug=settings.SC_BOOT_MODE.is_devel_mode(), + debug=app_settings.SC_BOOT_MODE.is_devel_mode(), title=PROJECT_NAME, description=SUMMARY, version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", - **get_common_oas_options(is_devel_mode=settings.SC_BOOT_MODE.is_devel_mode()), + **get_common_oas_options( + is_devel_mode=app_settings.SC_BOOT_MODE.is_devel_mode() + ), ) override_fastapi_openapi_method(app) - app.state.settings = settings + app.state.settings = app_settings long_running_tasks.server.setup(app) @@ -149,6 +152,7 @@ def create_base_app() -> FastAPI: setup_reserved_space(app) + app.add_event_handler("shutdown", logging_shutdown_event) return app diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py index 52c91f22837..b710b504785 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py @@ -1,8 +1,12 @@ -"""Main application to be deployed in for example uvicorn. -""" +"""Main application to be deployed in for example uvicorn.""" from fastapi import FastAPI from simcore_service_dynamic_sidecar.core.application import create_app -# SINGLETON FastAPI app -the_app: FastAPI = create_app() + +def app_factory() -> FastAPI: + """Factory function to create the FastAPI app instance. + + This is used by uvicorn or other ASGI servers to run the application. + """ + return create_app() From 5fa9a1ef4bfa785adaada7edbb480b2c854f29e8 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:15:41 +0200 Subject: [PATCH 086/128] relative imports --- .../director-v2/src/simcore_service_director_v2/cli/_core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py index bc7cc095898..f857949b274 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py @@ -16,7 +16,6 @@ from rich.live import Live from rich.table import Table from servicelib.services_utils import get_service_from_key -from simcore_service_director_v2.modules.catalog import CatalogClient from tenacity.asyncio import AsyncRetrying from tenacity.stop import stop_after_attempt from tenacity.wait import wait_random_exponential @@ -25,6 +24,7 @@ from ..core.settings import AppSettings from ..models.dynamic_services_scheduler import DynamicSidecarNamesHelper from ..modules import db, director_v0, dynamic_sidecar +from ..modules.catalog import CatalogClient from ..modules.db.repositories.projects import ProjectsRepository from ..modules.dynamic_sidecar import api_client from ..modules.projects_networks import requires_dynamic_sidecar From a1ed743fe15fee93db9ab85e06f272295c5b5b37 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:18:06 +0200 Subject: [PATCH 087/128] efs --- .../core/application.py | 2 - .../src/simcore_service_efs_guardian/main.py | 41 ++++++++++++++----- 2 files changed, 31 insertions(+), 12 deletions(-) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py index 7c68ba3f0e4..d44dea34414 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py @@ -31,8 +31,6 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app_settings = settings or ApplicationSettings.create_from_envs() - logger.info("app settings: %s", app_settings.model_dump_json(indent=1)) - app = FastAPI( debug=app_settings.EFS_GUARDIAN_DEBUG, title=APP_NAME, diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/main.py b/services/efs-guardian/src/simcore_service_efs_guardian/main.py index e6aafbdf9a2..6143f4a26a1 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/main.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/main.py @@ -1,18 +1,39 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +import logging +from typing import Final + +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_efs_guardian.core.application import create_app from simcore_service_efs_guardian.core.settings import ApplicationSettings -the_settings = ApplicationSettings.create_from_envs() -setup_loggers( - log_format_local_dev_enabled=the_settings.EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.EFS_GUARDIAN_LOG_FILTER_MAPPING, - tracing_settings=the_settings.EFS_GUARDIAN_TRACING, - log_base_level=the_settings.log_level, - noisy_loggers=None, +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.EFS_GUARDIAN_LOG_FILTER_MAPPING, + tracing_settings=app_settings.EFS_GUARDIAN_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 7f27fca6a526f227112d7061b3c9e6e2fcb3eb8b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:20:19 +0200 Subject: [PATCH 088/128] invitations --- .../src/simcore_service_invitations/main.py | 37 ++++++++++++++----- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/services/invitations/src/simcore_service_invitations/main.py b/services/invitations/src/simcore_service_invitations/main.py index c17e5a406af..03bb038822c 100644 --- a/services/invitations/src/simcore_service_invitations/main.py +++ b/services/invitations/src/simcore_service_invitations/main.py @@ -1,19 +1,36 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +import logging +from typing import Final + +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_invitations.core.application import create_app from simcore_service_invitations.core.settings import ApplicationSettings -the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -setup_loggers( - log_format_local_dev_enabled=the_settings.INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.INVITATIONS_LOG_FILTER_MAPPING, - tracing_settings=the_settings.INVITATIONS_TRACING, - log_base_level=the_settings.log_level, - noisy_loggers=None, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.INVITATIONS_LOG_FILTER_MAPPING, + tracing_settings=app_settings.INVITATIONS_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 9775a8fc7103ef9a09f07ada582e3c3bd8631ba5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:25:56 +0200 Subject: [PATCH 089/128] notifications --- .../core/application.py | 24 ++++---------- .../core/events.py | 7 +++- .../src/simcore_service_notifications/main.py | 33 ++++++++++++++++++- 3 files changed, 45 insertions(+), 19 deletions(-) diff --git a/services/notifications/src/simcore_service_notifications/core/application.py b/services/notifications/src/simcore_service_notifications/core/application.py index f83ac8f528a..63517b52d5b 100644 --- a/services/notifications/src/simcore_service_notifications/core/application.py +++ b/services/notifications/src/simcore_service_notifications/core/application.py @@ -1,6 +1,7 @@ import logging from fastapi import FastAPI +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( initialize_prometheus_instrumentation, ) @@ -12,7 +13,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import setup_loggers from .._meta import API_VTAG, APP_NAME, SUMMARY, VERSION from ..api.rest.routing import initialize_rest_api @@ -22,21 +22,11 @@ _logger = logging.getLogger(__name__) -def _initialise_logger(settings: ApplicationSettings): - setup_loggers( - log_format_local_dev_enabled=settings.NOTIFICATIONS_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.NOTIFICATIONS_VOLUMES_LOG_FILTER_MAPPING, - tracing_settings=settings.NOTIFICATIONS_TRACING, - log_base_level=settings.log_level, - noisy_loggers=None, - ) - - -def create_app() -> FastAPI: - settings = ApplicationSettings.create_from_envs() - _logger.debug(settings.model_dump_json(indent=2)) - - _initialise_logger(settings) +def create_app( + settings: ApplicationSettings | None = None, + logging_lifespan: Lifespan | None = None, +) -> FastAPI: + settings = settings or ApplicationSettings.create_from_envs() assert settings.SC_BOOT_MODE # nosec app = FastAPI( @@ -45,7 +35,7 @@ def create_app() -> FastAPI: description=SUMMARY, version=f"{VERSION}", openapi_url=f"/api/{API_VTAG}/openapi.json", - lifespan=events.create_app_lifespan(), + lifespan=events.create_app_lifespan(logging_lifespan=logging_lifespan), **get_common_oas_options(is_devel_mode=settings.SC_BOOT_MODE.is_devel_mode()), ) override_fastapi_openapi_method(app) diff --git a/services/notifications/src/simcore_service_notifications/core/events.py b/services/notifications/src/simcore_service_notifications/core/events.py index 879582575c0..2660e2f426c 100644 --- a/services/notifications/src/simcore_service_notifications/core/events.py +++ b/services/notifications/src/simcore_service_notifications/core/events.py @@ -2,6 +2,7 @@ from fastapi import FastAPI from fastapi_lifespan_manager import LifespanManager, State +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( create_prometheus_instrumentationmain_input_state, prometheus_instrumentation_lifespan, @@ -35,9 +36,13 @@ async def _settings_lifespan(app: FastAPI) -> AsyncIterator[State]: } -def create_app_lifespan(): +def create_app_lifespan( + logging_lifespan: Lifespan | None = None, +) -> LifespanManager[FastAPI]: # WARNING: order matters app_lifespan = LifespanManager() + if logging_lifespan: + app_lifespan.add(logging_lifespan) app_lifespan.add(_settings_lifespan) # - postgres diff --git a/services/notifications/src/simcore_service_notifications/main.py b/services/notifications/src/simcore_service_notifications/main.py index 8b2e0ed3196..b5905966240 100644 --- a/services/notifications/src/simcore_service_notifications/main.py +++ b/services/notifications/src/simcore_service_notifications/main.py @@ -1,3 +1,34 @@ +import logging +from typing import Final + +from common_library.json_serialization import json_dumps +from fastapi import FastAPI +from servicelib.fastapi.logging_lifespan import setup_logging_lifespan from simcore_service_notifications.core.application import create_app +from simcore_service_notifications.core.settings import ( + ApplicationSettings, +) + +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", +) + + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_lifespan = setup_logging_lifespan( + log_format_local_dev_enabled=app_settings.NOTIFICATIONS_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.NOTIFICATIONS_VOLUMES_LOG_FILTER_MAPPING, + tracing_settings=app_settings.NOTIFICATIONS_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -the_app = create_app() + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + return create_app(settings=app_settings, logging_lifespan=logging_lifespan) From 1179058ce11cc18e849cf6099819e4b49612acc1 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:27:21 +0200 Subject: [PATCH 090/128] payments --- .../src/simcore_service_payments/main.py | 40 ++++++++++++++----- 1 file changed, 30 insertions(+), 10 deletions(-) diff --git a/services/payments/src/simcore_service_payments/main.py b/services/payments/src/simcore_service_payments/main.py index f285308c401..262259ef04f 100644 --- a/services/payments/src/simcore_service_payments/main.py +++ b/services/payments/src/simcore_service_payments/main.py @@ -1,19 +1,39 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +import logging +from typing import Final + +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_payments.core.application import create_app from simcore_service_payments.core.settings import ApplicationSettings -_the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -setup_loggers( - log_format_local_dev_enabled=_the_settings.PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.PAYMENTS_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.PAYMENTS_TRACING, - log_base_level=_the_settings.log_level, - noisy_loggers=None, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(_the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.PAYMENTS_LOG_FILTER_MAPPING, + tracing_settings=app_settings.PAYMENTS_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 4ef2c72b3bc57fbb024ea290079eb09694d89685 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:28:45 +0200 Subject: [PATCH 091/128] rut --- .../core/application.py | 2 - .../main.py | 40 ++++++++++++++----- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py index 2aacbfb4990..fb3bdf2d1e8 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py @@ -35,8 +35,6 @@ def create_app(settings: ApplicationSettings) -> FastAPI: - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.RESOURCE_USAGE_TRACKER_DEBUG, title=f"{PROJECT_NAME} web API", diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py index 6f39de0bb98..3e5a7380c6e 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py @@ -1,19 +1,39 @@ """Main application to be deployed by uvicorn (or equivalent) server""" +import logging +from typing import Final + +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import setup_loggers +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from simcore_service_resource_usage_tracker.core.application import create_app from simcore_service_resource_usage_tracker.core.settings import ApplicationSettings -the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -setup_loggers( - log_format_local_dev_enabled=the_settings.RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.RESOURCE_USAGE_TRACKER_LOG_FILTER_MAPPING, - tracing_settings=the_settings.RESOURCE_USAGE_TRACKER_TRACING, - log_base_level=the_settings.log_level, - noisy_loggers=None, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.RESOURCE_USAGE_TRACKER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.RESOURCE_USAGE_TRACKER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app From 555a87c45590b339ae8478f15b6e36af984852e8 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:33:35 +0200 Subject: [PATCH 092/128] fix mypy --- .../src/simcore_service_dynamic_sidecar/core/application.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index c87da8aa4c8..4b58fd75018 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -156,7 +156,7 @@ def create_base_app() -> FastAPI: return app -def create_app(): +def create_app() -> FastAPI: """ Creates the application from using the env vars as a context Also stores inside the state all instances of classes From b17ccdb03d2c4b9c40e2f6d22445572ada97b6ce Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:40:24 +0200 Subject: [PATCH 093/128] fix tests --- services/datcore-adapter/tests/unit/conftest.py | 4 ++-- .../resource-usage-tracker/tests/unit/test_core_settings.py | 4 ++-- services/resource-usage-tracker/tests/unit/test_main.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py index 65120234b3e..517528d67e6 100644 --- a/services/datcore-adapter/tests/unit/conftest.py +++ b/services/datcore-adapter/tests/unit/conftest.py @@ -70,9 +70,9 @@ def pennsieve_mock_dataset_packages(mocks_dir: Path) -> dict[str, Any]: def minimal_app( app_environment: None, ) -> FastAPI: - from simcore_service_datcore_adapter.main import the_app + from simcore_service_datcore_adapter.main import app_factory - return the_app + return app_factory() @pytest.fixture() diff --git a/services/resource-usage-tracker/tests/unit/test_core_settings.py b/services/resource-usage-tracker/tests/unit/test_core_settings.py index f99239cc06b..4569bcae7fa 100644 --- a/services/resource-usage-tracker/tests/unit/test_core_settings.py +++ b/services/resource-usage-tracker/tests/unit/test_core_settings.py @@ -24,7 +24,7 @@ def test_valid_cli_application_settings(app_environment: EnvVarsDict): assert settings.RESOURCE_USAGE_TRACKER_POSTGRES assert settings.RESOURCE_USAGE_TRACKER_REDIS assert settings.RESOURCE_USAGE_TRACKER_RABBITMQ - assert settings.LOG_LEVEL + assert settings.RESOURCE_USAGE_TRACKER_LOGLEVEL def test_valid_web_application_settings(app_environment: EnvVarsDict): @@ -34,4 +34,4 @@ def test_valid_web_application_settings(app_environment: EnvVarsDict): assert settings.RESOURCE_USAGE_TRACKER_POSTGRES assert settings.RESOURCE_USAGE_TRACKER_REDIS assert settings.RESOURCE_USAGE_TRACKER_RABBITMQ - assert settings.LOG_LEVEL + assert settings.RESOURCE_USAGE_TRACKER_LOGLEVEL diff --git a/services/resource-usage-tracker/tests/unit/test_main.py b/services/resource-usage-tracker/tests/unit/test_main.py index 6d9addd8ee2..7fe4c95cbc2 100644 --- a/services/resource-usage-tracker/tests/unit/test_main.py +++ b/services/resource-usage-tracker/tests/unit/test_main.py @@ -7,6 +7,6 @@ def test_main_app(app_environment: EnvVarsDict): - from simcore_service_resource_usage_tracker.main import the_app, the_settings + from simcore_service_resource_usage_tracker.main import app_factory - assert the_app.state.settings == the_settings + app_factory() From a82f48501bbe2def403aa079280b62b2149ad1e9 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:47:35 +0200 Subject: [PATCH 094/128] use app factory --- services/api-server/Makefile | 2 +- services/api-server/src/simcore_service_api_server/cli.py | 2 +- services/api-server/src/simcore_service_api_server/main.py | 3 --- services/api-server/tests/unit/test_cli.py | 4 ++-- services/autoscaling/src/simcore_service_autoscaling/cli.py | 2 +- services/autoscaling/tests/unit/test_main.py | 4 ++-- services/catalog/Makefile | 2 +- services/catalog/src/simcore_service_catalog/cli.py | 2 +- .../src/simcore_service_clusters_keeper/cli.py | 2 +- services/clusters-keeper/tests/unit/test_main.py | 4 ++-- .../src/simcore_service_datcore_adapter/cli.py | 2 +- services/director-v2/Makefile | 4 ++-- services/director/src/simcore_service_director/cli.py | 2 +- services/dynamic-scheduler/Makefile | 2 +- .../src/simcore_service_dynamic_sidecar/cli.py | 2 +- services/efs-guardian/src/simcore_service_efs_guardian/cli.py | 2 +- services/efs-guardian/tests/unit/test_main.py | 4 ++-- services/invitations/Makefile | 2 +- .../invitations/src/simcore_service_invitations/web_server.py | 3 ++- services/notifications/Makefile | 2 +- services/payments/Makefile | 2 +- services/payments/tests/unit/test_cli.py | 4 ++-- services/resource-usage-tracker/Makefile | 2 +- services/storage/src/simcore_service_storage/cli.py | 2 +- 24 files changed, 30 insertions(+), 32 deletions(-) diff --git a/services/api-server/Makefile b/services/api-server/Makefile index e923de11db8..4db8527326b 100644 --- a/services/api-server/Makefile +++ b/services/api-server/Makefile @@ -30,7 +30,7 @@ define _create_and_validate_openapi # generating openapi specs file under $< (NOTE: Skips DEV FEATURES since this OAS is the 'offically released'!) @source .env; \ export API_SERVER_DEV_FEATURES_ENABLED=$1; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # validates OAS file: $@ docker run --rm \ diff --git a/services/api-server/src/simcore_service_api_server/cli.py b/services/api-server/src/simcore_service_api_server/cli.py index 11427f61d4e..e4ead859f31 100644 --- a/services/api-server/src/simcore_service_api_server/cli.py +++ b/services/api-server/src/simcore_service_api_server/cli.py @@ -18,6 +18,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_api_server.main:the_app", + "$ uvicorn --factory simcore_service_api_server.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/api-server/src/simcore_service_api_server/main.py b/services/api-server/src/simcore_service_api_server/main.py index 51be335be9b..f96eedca64a 100644 --- a/services/api-server/src/simcore_service_api_server/main.py +++ b/services/api-server/src/simcore_service_api_server/main.py @@ -9,9 +9,6 @@ from simcore_service_api_server.core.application import init_app from simcore_service_api_server.core.settings import ApplicationSettings -# SINGLETON FastAPI app -the_app: FastAPI = init_app() - _logger = logging.getLogger(__name__) _NOISY_LOGGERS: Final[tuple[str, ...]] = ( diff --git a/services/api-server/tests/unit/test_cli.py b/services/api-server/tests/unit/test_cli.py index febeca14b1f..f96be2ffb2f 100644 --- a/services/api-server/tests/unit/test_cli.py +++ b/services/api-server/tests/unit/test_cli.py @@ -34,7 +34,7 @@ def test_cli_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): def test_main(app_environment: EnvVarsDict): - from simcore_service_api_server.main import the_app + from simcore_service_api_server.main import app_factory - assert the_app + the_app = app_factory() assert isinstance(the_app, FastAPI) diff --git a/services/autoscaling/src/simcore_service_autoscaling/cli.py b/services/autoscaling/src/simcore_service_autoscaling/cli.py index c02e20e348b..c280b67368a 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/cli.py +++ b/services/autoscaling/src/simcore_service_autoscaling/cli.py @@ -19,6 +19,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_autoscaling.main:the_app", + "$ uvicorn --factory simcore_service_autoscaling.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/autoscaling/tests/unit/test_main.py b/services/autoscaling/tests/unit/test_main.py index 525748023ec..b1e0b41985a 100644 --- a/services/autoscaling/tests/unit/test_main.py +++ b/services/autoscaling/tests/unit/test_main.py @@ -7,6 +7,6 @@ def test_main_app(app_environment: EnvVarsDict): - from simcore_service_autoscaling.main import the_app, the_settings + from simcore_service_autoscaling.main import app_factory - assert the_app.state.settings == the_settings + app_factory() diff --git a/services/catalog/Makefile b/services/catalog/Makefile index 31b3a327698..6fa07010255 100644 --- a/services/catalog/Makefile +++ b/services/catalog/Makefile @@ -17,7 +17,7 @@ openapi.json: .env-ignore @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # validates OAS file: $@ $(call validate_openapi_specs,$@) diff --git a/services/catalog/src/simcore_service_catalog/cli.py b/services/catalog/src/simcore_service_catalog/cli.py index 0d4fbf5107b..9fd453f1f55 100644 --- a/services/catalog/src/simcore_service_catalog/cli.py +++ b/services/catalog/src/simcore_service_catalog/cli.py @@ -30,7 +30,7 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_catalog.main:the_app", + "$ uvicorn --factory simcore_service_catalog.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/cli.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/cli.py index b65355463c4..66a71546a00 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/cli.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/cli.py @@ -19,6 +19,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_clusters_keeper.main:the_app", + "$ uvicorn --factory simcore_service_clusters_keeper.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/clusters-keeper/tests/unit/test_main.py b/services/clusters-keeper/tests/unit/test_main.py index 96d7fb8507d..13cd355678c 100644 --- a/services/clusters-keeper/tests/unit/test_main.py +++ b/services/clusters-keeper/tests/unit/test_main.py @@ -7,6 +7,6 @@ def test_main_app(app_environment: EnvVarsDict): - from simcore_service_clusters_keeper.main import the_app, the_settings + from simcore_service_clusters_keeper.main import app_factory - assert the_app.state.settings == the_settings + app_factory() diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py index 60839168e97..79006573380 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py @@ -20,6 +20,6 @@ def run() -> None: """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - f"$ uvicorn {PROJECT_NAME}.main:the_app", + f"$ uvicorn --factory {PROJECT_NAME}.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/director-v2/Makefile b/services/director-v2/Makefile index 030084bcb4e..0332ca3a657 100644 --- a/services/director-v2/Makefile +++ b/services/director-v2/Makefile @@ -18,7 +18,7 @@ openapi.json: .env @set -o allexport; \ source .env; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ DOCKER_API_VERSION ?= 1.41 @@ -65,7 +65,7 @@ down down-extra: ## stops extra stack run-devel: .env up-extra ## starts app with extra stack # start app (within $<) in devel mode - uvicorn $(APP_PACKAGE_NAME).__main__:the_app \ + uvicorn --factory $(APP_PACKAGE_NAME).__main__:app_factory \ --reload --reload-dir $(SRC_DIR) \ --port=8000 --host=0.0.0.0 diff --git a/services/director/src/simcore_service_director/cli.py b/services/director/src/simcore_service_director/cli.py index f2e16f6b97e..1a797d76d0b 100644 --- a/services/director/src/simcore_service_director/cli.py +++ b/services/director/src/simcore_service_director/cli.py @@ -21,6 +21,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_director.main:the_app", + "$ uvicorn --factory simcore_service_director.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/dynamic-scheduler/Makefile b/services/dynamic-scheduler/Makefile index f46337a7667..4d98e392aa3 100644 --- a/services/dynamic-scheduler/Makefile +++ b/services/dynamic-scheduler/Makefile @@ -15,4 +15,4 @@ openapi.json: .env-ignore ## produces openapi.json @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py index def22092390..8b7b9086ef1 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py @@ -95,5 +95,5 @@ async def _async_outputs_push() -> None: # # NOTE: We intentionally did NOT create a command to run the application -# Use instead $ uvicorn simcore_service_dynamic_sidecar.main:the_app +# Use instead $ uvicorn --factory simcore_service_dynamic_sidecar.main:app_factory # diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/cli.py b/services/efs-guardian/src/simcore_service_efs_guardian/cli.py index 77d18015ec0..3b64a7663ef 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/cli.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/cli.py @@ -19,6 +19,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_efs_guardian.main:the_app", + "$ uvicorn --factory simcore_service_efs_guardian.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/efs-guardian/tests/unit/test_main.py b/services/efs-guardian/tests/unit/test_main.py index bbdb41096c8..26b0fa6af39 100644 --- a/services/efs-guardian/tests/unit/test_main.py +++ b/services/efs-guardian/tests/unit/test_main.py @@ -7,6 +7,6 @@ def test_main_app(app_environment: EnvVarsDict): - from simcore_service_efs_guardian.main import the_app, the_settings + from simcore_service_efs_guardian.main import app_factory - assert the_app.state.settings == the_settings + app_factory() diff --git a/services/invitations/Makefile b/services/invitations/Makefile index f7a9b88fe72..7550b36a91a 100644 --- a/services/invitations/Makefile +++ b/services/invitations/Makefile @@ -17,7 +17,7 @@ openapi.json: .env-ignore ## produces openapi.json @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # diff --git a/services/invitations/src/simcore_service_invitations/web_server.py b/services/invitations/src/simcore_service_invitations/web_server.py index 92015153841..55c291e7825 100644 --- a/services/invitations/src/simcore_service_invitations/web_server.py +++ b/services/invitations/src/simcore_service_invitations/web_server.py @@ -7,9 +7,10 @@ def start( log_level: Literal["info", "debug", "warning", "error"], *, reload: bool = False ): uvicorn.run( - "simcore_service_invitations.web_main:the_app", + "simcore_service_invitations.web_main:app_factory", host="0.0.0.0", # nosec port=8000, log_level=log_level, reload=reload, + factory=True, ) diff --git a/services/notifications/Makefile b/services/notifications/Makefile index bc14e6354c1..13474cf2701 100644 --- a/services/notifications/Makefile +++ b/services/notifications/Makefile @@ -15,4 +15,4 @@ openapi.json: .env-ignore ## produces openapi.json @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ diff --git a/services/payments/Makefile b/services/payments/Makefile index cf361c3c10e..a5d4d241e1f 100644 --- a/services/payments/Makefile +++ b/services/payments/Makefile @@ -15,7 +15,7 @@ openapi.json: .env-ignore ## produces openapi.json @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # NOTE: Create using `ln -s path/to/osparc-config/repo.config .env-secret` diff --git a/services/payments/tests/unit/test_cli.py b/services/payments/tests/unit/test_cli.py index 1fb1db4eded..4701f992ede 100644 --- a/services/payments/tests/unit/test_cli.py +++ b/services/payments/tests/unit/test_cli.py @@ -55,6 +55,6 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): def test_main(app_environment: EnvVarsDict): - from simcore_service_payments.main import the_app + from simcore_service_payments.main import app_factory - assert the_app + app_factory() diff --git a/services/resource-usage-tracker/Makefile b/services/resource-usage-tracker/Makefile index d6d8745bc13..cb0bbe708f5 100644 --- a/services/resource-usage-tracker/Makefile +++ b/services/resource-usage-tracker/Makefile @@ -10,4 +10,4 @@ include ../../scripts/common-service.Makefile openapi-specs: openapi.json openapi.json: ## produces openapi.json # generating openapi specs file (need to have the environment set for this) - @python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + @python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ diff --git a/services/storage/src/simcore_service_storage/cli.py b/services/storage/src/simcore_service_storage/cli.py index bcf4086f4aa..f81e7cd8a38 100644 --- a/services/storage/src/simcore_service_storage/cli.py +++ b/services/storage/src/simcore_service_storage/cli.py @@ -31,7 +31,7 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - f"$ uvicorn {PROJECT_NAME}.main:the_app", + f"$ uvicorn --factory {PROJECT_NAME}.main:app_factory", fg=typer.colors.BLUE, ) From f14cabd478d5b484027e0d78edfb530ef171a0f4 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:49:43 +0200 Subject: [PATCH 095/128] hmm --- packages/service-library/tests/test_logging_utils.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index 14305872717..fbc1355991b 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -435,7 +435,7 @@ def test_set_parent_module_log_level_(caplog: pytest.LogCaptureFixture): @pytest.mark.parametrize("log_format_local_dev_enabled", [True, False]) -async def test_setup_async_loggers_basic( +def test_setup_async_loggers_basic( caplog: pytest.LogCaptureFixture, log_format_local_dev_enabled: bool, ): @@ -456,10 +456,9 @@ async def test_setup_async_loggers_basic( _assert_check_log_message(caplog, "Test async log message") -async def test_setup_async_loggers_with_filters( +def test_setup_async_loggers_with_filters( caplog: pytest.LogCaptureFixture, ): - """Test async logging setup with logger filters.""" caplog.clear() caplog.set_level(logging.INFO) @@ -496,7 +495,7 @@ async def test_setup_async_loggers_with_filters( assert "This is from unfiltered logger" in caplog.text -async def test_setup_async_loggers_with_tracing_settings( +def test_setup_async_loggers_with_tracing_settings( caplog: pytest.LogCaptureFixture, ): """Test async logging setup with tracing settings.""" @@ -518,7 +517,7 @@ async def test_setup_async_loggers_with_tracing_settings( _assert_check_log_message(caplog, "Test message with tracing settings") -async def test_setup_async_loggers_context_manager_cleanup( +def test_setup_async_loggers_context_manager_cleanup( caplog: pytest.LogCaptureFixture, ): """Test that async logging context manager properly cleans up.""" @@ -540,7 +539,7 @@ async def test_setup_async_loggers_context_manager_cleanup( _assert_check_log_message(caplog, "Message during context") -async def test_setup_async_loggers_exception_handling( +def test_setup_async_loggers_exception_handling( caplog: pytest.LogCaptureFixture, ): """Test that async logging handles exceptions gracefully.""" From 628b50ebf86399818f74cc213f1a04cf02356b11 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 12:50:47 +0200 Subject: [PATCH 096/128] docs --- packages/service-library/src/servicelib/logging_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 2d859b91c8d..bce9d8c6512 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -266,10 +266,10 @@ def setup_async_loggers_lifespan( noisy_loggers: tuple[str, ...] | None, ) -> Iterator[None]: """ - Async context manager for non-blocking logging infrastructure. + context manager for non-blocking logging infrastructure. Usage: - async with setup_async_loggers_lifespan(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): + with setup_async_loggers_lifespan(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): # Your async application code here logger.info("This is non-blocking!") From 04a2d3b6ab5d8d5bb0735726fa18727d14d2652f Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 13:32:31 +0200 Subject: [PATCH 097/128] sonar --- services/web/server/src/simcore_service_webserver/cli.py | 2 +- services/web/server/src/simcore_service_webserver/log.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index ce8319f4171..de59ad46ddf 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -72,7 +72,7 @@ async def app_factory() -> web.Application: "Using application factory: %s", app_settings.WEBSERVER_APP_FACTORY_NAME ) - logging_lifespan_cleanup_event = await setup_logging(app_settings) + logging_lifespan_cleanup_event = setup_logging(app_settings) if app_settings.WEBSERVER_APP_FACTORY_NAME == "WEBSERVER_AUTHZ_APP_FACTORY": app = create_application_auth() diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index 8f7d028e2ab..fad1c191b3f 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -31,7 +31,7 @@ CleanupEvent: TypeAlias = Callable[[web.Application], Awaitable[None]] -async def setup_logging(app_settings: ApplicationSettings) -> CleanupEvent: +def setup_logging(app_settings: ApplicationSettings) -> CleanupEvent: exit_stack = AsyncExitStack() exit_stack.enter_context( setup_async_loggers_lifespan( From 10e660d4a27996a18a695b20b99aa9018f222908 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 13:34:50 +0200 Subject: [PATCH 098/128] dv-2 --- .../core/application.py | 43 +++++++++++++++---- .../src/simcore_service_director_v2/main.py | 30 +------------ 2 files changed, 36 insertions(+), 37 deletions(-) diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index b666cc2b949..4617c276803 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -1,10 +1,12 @@ import logging +from typing import Final from common_library.json_serialization import json_dumps from fastapi import FastAPI, HTTPException, status from fastapi.exceptions import RequestValidationError from fastapi_lifespan_manager import LifespanManager from servicelib.fastapi.lifespan_utils import Lifespan +from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event from servicelib.fastapi.openapi import ( get_common_oas_options, override_fastapi_openapi_method, @@ -51,6 +53,13 @@ _logger = logging.getLogger(__name__) +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "httpcore", + "httpx", +) + def _set_exception_handlers(app: FastAPI): app.add_exception_handler(HTTPException, http_error_handler) @@ -104,26 +113,44 @@ def create_app_lifespan(logging_lifespan: Lifespan | None = None) -> LifespanMan def create_base_app( - settings: AppSettings | None = None, + app_settings: AppSettings | None = None, ) -> FastAPI: - if settings is None: - settings = AppSettings.create_from_envs() - assert settings # nosec + if app_settings is None: + app_settings = AppSettings.create_from_envs() + + logging_shutdown_event = setup_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DIRECTOR_V2_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DIRECTOR_V2_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) - assert settings.SC_BOOT_MODE # nosec + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + + assert app_settings # nosec + + assert app_settings.SC_BOOT_MODE # nosec app = FastAPI( - debug=settings.SC_BOOT_MODE.is_devel_mode(), + debug=app_settings.SC_BOOT_MODE.is_devel_mode(), title=PROJECT_NAME, description=SUMMARY, version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", - **get_common_oas_options(is_devel_mode=settings.SC_BOOT_MODE.is_devel_mode()), + **get_common_oas_options( + is_devel_mode=app_settings.SC_BOOT_MODE.is_devel_mode() + ), ) override_fastapi_openapi_method(app) - app.state.settings = settings + app.state.settings = app_settings app.include_router(api_router) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/director-v2/src/simcore_service_director_v2/main.py b/services/director-v2/src/simcore_service_director_v2/main.py index 723b3628198..f8e8f87f577 100644 --- a/services/director-v2/src/simcore_service_director_v2/main.py +++ b/services/director-v2/src/simcore_service_director_v2/main.py @@ -1,40 +1,12 @@ """Main application to be deployed in for example uvicorn.""" import logging -from typing import Final -from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import ( - setup_logging_shutdown_event, -) from simcore_service_director_v2.core.application import init_app -from simcore_service_director_v2.core.settings import AppSettings _logger = logging.getLogger(__name__) -_NOISY_LOGGERS: Final[tuple[str, ...]] = ( - "aio_pika", - "aiormq", - "httpcore", - "httpx", -) - def app_factory() -> FastAPI: - app_settings = AppSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( - log_format_local_dev_enabled=app_settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=app_settings.DIRECTOR_V2_LOG_FILTER_MAPPING, - tracing_settings=app_settings.DIRECTOR_V2_TRACING, - log_base_level=app_settings.log_level, - noisy_loggers=_NOISY_LOGGERS, - ) - - _logger.info( - "Application settings: %s", - json_dumps(app_settings, indent=2, sort_keys=True), - ) - app = init_app(settings=app_settings) - app.add_event_handler("shutdown", logging_shutdown_event) - return app + return init_app() From dcec94d9b9d2d0c950b76c0c19488d9d0843ecde Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 13:39:10 +0200 Subject: [PATCH 099/128] fix tests --- packages/service-library/src/servicelib/logging_utils.py | 2 +- services/invitations/tests/unit/test_core_settings.py | 2 +- services/payments/tests/unit/test_core_settings.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index bce9d8c6512..c91fa9d3322 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -333,7 +333,7 @@ def setup_async_loggers_lifespan( ): listener.stop() - except Exception as exc: + except Exception as exc: # pylint: disable=broad-except sys.stderr.write(f"Error during async logging cleanup: {exc}\n") sys.stderr.flush() diff --git a/services/invitations/tests/unit/test_core_settings.py b/services/invitations/tests/unit/test_core_settings.py index cb89e8183ac..150f904ab09 100644 --- a/services/invitations/tests/unit/test_core_settings.py +++ b/services/invitations/tests/unit/test_core_settings.py @@ -43,4 +43,4 @@ def test_valid_application_settings(app_environment: EnvVarsDict): assert settings == ApplicationSettings.create_from_envs() - assert settings.LOG_LEVEL == "INFO" + assert settings.INVITATIONS_LOGLEVEL == "INFO" diff --git a/services/payments/tests/unit/test_core_settings.py b/services/payments/tests/unit/test_core_settings.py index 343a4507871..3c7810c73f7 100644 --- a/services/payments/tests/unit/test_core_settings.py +++ b/services/payments/tests/unit/test_core_settings.py @@ -13,4 +13,4 @@ def test_valid_application_settings(app_environment: EnvVarsDict): assert settings == ApplicationSettings.create_from_envs() - assert app_environment["PAYMENTS_LOGLEVEL"] == settings.LOG_LEVEL + assert app_environment["PAYMENTS_LOGLEVEL"] == settings.PAYMENTS_LOGLEVEL From 83f830ad5a7eedcbd116b11cba1c5345730a9bec Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 13:49:57 +0200 Subject: [PATCH 100/128] bug fix --- .../src/simcore_service_dask_sidecar/settings.py | 8 +++++++- .../src/simcore_service_dask_sidecar/utils/logs.py | 10 +++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py index e0a3e41d3a5..f2629768175 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py @@ -1,8 +1,10 @@ +from functools import cached_property from pathlib import Path -from typing import Annotated, Any +from typing import Annotated, Any, cast from models_library.basic_types import LogLevel from pydantic import AliasChoices, Field, field_validator +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.rabbit import RabbitSettings @@ -61,6 +63,10 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): RabbitSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) ] + @cached_property + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.DASK_SIDECAR_LOGLEVEL) + @field_validator("DASK_SIDECAR_LOGLEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: Any) -> str: diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py index 04b0630015c..76eaef67431 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py @@ -1,7 +1,15 @@ +from typing import Final + from servicelib.logging_utils import setup_loggers from ..settings import ApplicationSettings +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "werkzeug", +) + def setup_app_logging(settings: ApplicationSettings) -> None: setup_loggers( @@ -9,5 +17,5 @@ def setup_app_logging(settings: ApplicationSettings) -> None: logger_filter_mapping=settings.DASK_LOG_FILTER_MAPPING, tracing_settings=None, # no tracing for dask sidecar log_base_level=settings.log_level, - noisy_loggers=None, + noisy_loggers=_NOISY_LOGGERS, ) From 43c334c5764a7dd640d5d484cb8e2d06d2271a96 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 13:59:45 +0200 Subject: [PATCH 101/128] brought fixture in --- packages/aws-library/tests/conftest.py | 1 + packages/celery-library/tests/conftest.py | 1 + packages/common-library/tests/conftest.py | 1 + packages/dask-task-models-library/tests/conftest.py | 1 + packages/models-library/tests/conftest.py | 1 + packages/notifications-library/tests/conftest.py | 3 ++- packages/postgres-database/tests/conftest.py | 1 + packages/pytest-simcore/src/pytest_simcore/logging.py | 10 +++++----- packages/settings-library/tests/conftest.py | 1 + packages/simcore-sdk/tests/conftest.py | 1 + services/agent/tests/conftest.py | 1 + services/api-server/tests/conftest.py | 1 + services/autoscaling/tests/unit/conftest.py | 1 + services/catalog/tests/unit/conftest.py | 3 +-- services/clusters-keeper/tests/unit/conftest.py | 1 + services/dask-sidecar/tests/unit/conftest.py | 1 + services/datcore-adapter/tests/unit/conftest.py | 1 + services/director-v2/tests/conftest.py | 1 + services/director/tests/unit/conftest.py | 1 + .../docker-api-proxy/tests/integration/conftest.py | 1 + services/dynamic-scheduler/tests/conftest.py | 1 + services/dynamic-sidecar/tests/conftest.py | 1 + services/dynamic-sidecar/tests/integration/conftest.py | 1 + services/efs-guardian/tests/conftest.py | 1 + services/invitations/tests/unit/conftest.py | 1 + services/migration/tests/conftest.py | 1 + services/notifications/tests/conftest.py | 1 + services/payments/tests/conftest.py | 1 + services/resource-usage-tracker/tests/unit/conftest.py | 1 + services/storage/tests/conftest.py | 1 + services/web/server/tests/conftest.py | 1 + 31 files changed, 36 insertions(+), 8 deletions(-) diff --git a/packages/aws-library/tests/conftest.py b/packages/aws-library/tests/conftest.py index 47fcdd327e3..0300b9de075 100644 --- a/packages/aws-library/tests/conftest.py +++ b/packages/aws-library/tests/conftest.py @@ -14,6 +14,7 @@ "pytest_simcore.aws_ssm_service", "pytest_simcore.environment_configs", "pytest_simcore.file_extra", + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py index 2553d9df6d7..e9fc599136a 100644 --- a/packages/celery-library/tests/conftest.py +++ b/packages/celery-library/tests/conftest.py @@ -26,6 +26,7 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", "pytest_simcore.repository_paths", diff --git a/packages/common-library/tests/conftest.py b/packages/common-library/tests/conftest.py index 46f09f86b46..bf3e473ac55 100644 --- a/packages/common-library/tests/conftest.py +++ b/packages/common-library/tests/conftest.py @@ -9,6 +9,7 @@ import pytest pytest_plugins = [ + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/dask-task-models-library/tests/conftest.py b/packages/dask-task-models-library/tests/conftest.py index e551898ea95..5166cf785cf 100644 --- a/packages/dask-task-models-library/tests/conftest.py +++ b/packages/dask-task-models-library/tests/conftest.py @@ -7,6 +7,7 @@ import pytest pytest_plugins = [ + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/models-library/tests/conftest.py b/packages/models-library/tests/conftest.py index 8bf433b901d..adc09810588 100644 --- a/packages/models-library/tests/conftest.py +++ b/packages/models-library/tests/conftest.py @@ -10,6 +10,7 @@ pytest_plugins = [ "pytest_simcore.faker_projects_data", + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/notifications-library/tests/conftest.py b/packages/notifications-library/tests/conftest.py index 006b7ed1a7b..717cc6fcf14 100644 --- a/packages/notifications-library/tests/conftest.py +++ b/packages/notifications-library/tests/conftest.py @@ -29,6 +29,7 @@ "pytest_simcore.faker_payments_data", "pytest_simcore.faker_products_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.repository_paths", ] @@ -72,7 +73,7 @@ def product_data( return ProductData( # type: ignore product_name=product_name, display_name=product["display_name"], - vendor_display_inline=f"{vendor.get('name','')}, {vendor.get('address','')}", + vendor_display_inline=f"{vendor.get('name', '')}, {vendor.get('address', '')}", support_email=product["support_email"], homepage_url=vendor.get("url"), ui=product_ui, diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index fdac39729b6..1ca663ef578 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -41,6 +41,7 @@ from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine pytest_plugins = [ + "pytest_simcore.logging", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", ] diff --git a/packages/pytest-simcore/src/pytest_simcore/logging.py b/packages/pytest-simcore/src/pytest_simcore/logging.py index 4f1399094bc..bd4b15005a0 100644 --- a/packages/pytest-simcore/src/pytest_simcore/logging.py +++ b/packages/pytest-simcore/src/pytest_simcore/logging.py @@ -1,7 +1,7 @@ # In conftest.py or test_logging_utils.py import logging -from collections.abc import AsyncIterator -from contextlib import asynccontextmanager +from collections.abc import Iterator +from contextlib import contextmanager from unittest.mock import patch import pytest @@ -19,15 +19,15 @@ def preserve_caplog_for_async_logging(request): # Patch setup_async_loggers_lifespan to preserve caplog handlers original_setup = setup_async_loggers_lifespan - @asynccontextmanager - async def patched_setup_async_loggers_lifespan(**kwargs) -> AsyncIterator[None]: + @contextmanager + def patched_setup_async_loggers_lifespan(**kwargs) -> Iterator[None]: # Find caplog's handler in root logger root_logger = logging.getLogger() caplog_handlers = [ h for h in root_logger.handlers if "LogCaptureHandler" in f"{type(h)}" ] - async with original_setup(**kwargs): + with original_setup(**kwargs): # After setup, restore caplog handlers alongside queue handler for handler in caplog_handlers: if handler not in root_logger.handlers: diff --git a/packages/settings-library/tests/conftest.py b/packages/settings-library/tests/conftest.py index c2a02e3a9b4..142b9cb4bea 100644 --- a/packages/settings-library/tests/conftest.py +++ b/packages/settings-library/tests/conftest.py @@ -18,6 +18,7 @@ pytest_plugins = [ "pytest_simcore.cli_runner", "pytest_simcore.environment_configs", + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/simcore-sdk/tests/conftest.py b/packages/simcore-sdk/tests/conftest.py index 2cf26c9a49e..e419fbdba8f 100644 --- a/packages/simcore-sdk/tests/conftest.py +++ b/packages/simcore-sdk/tests/conftest.py @@ -28,6 +28,7 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.file_extra", + "pytest_simcore.logging", "pytest_simcore.minio_service", "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", diff --git a/services/agent/tests/conftest.py b/services/agent/tests/conftest.py index 624669ce7f4..8213e84ad47 100644 --- a/services/agent/tests/conftest.py +++ b/services/agent/tests/conftest.py @@ -16,6 +16,7 @@ "pytest_simcore.aws_server", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", ] diff --git a/services/api-server/tests/conftest.py b/services/api-server/tests/conftest.py index 9aed5258956..2fd59c2f626 100644 --- a/services/api-server/tests/conftest.py +++ b/services/api-server/tests/conftest.py @@ -25,6 +25,7 @@ "pytest_simcore.faker_users_data", "pytest_simcore.httpbin_service", "pytest_simcore.httpx_calls_capture", + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index d348fd7abe0..97e852b44d6 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -110,6 +110,7 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", ] diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py index 3cc1e633efc..0088fa436a0 100644 --- a/services/catalog/tests/unit/conftest.py +++ b/services/catalog/tests/unit/conftest.py @@ -44,6 +44,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_products_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", @@ -179,7 +180,6 @@ def client( assert spy_app.on_shutdown.call_count == 0 with TestClient(app_under_test) as cli: - assert spy_app.on_startup.call_count == 1 assert spy_app.on_shutdown.call_count == 0 @@ -392,7 +392,6 @@ def mocked_director_rest_api_base( assert_all_called=False, assert_all_mocked=True, ) as respx_mock: - # HEATHCHECK assert openapi["paths"].get("/") respx_mock.head("/", name="healthcheck").respond( diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py index 43bb1bacde1..15c52ee996d 100644 --- a/services/clusters-keeper/tests/unit/conftest.py +++ b/services/clusters-keeper/tests/unit/conftest.py @@ -48,6 +48,7 @@ "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", "pytest_simcore.simcore_service_library_fixtures", diff --git a/services/dask-sidecar/tests/unit/conftest.py b/services/dask-sidecar/tests/unit/conftest.py index 2e3fb246f88..e4bc735025c 100644 --- a/services/dask-sidecar/tests/unit/conftest.py +++ b/services/dask-sidecar/tests/unit/conftest.py @@ -42,6 +42,7 @@ "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", ] diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py index 517528d67e6..976d86ad334 100644 --- a/services/datcore-adapter/tests/unit/conftest.py +++ b/services/datcore-adapter/tests/unit/conftest.py @@ -28,6 +28,7 @@ "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.environment_configs", + "pytest_simcore.logging", "pytest_simcore.repository_paths", "pytest_simcore.pytest_global_environs", ] diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index c2b2ea89ac2..9ae961cdbe3 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -47,6 +47,7 @@ "pytest_simcore.faker_products_data", "pytest_simcore.faker_projects_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.minio_service", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", diff --git a/services/director/tests/unit/conftest.py b/services/director/tests/unit/conftest.py index 71e373f5f16..68c20076f76 100644 --- a/services/director/tests/unit/conftest.py +++ b/services/director/tests/unit/conftest.py @@ -28,6 +28,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_projects_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.repository_paths", "pytest_simcore.simcore_service_library_fixtures", ] diff --git a/services/docker-api-proxy/tests/integration/conftest.py b/services/docker-api-proxy/tests/integration/conftest.py index 10878c70d57..09d5ec3cb9a 100644 --- a/services/docker-api-proxy/tests/integration/conftest.py +++ b/services/docker-api-proxy/tests/integration/conftest.py @@ -23,6 +23,7 @@ "pytest_simcore.docker_api_proxy", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", + "pytest_simcore.logging", "pytest_simcore.repository_paths", "pytest_simcore.simcore_services", ] diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py index 4a4aaf8c501..5543ad0665d 100644 --- a/services/dynamic-scheduler/tests/conftest.py +++ b/services/dynamic-scheduler/tests/conftest.py @@ -28,6 +28,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_projects_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index bb6820d2df7..c85d29105a9 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -40,6 +40,7 @@ "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.minio_service", "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", diff --git a/services/dynamic-sidecar/tests/integration/conftest.py b/services/dynamic-sidecar/tests/integration/conftest.py index 8c9f5426bd3..98ba076f260 100644 --- a/services/dynamic-sidecar/tests/integration/conftest.py +++ b/services/dynamic-sidecar/tests/integration/conftest.py @@ -8,6 +8,7 @@ pytest_plugins = [ "pytest_simcore.asyncio_event_loops", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.simcore_storage_service", "pytest_simcore.rabbit_service", diff --git a/services/efs-guardian/tests/conftest.py b/services/efs-guardian/tests/conftest.py index 6cdf23bdc23..8a3e48d2325 100644 --- a/services/efs-guardian/tests/conftest.py +++ b/services/efs-guardian/tests/conftest.py @@ -24,6 +24,7 @@ "pytest_simcore.faker_users_data", "pytest_simcore.faker_products_data", "pytest_simcore.faker_projects_data", + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", diff --git a/services/invitations/tests/unit/conftest.py b/services/invitations/tests/unit/conftest.py index e7a12b0426c..414062b9f94 100644 --- a/services/invitations/tests/unit/conftest.py +++ b/services/invitations/tests/unit/conftest.py @@ -17,6 +17,7 @@ pytest_plugins = [ "pytest_simcore.asyncio_event_loops", + "pytest_simcore.logging", "pytest_simcore.cli_runner", "pytest_simcore.repository_paths", ] diff --git a/services/migration/tests/conftest.py b/services/migration/tests/conftest.py index 2ad21378f67..eb8c5b1c743 100644 --- a/services/migration/tests/conftest.py +++ b/services/migration/tests/conftest.py @@ -2,5 +2,6 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", + "pytest_simcore.logging", "pytest_simcore.repository_paths", ] diff --git a/services/notifications/tests/conftest.py b/services/notifications/tests/conftest.py index 422f58a9122..6091f50b9de 100644 --- a/services/notifications/tests/conftest.py +++ b/services/notifications/tests/conftest.py @@ -13,6 +13,7 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", diff --git a/services/payments/tests/conftest.py b/services/payments/tests/conftest.py index 39608fe4e70..45b36d3262a 100644 --- a/services/payments/tests/conftest.py +++ b/services/payments/tests/conftest.py @@ -24,6 +24,7 @@ "pytest_simcore.faker_products_data", "pytest_simcore.faker_users_data", "pytest_simcore.httpbin_service", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.socketio", "pytest_simcore.rabbit_service", diff --git a/services/resource-usage-tracker/tests/unit/conftest.py b/services/resource-usage-tracker/tests/unit/conftest.py index a2e1b67a169..7269ffae009 100644 --- a/services/resource-usage-tracker/tests/unit/conftest.py +++ b/services/resource-usage-tracker/tests/unit/conftest.py @@ -35,6 +35,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_projects_data", "pytest_simcore.faker_products_data", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 8fbf7c550e9..32813640197 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -98,6 +98,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.file_extra", "pytest_simcore.httpbin_service", + "pytest_simcore.logging", "pytest_simcore.openapi_specs", "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index 3a8917a7898..b6c62eae17b 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -74,6 +74,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_users_data", "pytest_simcore.hypothesis_type_strategies", + "pytest_simcore.logging", "pytest_simcore.openapi_specs", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", From dc0223eb9a567bd500f0eb7b21142fcbee4f562e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 14:09:15 +0200 Subject: [PATCH 102/128] align --- .../simcore_service_dynamic_scheduler/core/settings.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py index f1ce9b13d33..8405a419536 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py @@ -1,8 +1,10 @@ import datetime -from typing import Annotated +from functools import cached_property +from typing import Annotated, cast from common_library.basic_types import DEFAULT_FACTORY from pydantic import AliasChoices, Field, SecretStr, TypeAdapter, field_validator +from servicelib.logging_utils import LogLevelInt from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag @@ -105,6 +107,10 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ] = False + @cached_property + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.DYNAMIC_SCHEDULER_LOGLEVEL) + @field_validator("DYNAMIC_SCHEDULER_LOGLEVEL", mode="before") @classmethod def _validate_log_level(cls, value: str) -> str: From 5a5fced2627458d82a56784ed471b1d2eed324d8 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 14:15:03 +0200 Subject: [PATCH 103/128] shellcheck --- services/storage/docker/boot.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/docker/boot.sh b/services/storage/docker/boot.sh index a8cfa4d68ad..6dd4e72f8e6 100755 --- a/services/storage/docker/boot.sh +++ b/services/storage/docker/boot.sh @@ -90,7 +90,7 @@ else exec uvicorn \ --factory simcore_service_storage.main:app_factory \ --host 0.0.0.0 \ - --port ${STORAGE_PORT} \ + --port "${STORAGE_PORT}" \ --log-level "${SERVER_LOG_LEVEL}" fi fi From 575ce89a602158058d76dfe35a5641fb8b2e3f92 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 14:15:11 +0200 Subject: [PATCH 104/128] app_factory --- services/storage/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/storage/Makefile b/services/storage/Makefile index ef350cae091..3627250ce98 100644 --- a/services/storage/Makefile +++ b/services/storage/Makefile @@ -12,7 +12,7 @@ openapi.json: .env @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # validates OAS file: $@ $(call validate_openapi_specs,$@) From ed3894b28fce7c60c587cea6cd4add4ab87370e8 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 14:17:51 +0200 Subject: [PATCH 105/128] adjust fixture --- .../src/pytest_simcore/logging.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/logging.py b/packages/pytest-simcore/src/pytest_simcore/logging.py index bd4b15005a0..f0578a6b1a3 100644 --- a/packages/pytest-simcore/src/pytest_simcore/logging.py +++ b/packages/pytest-simcore/src/pytest_simcore/logging.py @@ -34,14 +34,16 @@ def patched_setup_async_loggers_lifespan(**kwargs) -> Iterator[None]: root_logger.addHandler(handler) yield - with ( - patch( - "tests.test_logging_utils.setup_async_loggers_lifespan", - patched_setup_async_loggers_lifespan, - ), - patch( - "servicelib.logging_utils.setup_async_loggers_lifespan", - patched_setup_async_loggers_lifespan, - ), + with patch( + "servicelib.logging_utils.setup_async_loggers_lifespan", + patched_setup_async_loggers_lifespan, ): - yield + try: + with patch( + "tests.test_logging_utils.setup_async_loggers_lifespan", + patched_setup_async_loggers_lifespan, + ): + yield + except ModuleNotFoundError: + # NOTE: this is for tests running in service library + yield From 7486db26ab076f21315cb3ce381098cae41a314d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 14:20:33 +0200 Subject: [PATCH 106/128] reverted --- packages/common-library/tests/conftest.py | 1 - packages/dask-task-models-library/tests/conftest.py | 1 - packages/models-library/tests/conftest.py | 1 - packages/notifications-library/tests/conftest.py | 1 - packages/postgres-database/tests/conftest.py | 1 - packages/settings-library/tests/conftest.py | 1 - 6 files changed, 6 deletions(-) diff --git a/packages/common-library/tests/conftest.py b/packages/common-library/tests/conftest.py index bf3e473ac55..46f09f86b46 100644 --- a/packages/common-library/tests/conftest.py +++ b/packages/common-library/tests/conftest.py @@ -9,7 +9,6 @@ import pytest pytest_plugins = [ - "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/dask-task-models-library/tests/conftest.py b/packages/dask-task-models-library/tests/conftest.py index 5166cf785cf..e551898ea95 100644 --- a/packages/dask-task-models-library/tests/conftest.py +++ b/packages/dask-task-models-library/tests/conftest.py @@ -7,7 +7,6 @@ import pytest pytest_plugins = [ - "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/models-library/tests/conftest.py b/packages/models-library/tests/conftest.py index adc09810588..8bf433b901d 100644 --- a/packages/models-library/tests/conftest.py +++ b/packages/models-library/tests/conftest.py @@ -10,7 +10,6 @@ pytest_plugins = [ "pytest_simcore.faker_projects_data", - "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/notifications-library/tests/conftest.py b/packages/notifications-library/tests/conftest.py index 717cc6fcf14..e8440aa573c 100644 --- a/packages/notifications-library/tests/conftest.py +++ b/packages/notifications-library/tests/conftest.py @@ -29,7 +29,6 @@ "pytest_simcore.faker_payments_data", "pytest_simcore.faker_products_data", "pytest_simcore.faker_users_data", - "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.repository_paths", ] diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index 1ca663ef578..fdac39729b6 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -41,7 +41,6 @@ from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine pytest_plugins = [ - "pytest_simcore.logging", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", ] diff --git a/packages/settings-library/tests/conftest.py b/packages/settings-library/tests/conftest.py index 142b9cb4bea..c2a02e3a9b4 100644 --- a/packages/settings-library/tests/conftest.py +++ b/packages/settings-library/tests/conftest.py @@ -18,7 +18,6 @@ pytest_plugins = [ "pytest_simcore.cli_runner", "pytest_simcore.environment_configs", - "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", From 59b20d79aca39b666cdb2a38a0834ebffde69492 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 15:12:39 +0200 Subject: [PATCH 107/128] add more filtering --- .env-devel | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env-devel b/.env-devel index 167b4e36c59..3a8f1090375 100644 --- a/.env-devel +++ b/.env-devel @@ -161,7 +161,7 @@ INVITATIONS_USERNAME=admin INVITATIONS_TRACING={} LOG_FORMAT_LOCAL_DEV_ENABLED=1 -LOG_FILTER_MAPPING='{"gunicorn.access":[" /v0/ ", " /v0/health "], "uvicorn.access":[" / "]}' +LOG_FILTER_MAPPING='{"gunicorn.access":[" /v0/ ", " /v0/health "], "uvicorn.access":[" / ", " /v0/ "]}' NOTIFICATIONS_LOGLEVEL=INFO NOTIFICATIONS_TRACING={} From 55aca254642721602940136a9bafe8c5b310b04d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 15:19:07 +0200 Subject: [PATCH 108/128] info --- .../service-library/src/servicelib/logging_utils.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index c91fa9d3322..575ab8ba180 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -19,6 +19,7 @@ from pathlib import Path from typing import Any, Final, NotRequired, TypeAlias, TypedDict, TypeVar +from common_library.json_serialization import json_dumps from settings_library.tracing import TracingSettings from .logging_utils_filtering import GeneralLogFilter, LoggerName, MessageSubstring @@ -647,10 +648,12 @@ def _apply_comprehensive_logging_setup( _logger.info( "Modified %d loggers for comprehensive logging: %s", len(loggers_modified), - [ - f"{info['name']}(removed_handlers={info['handlers']}, enabled_propagate={info['had_propagate_disabled']})" - for info in loggers_modified[:3] - ], # Show first 3 to avoid spam + json_dumps( + [ + f"{info['name']}(removed_handlers={info['handlers']}, enabled_propagate={info['had_propagate_disabled']})" + for info in loggers_modified + ] + ), ) # Set up root logger with the provided handler only From 6800114323d2d390505ddac2063600fd888fb800 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 17:19:52 +0200 Subject: [PATCH 109/128] fixed test --- services/dynamic-sidecar/tests/unit/test_cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/dynamic-sidecar/tests/unit/test_cli.py b/services/dynamic-sidecar/tests/unit/test_cli.py index 9caf2316347..a06fc698efc 100644 --- a/services/dynamic-sidecar/tests/unit/test_cli.py +++ b/services/dynamic-sidecar/tests/unit/test_cli.py @@ -50,12 +50,12 @@ def test_list_state_dirs(cli_runner: CliRunner, mock_data_manager: None): def test_outputs_push_interface(cli_runner: CliRunner, mock_data_manager: None): result = cli_runner.invoke(main, ["state-save"]) assert result.exit_code == os.EX_OK, _format_cli_error(result) - assert result.stdout == "state save finished successfully\n" + assert "state save finished successfully\n" in result.stdout print(result) def test_state_save_interface(cli_runner: CliRunner, mock_nodeports: None): result = cli_runner.invoke(main, ["outputs-push"]) assert result.exit_code == os.EX_OK, _format_cli_error(result) - assert result.stdout == "output ports push finished successfully\n" + assert "output ports push finished successfully\n" in result.stdout print(result) From c4e234d4e6ac4de205f8578db5b2cb3a6f512836 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 17:23:29 +0200 Subject: [PATCH 110/128] simplify fixture --- .../src/pytest_simcore/logging.py | 33 ++++++++----------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/logging.py b/packages/pytest-simcore/src/pytest_simcore/logging.py index f0578a6b1a3..988e1bc8731 100644 --- a/packages/pytest-simcore/src/pytest_simcore/logging.py +++ b/packages/pytest-simcore/src/pytest_simcore/logging.py @@ -1,21 +1,18 @@ # In conftest.py or test_logging_utils.py +import contextlib import logging from collections.abc import Iterator from contextlib import contextmanager -from unittest.mock import patch import pytest +from pytest_mock import MockerFixture from servicelib.logging_utils import setup_async_loggers_lifespan @pytest.fixture(autouse=True) -def preserve_caplog_for_async_logging(request): - """Automatically preserve caplog handlers when both caplog and async logging are used.""" - # Check if this test uses caplog fixture - if "caplog" not in request.fixturenames: - yield # No caplog, no patching needed - return - +def preserve_caplog_for_async_logging( + request: pytest.FixtureRequest, mocker: MockerFixture +) -> None: # Patch setup_async_loggers_lifespan to preserve caplog handlers original_setup = setup_async_loggers_lifespan @@ -34,16 +31,12 @@ def patched_setup_async_loggers_lifespan(**kwargs) -> Iterator[None]: root_logger.addHandler(handler) yield - with patch( + methods_to_patch = [ "servicelib.logging_utils.setup_async_loggers_lifespan", - patched_setup_async_loggers_lifespan, - ): - try: - with patch( - "tests.test_logging_utils.setup_async_loggers_lifespan", - patched_setup_async_loggers_lifespan, - ): - yield - except ModuleNotFoundError: - # NOTE: this is for tests running in service library - yield + "servicelib.fastapi.logging_lifespan.setup_async_loggers_lifespan", + "tests.test_logging_utils.setup_async_loggers_lifespan", + ] + for method in methods_to_patch: + with contextlib.suppress(AttributeError, ModuleNotFoundError): + # Patch the method to use our patched version + mocker.patch(method, patched_setup_async_loggers_lifespan) From 3ec8bc7ab1db86db675d9b8e5dc75d448ee593ce Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 17:24:07 +0200 Subject: [PATCH 111/128] simplify fixture --- packages/pytest-simcore/src/pytest_simcore/logging.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/logging.py b/packages/pytest-simcore/src/pytest_simcore/logging.py index 988e1bc8731..de4be394a09 100644 --- a/packages/pytest-simcore/src/pytest_simcore/logging.py +++ b/packages/pytest-simcore/src/pytest_simcore/logging.py @@ -10,10 +10,10 @@ @pytest.fixture(autouse=True) -def preserve_caplog_for_async_logging( - request: pytest.FixtureRequest, mocker: MockerFixture -) -> None: - # Patch setup_async_loggers_lifespan to preserve caplog handlers +def preserve_caplog_for_async_logging(mocker: MockerFixture) -> None: + # Patch setup_async_loggers_lifespan to preserve caplog handlers, + # and pytest logs in general as pytest captures logs in a special way + # that is not compatible with the queue handler used in async logging. original_setup = setup_async_loggers_lifespan @contextmanager From 0631674230e2a598bf95d86fc0467ad31a973fbc Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 17:31:57 +0200 Subject: [PATCH 112/128] add some docs --- .../src/servicelib/fastapi/logging_lifespan.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py index d5c4020e5e7..d1564e1a6a1 100644 --- a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -24,6 +24,7 @@ def setup_logging_lifespan( log_base_level: LogLevelInt, noisy_loggers: tuple[str, ...] | None, ) -> Lifespan: + """Returns a FastAPI-compatible lifespan handler to set up async logging.""" exit_stack = AsyncExitStack() exit_stack.enter_context( setup_async_loggers_lifespan( @@ -52,6 +53,13 @@ def setup_logging_shutdown_event( log_base_level: LogLevelInt, noisy_loggers: tuple[str, ...] | None, ) -> Callable[[], Awaitable[None]]: + """retruns a fastapi-compatible shutdown event handler to be used with old style lifespan + handlers. This is useful for applications that do not use the new async lifespan + handlers introduced in fastapi 0.100.0. + + Note: This function is for backwards compatibility only and will be removed in the future. + setup_logging_lifespan should be used instead for new style lifespan handlers. + """ exit_stack = AsyncExitStack() exit_stack.enter_context( setup_async_loggers_lifespan( From 72f0b73c79888f41b3709c86aaa519e295b08e6e Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 17:43:13 +0200 Subject: [PATCH 113/128] cleanup --- services/web/server/src/simcore_service_webserver/log.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index fad1c191b3f..a53e4b98398 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -11,8 +11,7 @@ from servicelib.logging_utils import setup_async_loggers_lifespan from simcore_service_webserver.application_settings import ApplicationSettings -LOG_LEVEL_STEP: Final[int] = logging.CRITICAL - logging.ERROR -NOISY_LOGGERS: Final[tuple[str, ...]] = ( +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( "aio_pika", "aiormq", "engineio", @@ -36,7 +35,7 @@ def setup_logging(app_settings: ApplicationSettings) -> CleanupEvent: exit_stack.enter_context( setup_async_loggers_lifespan( log_base_level=app_settings.log_level, - noisy_loggers=NOISY_LOGGERS, + noisy_loggers=_NOISY_LOGGERS, log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.WEBSERVER_LOG_FILTER_MAPPING, tracing_settings=app_settings.WEBSERVER_TRACING, From 395e99bc38171cc076a6364550f73fa445a91428 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Thu, 10 Jul 2025 17:47:35 +0200 Subject: [PATCH 114/128] missing dep --- services/invitations/requirements/_test.in | 1 + services/invitations/requirements/_test.txt | 3 +++ 2 files changed, 4 insertions(+) diff --git a/services/invitations/requirements/_test.in b/services/invitations/requirements/_test.in index 040fb5659da..5fdd0bcf70b 100644 --- a/services/invitations/requirements/_test.in +++ b/services/invitations/requirements/_test.in @@ -18,6 +18,7 @@ hypothesis pytest pytest-asyncio pytest-cov +pytest-mock pytest-runner pytest-sugar python-dotenv diff --git a/services/invitations/requirements/_test.txt b/services/invitations/requirements/_test.txt index e9436a09449..518d2664fdb 100644 --- a/services/invitations/requirements/_test.txt +++ b/services/invitations/requirements/_test.txt @@ -58,11 +58,14 @@ pytest==8.4.1 # -r requirements/_test.in # pytest-asyncio # pytest-cov + # pytest-mock # pytest-sugar pytest-asyncio==1.0.0 # via -r requirements/_test.in pytest-cov==6.2.1 # via -r requirements/_test.in +pytest-mock==3.14.1 + # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 From ce18732f354263340477c561c2cce98d5d9385f1 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 11 Jul 2025 11:33:14 +0200 Subject: [PATCH 115/128] @GitHK review: rename --- .../src/servicelib/fastapi/logging_lifespan.py | 2 +- services/catalog/src/simcore_service_catalog/main.py | 4 ++-- .../src/simcore_service_dynamic_scheduler/main.py | 4 ++-- .../notifications/src/simcore_service_notifications/main.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py index d1564e1a6a1..4293e293ccc 100644 --- a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -16,7 +16,7 @@ _logger = logging.getLogger(__name__) -def setup_logging_lifespan( +def logging_lifespan( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index edf5acfff4e..24f75294a17 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_lifespan +from servicelib.fastapi.logging_lifespan import logging_lifespan from simcore_service_catalog.core.application import create_app from simcore_service_catalog.core.settings import ApplicationSettings @@ -24,7 +24,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_lifespan = setup_logging_lifespan( + logging_lifespan = logging_lifespan( log_format_local_dev_enabled=app_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.CATALOG_LOG_FILTER_MAPPING, tracing_settings=app_settings.CATALOG_TRACING, diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py index 0ab505b83f4..2c8c8c15111 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py @@ -6,7 +6,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI from servicelib.fastapi.logging_lifespan import ( - setup_logging_lifespan, + logging_lifespan, ) from simcore_service_dynamic_scheduler.core.application import create_app from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings @@ -24,7 +24,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_lifespan = setup_logging_lifespan( + logging_lifespan = logging_lifespan( log_format_local_dev_enabled=app_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, tracing_settings=app_settings.DYNAMIC_SCHEDULER_TRACING, diff --git a/services/notifications/src/simcore_service_notifications/main.py b/services/notifications/src/simcore_service_notifications/main.py index b5905966240..03413729ffd 100644 --- a/services/notifications/src/simcore_service_notifications/main.py +++ b/services/notifications/src/simcore_service_notifications/main.py @@ -3,7 +3,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_lifespan +from servicelib.fastapi.logging_lifespan import logging_lifespan from simcore_service_notifications.core.application import create_app from simcore_service_notifications.core.settings import ( ApplicationSettings, @@ -19,7 +19,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_lifespan = setup_logging_lifespan( + logging_lifespan = logging_lifespan( log_format_local_dev_enabled=app_settings.NOTIFICATIONS_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.NOTIFICATIONS_VOLUMES_LOG_FILTER_MAPPING, tracing_settings=app_settings.NOTIFICATIONS_TRACING, From 8cb3c061196f0af7d1fce19f0c2cbeb3c3cff675 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Fri, 11 Jul 2025 11:35:45 +0200 Subject: [PATCH 116/128] @GitHK review: rename --- .../pytest-simcore/src/pytest_simcore/logging.py | 16 ++++++++-------- .../src/servicelib/fastapi/logging_lifespan.py | 6 +++--- .../src/servicelib/logging_utils.py | 4 ++-- .../service-library/tests/test_logging_utils.py | 12 ++++++------ .../server/src/simcore_service_webserver/log.py | 4 ++-- 5 files changed, 21 insertions(+), 21 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/logging.py b/packages/pytest-simcore/src/pytest_simcore/logging.py index de4be394a09..2db9ae93c5d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/logging.py +++ b/packages/pytest-simcore/src/pytest_simcore/logging.py @@ -6,18 +6,18 @@ import pytest from pytest_mock import MockerFixture -from servicelib.logging_utils import setup_async_loggers_lifespan +from servicelib.logging_utils import async_loggers @pytest.fixture(autouse=True) def preserve_caplog_for_async_logging(mocker: MockerFixture) -> None: - # Patch setup_async_loggers_lifespan to preserve caplog handlers, + # Patch async_loggers to preserve caplog handlers, # and pytest logs in general as pytest captures logs in a special way # that is not compatible with the queue handler used in async logging. - original_setup = setup_async_loggers_lifespan + original_setup = async_loggers @contextmanager - def patched_setup_async_loggers_lifespan(**kwargs) -> Iterator[None]: + def patched_async_loggers(**kwargs) -> Iterator[None]: # Find caplog's handler in root logger root_logger = logging.getLogger() caplog_handlers = [ @@ -32,11 +32,11 @@ def patched_setup_async_loggers_lifespan(**kwargs) -> Iterator[None]: yield methods_to_patch = [ - "servicelib.logging_utils.setup_async_loggers_lifespan", - "servicelib.fastapi.logging_lifespan.setup_async_loggers_lifespan", - "tests.test_logging_utils.setup_async_loggers_lifespan", + "servicelib.logging_utils.async_loggers", + "servicelib.fastapi.logging_lifespan.async_loggers", + "tests.test_logging_utils.async_loggers", ] for method in methods_to_patch: with contextlib.suppress(AttributeError, ModuleNotFoundError): # Patch the method to use our patched version - mocker.patch(method, patched_setup_async_loggers_lifespan) + mocker.patch(method, patched_async_loggers) diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py index 4293e293ccc..ce326c8ce13 100644 --- a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -7,8 +7,8 @@ from ..logging_utils import ( LogLevelInt, + async_loggers, log_context, - setup_async_loggers_lifespan, ) from ..logging_utils_filtering import LoggerName, MessageSubstring from .lifespan_utils import Lifespan @@ -27,7 +27,7 @@ def logging_lifespan( """Returns a FastAPI-compatible lifespan handler to set up async logging.""" exit_stack = AsyncExitStack() exit_stack.enter_context( - setup_async_loggers_lifespan( + async_loggers( log_base_level=log_base_level, noisy_loggers=noisy_loggers, log_format_local_dev_enabled=log_format_local_dev_enabled, @@ -62,7 +62,7 @@ def setup_logging_shutdown_event( """ exit_stack = AsyncExitStack() exit_stack.enter_context( - setup_async_loggers_lifespan( + async_loggers( log_base_level=log_base_level, noisy_loggers=noisy_loggers, log_format_local_dev_enabled=log_format_local_dev_enabled, diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 575ab8ba180..31e3db97735 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -258,7 +258,7 @@ def setup_loggers( @contextmanager -def setup_async_loggers_lifespan( +def async_loggers( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], @@ -270,7 +270,7 @@ def setup_async_loggers_lifespan( context manager for non-blocking logging infrastructure. Usage: - with setup_async_loggers_lifespan(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): + with async_loggers(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): # Your async application code here logger.info("This is non-blocking!") diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index fbc1355991b..287487c17c6 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -13,12 +13,12 @@ LogExtra, LogLevelInt, LogMessageStr, + async_loggers, guess_message_log_level, log_context, log_decorator, log_exceptions, set_parent_module_log_level, - setup_async_loggers_lifespan, ) from tenacity import ( retry, @@ -443,7 +443,7 @@ def test_setup_async_loggers_basic( caplog.clear() caplog.set_level(logging.INFO) - with setup_async_loggers_lifespan( + with async_loggers( log_format_local_dev_enabled=log_format_local_dev_enabled, logger_filter_mapping={}, # No filters for this test tracing_settings=None, # No tracing for this test @@ -467,7 +467,7 @@ def test_setup_async_loggers_with_filters( "test_filtered_logger": ["filtered_message"], } - with setup_async_loggers_lifespan( + with async_loggers( log_format_local_dev_enabled=True, logger_filter_mapping=filter_mapping, tracing_settings=None, # No tracing for this test @@ -504,7 +504,7 @@ def test_setup_async_loggers_with_tracing_settings( # Note: We can't easily test actual tracing without setting up OpenTelemetry # But we can test that the function accepts the parameter - with setup_async_loggers_lifespan( + with async_loggers( log_format_local_dev_enabled=False, logger_filter_mapping={}, # No filters for this test tracing_settings=None, @@ -526,7 +526,7 @@ def test_setup_async_loggers_context_manager_cleanup( test_logger = logging.getLogger("test_cleanup_logger") - with setup_async_loggers_lifespan( + with async_loggers( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, @@ -552,7 +552,7 @@ def _raise_test_exception(): raise ValueError(exc_msg) try: - with setup_async_loggers_lifespan( + with async_loggers( log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None, diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index a53e4b98398..271c28bd7b9 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -8,7 +8,7 @@ from aiodebug import log_slow_callbacks # type: ignore[import-untyped] from aiohttp import web from aiohttp.log import access_logger -from servicelib.logging_utils import setup_async_loggers_lifespan +from servicelib.logging_utils import async_loggers from simcore_service_webserver.application_settings import ApplicationSettings _NOISY_LOGGERS: Final[tuple[str, ...]] = ( @@ -33,7 +33,7 @@ def setup_logging(app_settings: ApplicationSettings) -> CleanupEvent: exit_stack = AsyncExitStack() exit_stack.enter_context( - setup_async_loggers_lifespan( + async_loggers( log_base_level=app_settings.log_level, noisy_loggers=_NOISY_LOGGERS, log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, From 15914d5b54d194890f44bec14d192452ef613b22 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 07:31:06 +0200 Subject: [PATCH 117/128] reviews --- .../src/servicelib/logging_utils.py | 36 ++++++++++--------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 31e3db97735..64fc5be005e 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -14,6 +14,7 @@ from asyncio import iscoroutinefunction from collections.abc import Callable, Iterator from contextlib import contextmanager +from dataclasses import dataclass from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path @@ -584,31 +585,32 @@ def set_parent_module_log_level( logging.getLogger(parent_module).setLevel(desired_log_level) -def _store_logger_state(loggers: list[logging.Logger]) -> list[dict[str, Any]]: - """Store the original state of loggers for later restoration.""" +@dataclass(frozen=True) +class _LoggerState: + logger: logging.Logger + handlers: list[logging.Handler] + propagate: bool + + +def _store_logger_state(loggers: list[logging.Logger]) -> list[_LoggerState]: return [ - { - "logger": logger, - "handlers": logger.handlers.copy(), - "propagate": logger.propagate, - } + _LoggerState(logger, logger.handlers.copy(), logger.propagate) for logger in loggers ] -def _restore_logger_state(original_state: list[dict[str, Any]]) -> None: - """Restore loggers to their original state.""" +def _restore_logger_state(original_state: list[_LoggerState]) -> None: for state in original_state: - logger = state["logger"] + logger = state.logger logger.handlers.clear() - logger.handlers.extend(state["handlers"]) - logger.propagate = state["propagate"] + logger.handlers.extend(state.handlers) + logger.propagate = state.propagate def _apply_comprehensive_logging_setup( all_loggers: list[logging.Logger], root_handler: logging.Handler, -) -> list[dict[str, Any]]: +) -> list[_LoggerState]: """ Apply comprehensive logging setup: clear all handlers, ensure propagation, and set up root logger with the provided handler. @@ -644,6 +646,10 @@ def _apply_comprehensive_logging_setup( logger.handlers.clear() logger.propagate = True + # Set up root logger with the provided handler only + root_logger.handlers.clear() + root_logger.addHandler(root_handler) + if loggers_modified: _logger.info( "Modified %d loggers for comprehensive logging: %s", @@ -656,8 +662,4 @@ def _apply_comprehensive_logging_setup( ), ) - # Set up root logger with the provided handler only - root_logger.handlers.clear() - root_logger.addHandler(root_handler) - return original_logger_state From 94e79a22da34423fb68ceb59dcada5b2556d6b09 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 07:34:09 +0200 Subject: [PATCH 118/128] reviews --- .../src/servicelib/logging_utils.py | 31 ++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 64fc5be005e..fa6eb9f4d23 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -268,7 +268,34 @@ def async_loggers( noisy_loggers: tuple[str, ...] | None, ) -> Iterator[None]: """ - context manager for non-blocking logging infrastructure. + Context manager for non-blocking logging infrastructure. + + Flow Diagram: + ┌─────────────────┐ ┌──────────────┐ ┌─────────────────┐ + │ Application │ │ Queue │ │ Background │ + │ Thread │───▶│ (unlimited) │───▶│ Listener Thread │ + │ │ │ │ │ │ + │ logger.info() │ │ LogRecord │ │ StreamHandler │ + │ logger.error() │ │ LogRecord │ │ ├─ Formatter │ + │ (non-blocking) │ │ LogRecord │ │ └─ Output │ + └─────────────────┘ └──────────────┘ └─────────────────┘ + │ │ │ + │ │ ▼ + │ │ ┌─────────────┐ + │ │ │ Console/ │ + │ │ │ Terminal │ + │ │ └─────────────┘ + │ │ + └───────────────────────┴─ No blocking, immediate return + + The async logging setup ensures that: + 1. All log calls return immediately (non-blocking) + 2. Log records are queued in an unlimited queue + 3. A background thread processes the queue and handles actual I/O + 4. All loggers propagate to root for centralized handling + + For more details on the underlying implementation, see: + https://docs.python.org/3/library/logging.handlers.html#queuehandler Usage: with async_loggers(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): @@ -279,6 +306,8 @@ def async_loggers( log_format_local_dev_enabled: Enable local development formatting logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration + log_base_level: Base logging level to set + noisy_loggers: Loggers to set to a quieter level """ _setup_base_logging_level(log_base_level) if noisy_loggers is not None: From ecc9ab0c834259137e1e5d4ee51b66e30fdb76ed Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 07:36:02 +0200 Subject: [PATCH 119/128] @pcrespov review: renamed init_app to create_app repo-wide --- .../src/simcore_service_api_server/core/application.py | 2 +- services/api-server/src/simcore_service_api_server/main.py | 4 ++-- services/api-server/tests/unit/_with_db/conftest.py | 4 ++-- services/api-server/tests/unit/conftest.py | 4 ++-- .../src/simcore_service_director_v2/core/application.py | 2 +- .../director-v2/src/simcore_service_director_v2/main.py | 4 ++-- services/director-v2/tests/conftest.py | 6 +++--- .../tests/integration/02/test_dynamic_services_routes.py | 4 ++-- .../tests/unit/test_modules_dask_clients_pool.py | 6 +++--- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index b6aa35e431e..33505c35c5f 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -48,7 +48,7 @@ def _label_title_and_version(settings: ApplicationSettings, title: str, version: return title, version -def init_app(settings: ApplicationSettings | None = None) -> FastAPI: +def create_app(settings: ApplicationSettings | None = None) -> FastAPI: if settings is None: settings = ApplicationSettings.create_from_envs() _logger.info( diff --git a/services/api-server/src/simcore_service_api_server/main.py b/services/api-server/src/simcore_service_api_server/main.py index f96eedca64a..16be007be18 100644 --- a/services/api-server/src/simcore_service_api_server/main.py +++ b/services/api-server/src/simcore_service_api_server/main.py @@ -6,7 +6,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event -from simcore_service_api_server.core.application import init_app +from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings _logger = logging.getLogger(__name__) @@ -33,6 +33,6 @@ def app_factory() -> FastAPI: "Application settings: %s", json_dumps(app_settings, indent=2, sort_keys=True), ) - app = init_app(settings=app_settings) + app = create_app(settings=app_settings) app.add_event_handler("shutdown", logging_shutdown_event) return app diff --git a/services/api-server/tests/unit/_with_db/conftest.py b/services/api-server/tests/unit/_with_db/conftest.py index fd2441c879e..90fcea5f809 100644 --- a/services/api-server/tests/unit/_with_db/conftest.py +++ b/services/api-server/tests/unit/_with_db/conftest.py @@ -34,7 +34,7 @@ from simcore_postgres_database.models.products import products from simcore_postgres_database.models.users import users from simcore_service_api_server.clients.postgres import get_engine -from simcore_service_api_server.core.application import init_app +from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import PostgresSettings from sqlalchemy.ext.asyncio import AsyncEngine @@ -187,7 +187,7 @@ def app(app_environment: EnvVarsDict, migrated_db: None) -> FastAPI: - it uses default environ as pg - db is started and initialized """ - return init_app() + return create_app() @pytest.fixture diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 5879d2a25ba..84837222595 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -54,7 +54,7 @@ from pytest_simcore.simcore_webserver_projects_rest_api import GET_PROJECT from requests.auth import HTTPBasicAuth from respx import MockRouter -from simcore_service_api_server.core.application import init_app +from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings from simcore_service_api_server.repository.api_keys import UserAndProductTuple from simcore_service_api_server.services_http.solver_job_outputs import ResultsTypes @@ -131,7 +131,7 @@ def app( patch_lrt_response_urls() - return init_app() + return create_app() MAX_TIME_FOR_APP_TO_STARTUP = 10 diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 4617c276803..22f3f4e539a 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -154,7 +154,7 @@ def create_base_app( return app -def init_app( # noqa: C901, PLR0912 +def create_app( # noqa: C901, PLR0912 settings: AppSettings | None = None, ) -> FastAPI: app = create_base_app(settings) diff --git a/services/director-v2/src/simcore_service_director_v2/main.py b/services/director-v2/src/simcore_service_director_v2/main.py index f8e8f87f577..d0ecfb8b16a 100644 --- a/services/director-v2/src/simcore_service_director_v2/main.py +++ b/services/director-v2/src/simcore_service_director_v2/main.py @@ -3,10 +3,10 @@ import logging from fastapi import FastAPI -from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.application import create_app _logger = logging.getLogger(__name__) def app_factory() -> FastAPI: - return init_app() + return create_app() diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index 9ae961cdbe3..831b34e286c 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -32,7 +32,7 @@ from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQRPCClient from settings_library.rabbit import RabbitSettings -from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.application import create_app from simcore_service_director_v2.core.settings import AppSettings from starlette.testclient import ASGI3App, TestClient @@ -202,7 +202,7 @@ def mock_env( @pytest.fixture() async def initialized_app(mock_env: EnvVarsDict) -> AsyncIterable[FastAPI]: settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) print("Application settings\n", settings.model_dump_json(indent=2)) async with LifespanManager(app): yield app @@ -213,7 +213,7 @@ async def client(mock_env: EnvVarsDict) -> AsyncIterator[TestClient]: # NOTE: this way we ensure the events are run in the application # since it starts the app on a test server settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) # NOTE: we cannot use the initialized_app fixture here as the TestClient also creates it print("Application settings\n", settings.model_dump_json(indent=2)) with TestClient(app, raise_server_exceptions=True) as test_client: diff --git a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py index 07c24d10fe4..a6800131007 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py +++ b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py @@ -36,7 +36,7 @@ ) from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings -from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.application import create_app from simcore_service_director_v2.core.settings import AppSettings from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -197,7 +197,7 @@ async def director_v2_client( settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) async with TestClient(app) as client: yield client diff --git a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py index b0c982647dd..3bd19025251 100644 --- a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py +++ b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py @@ -22,7 +22,7 @@ ) from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.application import create_app from simcore_service_director_v2.core.errors import ( ConfigurationError, DaskClientAcquisisitonError, @@ -54,7 +54,7 @@ def test_dask_clients_pool_missing_raises_configuration_error( ): monkeypatch.setenv("COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED", "0") settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) with TestClient(app, raise_server_exceptions=True): # noqa: SIM117 with pytest.raises(ConfigurationError): @@ -70,7 +70,7 @@ def test_dask_clients_pool_properly_setup_and_deleted( ) mocked_dask_clients_pool.create.return_value = mocked_dask_clients_pool settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) with TestClient(app, raise_server_exceptions=True): mocked_dask_clients_pool.create.assert_called_once() From 9d584fab8900976ee36630dc2c1577c56bbb9ede Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 07:46:36 +0200 Subject: [PATCH 120/128] added docs --- .../src/servicelib/logging_utils.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index fa6eb9f4d23..d650e203522 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -222,15 +222,38 @@ def setup_loggers( """ Applies comprehensive configuration to ALL registered loggers. + Flow Diagram (Synchronous Logging): + ┌─────────────────┐ ┌─────────────────┐ + │ Application │ │ Root Logger │ + │ Thread │───────────────────▶│ StreamHandler │ + │ │ │ ├─ Formatter │ + │ logger.info() │ │ └─ Output │ + │ logger.error() │ │ │ + │ (blocking I/O) │ │ │ + └─────────────────┘ └─────────────────┘ + │ │ + │ ▼ + │ ┌─────────────┐ + │ │ Console/ │ + │ │ Terminal │ + │ └─────────────┘ + │ + └─ Blocks until I/O completes + This function uses a comprehensive approach: - Removes all handlers from all loggers - Ensures all loggers propagate to root - Sets up root logger with properly formatted handler + - All logging calls are synchronous and may block on I/O + + For async/non-blocking logging, use `async_loggers` context manager instead. Args: log_format_local_dev_enabled: Enable local development formatting logger_filter_mapping: Mapping of logger names to filtered message substrings tracing_settings: OpenTelemetry tracing configuration + log_base_level: Base logging level to set + noisy_loggers: Loggers to set to a quieter level """ _setup_base_logging_level(log_base_level) if noisy_loggers is not None: From 7ed8fe552a00896152271e2b5a1ad4ce9445ec53 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 07:53:58 +0200 Subject: [PATCH 121/128] cleanup --- packages/service-library/src/servicelib/logging_utils.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index d650e203522..db9d2f70bed 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -367,15 +367,13 @@ def async_loggers( ) try: - # Apply filters if provided if logger_filter_mapping: _apply_logger_filters(logger_filter_mapping) - _logger.info("Async logging context initialized with unlimited queue") - yield + with log_context(_logger, logging.INFO, "Asynchronous logging"): + yield finally: - # Cleanup: Restore all loggers to their original state try: _restore_logger_state(original_logger_state) From 8a014e9560212f864ae5591ef392a202f9d8785d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:28:07 +0200 Subject: [PATCH 122/128] cleanup --- .../src/servicelib/logging_utils.py | 151 +++++++++++------- 1 file changed, 97 insertions(+), 54 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index db9d2f70bed..4a005bd7d44 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -10,7 +10,6 @@ import logging import logging.handlers import queue -import sys from asyncio import iscoroutinefunction from collections.abc import Callable, Iterator from contextlib import contextmanager @@ -155,19 +154,25 @@ def format(self, record) -> str: # log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{DATA:log_source} \| (log_uid=%{WORD:log_uid} \| )?log_msg=%{GREEDYDATA:log_msg} -def _setup_format_string( +def _setup_logging_formatter( *, tracing_settings: TracingSettings | None, log_format_local_dev_enabled: bool, -) -> str: +) -> logging.Formatter: if log_format_local_dev_enabled: - return ( + fmt = ( _LOCAL_TRACING_FORMATTING if tracing_settings is not None else _LOCAL_FORMATTING ) + else: + fmt = ( + _TRACING_FORMATTING if tracing_settings is not None else _DEFAULT_FORMATTING + ) - return _TRACING_FORMATTING if tracing_settings is not None else _DEFAULT_FORMATTING + return CustomFormatter( + fmt, log_format_local_dev_enabled=log_format_local_dev_enabled + ) def _get_all_loggers() -> list[logging.Logger]: @@ -260,25 +265,69 @@ def setup_loggers( _dampen_noisy_loggers(noisy_loggers) if tracing_settings is not None: setup_log_tracing(tracing_settings=tracing_settings) - fmt = _setup_format_string( + formatter = _setup_logging_formatter( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, ) # Create a properly formatted handler for the root logger - root_handler = logging.StreamHandler() - root_handler.setFormatter( - CustomFormatter(fmt, log_format_local_dev_enabled=log_format_local_dev_enabled) + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(formatter) + + _store_logger_state(_get_all_loggers()) + _clean_all_handlers() + _set_root_handler(stream_handler) + + if logger_filter_mapping: + _apply_logger_filters(logger_filter_mapping) + + +@contextmanager +def _queued_logging_handler( + log_formatter: logging.Formatter, +) -> Iterator[logging.Handler]: + log_queue: queue.Queue[logging.LogRecord] = queue.Queue() + # Create handler with proper formatting + handler = logging.StreamHandler() + handler.setFormatter(log_formatter) + + # Create and start the queue listener + listener = logging.handlers.QueueListener( + log_queue, handler, respect_handler_level=True ) + listener.start() + + queue_handler = logging.handlers.QueueHandler(log_queue) + + yield queue_handler + + # cleanup + with log_context( + _logger, + level=logging.DEBUG, + msg="Shutdown async logging listener", + ): + listener.stop() + +def _clean_all_handlers() -> None: + """ + Cleans all handlers from all loggers. + This is useful for resetting the logging configuration. + """ + root_logger = logging.getLogger() all_loggers = _get_all_loggers() + for logger in all_loggers: + if logger is root_logger: + continue + logger.handlers.clear() + logger.propagate = True # Ensure propagation is enabled - # Apply comprehensive logging setup - # Note: We don't store the original state here since this is a permanent setup - _apply_comprehensive_logging_setup(all_loggers, root_handler) - # Apply filters - _apply_logger_filters(logger_filter_mapping) +def _set_root_handler(handler: logging.Handler) -> None: + root_logger = logging.getLogger() + root_logger.handlers.clear() # Clear existing handlers + root_logger.addHandler(handler) # Add the new handler @contextmanager @@ -338,57 +387,24 @@ def async_loggers( if tracing_settings is not None: setup_log_tracing(tracing_settings=tracing_settings) - fmt = _setup_format_string( + formatter = _setup_logging_formatter( tracing_settings=tracing_settings, log_format_local_dev_enabled=log_format_local_dev_enabled, ) - # Set up async logging infrastructure - log_queue: queue.Queue[logging.LogRecord] = queue.Queue() - # Create handler with proper formatting - handler = logging.StreamHandler() - handler.setFormatter( - CustomFormatter(fmt, log_format_local_dev_enabled=log_format_local_dev_enabled) - ) - - # Create and start the queue listener - listener = logging.handlers.QueueListener( - log_queue, handler, respect_handler_level=True - ) - listener.start() - - # Create queue handler for loggers - queue_handler = logging.handlers.QueueHandler(log_queue) - - # Apply comprehensive logging setup and store original state for restoration - all_loggers = _get_all_loggers() - original_logger_state = _apply_comprehensive_logging_setup( - all_loggers, queue_handler - ) + with ( + _queued_logging_handler(formatter) as queue_handler, + _stored_logger_states(_get_all_loggers()), + ): + _clean_all_handlers() + _set_root_handler(queue_handler) - try: if logger_filter_mapping: _apply_logger_filters(logger_filter_mapping) with log_context(_logger, logging.INFO, "Asynchronous logging"): yield - finally: - try: - _restore_logger_state(original_logger_state) - - # Stop the queue listener - with log_context( - _logger, - level=logging.DEBUG, - msg="Shutdown async logging listener", - ): - listener.stop() - - except Exception as exc: # pylint: disable=broad-except - sys.stderr.write(f"Error during async logging cleanup: {exc}\n") - sys.stderr.flush() - class LogExceptionsKwargsDict(TypedDict, total=True): logger: logging.Logger @@ -642,10 +658,37 @@ class _LoggerState: propagate: bool +@contextmanager +def _stored_logger_states( + loggers: list[logging.Logger], +) -> Iterator[list[_LoggerState]]: + """ + Context manager to store and restore the state of loggers. + It captures the current handlers and propagation state of each logger. + """ + original_state = _store_logger_state(loggers) + + try: + # log which loggers states were stored + _logger.info( + "Stored logger states: %s. TIP: these loggers configuration will be restored later.", + json_dumps( + [ + f"{state.logger.name}(handlers={len(state.handlers)}, propagate={state.propagate})" + for state in original_state + ] + ), + ) + yield original_state + finally: + _restore_logger_state(original_state) + + def _store_logger_state(loggers: list[logging.Logger]) -> list[_LoggerState]: return [ _LoggerState(logger, logger.handlers.copy(), logger.propagate) for logger in loggers + if logger.handlers or not logger.propagate ] From f05315628e95631997d09436668c01891a122658 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:33:46 +0200 Subject: [PATCH 123/128] re-use common patterns --- .../src/servicelib/logging_utils.py | 73 ++++++++++++------- 1 file changed, 48 insertions(+), 25 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 4a005bd7d44..b8b30a9505b 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -216,6 +216,44 @@ def _dampen_noisy_loggers( logging.getLogger(name).setLevel(quiet_level) +def _configure_common_logging_settings( + *, + log_format_local_dev_enabled: bool, + tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...] | None, +) -> logging.Formatter: + """ + Common configuration logic shared by both sync and async logging setups. + + Returns the configured formatter to be used with the appropriate handler. + """ + _setup_base_logging_level(log_base_level) + if noisy_loggers is not None: + _dampen_noisy_loggers(noisy_loggers) + if tracing_settings is not None: + setup_log_tracing(tracing_settings=tracing_settings) + + return _setup_logging_formatter( + tracing_settings=tracing_settings, + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + + +def _apply_logging_configuration( + handler: logging.Handler, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], +) -> None: + """ + Apply the logging configuration with the given handler. + """ + _clean_all_handlers() + _set_root_handler(handler) + + if logger_filter_mapping: + _apply_logger_filters(logger_filter_mapping) + + def setup_loggers( *, log_format_local_dev_enabled: bool, @@ -260,14 +298,11 @@ def setup_loggers( log_base_level: Base logging level to set noisy_loggers: Loggers to set to a quieter level """ - _setup_base_logging_level(log_base_level) - if noisy_loggers is not None: - _dampen_noisy_loggers(noisy_loggers) - if tracing_settings is not None: - setup_log_tracing(tracing_settings=tracing_settings) - formatter = _setup_logging_formatter( - tracing_settings=tracing_settings, + formatter = _configure_common_logging_settings( log_format_local_dev_enabled=log_format_local_dev_enabled, + tracing_settings=tracing_settings, + log_base_level=log_base_level, + noisy_loggers=noisy_loggers, ) # Create a properly formatted handler for the root logger @@ -275,11 +310,7 @@ def setup_loggers( stream_handler.setFormatter(formatter) _store_logger_state(_get_all_loggers()) - _clean_all_handlers() - _set_root_handler(stream_handler) - - if logger_filter_mapping: - _apply_logger_filters(logger_filter_mapping) + _apply_logging_configuration(stream_handler, logger_filter_mapping) @contextmanager @@ -381,26 +412,18 @@ def async_loggers( log_base_level: Base logging level to set noisy_loggers: Loggers to set to a quieter level """ - _setup_base_logging_level(log_base_level) - if noisy_loggers is not None: - _dampen_noisy_loggers(noisy_loggers) - - if tracing_settings is not None: - setup_log_tracing(tracing_settings=tracing_settings) - formatter = _setup_logging_formatter( - tracing_settings=tracing_settings, + formatter = _configure_common_logging_settings( log_format_local_dev_enabled=log_format_local_dev_enabled, + tracing_settings=tracing_settings, + log_base_level=log_base_level, + noisy_loggers=noisy_loggers, ) with ( _queued_logging_handler(formatter) as queue_handler, _stored_logger_states(_get_all_loggers()), ): - _clean_all_handlers() - _set_root_handler(queue_handler) - - if logger_filter_mapping: - _apply_logger_filters(logger_filter_mapping) + _apply_logging_configuration(queue_handler, logger_filter_mapping) with log_context(_logger, logging.INFO, "Asynchronous logging"): yield From 1667911a88f61046c58218f9febdf6ce1fec1ada Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:35:01 +0200 Subject: [PATCH 124/128] make it same --- .../src/servicelib/logging_utils.py | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index b8b30a9505b..382f74ad7ff 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -692,27 +692,28 @@ def _stored_logger_states( original_state = _store_logger_state(loggers) try: - # log which loggers states were stored - _logger.info( - "Stored logger states: %s. TIP: these loggers configuration will be restored later.", - json_dumps( - [ - f"{state.logger.name}(handlers={len(state.handlers)}, propagate={state.propagate})" - for state in original_state - ] - ), - ) yield original_state finally: _restore_logger_state(original_state) def _store_logger_state(loggers: list[logging.Logger]) -> list[_LoggerState]: - return [ + logger_states = [ _LoggerState(logger, logger.handlers.copy(), logger.propagate) for logger in loggers if logger.handlers or not logger.propagate ] + # log which loggers states were stored + _logger.info( + "Stored logger states: %s. TIP: these loggers configuration will be restored later.", + json_dumps( + [ + f"{state.logger.name}(handlers={len(state.handlers)}, propagate={state.propagate})" + for state in logger_states + ] + ), + ) + return logger_states def _restore_logger_state(original_state: list[_LoggerState]) -> None: From d7b3cafc84cf7dfdd1d8d4a9f04ce2afe8afe82d Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:35:16 +0200 Subject: [PATCH 125/128] cleanup --- .../src/servicelib/logging_utils.py | 58 ------------------- 1 file changed, 58 deletions(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 382f74ad7ff..18f64989d4e 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -722,61 +722,3 @@ def _restore_logger_state(original_state: list[_LoggerState]) -> None: logger.handlers.clear() logger.handlers.extend(state.handlers) logger.propagate = state.propagate - - -def _apply_comprehensive_logging_setup( - all_loggers: list[logging.Logger], - root_handler: logging.Handler, -) -> list[_LoggerState]: - """ - Apply comprehensive logging setup: clear all handlers, ensure propagation, - and set up root logger with the provided handler. - - Returns the original logger state for restoration. - """ - root_logger = logging.getLogger() - - # Store original state for restoration - original_logger_state = _store_logger_state(all_loggers) - - # Remove all handlers from all loggers and ensure propagation - loggers_modified = [] - for logger in all_loggers: - if logger is root_logger: - continue - - # Track what we're modifying for logging purposes - had_handlers = bool(logger.handlers) - had_propagate_disabled = not logger.propagate - - if had_handlers or had_propagate_disabled: - loggers_modified.append( - { - "name": logger.name, - "had_handlers": had_handlers, - "had_propagate_disabled": had_propagate_disabled, - "handlers": [type(h).__name__ for h in logger.handlers], - } - ) - - # Clear handlers and ensure propagation - logger.handlers.clear() - logger.propagate = True - - # Set up root logger with the provided handler only - root_logger.handlers.clear() - root_logger.addHandler(root_handler) - - if loggers_modified: - _logger.info( - "Modified %d loggers for comprehensive logging: %s", - len(loggers_modified), - json_dumps( - [ - f"{info['name']}(removed_handlers={info['handlers']}, enabled_propagate={info['had_propagate_disabled']})" - for info in loggers_modified - ] - ), - ) - - return original_logger_state From a7fb29bcfec43b0e85a69114ab804cc4b10221e5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 16:36:26 +0200 Subject: [PATCH 126/128] cleanup --- packages/service-library/src/servicelib/logging_utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 18f64989d4e..d335fc05989 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -184,7 +184,6 @@ def _get_all_loggers() -> list[logging.Logger]: def _apply_logger_filters( logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], ) -> None: - """Apply filters to specific loggers.""" for logger_name, filtered_routes in logger_filter_mapping.items(): logger = logging.getLogger(logger_name) if not logger.hasHandlers(): From 1d141ef7723b5021c3008bca3edac595cb40da4b Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 18:55:05 +0200 Subject: [PATCH 127/128] renaming --- .../src/servicelib/fastapi/logging_lifespan.py | 2 +- services/agent/src/simcore_service_agent/main.py | 4 ++-- services/api-server/src/simcore_service_api_server/main.py | 4 ++-- services/autoscaling/src/simcore_service_autoscaling/main.py | 4 ++-- .../src/simcore_service_clusters_keeper/main.py | 4 ++-- .../src/simcore_service_datcore_adapter/main.py | 4 ++-- .../src/simcore_service_director_v2/core/application.py | 4 ++-- services/director/src/simcore_service_director/main.py | 4 ++-- .../src/simcore_service_dynamic_sidecar/core/application.py | 4 ++-- .../efs-guardian/src/simcore_service_efs_guardian/main.py | 4 ++-- services/invitations/src/simcore_service_invitations/main.py | 4 ++-- services/payments/src/simcore_service_payments/main.py | 4 ++-- .../src/simcore_service_resource_usage_tracker/main.py | 4 ++-- services/storage/src/simcore_service_storage/main.py | 4 ++-- 14 files changed, 27 insertions(+), 27 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py index ce326c8ce13..6c41d1c0250 100644 --- a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -45,7 +45,7 @@ async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]: return _logging_lifespan -def setup_logging_shutdown_event( +def create_logging_shutdown_event( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], diff --git a/services/agent/src/simcore_service_agent/main.py b/services/agent/src/simcore_service_agent/main.py index 315239e60c1..1af4eb695c2 100644 --- a/services/agent/src/simcore_service_agent/main.py +++ b/services/agent/src/simcore_service_agent/main.py @@ -3,7 +3,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_agent.core.application import create_app from simcore_service_agent.core.settings import ApplicationSettings @@ -19,7 +19,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, tracing_settings=app_settings.AGENT_TRACING, diff --git a/services/api-server/src/simcore_service_api_server/main.py b/services/api-server/src/simcore_service_api_server/main.py index 16be007be18..6b8ccc1783f 100644 --- a/services/api-server/src/simcore_service_api_server/main.py +++ b/services/api-server/src/simcore_service_api_server/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings @@ -21,7 +21,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.API_SERVER_LOG_FILTER_MAPPING, tracing_settings=app_settings.API_SERVER_TRACING, diff --git a/services/autoscaling/src/simcore_service_autoscaling/main.py b/services/autoscaling/src/simcore_service_autoscaling/main.py index 4ebf37037cd..b1f7055d75d 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/main.py +++ b/services/autoscaling/src/simcore_service_autoscaling/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_autoscaling.core.application import create_app from simcore_service_autoscaling.core.settings import ApplicationSettings @@ -22,7 +22,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.AUTOSCALING_LOG_FILTER_MAPPING, tracing_settings=app_settings.AUTOSCALING_TRACING, diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py index 95677e3a2fe..d4e4bdf99ee 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_clusters_keeper.core.application import create_app from simcore_service_clusters_keeper.core.settings import ApplicationSettings @@ -22,7 +22,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.CLUSTERS_KEEPER_LOG_FILTER_MAPPING, tracing_settings=app_settings.CLUSTERS_KEEPER_TRACING, diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py index dbe4c2ef1e5..0a46179524d 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_datcore_adapter.core.application import create_app from simcore_service_datcore_adapter.core.settings import ApplicationSettings @@ -20,7 +20,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, tracing_settings=app_settings.DATCORE_ADAPTER_TRACING, diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 22f3f4e539a..4a9c71605dc 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -6,7 +6,7 @@ from fastapi.exceptions import RequestValidationError from fastapi_lifespan_manager import LifespanManager from servicelib.fastapi.lifespan_utils import Lifespan -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from servicelib.fastapi.openapi import ( get_common_oas_options, override_fastapi_openapi_method, @@ -118,7 +118,7 @@ def create_base_app( if app_settings is None: app_settings = AppSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.DIRECTOR_V2_LOG_FILTER_MAPPING, tracing_settings=app_settings.DIRECTOR_V2_TRACING, diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index 805e2c9a76a..5ad1c4b03d9 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_director.core.application import create_app from simcore_service_director.core.settings import ApplicationSettings @@ -20,7 +20,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.DIRECTOR_LOG_FILTER_MAPPING, tracing_settings=app_settings.DIRECTOR_TRACING, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index 4b58fd75018..dad8f18dd59 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -6,7 +6,7 @@ from fastapi import FastAPI from servicelib.async_utils import cancel_sequential_workers from servicelib.fastapi import long_running_tasks -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from servicelib.fastapi.openapi import ( get_common_oas_options, override_fastapi_openapi_method, @@ -118,7 +118,7 @@ def compose_spec(self) -> str | None: def create_base_app() -> FastAPI: # settings app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.DY_SIDECAR_LOG_FILTER_MAPPING, tracing_settings=app_settings.DYNAMIC_SIDECAR_TRACING, diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/main.py b/services/efs-guardian/src/simcore_service_efs_guardian/main.py index 6143f4a26a1..230016b548c 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/main.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_efs_guardian.core.application import create_app from simcore_service_efs_guardian.core.settings import ApplicationSettings @@ -22,7 +22,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.EFS_GUARDIAN_LOG_FILTER_MAPPING, tracing_settings=app_settings.EFS_GUARDIAN_TRACING, diff --git a/services/invitations/src/simcore_service_invitations/main.py b/services/invitations/src/simcore_service_invitations/main.py index 03bb038822c..d59e54918a0 100644 --- a/services/invitations/src/simcore_service_invitations/main.py +++ b/services/invitations/src/simcore_service_invitations/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_invitations.core.application import create_app from simcore_service_invitations.core.settings import ApplicationSettings @@ -19,7 +19,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.INVITATIONS_LOG_FILTER_MAPPING, tracing_settings=app_settings.INVITATIONS_TRACING, diff --git a/services/payments/src/simcore_service_payments/main.py b/services/payments/src/simcore_service_payments/main.py index 262259ef04f..604d4adaa1b 100644 --- a/services/payments/src/simcore_service_payments/main.py +++ b/services/payments/src/simcore_service_payments/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_payments.core.application import create_app from simcore_service_payments.core.settings import ApplicationSettings @@ -22,7 +22,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.PAYMENTS_LOG_FILTER_MAPPING, tracing_settings=app_settings.PAYMENTS_TRACING, diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py index 3e5a7380c6e..42d3184f155 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_resource_usage_tracker.core.application import create_app from simcore_service_resource_usage_tracker.core.settings import ApplicationSettings @@ -22,7 +22,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.RESOURCE_USAGE_TRACKER_LOG_FILTER_MAPPING, tracing_settings=app_settings.RESOURCE_USAGE_TRACKER_TRACING, diff --git a/services/storage/src/simcore_service_storage/main.py b/services/storage/src/simcore_service_storage/main.py index a55c20ed5e9..f2282decaa5 100644 --- a/services/storage/src/simcore_service_storage/main.py +++ b/services/storage/src/simcore_service_storage/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import setup_logging_shutdown_event +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_storage.core.application import create_app from simcore_service_storage.core.settings import ApplicationSettings @@ -24,7 +24,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_shutdown_event = setup_logging_shutdown_event( + logging_shutdown_event = create_logging_shutdown_event( log_format_local_dev_enabled=app_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.STORAGE_LOG_FILTER_MAPPING, tracing_settings=app_settings.STORAGE_TRACING, From fae4400ebc6cfe74438fc46b28e610e162df2fd5 Mon Sep 17 00:00:00 2001 From: sanderegg <35365065+sanderegg@users.noreply.github.com> Date: Mon, 14 Jul 2025 18:57:02 +0200 Subject: [PATCH 128/128] renaming --- .../src/servicelib/fastapi/logging_lifespan.py | 2 +- services/catalog/src/simcore_service_catalog/main.py | 4 ++-- .../src/simcore_service_dynamic_scheduler/main.py | 4 ++-- .../notifications/src/simcore_service_notifications/main.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py index 6c41d1c0250..e06d976878b 100644 --- a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -16,7 +16,7 @@ _logger = logging.getLogger(__name__) -def logging_lifespan( +def create_logging_lifespan( *, log_format_local_dev_enabled: bool, logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index 24f75294a17..60f5da962d4 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -5,7 +5,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import logging_lifespan +from servicelib.fastapi.logging_lifespan import create_logging_lifespan from simcore_service_catalog.core.application import create_app from simcore_service_catalog.core.settings import ApplicationSettings @@ -24,7 +24,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_lifespan = logging_lifespan( + logging_lifespan = create_logging_lifespan( log_format_local_dev_enabled=app_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.CATALOG_LOG_FILTER_MAPPING, tracing_settings=app_settings.CATALOG_TRACING, diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py index 2c8c8c15111..bf1f22e27e6 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py @@ -6,7 +6,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI from servicelib.fastapi.logging_lifespan import ( - logging_lifespan, + create_logging_lifespan, ) from simcore_service_dynamic_scheduler.core.application import create_app from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings @@ -24,7 +24,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_lifespan = logging_lifespan( + logging_lifespan = create_logging_lifespan( log_format_local_dev_enabled=app_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, tracing_settings=app_settings.DYNAMIC_SCHEDULER_TRACING, diff --git a/services/notifications/src/simcore_service_notifications/main.py b/services/notifications/src/simcore_service_notifications/main.py index 03413729ffd..cda95f9dd9f 100644 --- a/services/notifications/src/simcore_service_notifications/main.py +++ b/services/notifications/src/simcore_service_notifications/main.py @@ -3,7 +3,7 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.logging_lifespan import logging_lifespan +from servicelib.fastapi.logging_lifespan import create_logging_lifespan from simcore_service_notifications.core.application import create_app from simcore_service_notifications.core.settings import ( ApplicationSettings, @@ -19,7 +19,7 @@ def app_factory() -> FastAPI: app_settings = ApplicationSettings.create_from_envs() - logging_lifespan = logging_lifespan( + logging_lifespan = create_logging_lifespan( log_format_local_dev_enabled=app_settings.NOTIFICATIONS_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=app_settings.NOTIFICATIONS_VOLUMES_LOG_FILTER_MAPPING, tracing_settings=app_settings.NOTIFICATIONS_TRACING,