Skip to content

Commit 3d209f1

Browse files
committed
refactor
1 parent 4bb16a2 commit 3d209f1

File tree

9 files changed

+27
-30
lines changed

9 files changed

+27
-30
lines changed

services/storage/src/simcore_service_storage/cli.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,11 @@
99

1010
LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR
1111

12-
log = logging.getLogger(__name__)
12+
_logger = logging.getLogger(__name__)
1313

1414
main = typer.Typer(name="simcore-service-storage service")
1515

16-
main.command()(create_settings_command(settings_cls=Settings, logger=log))
16+
main.command()(create_settings_command(settings_cls=Settings, logger=_logger))
1717

1818

1919
@main.command()

services/storage/src/simcore_service_storage/datcore_adapter/datcore_adapter.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
DatcoreAdapterTimeoutError,
2222
)
2323

24-
log = logging.getLogger(__file__)
24+
_logger = logging.getLogger(__file__)
2525

2626

2727
class _DatcoreAdapterResponseError(DatcoreAdapterError):
@@ -102,7 +102,7 @@ async def _retrieve_all_pages(
102102
),
103103
)
104104
) and response.get("items"):
105-
log.debug(
105+
_logger.debug(
106106
"called %s [%d/%d], received %d objects",
107107
path,
108108
page,
@@ -155,7 +155,7 @@ async def list_all_datasets_files_metadatas(
155155
)
156156
for d in all_datasets
157157
),
158-
log=log,
158+
_logger=_logger,
159159
max_concurrency=MAX_CONCURRENT_REST_CALLS,
160160
)
161161
all_files_of_all_datasets: list[FileMetaData] = []

services/storage/src/simcore_service_storage/db_tokens.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99

1010
from .constants import APP_AIOPG_ENGINE_KEY, APP_CONFIG_KEY
1111

12-
log = logging.getLogger(__name__)
12+
_logger = logging.getLogger(__name__)
1313

1414

1515
async def _get_tokens_from_db(engine: Engine, user_id: UserID) -> dict[str, Any]:

services/storage/src/simcore_service_storage/handlers_datasets.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
StorageQueryParamsBase,
2222
)
2323

24-
log = logging.getLogger(__name__)
24+
_logger = logging.getLogger(__name__)
2525

2626
routes = RouteTableDef()
2727

@@ -36,7 +36,7 @@ async def list_datasets_metadata(request: web.Request) -> web.Response:
3636
StorageQueryParamsBase, request
3737
)
3838
path_params = parse_request_path_parameters_as(LocationPathParams, request)
39-
log.debug(
39+
_logger.debug(
4040
"received call to list_datasets_metadata with %s",
4141
f"{path_params=}, {query_params=}",
4242
)
@@ -58,7 +58,7 @@ async def list_dataset_files_metadata(request: web.Request) -> web.Response:
5858
path_params = parse_request_path_parameters_as(
5959
FilesMetadataDatasetPathParams, request
6060
)
61-
log.debug(
61+
_logger.debug(
6262
"received call to list_dataset_files_metadata with %s",
6363
f"{path_params=}, {query_params=}",
6464
)

services/storage/src/simcore_service_storage/handlers_health.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""
22
3-
- Checks connectivity with other services in the backend
3+
- Checks connectivity with other services in the backend
44
55
"""
66

@@ -21,7 +21,7 @@
2121
from .s3 import get_s3_client
2222
from .settings import Settings
2323

24-
log = logging.getLogger(__name__)
24+
_logger = logging.getLogger(__name__)
2525

2626
routes = web.RouteTableDef()
2727

services/storage/src/simcore_service_storage/handlers_locations.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
from .settings import Settings
2525
from .simcore_s3_dsm import SimcoreS3DataManager
2626

27-
log = logging.getLogger(__name__)
27+
_logger = logging.getLogger(__name__)
2828

2929
routes = RouteTableDef()
3030

@@ -35,7 +35,7 @@ async def list_storage_locations(request: web.Request) -> web.Response:
3535
query_params: StorageQueryParamsBase = parse_request_query_parameters_as(
3636
StorageQueryParamsBase, request
3737
)
38-
log.debug(
38+
_logger.debug(
3939
"received call to list_storage_locations with %s",
4040
f"{query_params=}",
4141
)
@@ -58,7 +58,7 @@ async def synchronise_meta_data_table(request: web.Request) -> web.Response:
5858
SyncMetadataQueryParams, request
5959
)
6060
path_params = parse_request_path_parameters_as(LocationPathParams, request)
61-
log.debug(
61+
_logger.debug(
6262
"received call to synchronise_meta_data_table with %s",
6363
f"{path_params=}, {query_params=}",
6464
)
@@ -77,12 +77,12 @@ async def _go():
7777
timeout = settings.STORAGE_SYNC_METADATA_TIMEOUT
7878
try:
7979
result = await asyncio.wait_for(sync_coro, timeout=timeout)
80-
log.info(
80+
_logger.info(
8181
"Sync metadata table completed: %d entries removed",
8282
len(result),
8383
)
8484
except asyncio.TimeoutError:
85-
log.exception("Sync metadata table timed out (%s seconds)", timeout)
85+
_logger.exception("Sync metadata table timed out (%s seconds)", timeout)
8686

8787
fire_and_forget_task(
8888
_go(),

services/storage/src/simcore_service_storage/s3.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
""" Module to access s3 service
2-
3-
"""
1+
"""Module to access s3 service"""
42

53
import logging
64
from collections.abc import AsyncGenerator
@@ -17,28 +15,28 @@
1715
from .constants import APP_CONFIG_KEY, APP_S3_KEY, RETRY_WAIT_SECS
1816
from .settings import Settings
1917

20-
log = logging.getLogger(__name__)
18+
_logger = logging.getLogger(__name__)
2119

2220

2321
async def setup_s3_client(app) -> AsyncGenerator[None, None]:
2422
client = None
2523

26-
with log_context(log, logging.DEBUG, msg="setup.s3_client.cleanup_ctx"):
24+
with log_context(_logger, logging.DEBUG, msg="setup.s3_client.cleanup_ctx"):
2725
storage_settings: Settings = app[APP_CONFIG_KEY]
2826
storage_s3_settings = storage_settings.STORAGE_S3
2927
assert storage_s3_settings # nosec
3028

3129
async for attempt in AsyncRetrying(
3230
wait=wait_fixed(RETRY_WAIT_SECS),
33-
before_sleep=before_sleep_log(log, logging.WARNING),
31+
before_sleep=before_sleep_log(_logger, logging.WARNING),
3432
reraise=True,
3533
):
3634
with attempt:
3735
client = await SimcoreS3API.create(
3836
storage_s3_settings,
3937
storage_settings.STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY,
4038
)
41-
log.info(
39+
_logger.info(
4240
"S3 client %s successfully created [%s]",
4341
f"{client=}",
4442
json_dumps(attempt.retry_state.retry_object.statistics),
@@ -48,13 +46,13 @@ async def setup_s3_client(app) -> AsyncGenerator[None, None]:
4846

4947
yield
5048

51-
with log_context(log, logging.DEBUG, msg="teardown.s3_client.cleanup_ctx"):
49+
with log_context(_logger, logging.DEBUG, msg="teardown.s3_client.cleanup_ctx"):
5250
if client:
5351
await client.close()
5452

5553

5654
async def setup_s3_bucket(app: web.Application):
57-
with log_context(log, logging.DEBUG, msg="setup.s3_bucket.cleanup_ctx"):
55+
with log_context(_logger, logging.DEBUG, msg="setup.s3_bucket.cleanup_ctx"):
5856
storage_s3_settings = app[APP_CONFIG_KEY].STORAGE_S3
5957
client = get_s3_client(app)
6058
await client.create_bucket(

services/storage/src/simcore_service_storage/settings.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,14 +88,14 @@ class Settings(BaseCustomSettings, MixinLoggingSettings):
8888
"STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED",
8989
"LOG_FORMAT_LOCAL_DEV_ENABLED",
9090
),
91-
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
91+
description="Enables local development _logger format. WARNING: make sure it is disabled if you want to have structured logs!",
9292
)
9393
STORAGE_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field(
9494
default_factory=dict,
9595
validation_alias=AliasChoices(
9696
"STORAGE_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"
9797
),
98-
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
98+
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of _logger message patterns that should be filtered out.",
9999
)
100100

101101
@field_validator("LOG_LEVEL", mode="before")

services/storage/src/simcore_service_storage/simcore_s3_dsm.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -789,7 +789,6 @@ async def create_soft_link(
789789
async def synchronise_meta_data_table(
790790
self, *, dry_run: bool
791791
) -> list[StorageFileID]:
792-
793792
async with self.engine.acquire() as conn:
794793
_logger.warning(
795794
"Total number of entries to check %d",
@@ -855,7 +854,7 @@ async def _clean_expired_uploads(self) -> None:
855854
for fmd in list_of_expired_uploads
856855
),
857856
reraise=False,
858-
log=_logger,
857+
_logger=_logger,
859858
limit=_NO_CONCURRENCY,
860859
)
861860

@@ -886,7 +885,7 @@ async def _revert_file(fmd: FileMetaDataAtDB) -> FileMetaDataAtDB:
886885
reverted_fmds = await limited_gather(
887886
*(_revert_file(fmd) for fmd in list_of_fmds_to_delete),
888887
reraise=False,
889-
log=_logger,
888+
_logger=_logger,
890889
limit=_NO_CONCURRENCY,
891890
)
892891
list_of_fmds_to_delete = [

0 commit comments

Comments
 (0)