Skip to content

Commit 9d09bc1

Browse files
author
Andrei Neagu
committed
Merge remote-tracking branch 'upstream/pydantic_v2_migration_do_not_squash_updates' into pr-osparc-pydantic-v2-fixes-2
2 parents f131ea7 + 24218c1 commit 9d09bc1

File tree

75 files changed

+629
-224
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

75 files changed

+629
-224
lines changed

.env-devel

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
# unset $(grep -v '^#' .env | sed -E 's/(.*)=.*/\1/' | xargs)
1111
#
1212

13-
AGENT_LOGLEVEL=WARNING
13+
AGENT_LOGLEVEL=INFO
1414
AGENT_VOLUMES_CLEANUP_S3_ACCESS_KEY=12345678
1515
AGENT_VOLUMES_CLEANUP_S3_BUCKET=simcore-volume-backups
1616
AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001
@@ -19,7 +19,7 @@ AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1
1919
AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678
2020

2121
API_SERVER_DEV_FEATURES_ENABLED=0
22-
API_SERVER_LOGLEVEL=WARNING
22+
API_SERVER_LOGLEVEL=INFO
2323
API_SERVER_PROFILING=1
2424
TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25
2525

@@ -29,7 +29,7 @@ AUTOSCALING_DOCKER_JOIN_DRAINED=True
2929
AUTOSCALING_WAIT_FOR_CLOUD_INIT_BEFORE_WARM_BUFFER_ACTIVATION=False
3030
AUTOSCALING_EC2_ACCESS=null
3131
AUTOSCALING_EC2_INSTANCES=null
32-
AUTOSCALING_LOGLEVEL=WARNING
32+
AUTOSCALING_LOGLEVEL=INFO
3333
AUTOSCALING_NODES_MONITORING=null
3434
AUTOSCALING_POLL_INTERVAL="00:00:10"
3535
AUTOSCALING_SSM_ACCESS=null
@@ -39,7 +39,7 @@ AWS_S3_CLI_S3=null
3939
CATALOG_BACKGROUND_TASK_REST_TIME=60
4040
CATALOG_DEV_FEATURES_ENABLED=0
4141
CATALOG_HOST=catalog
42-
CATALOG_LOGLEVEL=WARNING
42+
CATALOG_LOGLEVEL=INFO
4343
CATALOG_PORT=8000
4444
CATALOG_PROFILING=1
4545
CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}'
@@ -52,15 +52,15 @@ CLUSTERS_KEEPER_DASK_WORKER_SATURATION=inf
5252
CLUSTERS_KEEPER_EC2_ACCESS=null
5353
CLUSTERS_KEEPER_SSM_ACCESS=null
5454
CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX=""
55-
CLUSTERS_KEEPER_LOGLEVEL=WARNING
55+
CLUSTERS_KEEPER_LOGLEVEL=INFO
5656
CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5
5757
CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null
5858
CLUSTERS_KEEPER_TASK_INTERVAL=30
5959
CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null
6060

6161
DASK_SCHEDULER_HOST=dask-scheduler
6262
DASK_SCHEDULER_PORT=8786
63-
DASK_SIDECAR_LOGLEVEL=WARNING
63+
DASK_SIDECAR_LOGLEVEL=INFO
6464
DASK_TLS_CA_FILE=/home/scu/.dask/dask-crt.pem
6565
DASK_TLS_CERT=/home/scu/.dask/dask-crt.pem
6666
DASK_TLS_KEY=/home/scu/.dask/dask-key.pem
@@ -91,7 +91,7 @@ DIRECTOR_V2_DYNAMIC_SCHEDULER_CLOSE_SERVICES_VIA_FRONTEND_WHEN_CREDITS_LIMIT_REA
9191
DIRECTOR_V2_DYNAMIC_SIDECAR_SLEEP_AFTER_CONTAINER_REMOVAL=0
9292
DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS='{}'
9393
DIRECTOR_V2_HOST=director-v2
94-
DIRECTOR_V2_LOGLEVEL=WARNING
94+
DIRECTOR_V2_LOGLEVEL=INFO
9595
DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH=null
9696
DIRECTOR_V2_PORT=8000
9797
DIRECTOR_V2_PROFILING=1
@@ -115,7 +115,7 @@ FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "[email protected]
115115
# Can use 'docker run -it itisfoundation/invitations:latest simcore-service-invitations generate-dotenv --auto-password'
116116
INVITATIONS_DEFAULT_PRODUCT=osparc
117117
INVITATIONS_HOST=invitations
118-
INVITATIONS_LOGLEVEL=WARNING
118+
INVITATIONS_LOGLEVEL=INFO
119119
INVITATIONS_OSPARC_URL=http://127.0.0.1.nip.io:9081
120120
INVITATIONS_PASSWORD=adminadmin
121121
INVITATIONS_PORT=8000
@@ -124,6 +124,7 @@ INVITATIONS_SWAGGER_API_DOC_ENABLED=1
124124
INVITATIONS_USERNAME=admin
125125

126126
LOG_FORMAT_LOCAL_DEV_ENABLED=1
127+
LOG_FILTER_MAPPING='{}'
127128

128129
PAYMENTS_ACCESS_TOKEN_EXPIRE_MINUTES=30
129130
PAYMENTS_ACCESS_TOKEN_SECRET_KEY=2c0411810565e063309be1457009fb39ce023946f6a354e6935107b57676
@@ -138,7 +139,7 @@ PAYMENTS_FAKE_COMPLETION=0
138139
PAYMENTS_GATEWAY_API_SECRET=adminadmin
139140
PAYMENTS_GATEWAY_URL=http://127.0.0.1:32769
140141
PAYMENTS_HOST=payments
141-
PAYMENTS_LOGLEVEL=WARNING
142+
PAYMENTS_LOGLEVEL=INFO
142143
PAYMENTS_PASSWORD=adminadmin
143144
PAYMENTS_PORT=8000
144145
PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret'
@@ -179,7 +180,7 @@ RESOURCE_MANAGER_RESOURCE_TTL_S=900
179180
RESOURCE_USAGE_TRACKER_HOST=resource-usage-tracker
180181
RESOURCE_USAGE_TRACKER_PORT=8000
181182
RESOURCE_USAGE_TRACKER_EXTERNAL_PORT=8000
182-
RESOURCE_USAGE_TRACKER_LOGLEVEL=WARNING
183+
RESOURCE_USAGE_TRACKER_LOGLEVEL=INFO
183184
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1
184185
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6
185186
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300
@@ -214,7 +215,7 @@ BF_API_KEY=none
214215
BF_API_SECRET=none
215216
STORAGE_ENDPOINT=storage:8080
216217
STORAGE_HOST=storage
217-
STORAGE_LOGLEVEL=WARNING
218+
STORAGE_LOGLEVEL=INFO
218219
STORAGE_PORT=8080
219220
STORAGE_PROFILING=1
220221
# STORAGE ----
@@ -245,7 +246,7 @@ WB_GC_GARBAGE_COLLECTOR='{"GARBAGE_COLLECTOR_INTERVAL_S": 30}'
245246
WB_GC_GROUPS=0
246247
WB_GC_INVITATIONS=null
247248
WB_GC_LOGIN=null
248-
WB_GC_LOGLEVEL=WARNING
249+
WB_GC_LOGLEVEL=INFO
249250
WB_GC_META_MODELING=0
250251
WB_GC_NOTIFICATIONS=0
251252
WB_GC_PAYMENTS=null
@@ -278,7 +279,7 @@ WB_DB_EL_GARBAGE_COLLECTOR=null
278279
WB_DB_EL_GROUPS=0
279280
WB_DB_EL_INVITATIONS=null
280281
WB_DB_EL_LOGIN=null
281-
WB_DB_EL_LOGLEVEL=WARNING
282+
WB_DB_EL_LOGLEVEL=INFO
282283
WB_DB_EL_META_MODELING=0
283284
WB_DB_EL_NOTIFICATIONS=0
284285
WB_DB_EL_PAYMENTS=null
@@ -348,7 +349,7 @@ WEBSERVER_GROUPS=1
348349
WEBSERVER_GUNICORN_CMD_ARGS=--timeout=180
349350
WEBSERVER_HOST=webserver
350351
WEBSERVER_LOGIN={}
351-
WEBSERVER_LOGLEVEL=WARNING
352+
WEBSERVER_LOGLEVEL=INFO
352353
WEBSERVER_META_MODELING=1
353354
WEBSERVER_NOTIFICATIONS=1
354355
WEBSERVER_PAYMENTS={}

packages/aws-library/tests/test_s3_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -696,7 +696,7 @@ async def test_create_single_presigned_download_link(
696696

697697
dest_file = tmp_path / faker.file_name()
698698
async with ClientSession() as session:
699-
response = await session.get(download_url)
699+
response = await session.get(f"{download_url}")
700700
response.raise_for_status()
701701
with dest_file.open("wb") as fp:
702702
fp.write(await response.read())

packages/models-library/src/models_library/docker.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def to_simcore_runtime_docker_label_key(key: str) -> DockerLabelKey:
7575
class StandardSimcoreDockerLabels(BaseModel):
7676
"""
7777
Represents the standard label on oSparc created containers (not yet services)
78-
In order to create this object in code, please use construct() method!
78+
In order to create this object in code, please use model_construct() method!
7979
"""
8080

8181
user_id: UserID = Field(..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}user-id") # type: ignore[literal-required]

packages/postgres-database/tests/test_utils_projects.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from aiopg.sa.connection import SAConnection
1313
from aiopg.sa.result import RowProxy
1414
from faker import Faker
15-
from pydantic import parse_obj_as
15+
from pydantic import TypeAdapter
1616
from simcore_postgres_database.models.projects import projects
1717
from simcore_postgres_database.utils_projects import (
1818
DBProjectNotFoundError,
@@ -69,7 +69,7 @@ async def test_get_project_trashed_at_column_can_be_converted_to_datetime(
6969

7070
row = result.fetchone()
7171

72-
trashed_at = parse_obj_as(datetime | None, row.trashed_at)
72+
trashed_at = TypeAdapter(datetime | None).validate_python(row.trashed_at)
7373
assert trashed_at == expected
7474

7575

packages/service-library/src/servicelib/logging_utils.py

Lines changed: 21 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from pathlib import Path
1717
from typing import Any, NotRequired, TypeAlias, TypedDict, TypeVar
1818

19+
from .logging_utils_filtering import GeneralLogFilter, LoggerName, MessageSubstring
1920
from .utils_secrets import mask_sensitive_data
2021

2122
_logger = logging.getLogger(__name__)
@@ -86,7 +87,11 @@ def format(self, record) -> str:
8687
# log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{DATA:log_source} \| log_msg=%{GREEDYDATA:log_msg}
8788

8889

89-
def config_all_loggers(*, log_format_local_dev_enabled: bool) -> None:
90+
def config_all_loggers(
91+
*,
92+
log_format_local_dev_enabled: bool,
93+
logger_filter_mapping: dict[LoggerName, list[MessageSubstring]],
94+
) -> None:
9095
"""
9196
Applies common configuration to ALL registered loggers
9297
"""
@@ -102,12 +107,25 @@ def config_all_loggers(*, log_format_local_dev_enabled: bool) -> None:
102107
fmt = LOCAL_FORMATTING
103108

104109
for logger in loggers:
105-
set_logging_handler(
110+
_set_logging_handler(
106111
logger, fmt=fmt, log_format_local_dev_enabled=log_format_local_dev_enabled
107112
)
108113

114+
for logger_name, filtered_routes in logger_filter_mapping.items():
115+
logger = logging.getLogger(logger_name)
116+
# Check if the logger has any handlers or is in active use
117+
if not logger.hasHandlers():
118+
_logger.warning(
119+
"Logger %s does not have any handlers. Filter will not be added.",
120+
logger_name,
121+
)
122+
continue
123+
124+
log_filter = GeneralLogFilter(filtered_routes)
125+
logger.addFilter(log_filter)
126+
109127

110-
def set_logging_handler(
128+
def _set_logging_handler(
111129
logger: logging.Logger,
112130
*,
113131
fmt: str,
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
"""
2+
This codes originates from this article
3+
https://medium.com/swlh/add-log-decorators-to-your-python-project-84094f832181
4+
5+
SEE also https://github.com/Delgan/loguru for a future alternative
6+
"""
7+
8+
import logging
9+
from typing import TypeAlias
10+
11+
_logger = logging.getLogger(__name__)
12+
13+
LoggerName: TypeAlias = str
14+
MessageSubstring: TypeAlias = str
15+
16+
17+
class GeneralLogFilter(logging.Filter):
18+
def __init__(self, filtered_routes: list[str]) -> None:
19+
super().__init__()
20+
self.filtered_routes = filtered_routes
21+
22+
def filter(self, record: logging.LogRecord) -> bool:
23+
msg = record.getMessage()
24+
25+
# Check if the filtered routes exists in the message
26+
return not any(
27+
filter_criteria in msg for filter_criteria in self.filtered_routes
28+
)

packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from models_library.projects_nodes_io import NodeID
66
from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace
7-
from pydantic import NonNegativeInt, parse_obj_as
7+
from pydantic import NonNegativeInt, TypeAdapter
88
from servicelib.logging_utils import log_decorator
99
from servicelib.rabbitmq import RabbitMQRPCClient
1010

@@ -29,7 +29,9 @@ async def remove_volumes_without_backup_for_service(
2929
"swarm_stack_name": swarm_stack_name,
3030
}
3131
),
32-
parse_obj_as(RPCMethodName, "remove_volumes_without_backup_for_service"),
32+
TypeAdapter(RPCMethodName).validate_python(
33+
"remove_volumes_without_backup_for_service"
34+
),
3335
node_id=node_id,
3436
timeout_s=_REQUEST_TIMEOUT,
3537
)
@@ -51,7 +53,9 @@ async def backup_and_remove_volumes_for_all_services(
5153
"swarm_stack_name": swarm_stack_name,
5254
}
5355
),
54-
parse_obj_as(RPCMethodName, "backup_and_remove_volumes_for_all_services"),
56+
TypeAdapter(RPCMethodName).validate_python(
57+
"backup_and_remove_volumes_for_all_services"
58+
),
5559
timeout_s=_REQUEST_TIMEOUT,
5660
)
5761
assert result is None # nosec

packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/disk_usage.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,20 @@
11
import logging
2+
from typing import Final
23

34
from models_library.api_schemas_dynamic_sidecar.telemetry import DiskUsage
45
from models_library.projects_nodes_io import NodeID
56
from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace
6-
from pydantic import parse_obj_as
7-
from servicelib.logging_utils import log_decorator
8-
from servicelib.rabbitmq import RabbitMQRPCClient
7+
from pydantic import TypeAdapter
8+
9+
from ....logging_utils import log_decorator
10+
from ... import RabbitMQRPCClient
911

1012
_logger = logging.getLogger(__name__)
1113

14+
_UPDATE_DISK_USAGE: Final[RPCMethodName] = TypeAdapter(RPCMethodName).validate_python(
15+
"update_disk_usage"
16+
)
17+
1218

1319
@log_decorator(_logger, level=logging.DEBUG)
1420
async def update_disk_usage(
@@ -21,6 +27,8 @@ async def update_disk_usage(
2127
{"service": "dy-sidecar", "node_id": f"{node_id}"}
2228
)
2329
result = await rabbitmq_rpc_client.request(
24-
rpc_namespace, parse_obj_as(RPCMethodName, "update_disk_usage"), usage=usage
30+
rpc_namespace,
31+
_UPDATE_DISK_USAGE,
32+
usage=usage,
2533
)
2634
assert result is None # nosec

packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/resource_usage_tracker/service_runs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import logging
2-
from typing import Final, cast
2+
from typing import Final
33

44
from models_library.api_schemas_resource_usage_tracker import (
55
RESOURCE_USAGE_TRACKER_RPC_NAMESPACE,
@@ -115,5 +115,5 @@ async def export_service_runs(
115115
filters=filters,
116116
timeout_s=_DEFAULT_TIMEOUT_S,
117117
)
118-
assert cast(AnyUrl, isinstance(result, AnyUrl)) # nosec
118+
assert isinstance(result, AnyUrl) # nosec
119119
return result

0 commit comments

Comments
 (0)