Skip to content

Commit 35b4a85

Browse files
committed
@pcrespov review: use prompt twice
1 parent 13320c2 commit 35b4a85

File tree

1 file changed

+158
-119
lines changed
  • services/director-v2/src/simcore_service_director_v2/core

1 file changed

+158
-119
lines changed

services/director-v2/src/simcore_service_director_v2/core/settings.py

Lines changed: 158 additions & 119 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from functools import cached_property
77
from typing import Annotated, cast
88

9+
from common_library.basic_types import DEFAULT_FACTORY
910
from common_library.pydantic_validators import validate_numeric_string_as_timedelta
1011
from fastapi import FastAPI
1112
from models_library.basic_types import LogLevel, PortInt
@@ -50,43 +51,53 @@
5051

5152

5253
class ComputationalBackendSettings(BaseCustomSettings):
53-
COMPUTATIONAL_BACKEND_ENABLED: bool = Field(
54-
default=True,
55-
)
56-
COMPUTATIONAL_BACKEND_SCHEDULING_CONCURRENCY: PositiveInt = Field(
57-
default=50,
58-
description="defines how many pipelines the application can schedule concurrently",
59-
)
60-
COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED: bool = Field(
61-
default=True,
62-
)
63-
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL: AnyUrl = Field(
64-
...,
65-
description="This is the cluster that will be used by default"
66-
" when submitting computational services (typically "
67-
"tcp://dask-scheduler:8786, tls://dask-scheduler:8786 for the internal cluster",
68-
)
69-
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: ClusterAuthentication = Field(
70-
default=...,
71-
description="this is the cluster authentication that will be used by default",
72-
)
73-
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE: FileLinkType = Field(
74-
FileLinkType.S3,
75-
description=f"Default file link type to use with the internal cluster '{list(FileLinkType)}'",
76-
)
77-
COMPUTATIONAL_BACKEND_DEFAULT_FILE_LINK_TYPE: FileLinkType = Field(
78-
FileLinkType.PRESIGNED,
79-
description=f"Default file link type to use with computational backend '{list(FileLinkType)}'",
80-
)
81-
COMPUTATIONAL_BACKEND_ON_DEMAND_CLUSTERS_FILE_LINK_TYPE: FileLinkType = Field(
82-
FileLinkType.PRESIGNED,
83-
description=f"Default file link type to use with computational backend on-demand clusters '{list(FileLinkType)}'",
84-
)
85-
COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_CLUSTER_TIMEOUT: datetime.timedelta = Field(
86-
default=datetime.timedelta(minutes=10),
87-
description="maximum time a pipeline can wait for a cluster to start"
88-
"(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formatting).",
89-
)
54+
COMPUTATIONAL_BACKEND_ENABLED: bool = True
55+
COMPUTATIONAL_BACKEND_SCHEDULING_CONCURRENCY: Annotated[
56+
PositiveInt,
57+
Field(
58+
description="defines how many pipelines the application can schedule concurrently"
59+
),
60+
] = 50
61+
COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED: bool = True
62+
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL: Annotated[
63+
AnyUrl,
64+
Field(
65+
description="This is the cluster that will be used by default"
66+
" when submitting computational services (typically "
67+
"tcp://dask-scheduler:8786, tls://dask-scheduler:8786 for the internal cluster",
68+
),
69+
]
70+
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: Annotated[
71+
ClusterAuthentication,
72+
Field(
73+
description="this is the cluster authentication that will be used by default"
74+
),
75+
]
76+
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE: Annotated[
77+
FileLinkType,
78+
Field(
79+
description=f"Default file link type to use with the internal cluster '{list(FileLinkType)}'"
80+
),
81+
] = FileLinkType.S3
82+
COMPUTATIONAL_BACKEND_DEFAULT_FILE_LINK_TYPE: Annotated[
83+
FileLinkType,
84+
Field(
85+
description=f"Default file link type to use with computational backend '{list(FileLinkType)}'"
86+
),
87+
] = FileLinkType.PRESIGNED
88+
COMPUTATIONAL_BACKEND_ON_DEMAND_CLUSTERS_FILE_LINK_TYPE: Annotated[
89+
FileLinkType,
90+
Field(
91+
description=f"Default file link type to use with computational backend on-demand clusters '{list(FileLinkType)}'"
92+
),
93+
] = FileLinkType.PRESIGNED
94+
COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_CLUSTER_TIMEOUT: Annotated[
95+
datetime.timedelta,
96+
Field(
97+
description="maximum time a pipeline can wait for a cluster to start"
98+
"(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formatting)."
99+
),
100+
] = datetime.timedelta(minutes=10)
90101

91102
@cached_property
92103
def default_cluster(self) -> BaseCluster:
@@ -116,91 +127,107 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings):
116127
),
117128
] = LogLevel.INFO
118129

119-
DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field(
120-
default=False,
121-
validation_alias=AliasChoices(
122-
"DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED",
123-
"LOG_FORMAT_LOCAL_DEV_ENABLED",
130+
DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[
131+
bool,
132+
Field(
133+
validation_alias=AliasChoices(
134+
"DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED",
135+
"LOG_FORMAT_LOCAL_DEV_ENABLED",
136+
),
137+
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
124138
),
125-
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
126-
)
127-
DIRECTOR_V2_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field(
128-
default_factory=dict,
129-
validation_alias=AliasChoices(
130-
"DIRECTOR_V2_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"
139+
] = False
140+
DIRECTOR_V2_LOG_FILTER_MAPPING: Annotated[
141+
dict[LoggerName, list[MessageSubstring]],
142+
Field(
143+
default_factory=dict,
144+
validation_alias=AliasChoices(
145+
"DIRECTOR_V2_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"
146+
),
147+
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
131148
),
132-
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
133-
)
149+
] = DEFAULT_FACTORY
134150
DIRECTOR_V2_DEV_FEATURES_ENABLED: bool = False
135151

136-
DIRECTOR_V2_DEV_FEATURE_R_CLONE_MOUNTS_ENABLED: bool = Field(
137-
default=False,
138-
description=(
139-
"Under development feature. If enabled state "
140-
"is saved using rclone docker volumes."
152+
DIRECTOR_V2_DEV_FEATURE_R_CLONE_MOUNTS_ENABLED: Annotated[
153+
bool,
154+
Field(
155+
description=(
156+
"Under development feature. If enabled state "
157+
"is saved using rclone docker volumes."
158+
)
141159
),
142-
)
160+
] = False
143161

144162
# for passing self-signed certificate to spawned services
145-
DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_ID: str = Field(
146-
default="",
147-
description="ID of the docker secret containing the self-signed certificate",
148-
)
149-
DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_NAME: str = Field(
150-
default="",
151-
description="Name of the docker secret containing the self-signed certificate",
152-
)
153-
DIRECTOR_V2_SELF_SIGNED_SSL_FILENAME: str = Field(
154-
default="",
155-
description="Filepath to self-signed osparc.crt file *as mounted inside the container*, empty strings disables it",
156-
)
163+
DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_ID: Annotated[
164+
str,
165+
Field(
166+
description="ID of the docker secret containing the self-signed certificate"
167+
),
168+
] = ""
169+
DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_NAME: Annotated[
170+
str,
171+
Field(
172+
description="Name of the docker secret containing the self-signed certificate"
173+
),
174+
] = ""
175+
DIRECTOR_V2_SELF_SIGNED_SSL_FILENAME: Annotated[
176+
str,
177+
Field(
178+
description="Filepath to self-signed osparc.crt file *as mounted inside the container*, empty strings disables it"
179+
),
180+
] = ""
157181
DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True
158182
DIRECTOR_V2_PROFILING: bool = False
159183

160-
DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = Field(default=None)
184+
DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = None
161185

162186
# extras
163-
SWARM_STACK_NAME: str = Field(default="undefined-please-check")
164-
SERVICE_TRACKING_HEARTBEAT: datetime.timedelta = Field(
165-
default=DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL,
166-
description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)"
167-
" (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)",
168-
)
187+
SWARM_STACK_NAME: str = "undefined-please-check"
188+
SERVICE_TRACKING_HEARTBEAT: Annotated[
189+
datetime.timedelta,
190+
Field(
191+
description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)"
192+
" (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)"
193+
),
194+
] = DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL
169195

170-
SIMCORE_SERVICES_NETWORK_NAME: str | None = Field(
171-
default=None,
172-
description="used to find the right network name",
173-
)
174-
SIMCORE_SERVICES_PREFIX: str | None = Field(
175-
"simcore/services",
176-
description="useful when developing with an alternative registry namespace",
177-
)
196+
SIMCORE_SERVICES_NETWORK_NAME: Annotated[
197+
str | None, Field(description="used to find the right network name")
198+
] = None
199+
SIMCORE_SERVICES_PREFIX: Annotated[
200+
str | None,
201+
Field(
202+
description="useful when developing with an alternative registry namespace"
203+
),
204+
] = "simcore/services"
178205

179-
DIRECTOR_V2_NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS: NonNegativeInt = Field(
180-
default=NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS_DEFAULT_VALUE,
181-
description="forwarded to sidecars which use nodeports",
182-
)
206+
DIRECTOR_V2_NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS: Annotated[
207+
NonNegativeInt, Field(description="forwarded to sidecars which use nodeports")
208+
] = NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS_DEFAULT_VALUE
183209

184210
# debug settings
185-
CLIENT_REQUEST: ClientRequestSettings = Field(
186-
json_schema_extra={"auto_default_from_env": True}
187-
)
211+
CLIENT_REQUEST: Annotated[
212+
ClientRequestSettings, Field(json_schema_extra={"auto_default_from_env": True})
213+
] = DEFAULT_FACTORY
188214

189215
# App modules settings ---------------------
190216
DIRECTOR_V2_STORAGE: Annotated[
191217
StorageSettings, Field(json_schema_extra={"auto_default_from_env": True})
192218
]
193-
DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: StorageAuthSettings | None = Field(
194-
json_schema_extra={"auto_default_from_env": True}
195-
)
219+
DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: Annotated[
220+
StorageAuthSettings | None,
221+
Field(json_schema_extra={"auto_default_from_env": True}),
222+
] = None
196223

197224
DIRECTOR_V2_CATALOG: Annotated[
198225
CatalogSettings | None, Field(json_schema_extra={"auto_default_from_env": True})
199226
]
200227

201-
DIRECTOR_V0: DirectorV0Settings = Field(
202-
json_schema_extra={"auto_default_from_env": True}
203-
)
228+
DIRECTOR_V0: Annotated[
229+
DirectorV0Settings, Field(json_schema_extra={"auto_default_from_env": True})
230+
] = DEFAULT_FACTORY
204231

205232
DYNAMIC_SERVICES: Annotated[
206233
DynamicServicesSettings,
@@ -211,35 +238,47 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings):
211238
PostgresSettings, Field(json_schema_extra={"auto_default_from_env": True})
212239
]
213240

214-
REDIS: RedisSettings = Field(json_schema_extra={"auto_default_from_env": True})
241+
REDIS: Annotated[
242+
RedisSettings, Field(json_schema_extra={"auto_default_from_env": True})
243+
] = DEFAULT_FACTORY
215244

216-
DIRECTOR_V2_RABBITMQ: RabbitSettings = Field(
217-
json_schema_extra={"auto_default_from_env": True}
218-
)
245+
DIRECTOR_V2_RABBITMQ: Annotated[
246+
RabbitSettings, Field(json_schema_extra={"auto_default_from_env": True})
247+
] = DEFAULT_FACTORY
219248

220-
TRAEFIK_SIMCORE_ZONE: str = Field("internal_simcore_stack")
249+
TRAEFIK_SIMCORE_ZONE: str = "internal_simcore_stack"
221250

222-
DIRECTOR_V2_COMPUTATIONAL_BACKEND: ComputationalBackendSettings = Field(
223-
json_schema_extra={"auto_default_from_env": True}
224-
)
251+
DIRECTOR_V2_COMPUTATIONAL_BACKEND: Annotated[
252+
ComputationalBackendSettings,
253+
Field(json_schema_extra={"auto_default_from_env": True}),
254+
] = DEFAULT_FACTORY
225255

226-
DIRECTOR_V2_DOCKER_REGISTRY: RegistrySettings = Field(
227-
json_schema_extra={"auto_default_from_env": True},
228-
description="settings for the private registry deployed with the platform",
229-
)
230-
DIRECTOR_V2_DOCKER_HUB_REGISTRY: RegistrySettings | None = Field(
231-
default=None, description="public DockerHub registry settings"
232-
)
256+
DIRECTOR_V2_DOCKER_REGISTRY: Annotated[
257+
RegistrySettings,
258+
Field(
259+
json_schema_extra={"auto_default_from_env": True},
260+
description="settings for the private registry deployed with the platform",
261+
),
262+
] = DEFAULT_FACTORY
263+
DIRECTOR_V2_DOCKER_HUB_REGISTRY: Annotated[
264+
RegistrySettings | None, Field(description="public DockerHub registry settings")
265+
] = None
233266

234-
DIRECTOR_V2_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field(
235-
json_schema_extra={"auto_default_from_env": True},
236-
description="resource usage tracker service client's plugin",
237-
)
267+
DIRECTOR_V2_RESOURCE_USAGE_TRACKER: Annotated[
268+
ResourceUsageTrackerSettings,
269+
Field(
270+
json_schema_extra={"auto_default_from_env": True},
271+
description="resource usage tracker service client's plugin",
272+
),
273+
] = DEFAULT_FACTORY
238274

239-
DIRECTOR_V2_TRACING: TracingSettings | None = Field(
240-
json_schema_extra={"auto_default_from_env": True},
241-
description="settings for opentelemetry tracing",
242-
)
275+
DIRECTOR_V2_TRACING: Annotated[
276+
TracingSettings | None,
277+
Field(
278+
json_schema_extra={"auto_default_from_env": True},
279+
description="settings for opentelemetry tracing",
280+
),
281+
] = None
243282

244283
@field_validator("LOG_LEVEL", mode="before")
245284
@classmethod

0 commit comments

Comments
 (0)