Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
272 changes: 158 additions & 114 deletions services/director-v2/src/simcore_service_director_v2/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from functools import cached_property
from typing import Annotated, cast

from common_library.basic_types import DEFAULT_FACTORY
from common_library.pydantic_validators import validate_numeric_string_as_timedelta
from fastapi import FastAPI
from models_library.basic_types import LogLevel, PortInt
Expand Down Expand Up @@ -50,38 +51,53 @@


class ComputationalBackendSettings(BaseCustomSettings):
COMPUTATIONAL_BACKEND_ENABLED: bool = Field(
default=True,
)
COMPUTATIONAL_BACKEND_SCHEDULING_CONCURRENCY: PositiveInt = Field(
default=50,
description="defines how many pipelines the application can schedule concurrently",
)
COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED: bool = Field(
default=True,
)
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL: AnyUrl = Field(
...,
description="This is the cluster that will be used by default"
" when submitting computational services (typically "
"tcp://dask-scheduler:8786, tls://dask-scheduler:8786 for the internal cluster",
)
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: ClusterAuthentication = Field(
default=...,
description="this is the cluster authentication that will be used by default",
)
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE: FileLinkType = Field(
FileLinkType.S3,
description=f"Default file link type to use with the internal cluster '{list(FileLinkType)}'",
)
COMPUTATIONAL_BACKEND_DEFAULT_FILE_LINK_TYPE: FileLinkType = Field(
FileLinkType.PRESIGNED,
description=f"Default file link type to use with computational backend '{list(FileLinkType)}'",
)
COMPUTATIONAL_BACKEND_ON_DEMAND_CLUSTERS_FILE_LINK_TYPE: FileLinkType = Field(
FileLinkType.PRESIGNED,
description=f"Default file link type to use with computational backend on-demand clusters '{list(FileLinkType)}'",
)
COMPUTATIONAL_BACKEND_ENABLED: bool = True
COMPUTATIONAL_BACKEND_SCHEDULING_CONCURRENCY: Annotated[
PositiveInt,
Field(
description="defines how many pipelines the application can schedule concurrently"
),
] = 50
COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED: bool = True
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL: Annotated[
AnyUrl,
Field(
description="This is the cluster that will be used by default"
" when submitting computational services (typically "
"tcp://dask-scheduler:8786, tls://dask-scheduler:8786 for the internal cluster",
),
]
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: Annotated[
ClusterAuthentication,
Field(
description="this is the cluster authentication that will be used by default"
),
]
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE: Annotated[
FileLinkType,
Field(
description=f"Default file link type to use with the internal cluster '{list(FileLinkType)}'"
),
] = FileLinkType.S3
COMPUTATIONAL_BACKEND_DEFAULT_FILE_LINK_TYPE: Annotated[
FileLinkType,
Field(
description=f"Default file link type to use with computational backend '{list(FileLinkType)}'"
),
] = FileLinkType.PRESIGNED
COMPUTATIONAL_BACKEND_ON_DEMAND_CLUSTERS_FILE_LINK_TYPE: Annotated[
FileLinkType,
Field(
description=f"Default file link type to use with computational backend on-demand clusters '{list(FileLinkType)}'"
),
] = FileLinkType.PRESIGNED
COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_CLUSTER_TIMEOUT: Annotated[
datetime.timedelta,
Field(
description="maximum time a pipeline can wait for a cluster to start"
"(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formatting)."
),
] = datetime.timedelta(minutes=10)

@cached_property
def default_cluster(self) -> BaseCluster:
Expand Down Expand Up @@ -111,91 +127,107 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings):
),
] = LogLevel.INFO

DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field(
default=False,
validation_alias=AliasChoices(
"DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED",
"LOG_FORMAT_LOCAL_DEV_ENABLED",
DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[
bool,
Field(
validation_alias=AliasChoices(
"DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED",
"LOG_FORMAT_LOCAL_DEV_ENABLED",
),
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
),
description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!",
)
DIRECTOR_V2_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field(
default_factory=dict,
validation_alias=AliasChoices(
"DIRECTOR_V2_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"
] = False
DIRECTOR_V2_LOG_FILTER_MAPPING: Annotated[
dict[LoggerName, list[MessageSubstring]],
Field(
default_factory=dict,
validation_alias=AliasChoices(
"DIRECTOR_V2_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING"
),
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
),
description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.",
)
] = DEFAULT_FACTORY
DIRECTOR_V2_DEV_FEATURES_ENABLED: bool = False

DIRECTOR_V2_DEV_FEATURE_R_CLONE_MOUNTS_ENABLED: bool = Field(
default=False,
description=(
"Under development feature. If enabled state "
"is saved using rclone docker volumes."
DIRECTOR_V2_DEV_FEATURE_R_CLONE_MOUNTS_ENABLED: Annotated[
bool,
Field(
description=(
"Under development feature. If enabled state "
"is saved using rclone docker volumes."
)
),
)
] = False

# for passing self-signed certificate to spawned services
DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_ID: str = Field(
default="",
description="ID of the docker secret containing the self-signed certificate",
)
DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_NAME: str = Field(
default="",
description="Name of the docker secret containing the self-signed certificate",
)
DIRECTOR_V2_SELF_SIGNED_SSL_FILENAME: str = Field(
default="",
description="Filepath to self-signed osparc.crt file *as mounted inside the container*, empty strings disables it",
)
DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_ID: Annotated[
str,
Field(
description="ID of the docker secret containing the self-signed certificate"
),
] = ""
DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_NAME: Annotated[
str,
Field(
description="Name of the docker secret containing the self-signed certificate"
),
] = ""
DIRECTOR_V2_SELF_SIGNED_SSL_FILENAME: Annotated[
str,
Field(
description="Filepath to self-signed osparc.crt file *as mounted inside the container*, empty strings disables it"
),
] = ""
DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True
DIRECTOR_V2_PROFILING: bool = False

DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = Field(default=None)
DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = None

# extras
SWARM_STACK_NAME: str = Field(default="undefined-please-check")
SERVICE_TRACKING_HEARTBEAT: datetime.timedelta = Field(
default=DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL,
description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)"
" (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)",
)
SWARM_STACK_NAME: str = "undefined-please-check"
SERVICE_TRACKING_HEARTBEAT: Annotated[
datetime.timedelta,
Field(
description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)"
" (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)"
),
] = DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL

SIMCORE_SERVICES_NETWORK_NAME: str | None = Field(
default=None,
description="used to find the right network name",
)
SIMCORE_SERVICES_PREFIX: str | None = Field(
"simcore/services",
description="useful when developing with an alternative registry namespace",
)
SIMCORE_SERVICES_NETWORK_NAME: Annotated[
str | None, Field(description="used to find the right network name")
] = None
SIMCORE_SERVICES_PREFIX: Annotated[
str | None,
Field(
description="useful when developing with an alternative registry namespace"
),
] = "simcore/services"

DIRECTOR_V2_NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS: NonNegativeInt = Field(
default=NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS_DEFAULT_VALUE,
description="forwarded to sidecars which use nodeports",
)
DIRECTOR_V2_NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS: Annotated[
NonNegativeInt, Field(description="forwarded to sidecars which use nodeports")
] = NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS_DEFAULT_VALUE

# debug settings
CLIENT_REQUEST: ClientRequestSettings = Field(
json_schema_extra={"auto_default_from_env": True}
)
CLIENT_REQUEST: Annotated[
ClientRequestSettings, Field(json_schema_extra={"auto_default_from_env": True})
] = DEFAULT_FACTORY

# App modules settings ---------------------
DIRECTOR_V2_STORAGE: Annotated[
StorageSettings, Field(json_schema_extra={"auto_default_from_env": True})
]
DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: StorageAuthSettings | None = Field(
json_schema_extra={"auto_default_from_env": True}
)
DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: Annotated[
StorageAuthSettings | None,
Field(json_schema_extra={"auto_default_from_env": True}),
] = None

DIRECTOR_V2_CATALOG: Annotated[
CatalogSettings | None, Field(json_schema_extra={"auto_default_from_env": True})
]

DIRECTOR_V0: DirectorV0Settings = Field(
json_schema_extra={"auto_default_from_env": True}
)
DIRECTOR_V0: Annotated[
DirectorV0Settings, Field(json_schema_extra={"auto_default_from_env": True})
] = DEFAULT_FACTORY

DYNAMIC_SERVICES: Annotated[
DynamicServicesSettings,
Expand All @@ -206,35 +238,47 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings):
PostgresSettings, Field(json_schema_extra={"auto_default_from_env": True})
]

REDIS: RedisSettings = Field(json_schema_extra={"auto_default_from_env": True})
REDIS: Annotated[
RedisSettings, Field(json_schema_extra={"auto_default_from_env": True})
] = DEFAULT_FACTORY

DIRECTOR_V2_RABBITMQ: RabbitSettings = Field(
json_schema_extra={"auto_default_from_env": True}
)
DIRECTOR_V2_RABBITMQ: Annotated[
RabbitSettings, Field(json_schema_extra={"auto_default_from_env": True})
] = DEFAULT_FACTORY

TRAEFIK_SIMCORE_ZONE: str = Field("internal_simcore_stack")
TRAEFIK_SIMCORE_ZONE: str = "internal_simcore_stack"

DIRECTOR_V2_COMPUTATIONAL_BACKEND: ComputationalBackendSettings = Field(
json_schema_extra={"auto_default_from_env": True}
)
DIRECTOR_V2_COMPUTATIONAL_BACKEND: Annotated[
ComputationalBackendSettings,
Field(json_schema_extra={"auto_default_from_env": True}),
] = DEFAULT_FACTORY

DIRECTOR_V2_DOCKER_REGISTRY: RegistrySettings = Field(
json_schema_extra={"auto_default_from_env": True},
description="settings for the private registry deployed with the platform",
)
DIRECTOR_V2_DOCKER_HUB_REGISTRY: RegistrySettings | None = Field(
default=None, description="public DockerHub registry settings"
)
DIRECTOR_V2_DOCKER_REGISTRY: Annotated[
RegistrySettings,
Field(
json_schema_extra={"auto_default_from_env": True},
description="settings for the private registry deployed with the platform",
),
] = DEFAULT_FACTORY
DIRECTOR_V2_DOCKER_HUB_REGISTRY: Annotated[
RegistrySettings | None, Field(description="public DockerHub registry settings")
] = None

DIRECTOR_V2_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field(
json_schema_extra={"auto_default_from_env": True},
description="resource usage tracker service client's plugin",
)
DIRECTOR_V2_RESOURCE_USAGE_TRACKER: Annotated[
ResourceUsageTrackerSettings,
Field(
json_schema_extra={"auto_default_from_env": True},
description="resource usage tracker service client's plugin",
),
] = DEFAULT_FACTORY

DIRECTOR_V2_TRACING: TracingSettings | None = Field(
json_schema_extra={"auto_default_from_env": True},
description="settings for opentelemetry tracing",
)
DIRECTOR_V2_TRACING: Annotated[
TracingSettings | None,
Field(
json_schema_extra={"auto_default_from_env": True},
description="settings for opentelemetry tracing",
),
] = None

@field_validator("LOG_LEVEL", mode="before")
@classmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -276,3 +276,15 @@ class ComputationTaskForRpcDBGet(BaseModel):
image: dict[str, Any]
started_at: dt.datetime | None
ended_at: dt.datetime | None

@field_validator("state", mode="before")
@classmethod
def _convert_from_state_type_enum_if_needed(cls, v):
if isinstance(v, str):
# try to convert to a StateType, if it fails the validations will continue
# and pydantic will try to convert it to a RunninState later on
with suppress(ValueError):
v = StateType(v)
if isinstance(v, StateType):
return RunningState(DB_TO_RUNNING_STATE[v])
return v
Loading
Loading