|
1 | 1 | from datetime import timedelta |
| 2 | +from typing import Annotated |
2 | 3 |
|
| 4 | +from common_library.basic_types import DEFAULT_FACTORY |
3 | 5 | from models_library.basic_types import BootModeEnum, LogLevel |
4 | 6 | from models_library.docker import DockerNodeID |
5 | 7 | from pydantic import AliasChoices, AnyHttpUrl, Field, field_validator |
|
12 | 14 |
|
13 | 15 |
|
14 | 16 | class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): |
15 | | - LOG_LEVEL: LogLevel = Field( |
16 | | - LogLevel.WARNING, |
17 | | - validation_alias=AliasChoices( |
18 | | - "AGENT_LOGLEVEL", |
19 | | - "LOG_LEVEL", |
20 | | - "LOGLEVEL", |
| 17 | + LOG_LEVEL: Annotated[ |
| 18 | + LogLevel, |
| 19 | + Field( |
| 20 | + validation_alias=AliasChoices( |
| 21 | + "AGENT_LOGLEVEL", |
| 22 | + "LOG_LEVEL", |
| 23 | + "LOGLEVEL", |
| 24 | + ), |
21 | 25 | ), |
22 | | - ) |
| 26 | + ] = LogLevel.WARNING |
| 27 | + |
23 | 28 | SC_BOOT_MODE: BootModeEnum | None |
24 | 29 |
|
25 | | - AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( |
26 | | - default=False, |
27 | | - validation_alias=AliasChoices( |
28 | | - "AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED", |
29 | | - "LOG_FORMAT_LOCAL_DEV_ENABLED", |
30 | | - ), |
31 | | - description=( |
32 | | - "Enables local development log format. WARNING: make sure it is " |
33 | | - "disabled if you want to have structured logs!" |
| 30 | + AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ |
| 31 | + bool, |
| 32 | + Field( |
| 33 | + validation_alias=AliasChoices( |
| 34 | + "AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED", |
| 35 | + "LOG_FORMAT_LOCAL_DEV_ENABLED", |
| 36 | + ), |
| 37 | + description=( |
| 38 | + "Enables local development log format. WARNING: make sure it is " |
| 39 | + "disabled if you want to have structured logs!" |
| 40 | + ), |
34 | 41 | ), |
35 | | - ) |
36 | | - AGENT_VOLUMES_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( |
37 | | - default_factory=dict, |
38 | | - validation_alias=AliasChoices( |
39 | | - "AGENT_VOLUMES_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" |
| 42 | + ] = False |
| 43 | + |
| 44 | + AGENT_VOLUMES_LOG_FILTER_MAPPING: Annotated[ |
| 45 | + dict[LoggerName, list[MessageSubstring]], |
| 46 | + Field( |
| 47 | + default_factory=dict, |
| 48 | + validation_alias=AliasChoices( |
| 49 | + "AGENT_VOLUMES_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" |
| 50 | + ), |
| 51 | + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", |
40 | 52 | ), |
41 | | - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", |
42 | | - ) |
43 | | - AGENT_VOLUMES_CLEANUP_TARGET_SWARM_STACK_NAME: str = Field( |
44 | | - ..., description="Exactly the same as director-v2's `SWARM_STACK_NAME` env var" |
45 | | - ) |
| 53 | + ] = DEFAULT_FACTORY |
| 54 | + |
| 55 | + AGENT_VOLUMES_CLEANUP_TARGET_SWARM_STACK_NAME: str |
46 | 56 | AGENT_VOLUMES_CLEANUP_S3_ENDPOINT: AnyHttpUrl |
47 | 57 | AGENT_VOLUMES_CLEANUP_S3_ACCESS_KEY: str |
48 | 58 | AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY: str |
49 | 59 | AGENT_VOLUMES_CLEANUP_S3_BUCKET: str |
50 | 60 | AGENT_VOLUMES_CLEANUP_S3_PROVIDER: S3Provider |
51 | 61 | AGENT_VOLUMES_CLEANUP_S3_REGION: str = "us-east-1" |
52 | | - AGENT_VOLUMES_CLEANUP_RETRIES: int = Field( |
53 | | - 3, description="upload retries in case of error" |
54 | | - ) |
55 | | - AGENT_VOLUMES_CLEANUP_PARALLELISM: int = Field( |
56 | | - 5, description="parallel transfers to s3" |
57 | | - ) |
58 | | - AGENT_VOLUMES_CLEANUP_EXCLUDE_FILES: list[str] = Field( |
59 | | - [".hidden_do_not_remove", "key_values.json"], |
60 | | - description="Files to ignore when syncing to s3", |
61 | | - ) |
62 | | - AGENT_VOLUMES_CLEANUP_INTERVAL: timedelta = Field( |
63 | | - timedelta(minutes=1), description="interval for running volumes removal" |
64 | | - ) |
65 | | - AGENT_VOLUMES_CLEANUP_BOOK_KEEPING_INTERVAL: timedelta = Field( |
66 | | - timedelta(minutes=1), |
67 | | - description=( |
68 | | - "interval at which to scan for unsued volumes and keep track since " |
69 | | - "they were detected as being unused" |
| 62 | + AGENT_VOLUMES_CLEANUP_RETRIES: Annotated[ |
| 63 | + int, Field(description="upload retries in case of error") |
| 64 | + ] = 3 |
| 65 | + AGENT_VOLUMES_CLEANUP_PARALLELISM: Annotated[ |
| 66 | + int, Field(description="parallel transfers to s3") |
| 67 | + ] = 5 |
| 68 | + AGENT_VOLUMES_CLEANUP_EXCLUDE_FILES: Annotated[ |
| 69 | + list[str], |
| 70 | + Field( |
| 71 | + [".hidden_do_not_remove", "key_values.json"], |
| 72 | + description="Files to ignore when syncing to s3", |
70 | 73 | ), |
71 | | - ) |
72 | | - AGENT_VOLUMES_CLEANUP_REMOVE_VOLUMES_INACTIVE_FOR: timedelta = Field( |
73 | | - timedelta(minutes=65), |
74 | | - description=( |
75 | | - "if a volume is unused for more than this interval it can be removed. " |
76 | | - "The default is set to a health 60+ miunutes since it might take upto " |
77 | | - "60 minutes for the dy-sidecar to properly save data form the volumes" |
| 74 | + ] |
| 75 | + AGENT_VOLUMES_CLEANUP_INTERVAL: Annotated[ |
| 76 | + timedelta, Field(description="interval for running volumes removal") |
| 77 | + ] = timedelta(minutes=1) |
| 78 | + AGENT_VOLUMES_CLEANUP_BOOK_KEEPING_INTERVAL: Annotated[ |
| 79 | + timedelta, |
| 80 | + Field( |
| 81 | + description=( |
| 82 | + "interval at which to scan for unsued volumes and keep track since " |
| 83 | + "they were detected as being unused" |
| 84 | + ), |
78 | 85 | ), |
79 | | - ) |
| 86 | + ] = timedelta(minutes=1) |
| 87 | + AGENT_VOLUMES_CLEANUP_REMOVE_VOLUMES_INACTIVE_FOR: Annotated[ |
| 88 | + timedelta, |
| 89 | + Field( |
| 90 | + description=( |
| 91 | + "if a volume is unused for more than this interval it can be removed. " |
| 92 | + "The default is set to a health 60+ miunutes since it might take upto " |
| 93 | + "60 minutes for the dy-sidecar to properly save data form the volumes" |
| 94 | + ), |
| 95 | + ), |
| 96 | + ] = timedelta(minutes=65) |
80 | 97 |
|
81 | 98 | AGENT_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True |
| 99 | + AGENT_DOCKER_NODE_ID: Annotated[ |
| 100 | + DockerNodeID, Field(description="used by the rabbitmq module") |
| 101 | + ] |
82 | 102 |
|
83 | | - AGENT_DOCKER_NODE_ID: DockerNodeID = Field( |
84 | | - ..., description="used by the rabbitmq module" |
85 | | - ) |
86 | | - |
87 | | - AGENT_RABBITMQ: RabbitSettings = Field( |
88 | | - description="settings for service/rabbitmq", |
89 | | - json_schema_extra={"auto_default_from_env": True}, |
90 | | - ) |
| 103 | + AGENT_RABBITMQ: Annotated[ |
| 104 | + RabbitSettings, |
| 105 | + Field( |
| 106 | + description="settings for service/rabbitmq", |
| 107 | + json_schema_extra={"auto_default_from_env": True}, |
| 108 | + ), |
| 109 | + ] |
91 | 110 |
|
92 | | - AGENT_TRACING: TracingSettings | None = Field( |
93 | | - description="settings for opentelemetry tracing", |
94 | | - json_schema_extra={"auto_default_from_env": True}, |
95 | | - ) |
| 111 | + AGENT_TRACING: Annotated[ |
| 112 | + TracingSettings | None, |
| 113 | + Field( |
| 114 | + description="settings for opentelemetry tracing", |
| 115 | + json_schema_extra={"auto_default_from_env": True}, |
| 116 | + ), |
| 117 | + ] |
96 | 118 |
|
97 | 119 | @field_validator("LOG_LEVEL") |
98 | 120 | @classmethod |
|
0 commit comments