|
| 1 | +from enum import Enum |
| 2 | +from typing import Optional |
| 3 | + |
| 4 | +from pydantic import BaseModel, model_validator |
| 5 | +from pydantic_settings import BaseSettings, SettingsConfigDict |
| 6 | + |
| 7 | +__all__ = [ |
| 8 | + "settings", |
| 9 | + "Settings", |
| 10 | + "Environment", |
| 11 | + "LoggingSettings", |
| 12 | + "OpenAISettings", |
| 13 | + "ReportGenerationSettings", |
| 14 | +] |
| 15 | + |
| 16 | + |
| 17 | +class Environment(str, Enum): |
| 18 | + """ |
| 19 | + Enum for the supported environments |
| 20 | + """ |
| 21 | + |
| 22 | + LOCAL = "local" |
| 23 | + DEV = "dev" |
| 24 | + STAGING = "staging" |
| 25 | + PROD = "prod" |
| 26 | + |
| 27 | + |
| 28 | +ENV_REPORT_MAPPING = { |
| 29 | + Environment.PROD: "https://guidellm.neuralmagic.com/local-report/index.html", |
| 30 | + Environment.STAGING: "https://staging.guidellm.neuralmagic.com/local-report/index.html", |
| 31 | + Environment.DEV: "https://dev.guidellm.neuralmagic.com/local-report/index.html", |
| 32 | + Environment.LOCAL: "tests/dummy/report.html", |
| 33 | +} |
| 34 | + |
| 35 | + |
| 36 | +class LoggingSettings(BaseModel): |
| 37 | + """ |
| 38 | + Logging settings for the application |
| 39 | + """ |
| 40 | + |
| 41 | + disabled: bool = False |
| 42 | + clear_loggers: bool = True |
| 43 | + console_log_level: str = "INFO" |
| 44 | + log_file: Optional[str] = None |
| 45 | + log_file_level: Optional[str] = None |
| 46 | + |
| 47 | + |
| 48 | +class OpenAISettings(BaseModel): |
| 49 | + """ |
| 50 | + OpenAI settings for the application to connect to the API |
| 51 | + for OpenAI server based pathways |
| 52 | + """ |
| 53 | + |
| 54 | + # OpenAI API key. |
| 55 | + api_key: str = "invalid" |
| 56 | + |
| 57 | + # OpenAI-compatible server URL |
| 58 | + # NOTE: The default value is default address of llama.cpp web server |
| 59 | + base_url: str = "http://localhost:8080" |
| 60 | + |
| 61 | + max_gen_tokens: int = 4096 |
| 62 | + |
| 63 | + |
| 64 | +class ReportGenerationSettings(BaseModel): |
| 65 | + source: str = "" |
| 66 | + |
| 67 | + |
| 68 | +class Settings(BaseSettings): |
| 69 | + """ |
| 70 | + All the settings are powered by pydantic_settings and could be |
| 71 | + populated from the .env file. |
| 72 | +
|
| 73 | + The format to populate the settings is next |
| 74 | +
|
| 75 | + ```sh |
| 76 | + export GUIDELLM__LOGGING__DISABLED=true |
| 77 | + export GUIDELLM__OPENAI__API_KEY=****** |
| 78 | + ``` |
| 79 | +
|
| 80 | + """ |
| 81 | + |
| 82 | + model_config = SettingsConfigDict( |
| 83 | + env_prefix="GUIDELLM__", |
| 84 | + env_nested_delimiter="__", |
| 85 | + extra="ignore", |
| 86 | + validate_default=True, |
| 87 | + env_file=".env", |
| 88 | + ) |
| 89 | + |
| 90 | + env: Environment = Environment.PROD |
| 91 | + request_timeout: int = 30 |
| 92 | + |
| 93 | + logging: LoggingSettings = LoggingSettings() |
| 94 | + openai: OpenAISettings = OpenAISettings() |
| 95 | + report_generation: ReportGenerationSettings = ReportGenerationSettings() |
| 96 | + |
| 97 | + @model_validator(mode="after") |
| 98 | + @classmethod |
| 99 | + def set_default_source(cls, values): |
| 100 | + if not values.report_generation.source: |
| 101 | + values.report_generation.source = ENV_REPORT_MAPPING.get(values.env) |
| 102 | + |
| 103 | + return values |
| 104 | + |
| 105 | + |
| 106 | +settings = Settings() |
0 commit comments