diff --git a/docs/requirements.txt b/docs/requirements.txt index f78020cce..52575eb33 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,9 +1,7 @@ -sphinx<8.0 +sphinx sphinx_design sphinx-copybutton pydata_sphinx_theme -pydantic<2.0a0 -autodoc-pydantic<2.0a0 myst-nb nglview -e ../qcportal diff --git a/docs/source/admin_guide/managers/index.rst b/docs/source/admin_guide/managers/index.rst index e104ae9b3..c5c21a9ec 100644 --- a/docs/source/admin_guide/managers/index.rst +++ b/docs/source/admin_guide/managers/index.rst @@ -128,25 +128,15 @@ Configuration for different HPC schedulers HPC cluster schedulers vary in behavior, so you will need to adapt your ``qcfractal-manager-config.yml`` to the scheduler of the HPC cluster you intend to use. The configuration keys available for each ``type`` of record in the ``executors`` list are referenced here. ----- -.. autopydantic_model:: qcfractalcompute.config.SlurmExecutorConfig - :model-show-config-summary: false - :model-show-field-summary: false +.. autoclass:: qcfractalcompute.config.SlurmExecutorConfig ----- -.. autopydantic_model:: qcfractalcompute.config.TorqueExecutorConfig - :model-show-config-summary: false - :model-show-field-summary: false +.. autoclass:: qcfractalcompute.config.TorqueExecutorConfig ----- -.. autopydantic_model:: qcfractalcompute.config.LSFExecutorConfig - :model-show-config-summary: false - :model-show-field-summary: false +.. autoclass:: qcfractalcompute.config.LSFExecutorConfig ----- .. _compute-manager-local: @@ -178,6 +168,4 @@ Using the ``local`` executor type is also recommended for running a compute mana ---- -.. autopydantic_model:: qcfractalcompute.config.LocalExecutorConfig - :model-show-config-summary: false - :model-show-field-summary: false +.. autoclass:: qcfractalcompute.config.LocalExecutorConfig diff --git a/docs/source/conf.py b/docs/source/conf.py index 347aa447d..050294ac5 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -49,7 +49,7 @@ "sphinx.ext.napoleon", "sphinx.ext.extlinks", "sphinx_design", - "sphinxcontrib.autodoc_pydantic", +# "sphinxcontrib.autodoc_pydantic", "sphinx_copybutton", "myst_nb", ] @@ -65,8 +65,8 @@ "show-inheritance": True, "member-order": "bysource", } -autodoc_pydantic_model_show_json = False -autodoc_pydantic_settings_show_json = False +#autodoc_pydantic_model_show_json = False +#autodoc_pydantic_settings_show_json = False # Add any paths that contain templates here, relative to this directory. templates_path = [ diff --git a/qcarchivetesting/conda-envs/fulltest_server.yaml b/qcarchivetesting/conda-envs/fulltest_server.yaml index 1c864a41d..23b6da6f9 100644 --- a/qcarchivetesting/conda-envs/fulltest_server.yaml +++ b/qcarchivetesting/conda-envs/fulltest_server.yaml @@ -13,6 +13,7 @@ dependencies: - requests - pyyaml - pydantic + - pydantic-settings - zstandard - apsw>=3.42 - qcelemental<0.70a0 diff --git a/qcarchivetesting/conda-envs/fulltest_snowflake.yaml b/qcarchivetesting/conda-envs/fulltest_snowflake.yaml index e06bf372a..25bbd8011 100644 --- a/qcarchivetesting/conda-envs/fulltest_snowflake.yaml +++ b/qcarchivetesting/conda-envs/fulltest_snowflake.yaml @@ -13,6 +13,7 @@ dependencies: - requests - pyyaml - pydantic + - pydantic-settings - zstandard - apsw>=3.42 - qcelemental<0.70a0 diff --git a/qcarchivetesting/conda-envs/fulltest_testing.yaml b/qcarchivetesting/conda-envs/fulltest_testing.yaml index df9ee6b7c..2c02904f1 100644 --- a/qcarchivetesting/conda-envs/fulltest_testing.yaml +++ b/qcarchivetesting/conda-envs/fulltest_testing.yaml @@ -13,6 +13,7 @@ dependencies: - requests - pyyaml - pydantic + - pydantic-settings - zstandard - apsw>=3.42 - qcelemental<0.70a0 diff --git a/qcarchivetesting/conda-envs/fulltest_worker.yaml b/qcarchivetesting/conda-envs/fulltest_worker.yaml index 4f9802aa5..63e96bc11 100644 --- a/qcarchivetesting/conda-envs/fulltest_worker.yaml +++ b/qcarchivetesting/conda-envs/fulltest_worker.yaml @@ -11,6 +11,7 @@ dependencies: - requests - pyyaml - pydantic + - pydantic-settings - zstandard - apsw>=3.42 - qcelemental<0.70a0 diff --git a/qcarchivetesting/qcarchivetesting/config_files/gha_fractal_compute.yaml b/qcarchivetesting/qcarchivetesting/config_files/gha_fractal_compute.yaml index 4d34959ca..29787d9be 100644 --- a/qcarchivetesting/qcarchivetesting/config_files/gha_fractal_compute.yaml +++ b/qcarchivetesting/qcarchivetesting/config_files/gha_fractal_compute.yaml @@ -17,5 +17,5 @@ executors: max_workers: 1 cores_per_worker: 2 memory_per_worker: 2 - queue_tags: + compute_tags: - '*' diff --git a/qcarchivetesting/qcarchivetesting/testing_classes.py b/qcarchivetesting/qcarchivetesting/testing_classes.py index 8635dbdb6..08317a808 100644 --- a/qcarchivetesting/qcarchivetesting/testing_classes.py +++ b/qcarchivetesting/qcarchivetesting/testing_classes.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from copy import deepcopy from qcarchivetesting import geoip_path, geoip_filename, ip_tests_enabled from qcfractal.config import DatabaseConfig @@ -78,10 +77,8 @@ def __init__(self, db_path: str): assert self.harness.is_alive() and not self.harness.can_connect() def get_new_harness(self, db_name: str) -> QCATestingPostgresHarness: - harness_config = deepcopy(self.harness.config.dict()) - harness_config["database_name"] = db_name - - new_harness = QCATestingPostgresHarness(DatabaseConfig(**harness_config)) + db_config = self.harness.config.model_copy(deep=True, update={"database_name": db_name}) + new_harness = QCATestingPostgresHarness(db_config) new_harness.create_database(create_tables=True) return new_harness @@ -170,7 +167,7 @@ def __init__( extra_config=qcf_config, ) - self._original_config = self._qcf_config.copy(deep=True) + self._original_config = self._qcf_config.model_copy(deep=True) if create_users: self.create_users() @@ -192,7 +189,7 @@ def reset(self): self._stop_job_runner() self._stop_compute() self._all_completed = set() - self._qcf_config = self._original_config.copy(deep=True) + self._qcf_config = self._original_config.model_copy(deep=True) if self._api_thread is None: self.start_api(wait=True) diff --git a/qcarchivetesting/qcarchivetesting/testing_fixtures.py b/qcarchivetesting/qcarchivetesting/testing_fixtures.py index 16cced27e..90d102744 100644 --- a/qcarchivetesting/qcarchivetesting/testing_fixtures.py +++ b/qcarchivetesting/qcarchivetesting/testing_fixtures.py @@ -27,7 +27,7 @@ def _generate_default_config(pg_harness, extra_config=None) -> FractalConfig: cfg_dict = {} cfg_dict["base_folder"] = pg_harness.config.base_folder cfg_dict["loglevel"] = "DEBUG" - cfg_dict["database"] = pg_harness.config.dict() + cfg_dict["database"] = pg_harness.config.model_dump() cfg_dict["database"]["pool_size"] = 0 cfg_dict["log_access"] = True diff --git a/qcfractal/qcfractal/components/managers/socket.py b/qcfractal/qcfractal/components/managers/socket.py index 445a95157..f670e9ba2 100644 --- a/qcfractal/qcfractal/components/managers/socket.py +++ b/qcfractal/qcfractal/components/managers/socket.py @@ -313,9 +313,9 @@ def query_active( stmt = stmt.where(ComputeManagerORM.programs.op("?&")(program_names)) # ?& = JSONB contains the keys of ... # Handle * - # If the server doesn't have strict_queue_tags and there is a * in the tags, then + # If the server doesn't have strict_compute_tags and there is a * in the tags, then # don't limit the query - if "*" not in compute_tag or ("*" in compute_tag and self.root_socket.qcf_config.strict_queue_tags): + if "*" not in compute_tag or ("*" in compute_tag and self.root_socket.qcf_config.strict_compute_tags): stmt = stmt.where(ComputeManagerORM.compute_tags.op("&&")(compute_tag)) # && = overlaps between two arrays with self.root_socket.optional_session(session, True) as session: diff --git a/qcfractal/qcfractal/components/tasks/socket.py b/qcfractal/qcfractal/components/tasks/socket.py index eb709246d..5a489855b 100644 --- a/qcfractal/qcfractal/components/tasks/socket.py +++ b/qcfractal/qcfractal/components/tasks/socket.py @@ -43,7 +43,7 @@ def __init__(self, root_socket: SQLAlchemySocket): self._logger = logging.getLogger(__name__) self._tasks_claim_limit = root_socket.qcf_config.api_limits.manager_tasks_claim - self._strict_queue_tags = root_socket.qcf_config.strict_queue_tags + self._strict_compute_tags = root_socket.qcf_config.strict_compute_tags def update_finished( self, manager_name: str, results_compressed: Dict[int, bytes], *, session: Optional[Session] = None @@ -331,9 +331,9 @@ def claim_tasks( TaskQueueORM.compute_priority.desc(), TaskQueueORM.sort_date.asc(), TaskQueueORM.id.asc() ) - # If tag is "*" (and strict_queue_tags is False), then the manager can pull anything - # If tag is "*" and strict_queue_tags is enabled, only pull tasks with tag == '*' - if tag != "*" or self._strict_queue_tags: + # If tag is "*" (and strict_compute_tags is False), then the manager can pull anything + # If tag is "*" and strict_compute_tags is enabled, only pull tasks with tag == '*' + if tag != "*" or self._strict_compute_tags: stmt = stmt.filter(TaskQueueORM.compute_tag == tag) # Skip locked rows - They may be in the process of being claimed by someone else diff --git a/qcfractal/qcfractal/components/tasks/test_socket_claim.py b/qcfractal/qcfractal/components/tasks/test_socket_claim.py index 504171b87..d89fb6801 100644 --- a/qcfractal/qcfractal/components/tasks/test_socket_claim.py +++ b/qcfractal/qcfractal/components/tasks/test_socket_claim.py @@ -364,7 +364,7 @@ def test_task_socket_claim_tag_wildcard_strict(postgres_server, pytestconfig): pg_harness = postgres_server.get_new_harness("claim_tag_wildcard_strict") encoding = pytestconfig.getoption("--client-encoding") - with QCATestingSnowflake(pg_harness, encoding=encoding, extra_config={"strict_queue_tags": True}) as snowflake: + with QCATestingSnowflake(pg_harness, encoding=encoding, extra_config={"strict_compute_tags": True}) as snowflake: storage_socket = snowflake.get_storage_socket() mname1 = ManagerName(cluster="test_cluster", hostname="a_host1", uuid="1234-5678-1234-5678") diff --git a/qcfractal/qcfractal/config.py b/qcfractal/qcfractal/config.py index a87398d88..f2431415a 100644 --- a/qcfractal/qcfractal/config.py +++ b/qcfractal/qcfractal/config.py @@ -12,13 +12,8 @@ import yaml from psycopg2.extensions import make_dsn, parse_dsn - -try: - from pydantic.v1 import BaseSettings, Field, validator, root_validator, ValidationError - from pydantic.v1.env_settings import SettingsSourceCallable -except ImportError: - from pydantic import BaseSettings, Field, validator, root_validator, ValidationError - from pydantic.env_settings import SettingsSourceCallable +from pydantic import Field, field_validator, model_validator, ValidationError +from pydantic_settings import BaseSettings, SettingsConfigDict from sqlalchemy.engine.url import URL, make_url from qcfractal.port_util import find_open_port @@ -64,50 +59,11 @@ def make_uri_string( return f"postgresql://{username}{password}{sep}{host}:{port}/{dbname}{query_str}" -class ConfigCommon: - case_sensitive = False - extra = "forbid" - - # Forces environment variables to take precedent over values - # passed to init (which usually come from a file) - @classmethod - def customise_sources( - cls, - init_settings: SettingsSourceCallable, - env_settings: SettingsSourceCallable, - file_secret_settings: SettingsSourceCallable, - ) -> tuple[SettingsSourceCallable, ...]: - return env_settings, init_settings, file_secret_settings - - -class ConfigBase(BaseSettings): - _type_map = {"string": str, "integer": int, "float": float, "boolean": bool} - - @classmethod - def field_names(cls): - return list(cls.schema()["properties"].keys()) - - @classmethod - def help_info(cls, field): - """ - Create 'help' information for use by argparse from a field in a settings class - """ - info = cls.schema()["properties"][field] - - ret = {"type": cls._type_map[info["type"]]} - - # Don't add defaults here. Argparse would then end up using thses - # defaults on the command line, overriding values specified in the config - # if "default" in info: - # ret["default"] = info["default"] - - if "description" in info: - ret["help"] = info["description"] - - return ret +class QCFConfigBase(BaseSettings): + model_config = SettingsConfigDict(extra="forbid", case_sensitive=False) -class DatabaseConfig(ConfigBase): +class DatabaseConfig(QCFConfigBase): """ Settings for the database used by QCFractal """ @@ -132,7 +88,9 @@ class DatabaseConfig(ConfigBase): database_name: str = Field("qcfractal_default", description="The database name to connect to.") username: str = Field(..., description="The database username to connect with") password: str = Field(..., description="The database password to connect with") - query: Dict[str, str] = Field({}, description="Extra connection query parameters at the end of the URL string") + query: Dict[str, Union[str, int]] = Field( + {}, description="Extra connection query parameters at the end of the URL string" + ) own: bool = Field( True, @@ -141,9 +99,9 @@ class DatabaseConfig(ConfigBase): data_directory: Optional[str] = Field( None, - description="Location to place the database if own == True. Default is [base_folder]/database if we own the databse", + description="Location to place the database if own == True. Default is [base_folder]/database if we own the database", ) - logfile: str = Field( + logfile: Optional[str] = Field( None, description="Path to a file to use as the database logfile (if own == True). Default is [base_folder]/qcfractal_database.log", ) @@ -164,19 +122,14 @@ class DatabaseConfig(ConfigBase): description="[ADVANCED] An existing database (not the one you want to use/create). This is used for database management", ) - class Config(ConfigCommon): - env_prefix = "QCF_DB_" - - @validator("data_directory") - def _check_data_directory(cls, v, values): - if values["own"] is True: - return _make_abs_path(v, values["base_folder"], "postgres") - else: - return None + model_config = QCFConfigBase.model_config | SettingsConfigDict(env_prefix="QCF_DB_") - @validator("logfile") - def _check_logfile(cls, v, values): - return _make_abs_path(v, values["base_folder"], "qcfractal_database.log") + @model_validator(mode="after") + def _check_paths(self): + if self.own: + self.data_directory = _make_abs_path(self.data_directory, self.base_folder, "postgres") + self.logfile = _make_abs_path(self.logfile, self.base_folder, "qcfractal_database.log") + return self @property def database_uri(self) -> str: @@ -238,7 +191,7 @@ def safe_uri(self) -> str: ) # everything left over -class AutoResetConfig(ConfigBase): +class AutoResetConfig(QCFConfigBase): """ Limits on the number of records returned per query. This can be specified per object (molecule, etc) """ @@ -248,11 +201,10 @@ class AutoResetConfig(ConfigBase): compute_lost: int = Field(5, description="Max restarts for computations where the compute resource disappeared") random_error: int = Field(5, description="Max restarts for random errors") - class Config(ConfigCommon): - env_prefix = "QCF_AUTORESET_" + model_config = QCFConfigBase.model_config | SettingsConfigDict(env_prefix="QCF_AUTORESET_") -class APILimitConfig(ConfigBase): +class APILimitConfig(QCFConfigBase): """ Limits on the number of records returned per query. This can be specified per object (molecule, etc) """ @@ -274,11 +226,10 @@ class APILimitConfig(ConfigBase): get_error_logs: int = Field(100, description="Number of error log records to return") get_internal_jobs: int = Field(1000, description="Number of internal jobs to return") - class Config(ConfigCommon): - env_prefix = "QCF_APILIMIT_" + model_config = QCFConfigBase.model_config | SettingsConfigDict(env_prefix="QCF_APILIMIT_") -class WebAPIConfig(ConfigBase): +class WebAPIConfig(QCFConfigBase): """ Settings for the Web API (api) interface """ @@ -329,24 +280,24 @@ class WebAPIConfig(ConfigBase): None, description="Any additional options to pass directly to the waitress serve function" ) - @validator( + @field_validator( "jwt_access_token_expires", "jwt_refresh_token_expires", "user_session_max_age", - pre=True, + mode="before", ) + @classmethod def _convert_durations(cls, v): return duration_to_seconds(v) - class Config(ConfigCommon): - env_prefix = "QCF_API_" + model_config = QCFConfigBase.model_config | SettingsConfigDict(env_prefix="QCF_API_") -class S3BucketMap(ConfigBase): +class S3BucketMap(QCFConfigBase): dataset_attachment: str = Field("dataset_attachment", description="Bucket to hold dataset views") -class S3Config(ConfigBase): +class S3Config(QCFConfigBase): """ Settings for using external files with S3 """ @@ -358,22 +309,22 @@ class S3Config(ConfigBase): access_key_id: Optional[str] = Field(None, description="AWS/S3 access key") secret_access_key: Optional[str] = Field(None, description="AWS/S3 secret key") - bucket_map: S3BucketMap = Field(S3BucketMap(), description="Configuration for where to store various files") + bucket_map: S3BucketMap = Field( + default_factory=S3BucketMap, description="Configuration for where to store various files" + ) - class Config(ConfigCommon): - env_prefix = "QCF_S3_" + model_config = QCFConfigBase.model_config | SettingsConfigDict(env_prefix="QCF_S3_") - @root_validator() - def _check_enabled(cls, values): - if values.get("enabled", False) is True: + @model_validator(mode="after") + def _check_enabled(self): + if self.enabled: for key in ["endpoint_url", "access_key_id", "secret_access_key"]: - if values.get(key, None) is None: + if getattr(self, key) is None: raise ValueError(f"S3 enabled but {key} not set") - - return values + return self -class CORSconfig(ConfigBase): +class CORSconfig(QCFConfigBase): """ Settings for using CORS """ @@ -385,7 +336,7 @@ class CORSconfig(ConfigBase): methods: List[str] = Field([]) -class FractalConfig(ConfigBase): +class FractalConfig(QCFConfigBase): """ Fractal Server settings """ @@ -408,11 +359,11 @@ class FractalConfig(ConfigBase): True, description="Allows unauthenticated read access to this instance. This does not extend to sensitive tables (such as user information)", ) - strict_queue_tags: bool = Field( + strict_compute_tags: bool = Field( False, - description="If True, disables wildcard behavior for queue tags. This disables managers from claiming all " + description="If True, disables wildcard behavior for compute tags. This disables managers from claiming all " "tags if they specify a wildcard ('*') tag. Managers will still be able to claim tasks with an " - "explicit '*' tag if they specifiy the '*' queue tag in their config", + "explicit '*' tag if they specify the '*' queue tag in their config", ) # Logging and profiling @@ -440,7 +391,7 @@ class FractalConfig(ConfigBase): description="The frequency (in seconds) to check the heartbeat of compute managers", gt=0, ) - heartbeat_frequency_jitter: int = Field( + heartbeat_frequency_jitter: float = Field( 0.1, description="Jitter fraction to be applied to the heartbeat frequency", ge=0 ) heartbeat_max_missed: int = Field( @@ -488,130 +439,70 @@ class FractalConfig(ConfigBase): # Other settings blocks database: DatabaseConfig = Field(..., description="Configuration of the settings for the database") api: WebAPIConfig = Field(..., description="Configuration of the REST interface") - s3: S3Config = Field(S3Config(), description="Configuration of the S3 file storage (optional)") - api_limits: APILimitConfig = Field(..., description="Configuration of the limits to the api") - cors: CORSconfig = Field(..., description="Configuration Cross Origin Resource sharing (advanced)") - auto_reset: AutoResetConfig = Field(..., description="Configuration for automatic resetting of tasks") - - @root_validator(pre=True) - def _root_validator(cls, values): - logger = logging.getLogger("config_validation") - - values.setdefault("database", dict()) - if "base_folder" not in values["database"]: - values["database"]["base_folder"] = values.get("base_folder") - - values.setdefault("api_limits", dict()) - values.setdefault("api", dict()) - values.setdefault("auto_reset", dict()) - values.setdefault("cors", dict()) - - if "statistics_frequency" in values: - values.pop("statistics_frequency") - logger.warning("The 'statistics_frequency' setting is no longer used and is now ignored") - - if "num_workers" in values["api"]: - values["api"].pop("num_workers") - logger.warning("The 'num_workers' setting is no longer used and is now ignored") - - if "get_server_stats" in values["api_limits"]: - values["api_limits"].pop("get_server_stats") - logger.warning("The 'get_server_stats' setting in 'api_limits' is no longer used and is now ignored") - - return values - - @validator("geoip2_dir") - def _check_geoip2_dir(cls, v, values): - return _make_abs_path(v, values["base_folder"], "geoip2") - - @validator("homepage_directory") - def _check_hompepage_directory_path(cls, v, values): - return _make_abs_path(v, values["base_folder"], None) - - @validator("upload_directory") - def _check_upload_directory_path(cls, v, values): - return _make_abs_path(v, values["base_folder"], None) - - @validator("logfile") - def _check_logfile_path(cls, v, values): - return _make_abs_path(v, values["base_folder"], None) + s3: S3Config = Field(default_factory=S3Config, description="Configuration of the S3 file storage (optional)") + api_limits: APILimitConfig = Field( + default_factory=APILimitConfig, description="Configuration of the limits to the api" + ) + cors: CORSconfig = Field( + default_factory=CORSconfig, description="Configuration Cross Origin Resource sharing (advanced)" + ) + auto_reset: AutoResetConfig = Field( + default_factory=AutoResetConfig, description="Configuration for automatic resetting of tasks" + ) - @validator("loglevel") + @field_validator("loglevel", mode="after") + @classmethod def _check_loglevel(cls, v): v = v.upper() if v not in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]: raise ValidationError(f"{v} is not a valid loglevel. Must be DEBUG, INFO, WARNING, ERROR, or CRITICAL") return v - @validator("service_frequency", "heartbeat_frequency", pre=True) + @field_validator("service_frequency", "heartbeat_frequency", mode="before") + @classmethod def _convert_durations(cls, v): return duration_to_seconds(v) - @validator("access_log_keep", "internal_job_keep", pre=True) + @field_validator("access_log_keep", "internal_job_keep", mode="before") def _convert_durations_days(cls, v): if isinstance(v, int) or (isinstance(v, str) and v.isdigit()): return int(v) * 86400 return duration_to_seconds(v) - @validator("temporary_dir", pre=True) - def _create_temporary_directory(cls, v, values): - v = _make_abs_path(v, values["base_folder"], tempfile.gettempdir()) + @model_validator(mode="before") + @classmethod + def _propagate_base_folder(cls, values): + if isinstance(values, dict) and "base_folder" in values: + values.setdefault("database", {}) + values["database"]["base_folder"] = values["base_folder"] + return values + + @model_validator(mode="before") + @classmethod + def _detect_deprecated_fields(cls, values): + if isinstance(values, dict): + if "strict_queue_tags" in values: + logging.getLogger(__name__).warning("strict_queue_tags is deprecated. Use strict_compute_tags instead") + values["strict_compute_tags"] = values.pop("strict_queue_tags") + return values + + @model_validator(mode="after") + def _check_paths(self): + self.homepage_directory = _make_abs_path(self.homepage_directory, self.base_folder, None) + self.upload_directory = _make_abs_path(self.upload_directory, self.base_folder, None) + self.logfile = _make_abs_path(self.logfile, self.base_folder, None) + self.geoip2_dir = _make_abs_path(self.geoip2_dir, self.base_folder, "geoip2") - if v is not None and not os.path.exists(v): - os.makedirs(v) + if self.temporary_dir is None: + self.temporary_dir = tempfile.gettempdir() + self.temporary_dir = _make_abs_path("qcf_tmp", self.base_folder, tempfile.gettempdir()) + else: + self.temporary_dir = _make_abs_path(self.temporary_dir, self.base_folder, None) - return v + os.makedirs(self.temporary_dir, exist_ok=True) + return self - class Config(ConfigCommon): - env_prefix = "QCF_" - - -def convert_old_configuration(old_config): - cfg_dict = {} - - cfg_dict["base_folder"] = old_config.base_folder - - # Database settings - cfg_dict["database"] = {} - cfg_dict["database"]["own"] = old_config.database.own - cfg_dict["database"]["host"] = old_config.database.host - cfg_dict["database"]["port"] = old_config.database.port - cfg_dict["database"]["username"] = old_config.database.username - cfg_dict["database"]["password"] = old_config.database.password - cfg_dict["database"]["database_name"] = old_config.database.database_name - cfg_dict["database"]["logfile"] = old_config.database.logfile - - if old_config.database.own: - cfg_dict["database"]["data_directory"] = old_config.database.directory - - # Response limits. The old config only had one. Set all the possible - # limits to that value - response_limit = old_config.fractal.query_limit - field_list = APILimitConfig.field_names() - cfg_dict["api_limits"] = {k: response_limit for k in field_list} - - # Flask server settings - cfg_dict["api"] = {} - cfg_dict["api"]["port"] = old_config.fractal.port - cfg_dict["api"]["secret_key"] = secrets.token_urlsafe(32) - cfg_dict["api"]["jwt_secret_key"] = secrets.token_urlsafe(32) - - # Now general fractal settings. Before these were in a - # separate config class, but now they are in the top level - cfg_dict["name"] = old_config.fractal.name - cfg_dict["enable_security"] = old_config.fractal.security == "local" - cfg_dict["allow_unauthenticated_read"] = old_config.fractal.allow_read - cfg_dict["logfile"] = old_config.fractal.logfile - cfg_dict["loglevel"] = old_config.fractal.loglevel - cfg_dict["service_frequency"] = old_config.fractal.service_frequency - cfg_dict["max_active_services"] = old_config.fractal.max_active_services - cfg_dict["heartbeat_frequency"] = old_config.fractal.heartbeat_frequency - cfg_dict["log_access"] = old_config.fractal.log_apis - - if old_config.fractal.geo_file_path: - cfg_dict["geoip2_dir"] = os.path.basename(old_config.fractal.geo_file_path) - - return FractalConfig(**cfg_dict) + model_config = QCFConfigBase.model_config | SettingsConfigDict(env_prefix="QCF_") def read_configuration(file_paths: list[str], extra_config: Optional[Dict[str, Any]] = None) -> FractalConfig: diff --git a/qcfractal/qcfractal/flask_app/helpers.py b/qcfractal/qcfractal/flask_app/helpers.py index 9810ad230..8c04d981d 100644 --- a/qcfractal/qcfractal/flask_app/helpers.py +++ b/qcfractal/qcfractal/flask_app/helpers.py @@ -166,7 +166,7 @@ def get_public_server_information(): "manager_heartbeat_frequency_jitter": qcf_cfg.heartbeat_frequency_jitter, "manager_heartbeat_max_missed": qcf_cfg.heartbeat_max_missed, "version": qcfractal_version, - "api_limits": qcf_cfg.api_limits.dict(), + "api_limits": qcf_cfg.api_limits.model_dump(mode="json"), "client_version_lower_limit": "0.50", "client_version_upper_limit": "1.00", "manager_version_lower_limit": "0.50", diff --git a/qcfractal/qcfractal/old_config.py b/qcfractal/qcfractal/old_config.py deleted file mode 100644 index cf02db980..000000000 --- a/qcfractal/qcfractal/old_config.py +++ /dev/null @@ -1,192 +0,0 @@ -""" -Pre-0.5 configuration file specification - -This is needed for migrating the old config -""" - -import os -from pathlib import Path -from typing import Optional - -import yaml - -try: - from pydantic.v1 import Field, validator -except ImportError: - from pydantic import Field, validator - -from .config import ConfigBase, ConfigCommon - - -class OldDatabaseSettings(ConfigBase): - """ - Postgres Database settings - """ - - port: int = Field(5432, description="The postgresql default port") - host: str = Field( - "localhost", - description="Default location for the postgres server. If not localhost, qcfractal command lines cannot manage " - "the instance.", - ) - username: str = Field(None, description="The postgres username to default to.") - password: str = Field(None, description="The postgres password for the give user.") - directory: str = Field( - None, description="The physical location of the QCFractal instance data, defaults to the root folder." - ) - database_name: str = Field("qcfractal_default", description="The database name to connect to.") - logfile: str = Field("qcfractal_postgres.log", description="The logfile to write postgres logs.") - own: bool = Field( - True, - description="If own is True, QCFractal will control the database instance. If False " - "Postgres will expect a booted server at the database specification.", - ) - - class Config(ConfigCommon): - pass - - -class OldViewSettings(ConfigBase): - """ - HDF5 view settings - """ - - enable: bool = Field(True, description="Enable frozen-views.") - directory: str = Field(None, description="Location of frozen-view data. If None, defaults to base_folder/views.") - - -class OldFractalServerSettings(ConfigBase): - """ - Fractal Server settings - """ - - name: str = Field("QCFractal Server", description="The QCFractal server default name.") - port: int = Field(7777, description="The QCFractal default port.") - - # TODO: to be removed, handled by Ngnix - compress_response: bool = Field( - True, description="Compress REST responses or not, should be True unless behind a proxy." - ) - allow_read: bool = Field(True, description="Always allows read access to record tables.") - security: str = Field( - None, - description="Optional user authentication. Specify 'local' to enable " - "authentication through locally stored usernames. " - "User permissions may be manipulated through the ``qcfractal-server " - "user`` CLI.", - ) - - query_limit: int = Field(1000, description="The maximum number of records to return per query.") - logfile: Optional[str] = Field("qcfractal_server.log", description="The logfile to write server logs.") - loglevel: str = Field("info", description="Level of logging to enable (debug, info, warning, error, critical)") - cprofile: Optional[str] = Field( - None, description="Enable profiling via cProfile, and output cprofile data to this path" - ) - service_frequency: int = Field(60, description="The frequency to update the QCFractal services.") - max_active_services: int = Field(20, description="The maximum number of concurrent active services.") - heartbeat_frequency: int = Field(1800, description="The frequency (in seconds) to check the heartbeat of workers.") - log_apis: bool = Field( - False, - description="True or False. Store API access in the Database. This is an advanced " - "option for servers accessed by external users through QCPortal.", - ) - geo_file_path: Optional[str] = Field( - None, - description="Geoip2 cites file path (.mmdb) for resolving IP addresses. Defaults to [base_folder]/GeoLite2-City.mmdb", - ) - - _default_geo_filename: str = "GeoLite2-City.mmdb" - - @validator("logfile") - def check_basis(cls, v): - if v == "None": - v = None - return v - - class Config(ConfigCommon): - pass - - -class OldFractalConfig(ConfigBase): - """ - Top level configuration headers and options for a QCFractal Configuration File - """ - - # class variable, not in the pydantic model - _defaults_file_path: str = os.path.expanduser("~/.qca/qcfractal_defaults.yaml") - - base_folder: str = Field( - os.path.expanduser("~/.qca/qcfractal"), - description="The QCFractal base instance to attach to. " "Default will be your home directory", - ) - database: OldDatabaseSettings = OldDatabaseSettings() - view: OldViewSettings = OldViewSettings() - fractal: OldFractalServerSettings = OldFractalServerSettings() - - class Config(ConfigCommon): - pass - - def __init__(self, **kwargs): - # If no base_folder provided, read it from ~/.qca/qcfractal_defaults.yaml (if it exists) - # else, use the default base_folder - if "base_folder" in kwargs: - kwargs["base_folder"] = os.path.expanduser(kwargs["base_folder"]) - else: - if Path(OldFractalConfig._defaults_file_path).exists(): - with open(OldFractalConfig._defaults_file_path, "r") as handle: - kwargs["base_folder"] = yaml.load(handle.read(), Loader=yaml.FullLoader)["default_base_folder"] - - super().__init__(**kwargs) - - @property - def base_path(self): - return Path(self.base_folder) - - @property - def config_file_path(self): - return self.base_path / "qcfractal_config.yaml" - - @property - def database_path(self): - if self.database.directory is None: - return self.base_path / "postgres" - else: - return Path(os.path.expanduser(self.database.directory)) - - def database_uri(self, safe: bool = True, database: str = None) -> str: - uri = "postgresql://" - if self.database.username is not None: - uri += f"{self.database.username}:" - - if self.database.password is not None: - if safe: - pw = "*******" - else: - pw = self.database.password - uri += pw - - uri += "@" - - uri += f"{self.database.host}:{self.database.port}/" - - if database is None: - uri += self.database.database_name - else: - uri += database - - return uri - - @property - def view_path(self): - if self.view.directory is None: - default_view_path = self.base_path / "views" - default_view_path.mkdir(parents=False, exist_ok=True) - return default_view_path - else: - return Path(os.path.expanduser(self.view.directory)) - - def geo_file_path(self): - if self.fractal.geo_file_path: - return self.fractal.geo_file_path - else: - return os.path.join(self.base_folder, self.fractal._default_geo_filename) diff --git a/qcfractal/qcfractal/qcfractal_server_cli.py b/qcfractal/qcfractal/qcfractal_server_cli.py index 4a44f5a86..b33be6b3a 100644 --- a/qcfractal/qcfractal/qcfractal_server_cli.py +++ b/qcfractal/qcfractal/qcfractal_server_cli.py @@ -162,11 +162,13 @@ def parse_args() -> argparse.Namespace: start = subparsers.add_parser("start", help="Starts a QCFractal server instance.", parents=[base_parser]) # Allow some config settings to be altered via the command line - start.add_argument("--port", **WebAPIConfig.help_info("port")) - start.add_argument("--host", **WebAPIConfig.help_info("host")) - start.add_argument("--logfile", **FractalConfig.help_info("logfile")) - start.add_argument("--loglevel", **FractalConfig.help_info("loglevel")) - start.add_argument("--enable-security", **FractalConfig.help_info("enable_security")) + start.add_argument("--port", type=int, help=WebAPIConfig.__pydantic_fields__["port"].description) + start.add_argument("--host", type=str, help=WebAPIConfig.__pydantic_fields__["host"].description) + start.add_argument("--logfile", type=str, help=FractalConfig.__pydantic_fields__["logfile"].description) + start.add_argument("--loglevel", type=str, help=FractalConfig.__pydantic_fields__["loglevel"].description) + start.add_argument( + "--enable-security", type=bool, help=FractalConfig.__pydantic_fields__["enable_security"].description + ) start.add_argument( "--disable-job-runner", @@ -182,8 +184,8 @@ def parse_args() -> argparse.Namespace: ) # Allow some config settings to be altered via the command line - start_per.add_argument("--logfile", **FractalConfig.help_info("logfile")) - start_per.add_argument("--loglevel", **FractalConfig.help_info("loglevel")) + start_per.add_argument("--logfile", type=str, help=FractalConfig.__pydantic_fields__["logfile"].description) + start_per.add_argument("--loglevel", type=str, help=FractalConfig.__pydantic_fields__["loglevel"].description) ##################################### # start-api subcommand @@ -191,8 +193,8 @@ def parse_args() -> argparse.Namespace: start_api = subparsers.add_parser("start-api", help="Starts a QCFractal server instance.", parents=[base_parser]) # Allow some config settings to be altered via the command line - start_api.add_argument("--logfile", **FractalConfig.help_info("logfile")) - start_api.add_argument("--loglevel", **FractalConfig.help_info("loglevel")) + start_api.add_argument("--logfile", type=str, help=FractalConfig.__pydantic_fields__["logfile"].description) + start_api.add_argument("--loglevel", type=str, help=FractalConfig.__pydantic_fields__["loglevel"].description) ##################################### # upgrade-db subcommand @@ -567,7 +569,7 @@ def server_upgrade_db(config): def server_upgrade_config(config_path): import secrets - from qcfractal.old_config import OldFractalConfig + from qcfractal.old_config import OldFractalQCFConfig from qcfractal.config import convert_old_configuration logger = logging.getLogger(__name__) @@ -581,7 +583,7 @@ def server_upgrade_config(config_path): logger.info(f"Configuration appears to be up-to-date") return - old_qcf_config = OldFractalConfig(**file_data) + old_qcf_config = OldFractalQCFConfig(**file_data) new_qcf_config = convert_old_configuration(old_qcf_config) # Move the old file out of the way diff --git a/qcfractal/qcfractal/snowflake.py b/qcfractal/qcfractal/snowflake.py index 051a8888a..ae7fcd381 100644 --- a/qcfractal/qcfractal/snowflake.py +++ b/qcfractal/qcfractal/snowflake.py @@ -97,7 +97,7 @@ def __init__( qcf_cfg: Dict[str, Any] = {} qcf_cfg["base_folder"] = self._tmpdir qcf_cfg["loglevel"] = logging.getLevelName(loglevel) - qcf_cfg["database"] = db_config.dict() + qcf_cfg["database"] = db_config.model_dump() qcf_cfg["enable_security"] = False qcf_cfg["hide_internal_errors"] = False qcf_cfg["service_frequency"] = 10 diff --git a/qcfractal/qcfractal/test_config.py b/qcfractal/qcfractal/test_config.py index 600525e4c..b6e5206ba 100644 --- a/qcfractal/qcfractal/test_config.py +++ b/qcfractal/qcfractal/test_config.py @@ -73,10 +73,10 @@ def test_config_durations_dhms(tmp_path): def test_config_tmpdir_create(tmp_path): - base_folder = str(tmp_path) base_config = copy.deepcopy(_base_config) + base_config["base_folder"] = str(tmp_path) base_config["temporary_dir"] = str(tmp_path / "qcatmpdir") - cfg = FractalConfig(base_folder=base_folder, **base_config) + cfg = FractalConfig(**base_config) assert cfg.temporary_dir == str(tmp_path / "qcatmpdir") assert os.path.exists(cfg.temporary_dir) diff --git a/qcfractal/qcfractal/test_db_connection.py b/qcfractal/qcfractal/test_db_connection.py index b7327b3b7..e4d72379b 100644 --- a/qcfractal/qcfractal/test_db_connection.py +++ b/qcfractal/qcfractal/test_db_connection.py @@ -157,13 +157,13 @@ def test_db_connection_hosts(tmp_path): try: # Make sure tests can fail - new_db_config = pg_harness.config.copy(update={"password": "not_correct"}) + new_db_config = pg_harness.config.model_copy(update={"password": "not_correct"}) new_pg_harness = PostgresHarness(new_db_config) assert new_pg_harness.can_connect() is False sock_path = os.path.join(db_config.data_directory, "sock") for test_host in ["localhost", "127.0.0.1", sock_path]: - new_db_config = db_config.copy(update={"host": test_host}) + new_db_config = db_config.model_copy(update={"host": test_host}) assert PostgresHarness(new_db_config).can_connect() assert create_engine(new_db_config.sqlalchemy_url).connect() SQLAlchemySocket.upgrade_database(new_db_config) @@ -197,12 +197,12 @@ def test_db_connection_full_uri(tmp_path): assert pg_harness.can_connect() # Make sure tests can fail - new_db_config = pg_harness.config.copy(update={"password": "not_correct"}) + new_db_config = pg_harness.config.model_copy(update={"password": "not_correct"}) new_pg_harness = PostgresHarness(new_db_config) assert new_pg_harness.can_connect() is False def can_connect(full_uri): - test_cfg = db_config.copy(update={"full_uri": full_uri}) + test_cfg = db_config.model_copy(update={"full_uri": full_uri}) assert PostgresHarness(test_cfg).can_connect() assert create_engine(test_cfg.sqlalchemy_url).connect() SQLAlchemySocket.upgrade_database(test_cfg) diff --git a/qcfractal/qcfractal/test_server_cli.py b/qcfractal/qcfractal/test_server_cli.py index 01f3bddcd..85fdf62a9 100644 --- a/qcfractal/qcfractal/test_server_cli.py +++ b/qcfractal/qcfractal/test_server_cli.py @@ -13,8 +13,6 @@ from qcfractal.port_util import find_open_port config_file = os.path.join(testconfig_path, "qcf_basic.yaml") -old_config_file = os.path.join(migrationdata_path, "qcfractal_config_v0.15.8.yaml") -old_db_dump = os.path.join(migrationdata_path, "qcfractal_config_v0.15.8.yaml") pytestmark = pytest.mark.slow @@ -60,11 +58,11 @@ def cli_runner_core(postgres_server, tmp_config, request, unique_db_name): pg_harness.delete_database() config = read_configuration([tmp_config]) - config.database = pg_harness.config.copy(deep=True) + config.database = pg_harness.config.model_copy(deep=True) config.database.own = False with open(tmp_config, "w") as f: - yaml.dump(config.dict(), f) + yaml.dump(config.model_dump(), f) # Use a functor so we can get own_db (and maybe other info in the future) class run_qcfractal_cli: @@ -350,22 +348,6 @@ def test_cli_upgrade_noinit(cli_runner_core): assert "does not exist for upgrading" in output -def test_cli_upgrade_config(tmp_path): - tmp_subdir = tmp_path / "cli_tmp" - tmp_subdir.mkdir() - - shutil.copy(old_config_file, tmp_subdir) - - old_config_path = os.path.join(tmp_subdir, os.path.basename(old_config_file)) - - output = subprocess.check_output( - ["qcfractal-server", "upgrade-config", "-v", "--config", old_config_path], universal_newlines=True - ) - - assert "Your configuration file has been upgraded" in output - assert os.path.isfile(old_config_path + ".backup") - - def test_cli_start(cli_runner): full_cmd = ["qcfractal-server", "--config", cli_runner.config_path, "start"] diff --git a/qcfractalcompute/qcfractalcompute/config.py b/qcfractalcompute/qcfractalcompute/config.py index d80aadede..639ba6d24 100644 --- a/qcfractalcompute/qcfractalcompute/config.py +++ b/qcfractalcompute/qcfractalcompute/config.py @@ -1,14 +1,11 @@ import logging import os from typing import List, Optional, Union, Dict, Any +from typing import Literal, Annotated import yaml - -try: - from pydantic.v1 import BaseModel, Field, validator, root_validator -except ImportError: - from pydantic import BaseModel, Field, validator, root_validator -from typing_extensions import Literal +from pydantic import BaseModel, Field, field_validator, model_validator, BeforeValidator +from pydantic_settings import BaseSettings, SettingsConfigDict from qcportal.utils import seconds_to_hms, duration_to_seconds, update_nested_dict @@ -31,7 +28,28 @@ def _make_abs_path(path: Optional[str], base_folder: str, default_filename: Opti return os.path.abspath(path) -class PackageEnvironmentSettings(BaseModel): +def _walltime_must_be_str(w) -> str: + """ + Converts walltime to a string if it is a number + + If walltime is an int, float, or a string that is a integer, convert it to a + string in the format HH:MM:SS. Otherwise, return the original value. + """ + + """""" + if isinstance(w, (float, int)): + return seconds_to_hms(w) + elif isinstance(w, str) and w.isdigit(): + return seconds_to_hms(int(w)) + else: + return w + + +class QCFComputeConfigBase(BaseSettings): + model_config = SettingsConfigDict(extra="forbid", case_sensitive=False, env_prefix="QCF_COMPUTE_") + + +class PackageEnvironmentSettings(QCFComputeConfigBase): """ Environments with installed packages that can be used to run calculations @@ -39,14 +57,17 @@ class PackageEnvironmentSettings(BaseModel): direct appropriate calculations to them. """ - use_manager_environment: bool = True + use_manager_environment: bool = Field( + True, + description="Use the environment that the manager is running in for computation. May be in addition to other environments", + ) conda: List[str] = Field([], description="List of conda environments to query for installed packages") apptainer: List[str] = Field( [], description="List of paths to apptainer/singularity files to query for installed packages" ) -class ExecutorConfig(BaseModel): +class ExecutorConfig(QCFComputeConfigBase): type: str compute_tags: List[str] worker_init: List[str] = [] @@ -59,19 +80,7 @@ class ExecutorConfig(BaseModel): extra_executor_options: Dict[str, Any] = {} - environments: PackageEnvironmentSettings = PackageEnvironmentSettings() - - class Config(BaseModel.Config): - case_insensitive = True - extra = "forbid" - - # TODO - DEPRECATED - REMOVE EVENTUALLY - @root_validator(pre=True) - def _old_queue_tag(cls, values): - if "queue_tags" in values: - values["compute_tags"] = values.pop("queue_tags") - - return values + environments: PackageEnvironmentSettings = Field(default_factory=PackageEnvironmentSettings) class CustomExecutorConfig(ExecutorConfig): @@ -87,7 +96,7 @@ class LocalExecutorConfig(ExecutorConfig): class SlurmExecutorConfig(ExecutorConfig): type: Literal["slurm"] = "slurm" - walltime: str + walltime: Annotated[str, BeforeValidator(_walltime_must_be_str)] exclusive: bool = True partition: Optional[str] = None account: Optional[str] = None @@ -97,18 +106,11 @@ class SlurmExecutorConfig(ExecutorConfig): scheduler_options: List[str] = [] - @validator("walltime", pre=True) - def walltime_must_be_str(cls, v): - if isinstance(v, int): - return seconds_to_hms(v) - else: - return v - class TorqueExecutorConfig(ExecutorConfig): type: Literal["torque"] = "torque" - walltime: str + walltime: Annotated[str, BeforeValidator(_walltime_must_be_str)] account: Optional[str] = None queue: Optional[str] = None @@ -117,15 +119,11 @@ class TorqueExecutorConfig(ExecutorConfig): scheduler_options: List[str] = [] - @validator("walltime", pre=True) - def walltime_must_be_str(cls, v): - return seconds_to_hms(duration_to_seconds(v)) - class LSFExecutorConfig(ExecutorConfig): type: Literal["lsf"] = "lsf" - walltime: str + walltime: Annotated[str, BeforeValidator(_walltime_must_be_str)] project: Optional[str] = None queue: Optional[str] = None @@ -137,17 +135,20 @@ class LSFExecutorConfig(ExecutorConfig): scheduler_options: List[str] = [] - @validator("walltime", pre=True) - def walltime_must_be_str(cls, v): - return seconds_to_hms(duration_to_seconds(v)) - -AllExecutorTypes = Union[ - CustomExecutorConfig, LocalExecutorConfig, SlurmExecutorConfig, TorqueExecutorConfig, LSFExecutorConfig +AllExecutorTypes = Annotated[ + Union[ + CustomExecutorConfig, + LocalExecutorConfig, + SlurmExecutorConfig, + TorqueExecutorConfig, + LSFExecutorConfig, + ], + Field(discriminator="type"), ] -class FractalServerSettings(BaseModel): +class FractalServerSettings(QCFComputeConfigBase): """ Settings pertaining to the Fractal Server you wish to pull tasks from and push completed tasks to. Each manager supports exactly 1 Fractal Server to be in communication with, and exactly 1 user on that Fractal Server. These @@ -169,12 +170,8 @@ class FractalServerSettings(BaseModel): ) verify: Optional[bool] = Field(None, description="Use Server-side generated SSL certification or not.") - class Config(BaseModel.Config): - case_insensitive = True - extra = "forbid" - -class FractalComputeConfig(BaseModel): +class FractalComputeConfig(QCFComputeConfigBase): base_folder: str = Field( ..., description="The base folder to use as the default for some options (logs, etc). Default is the location of the config file.", @@ -218,24 +215,22 @@ class FractalComputeConfig(BaseModel): parsl_usage_tracking: int = 0 server: FractalServerSettings = Field(...) - environments: PackageEnvironmentSettings = PackageEnvironmentSettings() + environments: PackageEnvironmentSettings = Field(default_factory=PackageEnvironmentSettings) executors: Dict[str, AllExecutorTypes] = Field(...) - class Config(BaseModel.Config): - case_insensitive = True - extra = "forbid" - - @validator("logfile") - def _check_logfile(cls, v, values): - return _make_abs_path(v, values["base_folder"], None) + @model_validator(mode="after") + def _check_paths(self): + self.logfile = _make_abs_path(self.logfile, self.base_folder, None) + self.parsl_run_dir = _make_abs_path(self.parsl_run_dir, self.base_folder, "parsl_run_dir") + return self - @validator("parsl_run_dir") - def _check_run_dir(cls, v, values): - return _make_abs_path(v, values["base_folder"], "parsl_run_dir") - - @validator("update_frequency", "max_idle_time", pre=True) + @field_validator("update_frequency", "max_idle_time", mode="before") + @classmethod def _convert_durations(cls, v): - return duration_to_seconds(v) + if v is None: + return None + else: + return duration_to_seconds(v) def read_configuration(file_paths: List[str], extra_config: Optional[Dict[str, Any]] = None) -> FractalComputeConfig: diff --git a/qcfractalcompute/qcfractalcompute/test_manager_config.py b/qcfractalcompute/qcfractalcompute/test_manager_config.py index 79a0496a6..51f1dc41f 100644 --- a/qcfractalcompute/qcfractalcompute/test_manager_config.py +++ b/qcfractalcompute/qcfractalcompute/test_manager_config.py @@ -22,7 +22,7 @@ def test_manager_config_walltime(time_str): # Walltime as a string config_yaml = f""" - queue_tags: + compute_tags: - '*' cores_per_worker: 1 memory_per_worker: 1.0 @@ -37,7 +37,7 @@ def test_manager_config_walltime(time_str): # Walltime without quotes (gets converted by yaml to int) config_yaml = f""" - queue_tags: + compute_tags: - '*' cores_per_worker: 1 memory_per_worker: 1.0 diff --git a/qcportal/pyproject.toml b/qcportal/pyproject.toml index 17dfc1ae0..245683a13 100644 --- a/qcportal/pyproject.toml +++ b/qcportal/pyproject.toml @@ -26,7 +26,8 @@ dependencies = [ "msgpack", "requests", "pyyaml", - "pydantic", + "pydantic>2.10", + "pydantic-settings", "zstandard", "apsw>=3.42", "qcelemental<0.70a0",