Skip to content

Commit 5b260ea

Browse files
committed
Add support for returning hips lists for multiple data releases.
Change Configuration to allow reading multiple data release and subpaths from file and serving hip lists from a new v2 that uses the data release from a path parameter. Also update requirements and tests.
1 parent c754b0b commit 5b260ea

File tree

21 files changed

+1403
-708
lines changed

21 files changed

+1403
-708
lines changed

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,12 @@ repos:
88
- id: trailing-whitespace
99

1010
- repo: https://github.com/astral-sh/uv-pre-commit
11-
rev: 0.6.14
11+
rev: 0.7.13
1212
hooks:
1313
- id: uv-lock
1414

1515
- repo: https://github.com/astral-sh/ruff-pre-commit
16-
rev: v0.11.5
16+
rev: v0.11.13
1717
hooks:
1818
- id: ruff
1919
args: [--fix, --exit-non-zero-on-fix]

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# - Runs a non-root user.
1313
# - Sets up the entrypoint and port.
1414

15-
FROM python:3.13.3-slim-bookworm AS base-image
15+
FROM python:3.13.4-slim-bookworm AS base-image
1616

1717
# Update system packages
1818
COPY scripts/install-base-packages.sh .
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
<!-- Delete the sections that don't apply -->
2+
3+
### New features
4+
5+
- Change Config to load configuration from local file, to support more complex structures for the hips release data
6+
- Introduce v2 hips router. Add an additional hips dependency that builds a hips list per data release, given the data release lists and their paths via configuration
7+
- Add new tests for v2 and legacy hips list endpoint
8+
9+
### Other changes
10+
11+
- Upgraded dependencies and python version

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ dependencies = [
2323
"httpx",
2424
"jinja2",
2525
"lsst-daf-butler[remote]",
26-
"pydantic>2",
26+
"pydantic>=2.11",
2727
"pydantic-settings",
2828
"pyyaml",
2929
"safir>=6.2.0",

src/datalinker/config.py

Lines changed: 143 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -4,25 +4,38 @@
44

55
from datetime import timedelta
66
from pathlib import Path
7-
from typing import Annotated
7+
from typing import Annotated, Self
88

9-
from pydantic import Field, HttpUrl, SecretStr
9+
import yaml
10+
from pydantic import BaseModel, Field, HttpUrl, model_validator
1011
from pydantic_settings import BaseSettings, SettingsConfigDict
1112
from safir.logging import LogLevel, Profile
1213
from safir.pydantic import HumanTimedelta
1314

14-
__all__ = [
15-
"Config",
16-
"config",
17-
]
15+
__all__ = ["Config", "HiPSDatasetConfig"]
16+
17+
18+
class HiPSDatasetConfig(BaseModel):
19+
"""Configuration for a single HiPS dataset."""
20+
21+
url: Annotated[
22+
HttpUrl,
23+
Field(title="Base URL", description="Base URL for this HiPS dataset"),
24+
]
25+
26+
paths: Annotated[
27+
list[str],
28+
Field(
29+
title="HiPS paths",
30+
description="List of available HiPS paths",
31+
),
32+
]
1833

1934

2035
class Config(BaseSettings):
2136
"""Configuration for datalinker."""
2237

23-
model_config = SettingsConfigDict(
24-
env_prefix="DATALINKER_", case_sensitive=False
25-
)
38+
model_config = SettingsConfigDict(extra="forbid", populate_by_name=True)
2639

2740
cutout_sync_url: Annotated[
2841
HttpUrl,
@@ -31,30 +44,65 @@ class Config(BaseSettings):
3144
description=(
3245
"URL to the sync API for the SODA service that does cutouts"
3346
),
47+
validation_alias="cutoutSyncUrl",
3448
),
3549
]
3650

37-
hips_base_url: Annotated[HttpUrl, Field(title="Base URL for HiPS lists")]
51+
hips_base_url: Annotated[
52+
HttpUrl,
53+
Field(title="Base URL for HiPS lists", validation_alias="hipsBaseUrl"),
54+
]
55+
56+
hips_datasets: Annotated[
57+
dict[str, HiPSDatasetConfig],
58+
Field(
59+
title="HiPS dataset configurations",
60+
description=(
61+
"Mapping of dataset names to their configuration. "
62+
"Each dataset has a base URL and list of available HiPS paths."
63+
),
64+
validation_alias="hipsDatasets",
65+
),
66+
] = {}
67+
68+
hips_default_dataset: Annotated[
69+
str, Field(validation_alias="hipsDefaultDataset")
70+
] = ""
71+
"""The dataset to serve from v1 routes. Must be a key in hips_datasets"""
3872

3973
hips_path_prefix: Annotated[
4074
str,
4175
Field(
4276
title="URL prefix for HiPS API",
4377
description="URL prefix used to inject the HiPS list file",
78+
validation_alias="hipsPathPrefix",
4479
),
4580
] = "/api/hips"
4681

82+
hips_v2_path_prefix: Annotated[
83+
str,
84+
Field(
85+
title="URL prefix for HiPS API",
86+
description="URL prefix used to inject the HiPS list file",
87+
validation_alias="hipsV2PathPrefix",
88+
),
89+
] = "/api/hips/v2"
90+
4791
links_lifetime: Annotated[
4892
HumanTimedelta,
4993
Field(
5094
title="Lifetime of image links replies",
5195
description="Should match the lifetime of signed URLs from Butler",
96+
validation_alias="linksLifetime",
5297
),
5398
] = timedelta(hours=1)
5499

55100
log_level: Annotated[
56101
LogLevel,
57-
Field(title="Log level of the application's logger"),
102+
Field(
103+
title="Log level of the application's logger",
104+
validation_alias="logLevel",
105+
),
58106
] = LogLevel.INFO
59107

60108
name: Annotated[str, Field(title="Application name")] = "datalinker"
@@ -67,6 +115,7 @@ class Config(BaseSettings):
67115
"This URL prefix is used for the IVOA DataLink API and for"
68116
" any other helper APIs exposed via DataLink descriptors"
69117
),
118+
validation_alias="pathPrefix",
70119
),
71120
] = "/api/datalink"
72121

@@ -75,28 +124,100 @@ class Config(BaseSettings):
75124
Field(title="Application logging profile"),
76125
] = Profile.production
77126

127+
tap_metadata_url: Annotated[
128+
Path | None,
129+
Field(
130+
title="URL to TAP schema metadata",
131+
description=(
132+
"URL containing TAP schema metadata used to construct queries"
133+
),
134+
validation_alias="tapMetadataUrl",
135+
),
136+
] = None
137+
78138
tap_metadata_dir: Annotated[
79139
Path | None,
80140
Field(
81141
title="Path to TAP YAML metadata",
82142
description=(
83143
"Directory containing YAML metadata files about TAP schema"
84144
),
145+
validation_alias="tapMetadataDir",
85146
),
86147
] = None
87148

88-
slack_webhook: Annotated[
89-
SecretStr | None, Field(title="Slack webhook for exception reporting")
90-
] = None
149+
slack_alerts: bool = Field(
150+
False,
151+
title="Enable Slack alerts",
152+
description=(
153+
"Whether to enable Slack alerts. If true, ``slack_webhook`` must"
154+
" also be set."
155+
),
156+
validation_alias="slackAlerts",
157+
)
91158

92-
token: Annotated[
93-
str,
94-
Field(
95-
title="Token for API authentication",
96-
description="Token to use to authenticate to the HiPS service",
159+
slack_webhook: str | None = Field(
160+
None,
161+
title="Slack webhook for alerts",
162+
description=(
163+
"If set, failures creating user labs or file servers and any"
164+
" uncaught exceptions in the Nublado controller will be"
165+
" reported to Slack via this webhook"
97166
),
98-
]
167+
validation_alias="DATALINKER_SLACK_WEBHOOK",
168+
)
99169

170+
token: str = Field(
171+
title="Token for API authentication",
172+
description="Token to use to authenticate to the HiPS service",
173+
validation_alias="DATALINKER_TOKEN",
174+
)
100175

101-
config = Config()
102-
"""Configuration for datalinker."""
176+
def has_hips_datasets(self) -> bool:
177+
"""Check if any HiPS datasets are configured."""
178+
return bool(self.hips_datasets)
179+
180+
def get_default_hips_dataset(self) -> HiPSDatasetConfig:
181+
"""Return the HiPS dataset config for the default dataset.
182+
183+
Returns
184+
-------
185+
HiPSDatasetConfig | None
186+
The default dataset configuration, or None if not configured.
187+
"""
188+
return self.hips_datasets[self.hips_default_dataset]
189+
190+
@model_validator(mode="after")
191+
def validate_default_hips_dataset(self) -> Self:
192+
"""Validate that the default HiPS dataset exists if specified."""
193+
if self.hips_default_dataset:
194+
if not self.hips_datasets:
195+
msg = (
196+
f"HiPS dataset key {self.hips_default_dataset} specified "
197+
"but no datasets are configured in hips_datasets"
198+
)
199+
raise ValueError(msg)
200+
if self.hips_default_dataset not in self.hips_datasets:
201+
msg = (
202+
f"HiPS dataset key {self.hips_default_dataset} not found. "
203+
f"Available datasets: {list(self.hips_datasets.keys())}"
204+
)
205+
raise ValueError(msg)
206+
return self
207+
208+
@classmethod
209+
def from_file(cls, path: Path) -> Self:
210+
"""Construct a Configuration object from a configuration file.
211+
212+
Parameters
213+
----------
214+
path
215+
Path to the configuration file in YAML.
216+
217+
Returns
218+
-------
219+
Config
220+
The corresponding `Configuration` object.
221+
"""
222+
with path.open("r") as f:
223+
return cls.model_validate(yaml.safe_load(f))

src/datalinker/constants.py

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,15 @@
11
"""Constants for datalinker."""
22

3+
from pathlib import Path
4+
5+
__all__ = ["CONFIG_PATH", "CONFIG_PATH_ENV_VAR"]
6+
7+
CONFIG_PATH = Path("/etc/datalinker/config.yaml")
8+
"""Default path to configuration."""
9+
10+
CONFIG_PATH_ENV_VAR = "DATALINKER_CONFIG_PATH"
11+
"""Env var to load config path from."""
12+
313
ADQL_COMPOUND_TABLE_REGEX = r"^([a-zA-Z0-9_]+\.)?[a-zA-Z0-9_.]+$"
414
"""ADQL table with optional prefix."""
515

@@ -8,16 +18,3 @@
818

919
ADQL_IDENTIFIER_REGEX = r"^[a-zA-Z0-9_]+$"
1020
"""ADQL table (without prefix)."""
11-
12-
HIPS_DATASETS = (
13-
"images/color_gri",
14-
"images/color_riz",
15-
"images/band_u",
16-
"images/band_g",
17-
"images/band_r",
18-
"images/band_i",
19-
"images/band_z",
20-
"images/band_y",
21-
"images/2MASS/Color",
22-
)
23-
"""HiPS data sets to include in the HiPS list."""
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
"""Config dependency for FastAPI.
2+
3+
Stolen from Gafaelfawr & Crawlspace.
4+
"""
5+
6+
from __future__ import annotations
7+
8+
import os
9+
from pathlib import Path
10+
11+
from ..config import Config
12+
from ..constants import CONFIG_PATH, CONFIG_PATH_ENV_VAR
13+
14+
__all__ = ["ConfigDependency", "config_dependency"]
15+
16+
17+
class ConfigDependency:
18+
"""Provides the configuration as a dependency.
19+
20+
We want a production deployment to default to one configuration path, but
21+
allow that path to be overridden by the test suite and, if the path
22+
changes, to reload the configuration (which allows sharing the same set of
23+
global singletons across multiple tests). Do this by loading the config
24+
dynamically when it's first requested and reloading it whenever the
25+
configuration path is changed.
26+
"""
27+
28+
def __init__(self) -> None:
29+
config_path = os.getenv(CONFIG_PATH_ENV_VAR, CONFIG_PATH)
30+
self._config_path = Path(config_path)
31+
self._config: Config | None = None
32+
33+
async def __call__(self) -> Config:
34+
"""Load the configuration if necessary and return it."""
35+
return self.config()
36+
37+
@property
38+
def config_path(self) -> Path:
39+
"""Path to the configuration file."""
40+
return self._config_path
41+
42+
def config(self) -> Config:
43+
"""Load the configuration if necessary and return it.
44+
45+
This is equivalent to using the dependency as a callable except that
46+
it's not async and can therefore be used from non-async functions.
47+
"""
48+
if not self._config:
49+
self._config = Config.from_file(self._config_path)
50+
return self._config
51+
52+
def set_config_path(self, path: Path) -> None:
53+
"""Change the configuration path and reload the config.
54+
55+
Parameters
56+
----------
57+
path
58+
The new configuration path.
59+
"""
60+
self._config_path = path
61+
self._config = Config.from_file(path)
62+
63+
64+
config_dependency = ConfigDependency()
65+
"""The dependency that will return the current configuration."""

0 commit comments

Comments
 (0)