Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
9c00b0c
proposal for updating propogate_component_properties using data classes
Dekermanjian Nov 2, 2025
d6d86e9
Iterate on proposal
Nov 7, 2025
f77990d
Fix iterator, add `to_dict` method to `CoordsInfo`
Nov 7, 2025
068c93f
Add `observed_states` helper to `StateInfo`
Nov 7, 2025
47fbdee
made necessary changes to get the regression component test to pass u…
Dekermanjian Nov 9, 2025
44813be
1. Updated dataclasses to include copy method and replaced raise on d…
Dekermanjian Nov 15, 2025
034d95b
1. added add and merge methods to base class
Dekermanjian Nov 16, 2025
66681d5
removed data & coords setters in _set<foo> medthod in Component class…
Dekermanjian Nov 22, 2025
9699d39
1. updated properties base class to handle duplicate names when allow…
Dekermanjian Dec 22, 2025
9b153cb
added docstring to setter methods in core and refactored level trend …
Dekermanjian Dec 28, 2025
5c3a913
1.restructured seasonal components to work with dataclass architecture
Dekermanjian Dec 28, 2025
1ba2dd1
restructured autoregressive component to follow dataclass architecture
Dekermanjian Dec 29, 2025
3a6d70b
restructured measuerment error component to align with dataclass arch…
Dekermanjian Dec 29, 2025
e3a86f5
restructured cycle component to use dataclass architecture
Dekermanjian Dec 29, 2025
97ad416
1. added protocol to reduce import compexity
Dekermanjian Dec 30, 2025
24be4cd
1. changed PyTensorVariable to SymbolicVariable and TensorData to Sym…
Dekermanjian Jan 1, 2026
9677ddc
Typing and helper methods
jessegrabowski Jan 16, 2026
236bd64
Update BayesianSARIMAX
jessegrabowski Jan 16, 2026
eec5bd9
Update BayesianVARMAX
jessegrabowski Jan 16, 2026
625ffe3
Update BayesianETS
jessegrabowski Jan 16, 2026
e1e56d3
Update BayesianDFM
jessegrabowski Jan 16, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
583 changes: 583 additions & 0 deletions notebooks/structural_components_dataclass.ipynb

Large diffs are not rendered by default.

264 changes: 264 additions & 0 deletions pymc_extras/statespace/core/properties.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,264 @@
from __future__ import annotations
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The robot always adds this -- why do we need it?


from collections.abc import Iterator
from copy import deepcopy
from dataclasses import dataclass, fields
from typing import Generic, Protocol, Self, TypeVar

from pytensor.tensor.variable import TensorVariable

from pymc_extras.statespace.utils.constants import (
ALL_STATE_AUX_DIM,
ALL_STATE_DIM,
OBS_STATE_AUX_DIM,
OBS_STATE_DIM,
SHOCK_AUX_DIM,
SHOCK_DIM,
)


class StateSpaceLike(Protocol):
@property
def state_names(self) -> list[str]: ...

@property
def observed_states(self) -> list[str]: ...

@property
def shock_names(self) -> list[str]: ...


@dataclass(frozen=True)
class Property:
def __str__(self) -> str:
return "\n".join(f"{f.name}: {getattr(self, f.name)}" for f in fields(self))


T = TypeVar("T", bound=Property)


@dataclass(frozen=True)
class Info(Generic[T]):
items: tuple[T, ...]
key_field: str = "name"
_index: dict[str, T] | None = None

def __post_init__(self):
index = {}
for item in self.items:
key = getattr(item, self.key_field)
if key in index:
raise ValueError(f"Duplicate {self.key_field} '{key}' detected.")
index[key] = item
object.__setattr__(self, "_index", index)

def _key(self, item: T) -> str:
return getattr(item, self.key_field)

def get(self, key: str, default=None) -> T | None:
return self._index.get(key, default)

def __getitem__(self, key: str) -> T:
try:
return self._index[key]
except KeyError as e:
available = ", ".join(self._index.keys())
raise KeyError(f"No {self.key_field} '{key}'. Available: [{available}]") from e

def __contains__(self, key: object) -> bool:
return key in self._index

def __iter__(self) -> Iterator[T]:
return iter(self.items)

def __len__(self) -> int:
return len(self.items)

def __str__(self) -> str:
return f"{self.key_field}s: {list(self._index.keys())}"

def add(self, new_item: T):
return type(self)([*self.items, new_item])

def merge(self, other: Self, overwrite_duplicates: bool = False) -> Self:
if not isinstance(other, type(self)):
raise TypeError(f"Cannot merge {type(other).__name__} with {type(self).__name__}")

overlapping = set(self.names) & set(other.names)
if overlapping and overwrite_duplicates:
return type(self)(
list(self.items)
+ [item for item in other.items if self._key(item) not in overlapping]
)

return type(self)(list(self.items) + list(other.items))

@property
def names(self) -> tuple[str, ...]:
return tuple(self._index.keys())

def copy(self) -> Info[T]:
return deepcopy(self)


@dataclass(frozen=True)
class Parameter(Property):
name: str
shape: tuple[int, ...]
dims: tuple[str, ...] | None
constraints: str | None = None


@dataclass(frozen=True)
class ParameterInfo(Info[Parameter]):
def __init__(self, parameters: list[Parameter]):
super().__init__(items=tuple(parameters), key_field="name")

def to_dict(self):
return {
param.name: {"shape": param.shape, "constraints": param.constraints, "dims": param.dims}
for param in self.items
}


@dataclass(frozen=True)
class Data(Property):
name: str
shape: tuple[int, ...]
dims: tuple[str, ...]
is_exogenous: bool


@dataclass(frozen=True)
class DataInfo(Info[Data]):
def __init__(self, data: list[Data]):
super().__init__(items=tuple(data), key_field="name")

@property
def needs_exogenous_data(self) -> bool:
return any(d.is_exogenous for d in self.items)

@property
def exogenous_names(self) -> tuple[str, ...]:
return tuple(d.name for d in self.items if d.is_exogenous)

def __str__(self) -> str:
return f"data: {[d.name for d in self.items]}\nneeds exogenous data: {self.needs_exogenous_data}"

def to_dict(self):
return {
data.name: {"shape": data.shape, "dims": data.dims, "exogenous": data.is_exogenous}
for data in self.items
}


@dataclass(frozen=True)
class Coord(Property):
dimension: str
labels: tuple[str | int, ...]


@dataclass(frozen=True)
class CoordInfo(Info[Coord]):
def __init__(self, coords: list[Coord]):
super().__init__(items=tuple(coords), key_field="dimension")

def __str__(self) -> str:
base = "coordinates:"
for coord in self.items:
coord_str = str(coord)
indented = "\n".join(" " + line for line in coord_str.splitlines())
base += "\n" + indented + "\n"
return base

@classmethod
def default_coords_from_model(cls, model: StateSpaceLike) -> Self:
states = tuple(model.state_names)
obs_states = tuple(model.observed_states)
shocks = tuple(model.shock_names)

dim_to_labels = (
(ALL_STATE_DIM, states),
(ALL_STATE_AUX_DIM, states),
(OBS_STATE_DIM, obs_states),
(OBS_STATE_AUX_DIM, obs_states),
(SHOCK_DIM, shocks),
(SHOCK_AUX_DIM, shocks),
)

coords = [Coord(dimension=dim, labels=labels) for dim, labels in dim_to_labels]
return cls(coords)

def to_dict(self):
return {coord.dimension: coord.labels for coord in self.items if len(coord.labels) > 0}


@dataclass(frozen=True)
class State(Property):
name: str
observed: bool
shared: bool


@dataclass(frozen=True)
class StateInfo(Info[State]):
def __init__(self, states: list[State]):
super().__init__(items=tuple(states), key_field="name")

def __str__(self) -> str:
return (
f"states: {[s.name for s in self.items]}\nobserved: {[s.observed for s in self.items]}"
)

@property
def observed_state_names(self) -> tuple[str, ...]:
return tuple(s.name for s in self.items if s.observed)

@property
def unobserved_state_names(self) -> tuple[str, ...]:
return tuple(s.name for s in self.items if not s.observed)


@dataclass(frozen=True)
class Shock(Property):
name: str


@dataclass(frozen=True)
class ShockInfo(Info[Shock]):
def __init__(self, shocks: list[Shock]):
super().__init__(items=tuple(shocks), key_field="name")


# It is debatable whether these are properties.
# These are placeholders for symbolic tensors whether they are parameters or data


@dataclass(frozen=True)
class SymbolicVariable(Property):
name: str
symbolic_variable: TensorVariable


@dataclass(frozen=True)
class SymbolicVariableInfo(Info[SymbolicVariable]):
def __init__(self, symbolic_variables: list[SymbolicVariable]):
super().__init__(items=tuple(symbolic_variables), key_field="name")

def to_dict(self):
return {variable.name: variable.symbolic_variable for variable in self.items}


@dataclass(frozen=True)
class SymbolicData(Property):
name: str
symbolic_data: TensorVariable


@dataclass(frozen=True)
class SymbolicDataInfo(Info[SymbolicData]):
def __init__(self, symbolic_data: list[SymbolicData]):
super().__init__(items=tuple(symbolic_data), key_field="name")

def to_dict(self):
return {data.name: data.symbolic_data for data in self.items}
27 changes: 19 additions & 8 deletions pymc_extras/statespace/core/statespace.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@
from rich.console import Console
from rich.table import Table

from pymc_extras.statespace.core.properties import (
SymbolicData,
SymbolicDataInfo,
SymbolicVariable,
SymbolicVariableInfo,
)
from pymc_extras.statespace.core.representation import PytensorRepresentation
from pymc_extras.statespace.filters import (
KalmanSmoother,
Expand Down Expand Up @@ -236,8 +242,11 @@ def __init__(
self._fit_exog_data: dict[str, dict] = {}

self._needs_exog_data = None
self._name_to_variable = {}
self._name_to_data = {}
if not self._tensor_variable_info:
self._tensor_variable_info = SymbolicVariableInfo(symbolic_variables=[])

if not self._tensor_data_info:
self._tensor_data_info = SymbolicDataInfo(symbolic_data=[])

self.k_endog = k_endog
self.k_states = k_states
Expand Down Expand Up @@ -520,14 +529,15 @@ def make_and_register_variable(
f"parameters."
)

if name in self._name_to_variable.keys():
if name in self._tensor_variable_info:
raise ValueError(
f"{name} is already a registered placeholder variable with shape "
f"{self._name_to_variable[name].type.shape}"
f"{self._tensor_variable_info[name].type.shape}"
)

placeholder = pt.tensor(name, shape=shape, dtype=dtype)
self._name_to_variable[name] = placeholder
tensor_var = SymbolicVariable(name=name, symbolic_variable=placeholder)
self._tensor_variable_info = self._tensor_variable_info.add(tensor_var)
return placeholder

def make_and_register_data(
Expand Down Expand Up @@ -559,14 +569,15 @@ def make_and_register_data(
f"parameters."
)

if name in self._name_to_data.keys():
if name in self._tensor_data_info:
raise ValueError(
f"{name} is already a registered placeholder variable with shape "
f"{self._name_to_data[name].type.shape}"
f"{self._tensor_data_info[name].type.shape}"
)

placeholder = pt.tensor(name, shape=shape, dtype=dtype)
self._name_to_data[name] = placeholder
tensor_data = SymbolicData(name=name, symbolic_data=placeholder)
self._tensor_data_info = self._tensor_data_info.merge(tensor_data)
return placeholder

def make_symbolic_graph(self) -> None:
Expand Down
Loading
Loading