Skip to content

Commit 16a4d0a

Browse files
committed
Merge branch 'dev' into point-estimation
2 parents e67e91b + 208d0f2 commit 16a4d0a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+909
-267
lines changed

.github/workflows/multiversion-docs.yaml

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,13 @@
33
name: multiversion-docs
44

55
on:
6+
pull_request:
7+
branches:
8+
- dev
69
workflow_dispatch:
7-
# execute this workflow automatically when we push to master or dev
8-
# push:
9-
# branches:
10-
# - master
11-
# - dev
10+
push:
11+
branches:
12+
- dev
1213

1314
jobs:
1415

@@ -51,6 +52,7 @@ jobs:
5152
- name: Commit changes to docs
5253
run: |
5354
cd ./gh-pages-dev
55+
git rm --quiet -rf .
5456
cp -R ../dev/docs/* ./
5557
git config --local user.email ""
5658
git config --local user.name "github-actions"

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,9 +94,9 @@ conda env create --file environment.yaml --name bayesflow
9494
Check out some of our walk-through notebooks below. We are actively working on porting all notebooks to the new interface so more will be available soon!
9595

9696
1. [Linear regression starter example](examples/Linear_Regression_Starter.ipynb)
97-
2. [Two moons starter example](examples/Two_Moons_Starter.ipynb)
98-
3. [SIR model with custom summary network](examples/SIR_Posterior_Estimation.ipynb)
99-
4. [SBML model using an external simulator](examples/From_ABC_to_BayesFlow.ipynb)
97+
2. [From ABC to BayesFlow](examples/From_ABC_to_BayesFlow.ipynb)
98+
3. [Two moons starter example](examples/Two_Moons_Starter.ipynb)
99+
4. [SIR model with custom summary network](examples/SIR_Posterior_Estimation.ipynb)
100100
5. [Hyperparameter optimization](examples/Hyperparameter_Optimization.ipynb)
101101
6. [Bayesian experimental design](examples/Bayesian_Experimental_Design.ipynb)
102102
7. [Simple model comparison example (One-Sample T-Test)](examples/One_Sample_TTest.ipynb)

bayesflow/adapters/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,6 @@
11
from . import transforms
22
from .adapter import Adapter
3+
4+
from ..utils._docs import _add_imports_to_all
5+
6+
_add_imports_to_all(include_modules=["transforms"])

bayesflow/adapters/adapter.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,12 +233,17 @@ def constrain(
233233
lower: int | float | np.ndarray = None,
234234
upper: int | float | np.ndarray = None,
235235
method: str = "default",
236+
inclusive: str = "both",
237+
epsilon: float = 1e-15,
236238
):
237239
if isinstance(keys, str):
238240
keys = [keys]
239241

240242
transform = MapTransform(
241-
transform_map={key: Constrain(lower=lower, upper=upper, method=method) for key in keys}
243+
transform_map={
244+
key: Constrain(lower=lower, upper=upper, method=method, inclusive=inclusive, epsilon=epsilon)
245+
for key in keys
246+
}
242247
)
243248
self.transforms.append(transform)
244249
return self

bayesflow/adapters/transforms/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,3 +16,7 @@
1616
from .standardize import Standardize
1717
from .to_array import ToArray
1818
from .transform import Transform
19+
20+
from ...utils._docs import _add_imports_to_all
21+
22+
_add_imports_to_all(include_modules=["transforms"])

bayesflow/adapters/transforms/constrain.py

Lines changed: 42 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
@serializable(package="bayesflow.adapters")
1717
class Constrain(ElementwiseTransform):
1818
"""
19-
Constrains neural network predictions of a data variable to specificied bounds.
19+
Constrains neural network predictions of a data variable to specified bounds.
2020
2121
Parameters:
2222
String containing the name of the data variable to be transformed e.g. "sigma". See examples below.
@@ -28,14 +28,21 @@ class Constrain(ElementwiseTransform):
2828
- Double bounded methods: sigmoid, expit, (default = sigmoid)
2929
- Lower bound only methods: softplus, exp, (default = softplus)
3030
- Upper bound only methods: softplus, exp, (default = softplus)
31-
31+
inclusive: Indicates which bounds are inclusive (or exclusive).
32+
- "both" (default): Both lower and upper bounds are inclusive.
33+
- "lower": Lower bound is inclusive, upper bound is exclusive.
34+
- "upper": Lower bound is exclusive, upper bound is inclusive.
35+
- "none": Both lower and upper bounds are exclusive.
36+
epsilon: Small value to ensure inclusive bounds are not violated.
37+
Current default is 1e-15 as this ensures finite outcomes
38+
with the default transformations applied to data exactly at the boundaries.
3239
3340
3441
Examples:
3542
1) Let sigma be the standard deviation of a normal distribution,
3643
then sigma should always be greater than zero.
3744
38-
Useage:
45+
Usage:
3946
adapter = (
4047
bf.Adapter()
4148
.constrain("sigma", lower=0)
@@ -45,14 +52,19 @@ class Constrain(ElementwiseTransform):
4552
[0,1] then we would constrain the neural network to estimate p in the following way.
4653
4754
Usage:
48-
adapter = (
49-
bf.Adapter()
50-
.constrain("p", lower=0, upper=1, method = "sigmoid")
51-
)
55+
>>> import bayesflow as bf
56+
>>> adapter = bf.Adapter()
57+
>>> adapter.constrain("p", lower=0, upper=1, method="sigmoid", inclusive="both")
5258
"""
5359

5460
def __init__(
55-
self, *, lower: int | float | np.ndarray = None, upper: int | float | np.ndarray = None, method: str = "default"
61+
self,
62+
*,
63+
lower: int | float | np.ndarray = None,
64+
upper: int | float | np.ndarray = None,
65+
method: str = "default",
66+
inclusive: str = "both",
67+
epsilon: float = 1e-15,
5668
):
5769
super().__init__()
5870

@@ -121,12 +133,31 @@ def unconstrain(x):
121133

122134
self.lower = lower
123135
self.upper = upper
124-
125136
self.method = method
137+
self.inclusive = inclusive
138+
self.epsilon = epsilon
126139

127140
self.constrain = constrain
128141
self.unconstrain = unconstrain
129142

143+
# do this last to avoid serialization issues
144+
match inclusive:
145+
case "lower":
146+
if lower is not None:
147+
lower = lower - epsilon
148+
case "upper":
149+
if upper is not None:
150+
upper = upper + epsilon
151+
case True | "both":
152+
if lower is not None:
153+
lower = lower - epsilon
154+
if upper is not None:
155+
upper = upper + epsilon
156+
case False | None | "none":
157+
pass
158+
case other:
159+
raise ValueError(f"Unsupported value for 'inclusive': {other!r}.")
160+
130161
@classmethod
131162
def from_config(cls, config: dict, custom_objects=None) -> "Constrain":
132163
return cls(**config)
@@ -136,6 +167,8 @@ def get_config(self) -> dict:
136167
"lower": self.lower,
137168
"upper": self.upper,
138169
"method": self.method,
170+
"inclusive": self.inclusive,
171+
"epsilon": self.epsilon,
139172
}
140173

141174
def forward(self, data: np.ndarray, **kwargs) -> np.ndarray:

bayesflow/approximators/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,7 @@
22
from .continuous_approximator import ContinuousApproximator
33
from .point_approximator import PointApproximator
44
from .model_comparison_approximator import ModelComparisonApproximator
5+
6+
from ..utils._docs import _add_imports_to_all
7+
8+
_add_imports_to_all(include_modules=[])

bayesflow/approximators/continuous_approximator.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -184,6 +184,8 @@ def sample(
184184
**kwargs,
185185
) -> dict[str, np.ndarray]:
186186
conditions = self.adapter(conditions, strict=False, stage="inference", **kwargs)
187+
# at inference time, inference_variables are estimated by the networks and thus ignored in conditions
188+
conditions.pop("inference_variables", None)
187189
conditions = keras.tree.map_structure(keras.ops.convert_to_tensor, conditions)
188190
conditions = {"inference_variables": self._sample(num_samples=num_samples, **conditions, **kwargs)}
189191
conditions = keras.tree.map_structure(keras.ops.convert_to_numpy, conditions)

bayesflow/approximators/model_comparison_approximator.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,8 @@ def predict(
208208
**kwargs,
209209
) -> np.ndarray:
210210
conditions = self.adapter(conditions, strict=False, stage="inference", **kwargs)
211+
# at inference time, model_indices are predicted by the networks and thus ignored in conditions
212+
conditions.pop("model_indices", None)
211213
conditions = keras.tree.map_structure(keras.ops.convert_to_tensor, conditions)
212214

213215
output = self._predict(**conditions, **kwargs)

bayesflow/benchmarks/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,6 @@
11
from .simulators import TwoMoons
22
from .simulators import LotkaVolterra
3+
4+
from ..utils._docs import _add_imports_to_all
5+
6+
_add_imports_to_all(include_modules=[])

0 commit comments

Comments
 (0)