Skip to content

Commit a807aee

Browse files
committed
clean up asserts -> raise
also make some code backend agnostic
1 parent 818f517 commit a807aee

File tree

4 files changed

+27
-15
lines changed

4 files changed

+27
-15
lines changed

bayesflow/links/ordered_quantiles.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -44,16 +44,17 @@ def build(self, input_shape):
4444
else:
4545
# choose quantile level closest to median as anchor index
4646
self.anchor_index = keras.ops.argmin(keras.ops.abs(keras.ops.convert_to_tensor(self.q) - 0.5))
47-
msg = (
48-
"Length of `q` does not coincide with input shape: "
49-
f"len(q)={len(self.q)}, position {self.axis} of shape={input_shape}"
50-
)
51-
assert num_quantile_levels == len(self.q), msg
5247

53-
msg = (
54-
"The link function `OrderedQuantiles` expects at least 3 quantile levels,"
55-
f" but only {num_quantile_levels} were given."
56-
)
57-
assert self.anchor_index not in (0, -1, num_quantile_levels - 1), msg
48+
if len(self.q) != num_quantile_levels:
49+
raise RuntimeError(
50+
f"Length of `q` does not coincide with input shape: len(q)={len(self.q)}, "
51+
f"position {self.axis} of shape={input_shape}"
52+
)
53+
54+
if self.anchor_index not in [0, -1, num_quantile_levels - 1]:
55+
raise RuntimeError(
56+
f"The link function `OrderedQuantiles` expects at least 3 quantile levels, "
57+
f"but only {num_quantile_levels} were given."
58+
)
5859

5960
super().build(input_shape)

bayesflow/networks/consistency_models/consistency_model.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,10 @@ def build(self, xz_shape, conditions_shape=None):
177177
# First, we calculate all unique numbers of discretization steps n
178178
# in a loop, as self.total_steps might be large
179179
self.max_n = int(self._schedule_discretization(self.total_steps))
180-
assert self.max_n == self.s1 + 1
180+
181+
if self.max_n != self.s1 + 1:
182+
raise ValueError("The maximum number of discretization steps must be equal to s1 + 1.")
183+
181184
unique_n = set()
182185
for step in range(int(self.total_steps)):
183186
unique_n.add(int(self._schedule_discretization(step)))

bayesflow/networks/embeddings/fourier_embedding.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,10 @@ def __init__(
3939
"""
4040

4141
super().__init__(**kwargs)
42-
assert embed_dim % 2 == 0, f"Embedding dimension must be even, but is {embed_dim}."
42+
43+
if embed_dim % 2 != 0:
44+
raise ValueError(f"Embedding dimension must be even, but is {embed_dim}.")
45+
4346
self.w = self.add_weight(initializer=initializer, shape=(embed_dim // 2,), trainable=trainable)
4447
self.scale = scale
4548
self.embed_dim = embed_dim

bayesflow/scores/multivariate_normal_score.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -98,9 +98,14 @@ def sample(self, batch_shape: Shape, mean: Tensor, covariance: Tensor) -> Tensor
9898
A tensor of shape (batch_size, num_samples, D) containing the generated samples.
9999
"""
100100
batch_size, num_samples = batch_shape
101-
dim = mean.shape[-1]
102-
assert mean.shape == (batch_size, dim), "mean must have shape (batch_size, D)"
103-
assert covariance.shape == (batch_size, dim, dim), "covariance must have shape (batch_size, D, D)"
101+
dim = keras.ops.shape(mean)[-1]
102+
if keras.ops.shape(mean) != (batch_size, dim):
103+
raise ValueError(f"mean must have shape (batch_size, {dim}), but got {keras.ops.shape(mean)}")
104+
105+
if keras.ops.shape(covariance) != (batch_size, dim, dim):
106+
raise ValueError(
107+
f"covariance must have shape (batch_size, {dim}, {dim}), but got {keras.ops.shape(covariance)}"
108+
)
104109

105110
# Use Cholesky decomposition to generate samples
106111
cholesky_factor = keras.ops.cholesky(covariance)

0 commit comments

Comments
 (0)