Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions bayesflow/experimental/cif/conditional_gaussian.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@
"""

super().__init__(**keras_kwargs(kwargs))
self.means = MLP(depth=depth, width=width, activation=activation)
self.stds = MLP(depth=depth, width=width, activation=activation)
self.means = MLP([width] * depth, activation=activation)
self.stds = MLP([width] * depth, activation=activation)

Check warning on line 37 in bayesflow/experimental/cif/conditional_gaussian.py

View check run for this annotation

Codecov / codecov/patch

bayesflow/experimental/cif/conditional_gaussian.py#L36-L37

Added lines #L36 - L37 were not covered by tests
self.output_projector = keras.layers.Dense(None)

def build(self, input_shape: Shape) -> None:
Expand Down
24 changes: 2 additions & 22 deletions bayesflow/networks/mlp/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,8 @@ class MLP(keras.Layer):

def __init__(
self,
widths: Sequence[int] = (256, 256),
*,
depth: int = None,
width: int = None,
widths: Sequence[int] = None,
activation: str = "mish",
kernel_initializer: str = "he_normal",
residual: bool = False,
Expand All @@ -46,15 +44,8 @@ def __init__(

Parameters
----------
depth : int, optional
Number of layers in the MLP when `widths` is not explicitly provided. Must be
used together with `width`. Default is 2.
width : int, optional
Number of units per layer when `widths` is not explicitly provided. Must be used
together with `depth`. Default is 256.
widths : Sequence[int], optional
Explicitly defines the number of hidden units per layer. If provided, `depth` and
`width` should not be specified. Default is None.
Defines the number of hidden units per layer, as well as the number of layers to be used.
activation : str, optional
Activation function applied in the hidden layers, such as "mish". Default is "mish".
kernel_initializer : str, optional
Expand All @@ -76,17 +67,6 @@ def __init__(

super().__init__(**keras_kwargs(kwargs))

if widths is not None:
if depth is not None or width is not None:
raise ValueError("Either specify 'widths' or 'depth' and 'width', not both.")
else:
if depth is None or width is None:
# use the default
depth = 2
width = 256

widths = [width] * depth

self.res_blocks = []
for width in widths:
self.res_blocks.append(
Expand Down
2 changes: 1 addition & 1 deletion tests/test_networks/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def flow_matching():
from bayesflow.networks import FlowMatching

return FlowMatching(
subnet_kwargs={"widths": None, "width": 64, "depth": 2},
subnet_kwargs={"widths": [64, 64]},
integrate_kwargs={"method": "rk45", "steps": 100},
)

Expand Down
Loading