Skip to content

Commit 862c6dc

Browse files
committed
fixed error in doc for creat_flow
1 parent fb6b80b commit 862c6dc

File tree

3 files changed

+29
-8
lines changed

3 files changed

+29
-8
lines changed

docs/src/example.md

Whitespace-only changes.

src/flows/utils.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ attaching them to the base distribution `q0`.
99
1010
- `layers`: an iterable of `Bijectors.Bijector` objects that are composed in order
1111
(left-to-right) via function composition
12-
(for instance, if `layers = [l1, l2, l3]`, the flow will be `l3∘l2∘l1(q0)`).
12+
(for instance, if `layers = [l1, l2, l3]`, the flow will be `l1∘l2∘l3(q0)`).
1313
- `q0`: the base distribution (e.g., `MvNormal(zeros(d), I)`).
1414
1515
Returns a `Bijectors.TransformedDistribution` representing the resulting flow.
@@ -18,7 +18,7 @@ Example
1818
1919
using Distributions, Bijectors, LinearAlgebra
2020
q0 = MvNormal(zeros(2), I)
21-
flow = create_flow((Bijectors.Scale([1.0, 2.0]), Bijectors.Shift([0.0, 1.0])), q0)
21+
flow = create_flow((Bijectors.Shift([0.0, 1.0]), Bijectors.Scale([1.0, 2.0])), q0)
2222
"""
2323
function create_flow(Ls, q₀)
2424
ts = reduce(, Ls)
@@ -62,7 +62,7 @@ Create a fully connected neural network (FNN).
6262
- `hidden_dims::AbstractVector{<:Int}`: A vector of integers specifying the dimensions of the hidden layers.
6363
- `output_dim::Int`: The dimension of the output layer.
6464
- `inlayer_activation`: The activation function for the hidden layers. Defaults to `Flux.leakyrelu`.
65-
- `output_activation`: The activation function for the output layer. Defaults to `Flux.tanh`.
65+
- `output_activation`: The activation function for the output layer. Defaults to `nothing`.
6666
- `paramtype::Type{T} = Float64`: The type of the parameters in the network, defaults to `Float64`.
6767
6868
# Returns

src/objectives/elbo.jl

Lines changed: 26 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,29 @@ function elbo(flow::Bijectors.TransformedDistribution, logp, n_samples)
4646
end
4747

4848

49+
"""
50+
_batched_elbos(flow, logp, xs)
51+
52+
Batched ELBO estimates that transforms a matrix of samples (each column represents a single
53+
sample) in one call.
54+
This is more efficient for invertible neural-network flows (RealNVP/NSF) as it leverages
55+
the batched operation of the neural networks.
56+
57+
Inputs
58+
- `flow::Bijectors.MultivariateTransformed`
59+
- `logp`: function returning log-density of target
60+
- `xs`: column-wise sample batch
61+
62+
Returns
63+
- a vector of ELBO estimates for each sample in the batch
64+
"""
65+
function _batched_elbos(flow::Bijectors.MultivariateTransformed, logp, xs::AbstractMatrix)
66+
# requires the flow transformation to be able to handle batched inputs
67+
ys, logabsdetjac = with_logabsdet_jacobian(flow.transform, xs)
68+
elbos = logp(ys) .- logpdf(flow.dist, xs) .+ logabsdetjac
69+
return elbos
70+
end
71+
4972
"""
5073
elbo_batch(flow, logp, xs)
5174
elbo_batch([rng, ] flow, logp, n_samples)
@@ -64,14 +87,12 @@ Returns
6487
- Scalar estimate of the ELBO
6588
"""
6689
function elbo_batch(flow::Bijectors.MultivariateTransformed, logp, xs::AbstractMatrix)
67-
# requires the flow transformation to be able to handle batched inputs
68-
ys, logabsdetjac = with_logabsdet_jacobian(flow.transform, xs)
69-
elbos = logp(ys) .- logpdf(flow.dist, xs) .+ logabsdetjac
70-
return elbos
90+
elbos = _batched_elbos(flow, logp, xs)
91+
return mean(elbos)
7192
end
7293
function elbo_batch(rng::AbstractRNG, flow::Bijectors.MultivariateTransformed, logp, n_samples)
7394
xs = _device_specific_rand(rng, flow.dist, n_samples)
74-
elbos = elbo_batch(flow, logp, xs)
95+
elbos = _batched_elbos(flow, logp, xs)
7596
return mean(elbos)
7697
end
7798
elbo_batch(flow::Bijectors.TransformedDistribution, logp, n_samples) =

0 commit comments

Comments
 (0)