Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pymc/pytensorf.py
Original file line number Diff line number Diff line change
Expand Up @@ -979,7 +979,7 @@ def constant_fold(
"""
fg = FunctionGraph(outputs=xs, features=[ShapeFeature()], copy_inputs=False, clone=True)

# The default rewrite_graph includes a constand_folding that is not always applied.
# The default rewrite_graph includes a constant_folding that is not always applied.
# We use an unconditional constant_folding as the last pass to ensure a thorough constant folding.
rewrite_graph(fg)
topo_unconditional_constant_folding.apply(fg)
Expand Down
3 changes: 3 additions & 0 deletions pymc/variational/minibatch_rv.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@ def make_node(self, rv, *total_size):
out = rv.type()
return Apply(self, [rv, *total_size], [out])

def infer_shape(self, fgraph, node, shapes):
return [shapes[0]]

def perform(self, node, inputs, output_storage):
output_storage[0][0] = inputs[0]

Expand Down
11 changes: 9 additions & 2 deletions pymc/variational/opvi.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@
from pymc.pytensorf import (
SeedSequenceSeed,
compile,
constant_fold,
find_rng_nodes,
reseed_rngs,
)
Expand Down Expand Up @@ -1105,7 +1106,10 @@ def symbolic_normalizing_constant(self):
t = self.to_flat_input(
pt.max(
[
get_scaling(v.owner.inputs[1:], v.shape)
get_scaling(
v.owner.inputs[1:],
constant_fold([v.owner.inputs[0].shape], raise_not_constant=False),
)
for v in self.group
if isinstance(v.owner.op, MinibatchRandomVariable)
]
Expand Down Expand Up @@ -1272,7 +1276,10 @@ def symbolic_normalizing_constant(self):
t = pt.max(
self.collect("symbolic_normalizing_constant")
+ [
get_scaling(obs.owner.inputs[1:], obs.shape)
get_scaling(
obs.owner.inputs[1:],
constant_fold([obs.owner.inputs[0].shape], raise_not_constant=False),
)
for obs in self.model.observed_RVs
if isinstance(obs.owner.op, MinibatchRandomVariable)
]
Expand Down
16 changes: 16 additions & 0 deletions tests/variational/test_opvi.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import pymc as pm

from pymc.testing import assert_no_rvs
from pymc.variational import opvi
from pymc.variational.approximations import (
Empirical,
Expand Down Expand Up @@ -278,3 +279,18 @@ def test_logq_globals(three_var_approx):
es = symbolic_logq.eval()
assert e.shape == ()
assert es.shape == (2,)


def test_symbolic_normalizing_constant_no_rvs():
# Test that RVs aren't included in the graph of symbolic_normalizing_constant
rng = np.random.default_rng()

with pm.Model() as m:
obs = pm.Data("obs", rng.normal(size=(1000,)))
obs_batch = pm.Minibatch(obs, batch_size=128)
x = pm.Normal("x") # Need at least one Free_RV in the graph
y_hat = pm.Flat("y_hat", observed=obs_batch, total_size=1000)

step = pm.ADVI()

assert_no_rvs(step.approx.symbolic_normalizing_constant)