Skip to content

Commit 343a6f5

Browse files
ricardoV94twiecki
authored andcommitted
Use more wide default init_dist in GaussianRandomWalk and AR and raise UserWarning when not explicitly defined
1 parent 12039e0 commit 343a6f5

File tree

3 files changed

+41
-12
lines changed

3 files changed

+41
-12
lines changed

pymc/distributions/timeseries.py

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,6 @@ class GaussianRandomWalk(distribution.Continuous):
227227
sigma > 0, innovation standard deviation, defaults to 1.0
228228
init_dist : unnamed distribution
229229
Univariate distribution of the initial value, created with the `.dist()` API.
230-
Defaults to a unit Normal.
231230
232231
.. warning:: init will be cloned, rendering them independent of the ones passed as input.
233232
@@ -274,7 +273,12 @@ def dist(
274273

275274
# If no scalar distribution is passed then initialize with a Normal of same mu and sigma
276275
if init_dist is None:
277-
init_dist = Normal.dist(0, 1)
276+
warnings.warn(
277+
"Initial distribution not specified, defaulting to `Normal.dist(0, 100)`."
278+
"You can specify an init_dist manually to suppress this warning.",
279+
UserWarning,
280+
)
281+
init_dist = Normal.dist(0, 100)
278282
else:
279283
if not (
280284
isinstance(init_dist, at.TensorVariable)
@@ -369,10 +373,10 @@ class AR(SymbolicDistribution):
369373
constant: bool, optional
370374
Whether the first element of rho should be used as a constant term in the AR
371375
process. Defaults to False
372-
init_dist: unnamed distribution, optional
373-
Scalar or vector distribution for initial values. Defaults to a unit Normal.
374-
Distribution should be created via the `.dist()` API, and have dimension
375-
(*size, ar_order). If not, it will be automatically resized.
376+
init_dist: unnamed distribution
377+
Scalar or vector distribution for initial values. Distribution should be
378+
created via the `.dist()` API, and have shape (*shape[:-1], ar_order). If not,
379+
it will be automatically resized.
376380
377381
.. warning:: init_dist will be cloned, rendering it independent of the one passed as input.
378382
@@ -461,7 +465,13 @@ def dist(
461465
f"got ndim_supp={init_dist.owner.op.ndim_supp}.",
462466
)
463467
else:
464-
init_dist = Normal.dist(0, 1, size=(*sigma.shape, ar_order))
468+
warnings.warn(
469+
"Initial distribution not specified, defaulting to "
470+
"`Normal.dist(0, 100, shape=...)`. You can specify an init_dist "
471+
"manually to suppress this warning.",
472+
UserWarning,
473+
)
474+
init_dist = Normal.dist(0, 100, shape=(*sigma.shape, ar_order))
465475

466476
# Tell Aeppl to ignore init_dist, as it will be accounted for in the logp term
467477
init_dist = ignore_logprob(init_dist)

pymc/tests/test_distributions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2610,7 +2610,7 @@ def test_gaussianrandomwalk(self):
26102610
def ref_logp(value, mu, sigma, steps):
26112611
# Relying on fact that init will be normal by default
26122612
return (
2613-
scipy.stats.norm.logpdf(value[0])
2613+
scipy.stats.norm.logpdf(value[0], 0, 100) # default init_dist has a scale 100
26142614
+ scipy.stats.norm.logpdf(np.diff(value), mu, sigma).sum()
26152615
)
26162616

pymc/tests/test_distributions_timeseries.py

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,12 @@ class TestGaussianRandomWalkRandom(BaseTestDistributionRandom):
101101

102102
pymc_dist = pm.GaussianRandomWalk
103103
pymc_dist_params = {"mu": 1.0, "sigma": 2, "init_dist": pm.Constant.dist(0), "steps": 4}
104-
expected_rv_op_params = {"mu": 1.0, "sigma": 2, "init_dist": pm.Constant.dist(0), "steps": 4}
104+
expected_rv_op_params = {
105+
"mu": 1.0,
106+
"sigma": 2,
107+
"init_dist": pm.Constant.dist(0),
108+
"steps": 4,
109+
}
105110

106111
checks_to_run = [
107112
"check_pymc_params_match_rv_op",
@@ -310,11 +315,25 @@ def test_batched_rhos(self):
310315
y_tp = np.random.randn(batch_size, steps)
311316
with Model() as t0:
312317
beta = Normal("beta", 0.0, 1.0, shape=(batch_size, ar_order), initval=beta_tp)
313-
AR("y", beta, sigma=1.0, shape=(batch_size, steps), initval=y_tp)
318+
AR(
319+
"y",
320+
beta,
321+
sigma=1.0,
322+
init_dist=Normal.dist(0, 1),
323+
shape=(batch_size, steps),
324+
initval=y_tp,
325+
)
314326
with Model() as t1:
315327
beta = Normal("beta", 0.0, 1.0, shape=(batch_size, ar_order), initval=beta_tp)
316328
for i in range(batch_size):
317-
AR(f"y_{i}", beta[i], sigma=1.0, shape=steps, initval=y_tp[i])
329+
AR(
330+
f"y_{i}",
331+
beta[i],
332+
init_dist=Normal.dist(0, 1),
333+
sigma=1.0,
334+
shape=steps,
335+
initval=y_tp[i],
336+
)
318337

319338
np.testing.assert_allclose(
320339
t0.compile_logp()(t0.initial_point()),
@@ -379,7 +398,7 @@ def test_batched_init_dist(self):
379398
beta_tp = aesara.shared(np.random.randn(ar_order), shape=(3,))
380399
y_tp = np.random.randn(batch_size, steps)
381400
with Model() as t0:
382-
init_dist = Normal.dist(0.0, 1.0, size=(batch_size, ar_order))
401+
init_dist = Normal.dist(0.0, 100.0, size=(batch_size, ar_order))
383402
AR("y", beta_tp, sigma=0.01, init_dist=init_dist, steps=steps, initval=y_tp)
384403
with Model() as t1:
385404
for i in range(batch_size):

0 commit comments

Comments
 (0)