diff --git a/pymc/distributions/continuous.py b/pymc/distributions/continuous.py index 7a0d044cc..d5e69fc79 100644 --- a/pymc/distributions/continuous.py +++ b/pymc/distributions/continuous.py @@ -1373,13 +1373,12 @@ class Exponential(PositiveContinuous): rv_op = exponential @classmethod - def dist(cls, lam=None, scale=None, *args, **kwargs): - if lam is not None and scale is not None: + def dist(cls, lam=None, *, scale=None, **kwargs): + if lam is None and scale is None: + scale = 1.0 + elif lam is not None and scale is not None: raise ValueError("Incompatible parametrization. Can't specify both lam and scale.") - elif lam is None and scale is None: - raise ValueError("Incompatible parametrization. Must specify either lam or scale.") - - if scale is None: + elif lam is not None: scale = pt.reciprocal(lam) scale = pt.as_tensor_variable(scale) diff --git a/pymc/step_methods/hmc/nuts.py b/pymc/step_methods/hmc/nuts.py index cc29e0334..770605f4b 100644 --- a/pymc/step_methods/hmc/nuts.py +++ b/pymc/step_methods/hmc/nuts.py @@ -242,24 +242,24 @@ def competence(var, has_grad): class _Tree: __slots__ = ( - "ndim", - "integrator", - "start", - "step_size", "Emax", - "start_energy", - "rng", - "left", - "right", - "proposal", "depth", - "log_size", + "floatX", + "integrator", + "left", "log_accept_sum", + "log_size", + "max_energy_change", "mean_tree_accept", "n_proposals", + "ndim", "p_sum", - "max_energy_change", - "floatX", + "proposal", + "right", + "rng", + "start", + "start_energy", + "step_size", ) def __init__( diff --git a/tests/distributions/test_continuous.py b/tests/distributions/test_continuous.py index 41504816a..2864335e3 100644 --- a/tests/distributions/test_continuous.py +++ b/tests/distributions/test_continuous.py @@ -461,15 +461,6 @@ def test_exponential(self): lambda q, lam: st.expon.ppf(q, loc=0, scale=1 / lam), ) - def test_exponential_wrong_arguments(self): - msg = "Incompatible parametrization. Can't specify both lam and scale" - with pytest.raises(ValueError, match=msg): - pm.Exponential.dist(lam=0.5, scale=5) - - msg = "Incompatible parametrization. Must specify either lam or scale" - with pytest.raises(ValueError, match=msg): - pm.Exponential.dist() - def test_laplace(self): check_logp( pm.Laplace, @@ -2274,8 +2265,20 @@ class TestExponential(BaseTestDistributionRandom): checks_to_run = [ "check_pymc_params_match_rv_op", "check_pymc_draws_match_reference", + "check_both_lam_scale_raises", + "check_default_scale", ] + def check_both_lam_scale_raises(self): + msg = "Incompatible parametrization. Can't specify both lam and scale" + with pytest.raises(ValueError, match=msg): + pm.Exponential.dist(lam=0.5, scale=5) + + def check_default_scale(self): + rv = self.pymc_dist.dist() + [scale] = rv.owner.op.dist_params(rv.owner) + assert scale.data == 1.0 + class TestExponentialScale(BaseTestDistributionRandom): pymc_dist = pm.Exponential