Skip to content

Commit 33fef97

Browse files
committed
Remove numdifftools
1 parent ff7f047 commit 33fef97

File tree

4 files changed

+5
-73
lines changed

4 files changed

+5
-73
lines changed

pymc3/tests/test_distributions.py

Lines changed: 3 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -471,37 +471,9 @@ def check_int_to_1(self, model, value, domain, paramdomains):
471471
area = integrate_nd(pdfx, domain, value.dshape, value.dtype)
472472
assert_almost_equal(area, 1, err_msg=str(pt))
473473

474-
def check_dlogp(self, model, value, domain, paramdomains):
475-
try:
476-
from numdifftools import Gradient
477-
except ImportError:
478-
return
479-
if not model.cont_vars:
480-
return
481-
482-
domains = paramdomains.copy()
483-
domains['value'] = domain
484-
bij = DictToArrayBijection(
485-
ArrayOrdering(model.cont_vars), model.test_point)
486-
dlogp = bij.mapf(model.fastdlogp(model.cont_vars))
487-
logp = bij.mapf(model.fastlogp)
488-
489-
def wrapped_logp(x):
490-
try:
491-
return logp(x)
492-
except:
493-
return np.nan
494-
495-
ndlogp = Gradient(wrapped_logp)
496-
for pt in product(domains, n_samples=100):
497-
pt = Point(pt, model=model)
498-
pt = bij.map(pt)
499-
decimals = select_by_precision(float64=6, float32=4)
500-
assert_almost_equal(dlogp(pt), ndlogp(pt), decimal=decimals, err_msg=str(pt))
501-
502474
def checkd(self, distfam, valuedomain, vardomains, checks=None, extra_args=None):
503475
if checks is None:
504-
checks = (self.check_int_to_1, self.check_dlogp)
476+
checks = (self.check_int_to_1, )
505477

506478
if extra_args is None:
507479
extra_args = {}
@@ -940,7 +912,8 @@ def test_wishart(self, n):
940912
# This check compares the autodiff gradient to the numdiff gradient.
941913
# However, due to the strict constraints of the wishart,
942914
# it is impossible to numerically determine the gradient as a small
943-
# pertubation breaks the symmetry. Thus disabling.
915+
# pertubation breaks the symmetry. Thus disabling. Also, numdifftools was
916+
# removed in June 2019, so an alternative would be needed.
944917
#
945918
# self.checkd(Wishart, PdMatrix(n), {'n': Domain([2, 3, 4, 2000]), 'V': PdMatrix(n)},
946919
# checks=[self.check_dlogp])
@@ -1120,12 +1093,6 @@ def logp(x):
11201093
return -log(2 * .5) - abs(x - .5) / .5
11211094
self.checkd(DensityDist, R, {}, extra_args={'logp': logp})
11221095

1123-
def test_addpotential(self):
1124-
with Model() as model:
1125-
value = Normal('value', 1, 1)
1126-
Potential('value_squared', -value ** 2)
1127-
self.check_dlogp(model, value, R, {})
1128-
11291096
def test_get_tau_sigma(self):
11301097
sigma = np.array([2])
11311098
assert_almost_equal(continuous.get_tau_sigma(sigma=sigma), [1. / sigma**2, sigma])

pymc3/tuning/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
from .starting import find_MAP
2-
from .scaling import approx_hessian, find_hessian, trace_cov, guess_scaling
2+
from .scaling import find_hessian, trace_cov, guess_scaling

pymc3/tuning/scaling.py

Lines changed: 1 addition & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -4,41 +4,7 @@
44
from ..theanof import hessian_diag, inputvars
55
from ..blocking import DictToArrayBijection, ArrayOrdering
66

7-
__all__ = ['approx_hessian', 'find_hessian', 'trace_cov', 'guess_scaling']
8-
9-
10-
def approx_hessian(point, vars=None, model=None):
11-
"""
12-
Returns an approximation of the Hessian at the current chain location.
13-
14-
Parameters
15-
----------
16-
model : Model (optional if in `with` context)
17-
point : dict
18-
vars : list
19-
Variables for which Hessian is to be calculated.
20-
"""
21-
from numdifftools import Jacobian
22-
23-
model = modelcontext(model)
24-
if vars is None:
25-
vars = model.cont_vars
26-
vars = inputvars(vars)
27-
28-
point = Point(point, model=model)
29-
30-
bij = DictToArrayBijection(ArrayOrdering(vars), point)
31-
dlogp = bij.mapf(model.fastdlogp(vars))
32-
33-
def grad_logp(point):
34-
return np.nan_to_num(dlogp(point))
35-
36-
'''
37-
Find the jacobian of the gradient function at the current position
38-
this should be the Hessian; invert it to find the approximate
39-
covariance matrix.
40-
'''
41-
return -Jacobian(grad_logp)(bij.map(point))
7+
__all__ = ['find_hessian', 'trace_cov', 'guess_scaling']
428

439

4410
def fixed_hessian(point, vars=None, model=None):

requirements-dev.txt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ Keras>=2.0.8
77
nbsphinx>=0.2.13
88
nose>=1.3.7
99
nose-parameterized==0.6.0
10-
numdifftools>=0.9.20
1110
numpy>=1.13.0
1211
numpydoc==0.7.0
1312
pycodestyle>=2.3.1

0 commit comments

Comments
 (0)