Skip to content

Commit 4deacac

Browse files
sudarsan2k5ricardoV94
authored andcommitted
Removed CholeskyGrad Op
1 parent ab13fe0 commit 4deacac

File tree

2 files changed

+1
-74
lines changed

2 files changed

+1
-74
lines changed

pytensor/tensor/slinalg.py

Lines changed: 0 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -129,73 +129,6 @@ def conjugate_solve_triangular(outer, inner):
129129
cholesky = Cholesky()
130130

131131

132-
class CholeskyGrad(Op):
133-
""""""
134-
135-
__props__ = ("lower", "destructive")
136-
137-
def __init__(self, lower=True):
138-
self.lower = lower
139-
self.destructive = False
140-
141-
def make_node(self, x, l, dz):
142-
x = as_tensor_variable(x)
143-
l = as_tensor_variable(l)
144-
dz = as_tensor_variable(dz)
145-
assert x.ndim == 2
146-
assert l.ndim == 2
147-
assert dz.ndim == 2
148-
assert (
149-
l.owner.op.lower == self.lower
150-
), "lower/upper mismatch between Cholesky op and CholeskyGrad op"
151-
return Apply(self, [x, l, dz], [x.type()])
152-
153-
def perform(self, node, inputs, outputs):
154-
"""
155-
Implements the "reverse-mode" gradient [#]_ for the
156-
Cholesky factorization of a positive-definite matrix.
157-
158-
References
159-
----------
160-
.. [#] S. P. Smith. "Differentiation of the Cholesky Algorithm".
161-
Journal of Computational and Graphical Statistics,
162-
Vol. 4, No. 2 (Jun.,1995), pp. 134-147
163-
http://www.jstor.org/stable/1390762
164-
165-
"""
166-
x = inputs[0]
167-
L = inputs[1]
168-
dz = inputs[2]
169-
dx = outputs[0]
170-
N = x.shape[0]
171-
if self.lower:
172-
F = np.tril(dz)
173-
for k in range(N - 1, -1, -1):
174-
for j in range(k + 1, N):
175-
for i in range(j, N):
176-
F[i, k] -= F[i, j] * L[j, k]
177-
F[j, k] -= F[i, j] * L[i, k]
178-
for j in range(k + 1, N):
179-
F[j, k] /= L[k, k]
180-
F[k, k] -= L[j, k] * F[j, k]
181-
F[k, k] /= 2 * L[k, k]
182-
else:
183-
F = np.triu(dz)
184-
for k in range(N - 1, -1, -1):
185-
for j in range(k + 1, N):
186-
for i in range(j, N):
187-
F[k, i] -= F[j, i] * L[k, j]
188-
F[k, j] -= F[j, i] * L[k, i]
189-
for j in range(k + 1, N):
190-
F[k, j] /= L[k, k]
191-
F[k, k] -= L[k, j] * F[k, j]
192-
F[k, k] /= 2 * L[k, k]
193-
dx[0] = F
194-
195-
def infer_shape(self, fgraph, node, shapes):
196-
return [shapes[0]]
197-
198-
199132
class CholeskySolve(Op):
200133

201134
__props__ = ("lower", "check_finite")

tests/tensor/test_slinalg.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
from pytensor.configdefaults import config
1212
from pytensor.tensor.slinalg import (
1313
Cholesky,
14-
CholeskyGrad,
1514
CholeskySolve,
1615
Solve,
1716
SolveBase,
@@ -122,22 +121,17 @@ def test_cholesky_grad_indef():
122121

123122

124123
@pytest.mark.slow
125-
def test_cholesky_and_cholesky_grad_shape():
124+
def test_cholesky_shape():
126125
rng = np.random.default_rng(utt.fetch_seed())
127126
x = matrix()
128127
for l in (cholesky(x), Cholesky(lower=True)(x), Cholesky(lower=False)(x)):
129128
f_chol = pytensor.function([x], l.shape)
130-
g = pytensor.gradient.grad(l.sum(), x)
131-
f_cholgrad = pytensor.function([x], g.shape)
132129
topo_chol = f_chol.maker.fgraph.toposort()
133-
topo_cholgrad = f_cholgrad.maker.fgraph.toposort()
134130
if config.mode != "FAST_COMPILE":
135131
assert sum(node.op.__class__ == Cholesky for node in topo_chol) == 0
136-
assert sum(node.op.__class__ == CholeskyGrad for node in topo_cholgrad) == 0
137132
for shp in [2, 3, 5]:
138133
m = np.cov(rng.standard_normal((shp, shp + 10))).astype(config.floatX)
139134
np.testing.assert_equal(f_chol(m), (shp, shp))
140-
np.testing.assert_equal(f_cholgrad(m), (shp, shp))
141135

142136

143137
def test_eigvalsh():

0 commit comments

Comments
 (0)