|
4 | 4 | from aesara.graph.basic import Constant, Variable |
5 | 5 | from aesara.graph.fg import MissingInputError |
6 | 6 | from aesara.graph.op import Op |
| 7 | + from aesara.gradient import grad_not_implemented |
7 | 8 | except ModuleNotFoundError: |
8 | 9 | import theano |
9 | 10 | import theano.tensor as aet |
| 11 | + from theano.gradient import grad_not_implemented |
10 | 12 | if hasattr(theano, "gof"): |
11 | 13 | from theano.gof.fg import MissingInputError |
12 | 14 | from theano.gof.var import Constant, Variable |
@@ -218,7 +220,7 @@ def grad(self, inputs, g): |
218 | 220 | return [ |
219 | 221 | aet.zeros_like(inputs[0]), |
220 | 222 | aet.sum(g[:, None, :] * sens, (0, -1)), |
221 | | - aesara.gradient.grad_not_implemented(self, 2, inputs[-1]) |
| 223 | + grad_not_implemented(self, 2, inputs[-1]) |
222 | 224 | ] |
223 | 225 |
|
224 | 226 |
|
@@ -257,7 +259,7 @@ def grad(self, inputs, g): |
257 | 259 | y0, params, params_fixed = inputs |
258 | 260 | backward = SolveODEAdjointBackward(self._solver, self._t0, self._tvals) |
259 | 261 | lamda, gradient = backward(y0, params, params_fixed, g) |
260 | | - return [-lamda, gradient, aesara.gradient.grad_not_implemented(self, 2, params_fixed)] |
| 262 | + return [-lamda, gradient, grad_not_implemented(self, 2, params_fixed)] |
261 | 263 |
|
262 | 264 |
|
263 | 265 | class SolveODEAdjointBackward(Op): |
|
0 commit comments