@@ -523,3 +523,31 @@ def L_op(self, inputs, outputs, output_gradients):
523523 _grad_wrt_num_theta = pt .grad (x_star , num_theta , disconnected_inputs = "raise" )
524524 # np.testing.assert_allclose(grad_wrt_num_theta.eval({x: np.pi, num_theta: np.e, str_theta: ":)"}), -1)
525525 # np.testing.assert_allclose(grad_wrt_num_theta.eval({x: np.pi, num_theta: np.e, str_theta: ":("}), 1)
526+
527+
528+ def test_vectorize_root_gradients ():
529+ """Regression test for https://github.com/pymc-devs/pytensor/issues/1586"""
530+ a , x , y = pt .dscalars ("a" , "x" , "y" )
531+
532+ eq_1 = a * x ** 2 - y - 1
533+ eq_2 = x - a * y ** 2 + 1
534+
535+ [x_star , y_star ], _ = pt .optimize .root (
536+ equations = pt .stack ([eq_1 , eq_2 ]),
537+ variables = [x , y ],
538+ method = "hybr" ,
539+ optimizer_kwargs = {"tol" : 1e-8 },
540+ )
541+ solution = pt .stack ([x_star , y_star ])
542+ a_grad = pt .grad (solution .sum (), a )
543+ a_grid = pt .dmatrix ("a_grid" )
544+
545+ solution_grid , a_grad_grid = pytensor .graph .vectorize_graph (
546+ [solution , a_grad ], {a : a_grid }
547+ )
548+
549+ _ = pytensor .function (
550+ [a_grid , x , y ],
551+ [solution_grid , a_grad_grid ],
552+ on_unused_input = "ignore" ,
553+ )
0 commit comments