@@ -3,6 +3,7 @@ module OptimizationReverseDiffExt
3
3
import Optimization
4
4
import Optimization. SciMLBase: OptimizationFunction
5
5
import Optimization. ADTypes: AutoReverseDiff
6
+ # using SparseDiffTools, Symbolics
6
7
isdefined (Base, :get_extension ) ? (using ReverseDiff, ReverseDiff. ForwardDiff) :
7
8
(using .. ReverseDiff, .. ReverseDiff. ForwardDiff)
8
9
@@ -20,9 +21,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoReverseDiff,
20
21
21
22
if f. hess === nothing
22
23
hess = function (res, θ, args... )
23
- res .= ForwardDiff. jacobian (θ) do θ
24
- ReverseDiff. gradient (x -> _f (x, args... ), θ)
25
- end
24
+ ReverseDiff. hessian! (res, x -> _f (x, args... ), θ)
26
25
end
27
26
else
28
27
hess = (H, θ, args... ) -> f. hess (H, θ, p, args... )
@@ -59,9 +58,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoReverseDiff,
59
58
fncs = [(x) -> cons_oop (x)[i] for i in 1 : num_cons]
60
59
cons_h = function (res, θ)
61
60
for i in 1 : num_cons
62
- res[i] .= ForwardDiff. jacobian (θ) do θ
63
- ReverseDiff. gradient (fncs[i], θ)
64
- end
61
+ ReverseDiff. hessian! (res[i], fncs[i], θ)
65
62
end
66
63
end
67
64
else
@@ -86,17 +83,14 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
86
83
_f = (θ, args... ) -> first (f. f (θ, cache. p, args... ))
87
84
88
85
if f. grad === nothing
89
- cfg = ReverseDiff. GradientConfig (cache. u0)
90
86
grad = (res, θ, args... ) -> ReverseDiff. gradient! (res, x -> _f (x, args... ), θ)
91
87
else
92
88
grad = (G, θ, args... ) -> f. grad (G, θ, cache. p, args... )
93
89
end
94
90
95
91
if f. hess === nothing
96
92
hess = function (res, θ, args... )
97
- res .= ForwardDiff. jacobian (θ) do θ
98
- ReverseDiff. gradient (x -> _f (x, args... ), θ)
99
- end
93
+ ReverseDiff. hessian! (res, x -> _f (x, args... ), θ)
100
94
end
101
95
else
102
96
hess = (H, θ, args... ) -> f. hess (H, θ, cache. p, args... )
@@ -133,9 +127,7 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
133
127
fncs = [(x) -> cons_oop (x)[i] for i in 1 : num_cons]
134
128
cons_h = function (res, θ)
135
129
for i in 1 : num_cons
136
- res[i] .= ForwardDiff. jacobian (θ) do θ
137
- ReverseDiff. gradient (fncs[i], θ)
138
- end
130
+ ReverseDiff. hessian! (res[i], fncs[i], θ)
139
131
end
140
132
end
141
133
else
0 commit comments