@@ -8,16 +8,21 @@ struct AutoZygote <: AbstractADType end
8
8
struct AutoFiniteDiff <: AbstractADType end
9
9
struct AutoModelingToolkit <: AbstractADType end
10
10
11
- struct OptimizationFunction{F,G,H,HV,K} <: AbstractOptimizationFunction
11
+ struct OptimizationFunction{F,G,H,HV,C,CJ,CH, K} <: AbstractOptimizationFunction
12
12
f:: F
13
13
grad:: G
14
14
hess:: H
15
15
hv:: HV
16
16
adtype:: AbstractADType
17
+ cons:: C
18
+ cons_j:: CJ
19
+ cons_h:: CH
20
+ num_cons:: Int
17
21
kwargs:: K
18
22
end
19
23
20
- function OptimizationFunction (f, x, :: AutoForwardDiff ; grad= nothing ,hess= nothing , p= DiffEqBase. NullParameters (), chunksize = 1 , hv = nothing , kwargs... )
24
+ function OptimizationFunction (f, x, :: AutoForwardDiff ; grad= nothing , hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
25
+ num_cons = 0 , p= DiffEqBase. NullParameters (), chunksize = 1 , hv = nothing , kwargs... )
21
26
_f = θ -> f (θ,p)[1 ]
22
27
if grad === nothing
23
28
gradcfg = ForwardDiff. GradientConfig (_f, x, ForwardDiff. Chunk {chunksize} ())
@@ -37,10 +42,41 @@ function OptimizationFunction(f, x, ::AutoForwardDiff; grad=nothing,hess=nothing
37
42
end
38
43
end
39
44
40
- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,AutoForwardDiff (),kwargs)
45
+ if cons != = nothing && cons_j === nothing
46
+ if num_cons == 1
47
+ cjconfig = ForwardDiff. JacobianConfig (cons, x, ForwardDiff. Chunk {chunksize} ())
48
+ cons_j = (res,θ) -> ForwardDiff. jacobian! (res, cons, θ, cjconfig)
49
+ else
50
+ cons_j = function (res, θ)
51
+ for i in 1 : num_cons
52
+ cjconfig = ForwardDiff. JacobianConfig (x -> cons (x)[i], θ, ForwardDiff. Chunk {chunksize} ())
53
+ ForwardDiff. jacobian! (res[i], x -> cons (x)[i], θ, cjconfig, Val {false} ())
54
+ end
55
+ end
56
+ end
57
+ end
58
+
59
+ if cons != = nothing && cons_h === nothing
60
+ if num_cons == 1
61
+ cons_h = function (res, θ)
62
+ hess_config_cache = ForwardDiff. HessianConfig (cons, θ, ForwardDiff. Chunk {chunksize} ())
63
+ ForwardDiff. hessian! (res, cons, θ, hess_config_cache)
64
+ end
65
+ else
66
+ cons_h = function (res, θ)
67
+ for i in 1 : num_cons
68
+ hess_config_cache = ForwardDiff. HessianConfig (x -> cons (x)[i], θ, ForwardDiff. Chunk {chunksize} ())
69
+ ForwardDiff. hessian! (res[i], x -> cons (x)[i], θ, hess_config_cache, Val {false} ())
70
+ end
71
+ end
72
+ end
73
+ end
74
+
75
+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof(kwargs)} (f,grad,hess,hv,AutoForwardDiff (),cons,cons_j,cons_h,num_cons,kwargs)
41
76
end
42
77
43
- function OptimizationFunction (f, x, :: AutoZygote ; grad= nothing , hess= nothing , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
78
+ function OptimizationFunction (f, x, :: AutoZygote ; grad= nothing , hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
79
+ num_cons = 0 , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
44
80
_f = θ -> f (θ,p)[1 ]
45
81
if grad === nothing
46
82
grad = (res,θ) -> res isa DiffResults. DiffResult ? DiffResults. gradient! (res, Zygote. gradient (_f, θ)[1 ]) : res .= Zygote. gradient (_f, θ)[1 ]
@@ -68,10 +104,11 @@ function OptimizationFunction(f, x, ::AutoZygote; grad=nothing, hess=nothing, p=
68
104
H .= getindex .(ForwardDiff. partials .(DiffResults. gradient (res)),1 )
69
105
end
70
106
end
71
- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,AutoZygote (),kwargs)
107
+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof( kwargs)} (f,grad,hess,hv,AutoZygote (),cons,cons_j,cons_h,num_cons ,kwargs)
72
108
end
73
109
74
- function OptimizationFunction (f, x, :: AutoReverseDiff ; grad= nothing ,hess= nothing , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
110
+ function OptimizationFunction (f, x, :: AutoReverseDiff ; grad= nothing ,hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
111
+ num_cons = 0 , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
75
112
_f = θ -> f (θ,p)[1 ]
76
113
if grad === nothing
77
114
grad = (res,θ) -> ReverseDiff. gradient! (res, _f, θ, ReverseDiff. GradientConfig (θ))
@@ -100,11 +137,12 @@ function OptimizationFunction(f, x, ::AutoReverseDiff; grad=nothing,hess=nothing
100
137
end
101
138
end
102
139
103
- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,AutoReverseDiff (),kwargs)
140
+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof( kwargs)} (f,grad,hess,hv,AutoReverseDiff (),cons,cons_j,cons_h,num_cons ,kwargs)
104
141
end
105
142
106
143
107
- function OptimizationFunction (f, x, :: AutoTracker ; grad= nothing ,hess= nothing , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
144
+ function OptimizationFunction (f, x, :: AutoTracker ; grad= nothing ,hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
145
+ num_cons = 0 , p= DiffEqBase. NullParameters (), hv = nothing , kwargs... )
108
146
_f = θ -> f (θ,p)[1 ]
109
147
if grad === nothing
110
148
grad = (res,θ) -> res isa DiffResults. DiffResult ? DiffResults. gradient! (res, Tracker. data (Tracker. gradient (_f, θ)[1 ])) : res .= Tracker. data (Tracker. gradient (_f, θ)[1 ])
@@ -119,10 +157,11 @@ function OptimizationFunction(f, x, ::AutoTracker; grad=nothing,hess=nothing, p=
119
157
end
120
158
121
159
122
- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,AutoTracker (),kwargs)
160
+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof( kwargs)} (f,grad,hess,hv,AutoTracker (),cons,cons_j,cons_h,num_cons ,kwargs)
123
161
end
124
162
125
- function OptimizationFunction (f, x, adtype:: AutoFiniteDiff ; grad= nothing ,hess= nothing , p= DiffEqBase. NullParameters (), hv = nothing , fdtype = :forward , fdhtype = :hcentral , kwargs... )
163
+ function OptimizationFunction (f, x, adtype:: AutoFiniteDiff ; grad= nothing ,hess= nothing , cons = nothing , cons_j = nothing , cons_h = nothing ,
164
+ num_cons = 0 , p= DiffEqBase. NullParameters (), hv = nothing , fdtype = :forward , fdhtype = :hcentral , kwargs... )
126
165
_f = θ -> f (θ,p)[1 ]
127
166
if grad === nothing
128
167
grad = (res,θ) -> FiniteDiff. finite_difference_gradient! (res, _f, θ, FiniteDiff. GradientCache (res, x, Val{fdtype}))
@@ -140,5 +179,5 @@ function OptimizationFunction(f, x, adtype::AutoFiniteDiff; grad=nothing,hess=no
140
179
end
141
180
end
142
181
143
- return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(kwargs)} (f,grad,hess,hv,adtype,kwargs)
182
+ return OptimizationFunction {typeof(f),typeof(grad),typeof(hess),typeof(hv),typeof(cons),typeof(cons_j),typeof(cons_h),typeof( kwargs)} (f,grad,hess,hv,adtype,cons,cons_j,cons_h,num_cons ,kwargs)
144
183
end
0 commit comments