forked from JuliaSmoothOptimizers/ADNLPModels.jl
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.jl
More file actions
19 lines (18 loc) · 1.03 KB
/
utils.jl
File metadata and controls
19 lines (18 loc) · 1.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
function test_allocations(nlp::ADNLPModel)
x = nlp.meta.x0
y = zeros(eltype(nlp.meta.x0), nlp.meta.ncon)
g = zeros(eltype(nlp.meta.x0), nlp.meta.nvar)
@test_opt target_modules=(ADNLPModels,) obj(nlp, x)
@test_opt target_modules=(ADNLPModels,) cons!(nlp, x, y)
@test_opt target_modules=(ADNLPModels,) grad!(nlp, x, g)
end
function test_allocations(nlp::ADNLSModel)
x = nlp.meta.x0
y = zeros(eltype(nlp.meta.x0), nlp.meta.ncon)
g = zeros(eltype(nlp.meta.x0), nlp.meta.nvar)
Fx = zeros(eltype(nlp.meta.x0), nlp.nls_meta.nequ)
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) obj(nlp, x)
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) cons!(nlp, x, y)
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) grad!(nlp, x, g, Fx)
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) residual!(nlp, x, Fx)
end