Skip to content

Commit 4c7a31c

Browse files
committed
More type stability in ADNLPModels.jl
1 parent d30f500 commit 4c7a31c

File tree

6 files changed

+7
-53
lines changed

6 files changed

+7
-53
lines changed

src/forward.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ function GenericForwardDiffADJtprod(
109109
return GenericForwardDiffADJtprod()
110110
end
111111
function Jtprod!(::GenericForwardDiffADJtprod, Jtv, f, x, v, ::Val)
112-
Jtv .= ForwardDiff.gradient(x -> dot(f(x), v), x)
112+
ForwardDiff.gradient!(Jtv, x -> dot(f(x), v), x)
113113
return Jtv
114114
end
115115

test/Project.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
1313

1414
[compat]
1515
ForwardDiff = "0.10"
16+
JET = "0.9, 0.10"
1617
ManualNLPModels = "0.1"
1718
NLPModels = "0.21"
1819
NLPModelsModifiers = "0.7"

test/nlp/basic.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ function test_autodiff_model(name; kwargs...)
2525
@test abs(obj(nlp, β) - norm(y .- β[1] - β[2] * x)^2 / 2) < 1e-12
2626
@test norm(grad(nlp, β)) < 1e-12
2727

28-
test_getter_setter(nlp)
2928
test_allocations(nlp)
3029

3130
@testset "Constructors for ADNLPModel with $name" begin

test/nls/basic.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ function autodiff_nls_test(name; kwargs...)
55

66
@test isapprox(residual(nls, ones(2)), zeros(2), rtol = 1e-8)
77

8-
test_getter_setter(nls)
98
test_allocations(nls)
109
end
1110

test/runtests.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
using LinearAlgebra, SparseArrays, Test
22
using SparseMatrixColorings
3-
using JET
3+
using ForwardDiff, JET
44
using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest
55
using ADNLPModels:
66
gradient, gradient!, jacobian, hessian, Jprod!, Jtprod!, directional_second_derivative, Hvprod!

test/utils.jl

Lines changed: 4 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -1,49 +1,3 @@
1-
ReverseDiffAD(nvar, f) = ADNLPModels.ADModelBackend(
2-
nvar,
3-
f,
4-
gradient_backend = ADNLPModels.ReverseDiffADGradient,
5-
hprod_backend = ADNLPModels.ReverseDiffADHvprod,
6-
jprod_backend = ADNLPModels.ReverseDiffADJprod,
7-
jtprod_backend = ADNLPModels.ReverseDiffADJtprod,
8-
jacobian_backend = ADNLPModels.ReverseDiffADJacobian,
9-
hessian_backend = ADNLPModels.ReverseDiffADHessian,
10-
)
11-
12-
function test_getter_setter(nlp)
13-
@test get_adbackend(nlp) == nlp.adbackend
14-
if typeof(nlp) <: ADNLPModel
15-
new_nlp = ADNLPModel(nlp, ReverseDiffAD(nlp.meta.nvar, nlp.f))
16-
elseif typeof(nlp) <: ADNLSModel
17-
function F(x; nequ = nlp.nls_meta.nequ)
18-
Fx = similar(x, nequ)
19-
nlp.F!(Fx, x)
20-
return Fx
21-
end
22-
new_nlp = ADNLSModel(nlp, ReverseDiffAD(nlp.meta.nvar, x -> sum(F(x) .^ 2)))
23-
end
24-
@test typeof(get_adbackend(new_nlp).gradient_backend) <: ADNLPModels.ReverseDiffADGradient
25-
@test typeof(get_adbackend(new_nlp).hprod_backend) <: ADNLPModels.ReverseDiffADHvprod
26-
@test typeof(get_adbackend(new_nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian
27-
if typeof(nlp) <: ADNLPModel
28-
newer_nlp = ADNLPModel(
29-
new_nlp,
30-
gradient_backend = ADNLPModels.ForwardDiffADGradient,
31-
jtprod_backend = ADNLPModels.GenericForwardDiffADJtprod(),
32-
)
33-
elseif typeof(nlp) <: ADNLSModel
34-
newer_nlp = ADNLSModel(
35-
new_nlp,
36-
gradient_backend = ADNLPModels.ForwardDiffADGradient,
37-
jtprod_backend = ADNLPModels.GenericForwardDiffADJtprod(),
38-
)
39-
end
40-
41-
@test typeof(get_adbackend(newer_nlp).gradient_backend) <: ADNLPModels.ForwardDiffADGradient
42-
@test typeof(get_adbackend(newer_nlp).hprod_backend) <: ADNLPModels.ReverseDiffADHvprod
43-
@test typeof(get_adbackend(newer_nlp).jtprod_backend) <: ADNLPModels.GenericForwardDiffADJtprod
44-
@test typeof(get_adbackend(newer_nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian
45-
end
46-
471
function test_allocations(nlp::ADNLPModel)
482
x = nlp.meta.x0
493
y = zeros(eltype(nlp.meta.x0), nlp.meta.ncon)
@@ -58,7 +12,8 @@ function test_allocations(nlp::ADNLSModel)
5812
y = zeros(eltype(nlp.meta.x0), nlp.meta.ncon)
5913
g = zeros(eltype(nlp.meta.x0), nlp.meta.nvar)
6014
Fx = zeros(eltype(nlp.meta.x0), nlp.nls_meta.nequ)
61-
@test_opt target_modules=(ADNLPModels,) obj(nlp, x)
62-
@test_opt target_modules=(ADNLPModels,) cons!(nlp, x, y)
63-
@test_opt target_modules=(ADNLPModels,) grad!(nlp, x, g, Fx)
15+
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) obj(nlp, x)
16+
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) cons!(nlp, x, y)
17+
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) grad!(nlp, x, g, Fx)
18+
@test_opt target_modules=(ADNLPModels,) function_filter=(@nospecialize(f) -> f != ForwardDiff.gradient!) residual!(nlp, x, Fx)
6419
end

0 commit comments

Comments
 (0)