@@ -2,6 +2,8 @@ using OptimizationMOI, Optimization, Ipopt, NLopt, Zygote, ModelingToolkit, Reve
22using AmplNLWriter, Ipopt_jll, Juniper, HiGHS
33using Test, SparseArrays
44
5+ import MathOptInterface
6+
57function _test_sparse_derivatives_hs071 (backend, optimizer)
68 function objective (x, :: Any )
79 return x[1 ] * x[4 ] * (x[1 ] + x[2 ] + x[3 ]) + x[3 ]
@@ -29,6 +31,32 @@ function _test_sparse_derivatives_hs071(backend, optimizer)
2931 return
3032end
3133
34+ @testset " Evaluator" begin
35+ rosenbrock (x, p) = (p[1 ] - x[1 ])^ 2 + p[2 ] * (x[2 ] - x[1 ]^ 2 )^ 2
36+ x0 = zeros (2 )
37+ _p = [1.0 , 100.0 ]
38+ cons_circ = (res, x, p) -> res .= [x[1 ]^ 2 + x[2 ]^ 2 ]
39+ optprob = OptimizationFunction (
40+ rosenbrock, Optimization. AutoZygote ();
41+ cons = cons_circ)
42+ prob = OptimizationProblem (optprob, x0, _p, ucons = [Inf ], lcons = [0.0 ])
43+ evaluator = init (prob, Ipopt. Optimizer ()). evaluator
44+
45+ x = prob. u0
46+ # vector-constraint jacobian product
47+ @test (evaluator. f. cons_j != = nothing ) || (evaluator. f. cons_jvp != = nothing )
48+ y = zeros (1 )
49+ w = ones (2 )
50+ @test MathOptInterface. eval_constraint_jacobian_product (evaluator, y, x, w) === nothing
51+
52+ # constraint jacobian-vector product
53+ @test (evaluator. f. cons_j != = nothing ) || (evaluator. f. cons_vjp != = nothing )
54+ y = zeros (2 )
55+ w = ones (1 )
56+ @test MathOptInterface. eval_constraint_jacobian_transpose_product (
57+ evaluator, y, x, w) === nothing
58+ end
59+
3260@testset " NLP" begin
3361 rosenbrock (x, p) = (p[1 ] - x[1 ])^ 2 + p[2 ] * (x[2 ] - x[1 ]^ 2 )^ 2
3462 x0 = zeros (2 )
0 commit comments