Skip to content

Commit 7b7a143

Browse files
SebastianM-Cclaude
andcommitted
fix deprecatios
Co-authored-by: Claude <[email protected]>
1 parent b24295b commit 7b7a143

File tree

3 files changed

+40
-39
lines changed

3 files changed

+40
-39
lines changed

lib/OptimizationBase/test/Project.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
[deps]
2+
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
23
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
34
BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
45
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"

lib/OptimizationBase/test/adtests.jl

Lines changed: 38 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
using OptimizationBase, Test, DifferentiationInterface, SparseArrays, Symbolics
2-
using ForwardDiff, Zygote, ReverseDiff, FiniteDiff, Tracker
2+
using ADTypes, ForwardDiff, Zygote, ReverseDiff, FiniteDiff, Tracker
33
using ModelingToolkit, Enzyme, Random
44

55
x0 = zeros(2)
@@ -27,9 +27,9 @@ g!(G1, x0)
2727
h!(H1, x0)
2828

2929
cons = (res, x, p) -> (res[1] = x[1]^2 + x[2]^2; return nothing)
30-
optf = OptimizationFunction(rosenbrock, OptimizationBase.AutoModelingToolkit(), cons = cons)
30+
optf = OptimizationFunction(rosenbrock, OptimizationBase.AutoSymbolics(), cons = cons)
3131
optprob = OptimizationBase.instantiate_function(optf, x0,
32-
OptimizationBase.AutoModelingToolkit(),
32+
OptimizationBase.AutoSymbolics(),
3333
nothing, 1, g = true, h = true, cons_j = true, cons_h = true)
3434
optprob.grad(G2, x0)
3535
@test G1 == G2
@@ -51,10 +51,10 @@ function con2_c(res, x, p)
5151
return nothing
5252
end
5353
optf = OptimizationFunction(rosenbrock,
54-
OptimizationBase.AutoModelingToolkit(),
54+
OptimizationBase.AutoSymbolics(),
5555
cons = con2_c)
5656
optprob = OptimizationBase.instantiate_function(optf, x0,
57-
OptimizationBase.AutoModelingToolkit(),
57+
OptimizationBase.AutoSymbolics(),
5858
nothing, 2, g = true, h = true, cons_j = true, cons_h = true)
5959
optprob.grad(G2, x0)
6060
@test G1 == G2
@@ -196,9 +196,9 @@ optprob.cons_h(H3, x0)
196196
H2 = Array{Float64}(undef, 2, 2)
197197

198198
optf = OptimizationFunction(
199-
rosenbrock, OptimizationBase.AutoReverseDiff(true), cons = cons)
199+
rosenbrock, OptimizationBase.AutoReverseDiff(; compile = true), cons = cons)
200200
optprob = OptimizationBase.instantiate_function(
201-
optf, x0, OptimizationBase.AutoReverseDiff(true),
201+
optf, x0, OptimizationBase.AutoReverseDiff(; compile = true),
202202
nothing, 1, g = true, h = true, hv = true,
203203
cons_j = true, cons_h = true, cons_vjp = true,
204204
cons_jvp = true, lag_h = true)
@@ -402,9 +402,9 @@ end
402402
H2 = Array{Float64}(undef, 2, 2)
403403

404404
optf = OptimizationFunction(
405-
rosenbrock, OptimizationBase.AutoReverseDiff(true), cons = con2_c)
405+
rosenbrock, OptimizationBase.AutoReverseDiff(; compile = true), cons = con2_c)
406406
optprob = OptimizationBase.instantiate_function(optf, x0,
407-
OptimizationBase.AutoReverseDiff(true),
407+
OptimizationBase.AutoReverseDiff(; compile = true),
408408
nothing, 2, g = true, h = true, hv = true,
409409
cons_j = true, cons_h = true, cons_vjp = true,
410410
cons_jvp = true, lag_h = true)
@@ -442,7 +442,7 @@ end
442442
optf = OptimizationFunction(
443443
rosenbrock, OptimizationBase.AutoForwardDiff(), cons = con2_c)
444444
optprob = OptimizationBase.instantiate_function(optf, x0,
445-
OptimizationBase.AutoReverseDiff(compile = true),
445+
OptimizationBase.AutoReverseDiff(; compile = true),
446446
nothing, 2, g = true, h = true, hv = true,
447447
cons_j = true, cons_h = true, cons_vjp = true,
448448
cons_jvp = true, lag_h = true)
@@ -582,12 +582,12 @@ end
582582
x0 = [0.5, 0.5, 0.5]
583583

584584
# Create OptimizationFunction
585-
optf = OptimizationFunction(sparse_objective, OptimizationBase.AutoSparseForwardDiff(),
585+
optf = OptimizationFunction(sparse_objective, AutoSparse(OptimizationBase.AutoForwardDiff()),
586586
cons = sparse_constraints)
587587

588588
# Instantiate the optimization problem
589589
optprob = OptimizationBase.instantiate_function(optf, x0,
590-
OptimizationBase.AutoSparseForwardDiff(),
590+
AutoSparse(OptimizationBase.AutoForwardDiff()),
591591
nothing, 2, g = true, h = true, cons_j = true, cons_h = true, lag_h = true)
592592
# Test gradient
593593
G = zeros(3)
@@ -631,12 +631,12 @@ end
631631
@test lag_H lag_H_expected
632632
@test nnz(lag_H) == 5
633633

634-
optf = OptimizationFunction(sparse_objective, OptimizationBase.AutoSparseReverseDiff(),
634+
optf = OptimizationFunction(sparse_objective, AutoSparse(OptimizationBase.AutoReverseDiff()),
635635
cons = sparse_constraints)
636636

637637
# Instantiate the optimization problem
638638
optprob = OptimizationBase.instantiate_function(optf, x0,
639-
OptimizationBase.AutoSparseForwardDiff(),
639+
AutoSparse(OptimizationBase.AutoForwardDiff()),
640640
nothing, 2, g = true, h = true, cons_j = true, cons_h = true, lag_h = true)
641641
# Test gradient
642642
G = zeros(3)
@@ -681,12 +681,12 @@ end
681681
@test nnz(lag_H) == 5
682682

683683
optf = OptimizationFunction(
684-
sparse_objective, OptimizationBase.AutoSparseReverseDiff(true),
684+
sparse_objective, AutoSparse(OptimizationBase.AutoReverseDiff(; compile = true)),
685685
cons = sparse_constraints)
686686

687687
# Instantiate the optimization problem
688688
optprob = OptimizationBase.instantiate_function(optf, x0,
689-
OptimizationBase.AutoSparseForwardDiff(),
689+
AutoSparse(OptimizationBase.AutoForwardDiff()),
690690
nothing, 2, g = true, h = true, cons_j = true, cons_h = true, lag_h = true)
691691
# Test gradient
692692
G = zeros(3)
@@ -730,12 +730,12 @@ end
730730
@test lag_H lag_H_expected
731731
@test nnz(lag_H) == 5
732732

733-
optf = OptimizationFunction(sparse_objective, OptimizationBase.AutoSparseFiniteDiff(),
733+
optf = OptimizationFunction(sparse_objective, AutoSparse(OptimizationBase.AutoFiniteDiff()),
734734
cons = sparse_constraints)
735735

736736
# Instantiate the optimization problem
737737
optprob = OptimizationBase.instantiate_function(optf, x0,
738-
OptimizationBase.AutoSparseForwardDiff(),
738+
AutoSparse(OptimizationBase.AutoForwardDiff()),
739739
nothing, 2, g = true, h = true, cons_j = true, cons_h = true, lag_h = true)
740740
# Test gradient
741741
G = zeros(3)
@@ -959,10 +959,10 @@ end
959959

960960
cons = (x, p) -> [x[1]^2 + x[2]^2]
961961
optf = OptimizationFunction{false}(rosenbrock,
962-
OptimizationBase.AutoReverseDiff(true),
962+
OptimizationBase.AutoReverseDiff(; compile = true),
963963
cons = cons)
964964
optprob = OptimizationBase.instantiate_function(optf, x0,
965-
OptimizationBase.AutoReverseDiff(true),
965+
OptimizationBase.AutoReverseDiff(; compile = true),
966966
nothing, 1, g = true, h = true, cons_j = true, cons_h = true)
967967

968968
@test optprob.grad(x0) == G1
@@ -976,10 +976,10 @@ end
976976

977977
cons = (x, p) -> [x[1]^2 + x[2]^2, x[2] * sin(x[1]) - x[1]]
978978
optf = OptimizationFunction{false}(rosenbrock,
979-
OptimizationBase.AutoReverseDiff(true),
979+
OptimizationBase.AutoReverseDiff(; compile = true),
980980
cons = cons)
981981
optprob = OptimizationBase.instantiate_function(optf, x0,
982-
OptimizationBase.AutoReverseDiff(true),
982+
OptimizationBase.AutoReverseDiff(; compile = true),
983983
nothing, 2, g = true, h = true, cons_j = true, cons_h = true)
984984

985985
@test optprob.grad(x0) == G1
@@ -990,10 +990,10 @@ end
990990

991991
cons = (x, p) -> [x[1]^2 + x[2]^2]
992992
optf = OptimizationFunction{false}(rosenbrock,
993-
OptimizationBase.AutoSparseForwardDiff(),
993+
AutoSparse(OptimizationBase.AutoForwardDiff()),
994994
cons = cons)
995995
optprob = OptimizationBase.instantiate_function(optf, x0,
996-
OptimizationBase.AutoSparseForwardDiff(),
996+
AutoSparse(OptimizationBase.AutoForwardDiff()),
997997
nothing, 1, g = true, h = true, cons_j = true, cons_h = true)
998998

999999
@test optprob.grad(x0) == G1
@@ -1007,10 +1007,10 @@ end
10071007

10081008
cons = (x, p) -> [x[1]^2 + x[2]^2, x[2] * sin(x[1]) - x[1]]
10091009
optf = OptimizationFunction{false}(rosenbrock,
1010-
OptimizationBase.AutoSparseForwardDiff(),
1010+
AutoSparse(OptimizationBase.AutoForwardDiff()),
10111011
cons = cons)
10121012
optprob = OptimizationBase.instantiate_function(optf, x0,
1013-
OptimizationBase.AutoSparseForwardDiff(),
1013+
AutoSparse(OptimizationBase.AutoForwardDiff()),
10141014
nothing, 2, g = true, h = true, cons_j = true, cons_h = true)
10151015

10161016
@test optprob.grad(x0) == G1
@@ -1021,10 +1021,10 @@ end
10211021

10221022
cons = (x, p) -> [x[1]^2 + x[2]^2]
10231023
optf = OptimizationFunction{false}(rosenbrock,
1024-
OptimizationBase.AutoSparseFiniteDiff(),
1024+
AutoSparse(OptimizationBase.AutoFiniteDiff()),
10251025
cons = cons)
10261026
optprob = OptimizationBase.instantiate_function(optf, x0,
1027-
OptimizationBase.AutoSparseFiniteDiff(),
1027+
AutoSparse(OptimizationBase.AutoFiniteDiff()),
10281028
nothing, 1, g = true, h = true, cons_j = true, cons_h = true)
10291029

10301030
@test optprob.grad(x0)G1 rtol=1e-4
@@ -1038,10 +1038,10 @@ end
10381038

10391039
cons = (x, p) -> [x[1]^2 + x[2]^2, x[2] * sin(x[1]) - x[1]]
10401040
optf = OptimizationFunction{false}(rosenbrock,
1041-
OptimizationBase.AutoSparseFiniteDiff(),
1041+
AutoSparse(OptimizationBase.AutoFiniteDiff()),
10421042
cons = cons)
10431043
optprob = OptimizationBase.instantiate_function(optf, x0,
1044-
OptimizationBase.AutoSparseForwardDiff(),
1044+
AutoSparse(OptimizationBase.AutoForwardDiff()),
10451045
nothing, 2, g = true, h = true, cons_j = true, cons_h = true)
10461046

10471047
@test optprob.grad(x0) == G1
@@ -1052,10 +1052,10 @@ end
10521052

10531053
cons = (x, p) -> [x[1]^2 + x[2]^2]
10541054
optf = OptimizationFunction{false}(rosenbrock,
1055-
OptimizationBase.AutoSparseReverseDiff(),
1055+
AutoSparse(OptimizationBase.AutoReverseDiff()),
10561056
cons = cons)
10571057
optprob = OptimizationBase.instantiate_function(optf, x0,
1058-
OptimizationBase.AutoSparseReverseDiff(),
1058+
AutoSparse(OptimizationBase.AutoReverseDiff()),
10591059
nothing, 1, g = true, h = true, cons_j = true, cons_h = true)
10601060

10611061
@test optprob.grad(x0) == G1
@@ -1069,10 +1069,10 @@ end
10691069

10701070
cons = (x, p) -> [x[1]^2 + x[2]^2, x[2] * sin(x[1]) - x[1]]
10711071
optf = OptimizationFunction{false}(rosenbrock,
1072-
OptimizationBase.AutoSparseReverseDiff(),
1072+
AutoSparse(OptimizationBase.AutoReverseDiff()),
10731073
cons = cons)
10741074
optprob = OptimizationBase.instantiate_function(optf, x0,
1075-
OptimizationBase.AutoSparseReverseDiff(),
1075+
AutoSparse(OptimizationBase.AutoReverseDiff()),
10761076
nothing, 2, g = true, h = true, cons_j = true, cons_h = true)
10771077

10781078
@test optprob.grad(x0) == G1
@@ -1083,10 +1083,10 @@ end
10831083

10841084
cons = (x, p) -> [x[1]^2 + x[2]^2]
10851085
optf = OptimizationFunction{false}(rosenbrock,
1086-
OptimizationBase.AutoSparseReverseDiff(true),
1086+
AutoSparse(OptimizationBase.AutoReverseDiff(; compile = true)),
10871087
cons = cons)
10881088
optprob = OptimizationBase.instantiate_function(optf, x0,
1089-
OptimizationBase.AutoSparseReverseDiff(true),
1089+
AutoSparse(OptimizationBase.AutoReverseDiff(; compile = true)),
10901090
nothing, 1, g = true, h = true, cons_j = true, cons_h = true)
10911091

10921092
@test optprob.grad(x0) == G1
@@ -1099,10 +1099,10 @@ end
10991099

11001100
cons = (x, p) -> [x[1]^2 + x[2]^2, x[2] * sin(x[1]) - x[1]]
11011101
optf = OptimizationFunction{false}(rosenbrock,
1102-
OptimizationBase.AutoSparseReverseDiff(true),
1102+
AutoSparse(OptimizationBase.AutoReverseDiff(; compile = true)),
11031103
cons = cons)
11041104
optprob = OptimizationBase.instantiate_function(optf, x0,
1105-
OptimizationBase.AutoSparseReverseDiff(true),
1105+
AutoSparse(OptimizationBase.AutoReverseDiff(; compile = true)),
11061106
nothing, 2, g = true, h = true, cons_j = true, cons_h = true)
11071107

11081108
@test optprob.grad(x0) == G1

lib/OptimizationBase/test/cvxtest.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,6 @@ prob = OptimizationProblem(optf, data2[1]; manifold = M, structural_analysis = t
4747

4848
opt = OptimizationManopt.GradientDescentOptimizer()
4949
@time sol = solve(prob, opt, maxiters = 100)
50-
@test sol.minimum < 1e-1
50+
@test sol.objective < 1e-1
5151
@test sol.cache.analysis_results.objective.curvature == SymbolicAnalysis.UnknownCurvature
5252
@test sol.cache.analysis_results.objective.gcurvature == SymbolicAnalysis.GConvex

0 commit comments

Comments
 (0)