Skip to content

Commit bdabc6f

Browse files
authored
Merge pull request #368 from JuliaOpt/eph/moi_solve
Pass solver constructors instead of instances
2 parents fd1a745 + b7da3f6 commit bdabc6f

38 files changed

+102
-71
lines changed

NEWS.md

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
1-
# Changes in v0.13.0
1+
# Major changes in v0.13.0
22

33
* The intermediate layer has changed from MathProgBase.jl to
44
[MathOptInterface.jl](https://github.com/JuliaOpt/MathOptInterface.jl)
5-
([#330](https://github.com/JuliaOpt/Convex.jl/pull/330)).
5+
([#330](https://github.com/JuliaOpt/Convex.jl/pull/330)). To solve problems,
6+
one should pass a MathOptInterface optimizer constructor, such as
7+
`SCS.Optimizer`, or `() -> SCS.Optimizer(verbose=false)`.
68
* `lambdamin` and `lambdamax` have been deprecated in favor of `eigmin` and
79
`eigmax`. ([#357](https://github.com/JuliaOpt/Convex.jl/pull/357))
810
* `evaluate(x::Variable)` and `evaluate(c::Constant)` now return scalars and

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ x = Variable(n)
4040
problem = minimize(sumsquares(A * x - b), [x >= 0])
4141

4242
# Solve the problem by calling solve!
43-
solve!(problem, SCS.Optimizer())
43+
solve!(problem, SCS.Optimizer)
4444

4545
# Check the status of the problem
4646
problem.status # :Optimal, :Infeasible, :Unbounded etc.

docs/examples_literate/general_examples/basic_usage.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ end
88

99
using SCS
1010
## passing in verbose=0 to hide output from SCS
11-
solver = SCS.Optimizer(verbose=0)
11+
solver = () -> SCS.Optimizer(verbose=0)
1212

1313
# ### Linear program
1414
#
@@ -73,7 +73,7 @@ p.optval
7373

7474
x = Variable(4)
7575
p = satisfy(norm(x) <= 100, exp(x[1]) <= 5, x[2] >= 7, geomean(x[3], x[4]) >= x[2])
76-
solve!(p, SCS.Optimizer(verbose=0))
76+
solve!(p, solver)
7777
println(p.status)
7878
x.value
7979

@@ -82,7 +82,7 @@ x.value
8282

8383
y = Semidefinite(2)
8484
p = maximize(eigmin(y), tr(y)<=6)
85-
solve!(p, SCS.Optimizer(verbose=0))
85+
solve!(p, solver)
8686
p.optval
8787

8888
#-
@@ -108,7 +108,7 @@ y.value
108108
using GLPK
109109
x = Variable(4, :Int)
110110
p = minimize(sum(x), x >= 0.5)
111-
solve!(p, GLPK.Optimizer())
111+
solve!(p, GLPK.Optimizer)
112112
x.value
113113

114114
#-

docs/examples_literate/general_examples/chebyshev_center.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ p.constraints += a1' * x_c + r * norm(a1, 2) <= b[1];
2525
p.constraints += a2' * x_c + r * norm(a2, 2) <= b[2];
2626
p.constraints += a3' * x_c + r * norm(a3, 2) <= b[3];
2727
p.constraints += a4' * x_c + r * norm(a4, 2) <= b[4];
28-
solve!(p, SCS.Optimizer(verbose=0))
28+
solve!(p, () -> SCS.Optimizer(verbose=0))
2929
p.optval
3030

3131
# Generate the figure

docs/examples_literate/general_examples/control.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ push!(constraints, velocity[:, T] == 0)
121121

122122
## Solve the problem
123123
problem = minimize(sumsquares(force), constraints)
124-
solve!(problem, SCS.Optimizer(verbose=0))
124+
solve!(problem, () -> SCS.Optimizer(verbose=0))
125125

126126
# We can plot the trajectory taken by the object.
127127

docs/examples_literate/general_examples/huber_regression.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,18 +40,18 @@ for i=1:length(p_vals)
4040
fit = norm(beta - beta_true) / norm(beta_true);
4141
cost = norm(X' * beta - Y);
4242
prob = minimize(cost);
43-
solve!(prob, SCS.Optimizer(verbose=0));
43+
solve!(prob, () -> SCS.Optimizer(verbose=0));
4444
lsq_data[i] = evaluate(fit);
4545

4646
## Form and solve a prescient regression problem,
4747
## i.e., where the sign changes are known.
4848
cost = norm(factor .* (X'*beta) - Y);
49-
solve!(minimize(cost), SCS.Optimizer(verbose=0))
49+
solve!(minimize(cost), () -> SCS.Optimizer(verbose=0))
5050
prescient_data[i] = evaluate(fit);
5151

5252
## Form and solve the Huber regression problem.
5353
cost = sum(huber(X' * beta - Y, 1));
54-
solve!(minimize(cost), SCS.Optimizer(verbose=0))
54+
solve!(minimize(cost), () -> SCS.Optimizer(verbose=0))
5555
huber_data[i] = evaluate(fit);
5656
end
5757

docs/examples_literate/general_examples/logistic_regression.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ X = hcat(ones(size(iris, 1)), iris.SepalLength, iris.SepalWidth, iris.PetalLengt
2323
n, p = size(X)
2424
beta = Variable(p)
2525
problem = minimize(logisticloss(-Y.*(X*beta)))
26-
solve!(problem, SCS.Optimizer(verbose=false))
26+
solve!(problem, () -> SCS.Optimizer(verbose=false))
2727

2828
# Let's see how well the model fits.
2929
using Plots

docs/examples_literate/general_examples/max_entropy.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ b = rand(m, 1);
2323

2424
x = Variable(n);
2525
problem = maximize(entropy(x), sum(x) == 1, A * x <= b)
26-
solve!(problem, SCS.Optimizer(verbose=false))
26+
solve!(problem, () -> SCS.Optimizer(verbose=false))
2727
problem.optval
2828

2929
#-

docs/examples_literate/general_examples/optimal_advertising.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ D = Variable(m, n);
4646
Si = [min(R[i]*dot(P[i,:], D[i,:]'), B[i]) for i=1:m];
4747
problem = maximize(sum(Si),
4848
[D >= 0, sum(D, dims=1)' <= T, sum(D, dims=2) >= c]);
49-
solve!(problem, SCS.Optimizer(verbose=0));
49+
solve!(problem, () -> SCS.Optimizer(verbose=0));
5050

5151
#-
5252

docs/examples_literate/general_examples/robust_approx_fitting.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,18 +43,18 @@ x = Variable(n)
4343

4444
# Case 1: Nominal optimal solution
4545
p = minimize(norm(A * x - b, 2))
46-
solve!(p, SCS.Optimizer(verbose=0))
46+
solve!(p, () -> SCS.Optimizer(verbose=0))
4747
x_nom = evaluate(x)
4848

4949
# Case 2: Stochastic robust approximation
5050
P = 1 / 3 * B' * B;
5151
p = minimize(square(pos(norm(A * x - b))) + quadform(x, Symmetric(P)))
52-
solve!(p, SCS.Optimizer(verbose=0))
52+
solve!(p, () -> SCS.Optimizer(verbose=0))
5353
x_stoch = evaluate(x)
5454

5555
# Case 3: Worst-case robust approximation
5656
p = minimize(max(norm((A - B) * x - b), norm((A + B) * x - b)))
57-
solve!(p, SCS.Optimizer(verbose=0))
57+
solve!(p, () -> SCS.Optimizer(verbose=0))
5858
x_wc = evaluate(x)
5959

6060
# Plot residuals:

0 commit comments

Comments
 (0)