Skip to content

Commit ecc91cd

Browse files
authored
Deprecate silent_solver to silent (#670)
1 parent bdcbbde commit ecc91cd

32 files changed

+118
-91
lines changed

docs/src/examples/general_examples/basic_usage.jl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ A = I(4)
2626
b = [10; 10; 10; 10]
2727
constraints = [A * x <= b, x >= 1, x <= 10, x[2] <= 5, x[1] + x[4] - x[2] <= 10]
2828
p = minimize(dot(c, x), constraints) # or c' * x
29-
solve!(p, SCS.Optimizer; silent_solver = true)
29+
solve!(p, SCS.Optimizer; silent = true)
3030

3131
# We can also inspect the objective value and the values of the variables at the solution:
3232
println(round(p.optval, digits = 2))
@@ -50,7 +50,7 @@ X = Variable(2, 2)
5050
y = Variable()
5151
## X is a 2 x 2 variable, and y is scalar. X' + y promotes y to a 2 x 2 variable before adding them
5252
p = minimize(norm(X) + y, 2 * X <= 1, X' + y >= 1, X >= 0, y >= 0)
53-
solve!(p, SCS.Optimizer; silent_solver = true)
53+
solve!(p, SCS.Optimizer; silent = true)
5454

5555
# We can also inspect the values of the variables at the solution:
5656
println(round.(evaluate(X), digits = 2))
@@ -76,13 +76,13 @@ p = satisfy(
7676
x[2] >= 7,
7777
geomean(x[3], x[4]) >= x[2],
7878
)
79-
solve!(p, SCS.Optimizer; silent_solver = true)
79+
solve!(p, SCS.Optimizer; silent = true)
8080

8181
# ### PSD cone and Eigenvalues
8282

8383
y = Semidefinite(2)
8484
p = maximize(eigmin(y), tr(y) <= 6)
85-
solve!(p, SCS.Optimizer; silent_solver = true)
85+
solve!(p, SCS.Optimizer; silent = true)
8686

8787
#-
8888

@@ -91,7 +91,7 @@ y = Variable((2, 2))
9191

9292
## PSD constraints
9393
p = minimize(x + y[1, 1], y 0, x >= 1, y[2, 1] == 1)
94-
solve!(p, SCS.Optimizer; silent_solver = true)
94+
solve!(p, SCS.Optimizer; silent = true)
9595

9696
# ### Mixed integer program
9797
#
@@ -106,7 +106,7 @@ solve!(p, SCS.Optimizer; silent_solver = true)
106106

107107
x = Variable(4, IntVar)
108108
p = minimize(sum(x), x >= 0.5)
109-
solve!(p, GLPK.Optimizer; silent_solver = true)
109+
solve!(p, GLPK.Optimizer; silent = true)
110110

111111
# And the value of `x` at the solution:
112112
evaluate(x)

docs/src/examples/general_examples/chebyshev_center.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ constraints = [
2929
a4' * x_c + r * norm(a4, 2) <= b[4],
3030
]
3131
p = maximize(r, constraints)
32-
solve!(p, SCS.Optimizer; silent_solver = true)
32+
solve!(p, SCS.Optimizer; silent = true)
3333

3434
# Generate the figure
3535
x = range(-1.5, stop = 1.5, length = 100);

docs/src/examples/general_examples/control.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ push!(constraints, velocity[:, T] == 0)
121121

122122
## Solve the problem
123123
problem = minimize(sumsquares(force), constraints)
124-
solve!(problem, SCS.Optimizer; silent_solver = true)
124+
solve!(problem, SCS.Optimizer; silent = true)
125125

126126
# We can plot the trajectory taken by the object.
127127

docs/src/examples/general_examples/dualization.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ p = 50
2525
# Now we formulate and solve our primal problem:
2626
d = Variable(p)
2727
problem = maximize(sum(d), 0 d, d 1, Σ Diagonal(d))
28-
@time solve!(problem, SCS.Optimizer; silent_solver = true)
28+
@time solve!(problem, SCS.Optimizer; silent = true)
2929

3030
# To solve the dual problem instead, we simply call `dual_optimizer` on our
3131
# optimizer function:
32-
@time solve!(problem, dual_optimizer(SCS.Optimizer); silent_solver = true)
32+
@time solve!(problem, dual_optimizer(SCS.Optimizer); silent = true)

docs/src/examples/general_examples/huber_regression.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,18 +40,18 @@ for i in 1:length(p_vals)
4040
fit = norm(beta - beta_true) / norm(beta_true)
4141
cost = norm(X' * beta - Y)
4242
prob = minimize(cost)
43-
solve!(prob, SCS.Optimizer; silent_solver = true)
43+
solve!(prob, SCS.Optimizer; silent = true)
4444
lsq_data[i] = evaluate(fit)
4545

4646
## Form and solve a prescient regression problem,
4747
## that is, where the sign changes are known.
4848
cost = norm(factor .* (X' * beta) - Y)
49-
solve!(minimize(cost), SCS.Optimizer; silent_solver = true)
49+
solve!(minimize(cost), SCS.Optimizer; silent = true)
5050
prescient_data[i] = evaluate(fit)
5151

5252
## Form and solve the Huber regression problem.
5353
cost = sum(huber(X' * beta - Y, 1))
54-
solve!(minimize(cost), SCS.Optimizer; silent_solver = true)
54+
solve!(minimize(cost), SCS.Optimizer; silent = true)
5555
huber_data[i] = evaluate(fit)
5656
end
5757

docs/src/examples/general_examples/lasso_regression.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ function LassoEN(Y, X, γ, λ = 0)
6666
## u'u/T + γ*sum(|b|) where u = Y-Xb
6767
problem = minimize(L1 - 2 * L2 + γ * L3)
6868
end
69-
solve!(problem, SCS.Optimizer; silent_solver = true)
69+
solve!(problem, SCS.Optimizer; silent = true)
7070
problem.status == Convex.MOI.OPTIMAL ? b_i = vec(evaluate(b)) : b_i = NaN
7171

7272
return b_i, b_ls

docs/src/examples/general_examples/logistic_regression.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ X = hcat(
2828
n, p = size(X)
2929
beta = Variable(p)
3030
problem = minimize(logisticloss(-Y .* (X * beta)))
31-
solve!(problem, SCS.Optimizer; silent_solver = true)
31+
solve!(problem, SCS.Optimizer; silent = true)
3232

3333
# Let's see how well the model fits.
3434
using Plots

docs/src/examples/general_examples/max_entropy.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ b = rand(m, 1);
2323

2424
x = Variable(n);
2525
problem = maximize(entropy(x), sum(x) == 1, A * x <= b)
26-
solve!(problem, SCS.Optimizer; silent_solver = true)
26+
solve!(problem, SCS.Optimizer; silent = true)
2727

2828
#-
2929

docs/src/examples/general_examples/optimal_advertising.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ D = Variable(m, n);
4545
Si = [min(R[i] * dot(P[i, :], D[i, :]'), B[i]) for i in 1:m];
4646
problem =
4747
maximize(sum(Si), [D >= 0, sum(D, dims = 1)' <= T, sum(D, dims = 2) >= c]);
48-
solve!(problem, SCS.Optimizer; silent_solver = true)
48+
solve!(problem, SCS.Optimizer; silent = true)
4949

5050
#-
5151

docs/src/examples/general_examples/robust_approx_fitting.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,20 +41,20 @@ x = Variable(n)
4141

4242
# ## Case 1: nominal optimal solution
4343
p = minimize(norm(A * x - b, 2))
44-
solve!(p, SCS.Optimizer; silent_solver = true)
44+
solve!(p, SCS.Optimizer; silent = true)
4545
#-
4646
x_nom = evaluate(x)
4747

4848
# ## Case 2: stochastic robust approximation
4949
P = 1 / 3 * B' * B;
5050
p = minimize(square(pos(norm(A * x - b))) + quadform(x, Symmetric(P)))
51-
solve!(p, SCS.Optimizer; silent_solver = true)
51+
solve!(p, SCS.Optimizer; silent = true)
5252
#-
5353
x_stoch = evaluate(x)
5454

5555
# ## Case 3: worst-case robust approximation
5656
p = minimize(max(norm((A - B) * x - b), norm((A + B) * x - b)))
57-
solve!(p, SCS.Optimizer; silent_solver = true)
57+
solve!(p, SCS.Optimizer; silent = true)
5858

5959
#-
6060
x_wc = evaluate(x)

0 commit comments

Comments
 (0)