Skip to content

Commit c20c7ec

Browse files
authored
[docs] small tweaks to examples (#666)
* small tweaks to examples * Apply suggestions from code review
1 parent 66d4e6c commit c20c7ec

18 files changed

+66
-44
lines changed

docs/src/examples/general_examples/DCP_analysis.jl

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@
22
using Convex
33
x = Variable();
44
y = Variable();
5-
expr = quadoverlin(x - y, 1 - max(x, y));
6-
println("expression curvature = ", vexity(expr));
5+
expr = quadoverlin(x - y, 1 - max(x, y))
6+
7+
# We can see from the printing of the expression that this `quadoverlin` (`qol`) atom
8+
# is convex with positive sign. We can query these programmatically using the `vexity`
9+
# and `sign` functions:
10+
println("expression convexity = ", vexity(expr));
711
println("expression sign = ", sign(expr));

docs/src/examples/general_examples/basic_usage.jl

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
# # Basic Usage
22

3+
# First we load Convex itself, LinearAlgebra to access the identity matrix `I`,
4+
# and two solvers: SCS and GLPK.
35
using Convex
46
using LinearAlgebra
5-
using SCS
7+
using SCS, GLPK
68

79
# ### Linear program
810
#
@@ -26,6 +28,7 @@ constraints = [A * x <= b, x >= 1, x <= 10, x[2] <= 5, x[1] + x[4] - x[2] <= 10]
2628
p = minimize(dot(c, x), constraints) # or c' * x
2729
solve!(p, SCS.Optimizer; silent_solver = true)
2830

31+
# We can also inspect the objective value and the values of the variables at the solution:
2932
println(round(p.optval, digits = 2))
3033
println(round.(evaluate(x), digits = 2))
3134
println(evaluate(x[1] + x[4] - x[2]))
@@ -48,6 +51,8 @@ y = Variable()
4851
## X is a 2 x 2 variable, and y is scalar. X' + y promotes y to a 2 x 2 variable before adding them
4952
p = minimize(norm(X) + y, 2 * X <= 1, X' + y >= 1, X >= 0, y >= 0)
5053
solve!(p, SCS.Optimizer; silent_solver = true)
54+
55+
# We can also inspect the values of the variables at the solution:
5156
println(round.(evaluate(X), digits = 2))
5257
println(evaluate(y))
5358
p.optval
@@ -72,24 +77,21 @@ p = satisfy(
7277
geomean(x[3], x[4]) >= x[2],
7378
)
7479
solve!(p, SCS.Optimizer; silent_solver = true)
75-
println(p.status)
76-
evaluate(x)
7780

78-
# ### SDP cone and Eigenvalues
81+
# ### PSD cone and Eigenvalues
7982

8083
y = Semidefinite(2)
8184
p = maximize(eigmin(y), tr(y) <= 6)
8285
solve!(p, SCS.Optimizer; silent_solver = true)
83-
p.optval
8486

8587
#-
8688

8789
x = Variable()
8890
y = Variable((2, 2))
89-
## SDP constraints
91+
92+
## PSD constraints
9093
p = minimize(x + y[1, 1], y 0, x >= 1, y[2, 1] == 1)
9194
solve!(p, SCS.Optimizer; silent_solver = true)
92-
evaluate(y)
9395

9496
# ### Mixed integer program
9597
#
@@ -102,9 +104,10 @@ evaluate(y)
102104
# ```
103105
#
104106

105-
using GLPK
106107
x = Variable(4, IntVar)
107108
p = minimize(sum(x), x >= 0.5)
108109
solve!(p, GLPK.Optimizer; silent_solver = true)
110+
111+
# And the value of `x` at the solution:
109112
evaluate(x)
110113
#-

docs/src/examples/general_examples/chebyshev_center.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ constraints = [
3030
]
3131
p = maximize(r, constraints)
3232
solve!(p, SCS.Optimizer; silent_solver = true)
33-
p.optval
3433

3534
# Generate the figure
3635
x = range(-1.5, stop = 1.5, length = 100);

docs/src/examples/general_examples/dualization.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ p = 50
2525
# Now we formulate and solve our primal problem:
2626
d = Variable(p)
2727
problem = maximize(sum(d), 0 d, d 1, Σ Diagonal(d))
28-
@elapsed solve!(problem, SCS.Optimizer; silent_solver = true)
28+
@time solve!(problem, SCS.Optimizer; silent_solver = true)
2929

3030
# To solve the dual problem instead, we simply call `dual_optimizer` on our
3131
# optimizer function:
32-
@elapsed solve!(problem, dual_optimizer(SCS.Optimizer); silent_solver = true)
32+
@time solve!(problem, dual_optimizer(SCS.Optimizer); silent_solver = true)

docs/src/examples/general_examples/huber_regression.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ v = randn(number_samples);
2222

2323
#-
2424

25-
## Generate data for different values of p.
26-
## Solve the resulting problems.
25+
# Generate data for different values of p.
26+
# Solve the resulting problems.
2727
using Convex, SCS, Distributions
2828
lsq_data = zeros(number_tests);
2929
huber_data = zeros(number_tests);

docs/src/examples/general_examples/lasso_regression.jl

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -8,23 +8,19 @@
88

99
using DelimitedFiles, LinearAlgebra, Statistics, Plots, Convex, SCS
1010

11-
# # Loading Data
11+
# ## Loading Data
1212
#
1313
# We use the diabetes data from Efron et al, downloaded from https://web.stanford.edu/~hastie/StatLearnSparsity_files/DATA/diabetes.html and then converted from a tab to a comma delimited file.
1414
#
1515
# All data series are standardised (see below) to have zero means and unit standard deviation, which improves the numerical stability. (Efron et al do not standardise the scale of the response variable.)
1616

1717
x, header =
1818
readdlm(joinpath(@__DIR__, "aux_files/diabetes.csv"), ',', header = true)
19-
#display(header)
20-
#display(x)
19+
x = (x .- mean(x, dims = 1)) ./ std(x, dims = 1) # standardise
20+
(Y, X) = (x[:, end], x[:, 1:end-1]); # to get traditional names
21+
xNames = header[1:end-1]
2122

22-
x = (x .- mean(x, dims = 1)) ./ std(x, dims = 1) #standardise
23-
24-
(Y, X) = (x[:, end], x[:, 1:end-1]); #to get traditional names
25-
xNames = header[1:end-1];
26-
27-
# # Lasso, Ridge and Elastic Net Regressions
23+
# ## Lasso, Ridge and Elastic Net Regressions
2824
#
2925
# (a) The regression is $Y = Xb + u$,
3026
# where $Y$ and $u$ are $T \times 1$, $X$ is $T \times K$, and $b$ is the $K$-vector of regression coefficients.
@@ -64,12 +60,14 @@ function LassoEN(Y, X, γ, λ = 0)
6460
L4 = sumsquares(b) #sum(b^2)
6561

6662
if λ > 0
67-
Sol = minimize(L1 - 2 * L2 + γ * L3 + λ * L4) #u'u/T + γ*sum(|b|) + λ*sum(b^2), where u = Y-Xb
63+
## u'u/T + γ*sum(|b|) + λ*sum(b^2), where u = Y-Xb
64+
problem = minimize(L1 - 2 * L2 + γ * L3 + λ * L4)
6865
else
69-
Sol = minimize(L1 - 2 * L2 + γ * L3) #u'u/T + γ*sum(|b|) where u = Y-Xb
66+
## u'u/T + γ*sum(|b|) where u = Y-Xb
67+
problem = minimize(L1 - 2 * L2 + γ * L3)
7068
end
71-
solve!(Sol, SCS.Optimizer; silent_solver = true)
72-
Sol.status == Convex.MOI.OPTIMAL ? b_i = vec(evaluate(b)) : b_i = NaN
69+
solve!(problem, SCS.Optimizer; silent_solver = true)
70+
problem.status == Convex.MOI.OPTIMAL ? b_i = vec(evaluate(b)) : b_i = NaN
7371

7472
return b_i, b_ls
7573
end
@@ -82,7 +80,7 @@ K = size(X, 2)
8280
(b, b_ls) = LassoEN(Y, X, γ)
8381

8482
println("OLS and Lasso coeffs (with γ=)")
85-
display([["" "OLS" "Lasso"]; xNames b_ls b])
83+
println([["" "OLS" "Lasso"]; xNames b_ls b])
8684

8785
# # Redo the Lasso Regression with Different Gamma Values
8886
#
@@ -111,7 +109,7 @@ plot(
111109
size = (600, 400),
112110
)
113111

114-
# # Ridge Regression
112+
# ## Ridge Regression
115113
#
116114
# We use the same function to do a ridge regression. Alternatively, do `b = inv(X'X/T + λ*I)*X'Y/T`.
117115

docs/src/examples/general_examples/max_entropy.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ b = rand(m, 1);
2424
x = Variable(n);
2525
problem = maximize(entropy(x), sum(x) == 1, A * x <= b)
2626
solve!(problem, SCS.Optimizer; silent_solver = true)
27-
problem.optval
2827

2928
#-
3029

docs/src/examples/general_examples/robust_approx_fitting.jl

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,23 +39,28 @@ B = B / norm(B);
3939
b = randn(m, 1);
4040
x = Variable(n)
4141

42-
# Case 1: nominal optimal solution
42+
# ## Case 1: nominal optimal solution
4343
p = minimize(norm(A * x - b, 2))
4444
solve!(p, SCS.Optimizer; silent_solver = true)
45+
#-
4546
x_nom = evaluate(x)
4647

47-
# Case 2: stochastic robust approximation
48+
# ## Case 2: stochastic robust approximation
4849
P = 1 / 3 * B' * B;
4950
p = minimize(square(pos(norm(A * x - b))) + quadform(x, Symmetric(P)))
5051
solve!(p, SCS.Optimizer; silent_solver = true)
52+
#-
5153
x_stoch = evaluate(x)
5254

53-
# Case 3: worst-case robust approximation
55+
# ## Case 3: worst-case robust approximation
5456
p = minimize(max(norm((A - B) * x - b), norm((A + B) * x - b)))
5557
solve!(p, SCS.Optimizer; silent_solver = true)
58+
59+
#-
5660
x_wc = evaluate(x)
5761

58-
# Plot residuals:
62+
# ## Plots
63+
# Here we plot the residuals.
5964
parvals = range(-2, stop = 2, length = 100);
6065

6166
errvals(x) = [norm((A + parvals[k] * B) * x - b) for k in eachindex(parvals)]

docs/src/examples/general_examples/trade_off_curves.jl

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
1-
# # Trade-off curves
1+
# # Regularized least-squares
2+
# Here we solve some constrained least-squares problems with 1-norm regularization,
3+
# and plot how the solution changes with increasing regularization.
24
using Random
35
Random.seed!(1)
46
m = 25;

docs/src/examples/general_examples/worst_case_analysis.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ ret = dot(r, w);
1919
risk = sum(quadform(w, Sigma_nom));
2020
problem = minimize(risk, [sum(w) == 1, ret >= 0.1, norm(w, 1) <= 2])
2121
solve!(problem, SCS.Optimizer; silent_solver = true)
22+
#-
2223
wval = vec(evaluate(w))
2324

2425
#-
@@ -37,6 +38,7 @@ problem = maximize(
3738
],
3839
);
3940
solve!(problem, SCS.Optimizer; silent_solver = true)
41+
#-
4042
println(
4143
"standard deviation = ",
4244
round(sqrt(wval' * Sigma_nom * wval), sigdigits = 2),

0 commit comments

Comments
 (0)