Skip to content

Commit a2624d7

Browse files
committed
Updated the tutorial files
1 parent dc70405 commit a2624d7

5 files changed

+59
-26
lines changed

examples/tutorials/accelerated_gradient_strongly_convex.tutorial.ipynb

Lines changed: 31 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
},
1313
{
1414
"cell_type": "code",
15-
"execution_count": 1,
15+
"execution_count": 13,
1616
"metadata": {},
1717
"outputs": [],
1818
"source": [
@@ -28,7 +28,7 @@
2828
},
2929
{
3030
"cell_type": "code",
31-
"execution_count": 2,
31+
"execution_count": 14,
3232
"metadata": {},
3333
"outputs": [
3434
{
@@ -37,7 +37,7 @@
3737
"PEP(AbstractFunction[], Point[], Constraint[], Expression[], PSDMatrix[], nothing)"
3838
]
3939
},
40-
"execution_count": 2,
40+
"execution_count": 14,
4141
"metadata": {},
4242
"output_type": "execute_result"
4343
}
@@ -54,12 +54,12 @@
5454
"\n",
5555
"Consider the convex minimization problem\n",
5656
"$$f_\\star \\triangleq \\min_x f(x),$$\n",
57-
"where $f$ is $L$-smooth and $\\mu$-strongly convex. For this example, let us take $\\mu=0.1$ and $L=1$. Let us declare this function type in `PEPit`. "
57+
"where $f$ is $L$-smooth and $\\mu$-strongly convex. For this example, let us take $\\mu=0.1$ and $L=1$. Let us declare this function type in `PEPit` next. Let us start with defining the parameters $\\mu$ and $L$. "
5858
]
5959
},
6060
{
6161
"cell_type": "code",
62-
"execution_count": 3,
62+
"execution_count": 15,
6363
"metadata": {},
6464
"outputs": [
6565
{
@@ -70,7 +70,7 @@
7070
" \"L\" => 1"
7171
]
7272
},
73-
"execution_count": 3,
73+
"execution_count": 15,
7474
"metadata": {},
7575
"output_type": "execute_result"
7676
}
@@ -90,7 +90,7 @@
9090
},
9191
{
9292
"cell_type": "code",
93-
"execution_count": 4,
93+
"execution_count": 16,
9494
"metadata": {},
9595
"outputs": [
9696
{
@@ -99,7 +99,7 @@
9999
"SmoothStronglyConvexFunction(0.1, 1.0, PEPFunction(1, true, OrderedDict{PEPFunction, Float64}(PEPFunction(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), true, Tuple{Point, Point, Expression}[], Tuple{Point, Point, Expression}[], Constraint[], PSDMatrix[], PSDMatrix[], 0))"
100100
]
101101
},
102-
"execution_count": 4,
102+
"execution_count": 16,
103103
"metadata": {},
104104
"output_type": "execute_result"
105105
}
@@ -141,7 +141,7 @@
141141
},
142142
{
143143
"cell_type": "code",
144-
"execution_count": 5,
144+
"execution_count": 17,
145145
"metadata": {},
146146
"outputs": [
147147
{
@@ -150,7 +150,7 @@
150150
"2"
151151
]
152152
},
153-
"execution_count": 5,
153+
"execution_count": 17,
154154
"metadata": {},
155155
"output_type": "execute_result"
156156
}
@@ -169,7 +169,7 @@
169169
},
170170
{
171171
"cell_type": "code",
172-
"execution_count": 6,
172+
"execution_count": 18,
173173
"metadata": {},
174174
"outputs": [
175175
{
@@ -178,7 +178,7 @@
178178
"Point(5, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 1, nothing)"
179179
]
180180
},
181-
"execution_count": 6,
181+
"execution_count": 18,
182182
"metadata": {},
183183
"output_type": "execute_result"
184184
}
@@ -202,7 +202,7 @@
202202
},
203203
{
204204
"cell_type": "code",
205-
"execution_count": 7,
205+
"execution_count": 19,
206206
"metadata": {},
207207
"outputs": [
208208
{
@@ -212,7 +212,7 @@
212212
" Constraint(Expression(15, false, OrderedDict{Any, Float64}(Expression(6, true, OrderedDict{Any, Float64}(Expression(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 1, nothing) => 1.0, Expression(4, true, OrderedDict{Any, Float64}(Expression(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 0, nothing) => -1.0, (Point(5, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 1, nothing), Point(5, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 1, nothing)) => 0.05, (Point(5, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 1, nothing), Point(2, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 0, nothing)) => -0.05, (Point(2, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 0, nothing), Point(5, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 1, nothing)) => -0.05, (Point(2, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 0, nothing), Point(2, true, OrderedDict{Point, Float64}(Point(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 0, nothing)) => 0.05, 1 => -1.0), nothing, nothing), \"inequality\", 0, nothing, nothing)"
213213
]
214214
},
215-
"execution_count": 7,
215+
"execution_count": 19,
216216
"metadata": {},
217217
"output_type": "execute_result"
218218
}
@@ -232,7 +232,7 @@
232232
},
233233
{
234234
"cell_type": "code",
235-
"execution_count": 8,
235+
"execution_count": 20,
236236
"metadata": {},
237237
"outputs": [],
238238
"source": [
@@ -256,7 +256,7 @@
256256
},
257257
{
258258
"cell_type": "code",
259-
"execution_count": 9,
259+
"execution_count": 21,
260260
"metadata": {},
261261
"outputs": [
262262
{
@@ -265,7 +265,7 @@
265265
"Expression(32, true, OrderedDict{Any, Float64}(Expression(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 3, nothing)"
266266
]
267267
},
268-
"execution_count": 9,
268+
"execution_count": 21,
269269
"metadata": {},
270270
"output_type": "execute_result"
271271
}
@@ -283,7 +283,7 @@
283283
},
284284
{
285285
"cell_type": "code",
286-
"execution_count": 10,
286+
"execution_count": 22,
287287
"metadata": {},
288288
"outputs": [
289289
{
@@ -293,7 +293,7 @@
293293
" Expression(35, false, OrderedDict{Any, Float64}(Expression(32, true, OrderedDict{Any, Float64}(Expression(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 3, nothing) => 1.0, Expression(4, true, OrderedDict{Any, Float64}(Expression(\u001b[33m#= circular reference @-2 =#\u001b[39m) => 1.0), 0, nothing) => -1.0), nothing, nothing)"
294294
]
295295
},
296-
"execution_count": 10,
296+
"execution_count": 22,
297297
"metadata": {},
298298
"output_type": "execute_result"
299299
}
@@ -312,7 +312,7 @@
312312
},
313313
{
314314
"cell_type": "code",
315-
"execution_count": 11,
315+
"execution_count": 23,
316316
"metadata": {},
317317
"outputs": [
318318
{
@@ -387,7 +387,7 @@
387387
"0.34760221803672986"
388388
]
389389
},
390-
"execution_count": 11,
390+
"execution_count": 23,
391391
"metadata": {},
392392
"output_type": "execute_result"
393393
}
@@ -413,7 +413,7 @@
413413
},
414414
{
415415
"cell_type": "code",
416-
"execution_count": 12,
416+
"execution_count": 24,
417417
"metadata": {},
418418
"outputs": [
419419
{
@@ -437,6 +437,14 @@
437437
"cell_type": "markdown",
438438
"metadata": {},
439439
"source": [
440+
"We should see the output:\n",
441+
"\n",
442+
"````Julia\n",
443+
"PEPit guarantee: f(x_n)-f_* <= 0.347602 (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)\n",
444+
"\n",
445+
"Theoretical guarantee: f(x_n)-f_* <= 0.467544 (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)\n",
446+
"````\n",
447+
"\n",
440448
"So the gurantee that we found via `PEPit` is tighter than the theoretical guarantee!\n",
441449
"\n",
442450
"**References**:\n",
@@ -448,7 +456,7 @@
448456
],
449457
"metadata": {
450458
"kernelspec": {
451-
"display_name": "Julia 1.12",
459+
"display_name": "Julia 1.12.2",
452460
"language": "julia",
453461
"name": "julia-1.12"
454462
},

examples/tutorials/accelerated_gradient_strongly_convex.tutorial.jl

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ problem = PEP()
2323
2424
Consider the convex minimization problem
2525
$$f_\star \triangleq \min_x f(x),$$
26-
where $f$ is $L$-smooth and $\mu$-strongly convex. For this example, let us take $\mu=0.1$ and $L=1$. Let us declare this function type in `PEPit`.
26+
where $f$ is $L$-smooth and $\mu$-strongly convex. For this example, let us take $\mu=0.1$ and $L=1$. Let us declare this function type in `PEPit` next. Let us start with defining the parameters $\mu$ and $L$.
2727
2828
=#
2929

@@ -160,6 +160,14 @@ Let us compute the theoretical value of $\tau$.
160160

161161
#=
162162
163+
We should see the output:
164+
165+
````Julia
166+
PEPit guarantee: f(x_n)-f_* <= 0.347602 (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)
167+
168+
Theoretical guarantee: f(x_n)-f_* <= 0.467544 (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)
169+
````
170+
163171
So the gurantee that we found via `PEPit` is tighter than the theoretical guarantee!
164172
165173
**References**:

examples/tutorials/accelerated_gradient_strongly_convex.tutorial.md

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ problem = PEP()
1717

1818
Consider the convex minimization problem
1919
$$f_\star \triangleq \min_x f(x),$$
20-
where $f$ is $L$-smooth and $\mu$-strongly convex. For this example, let us take $\mu=0.1$ and $L=1$. Let us declare this function type in `PEPit`.
20+
where $f$ is $L$-smooth and $\mu$-strongly convex. For this example, let us take $\mu=0.1$ and $L=1$. Let us declare this function type in `PEPit` next. Let us start with defining the parameters $\mu$ and $L$.
2121

2222
````julia
2323
μ = 0.1
@@ -133,6 +133,14 @@ Let us compute the theoretical value of $\tau$.
133133
@info "📝 Theoretical guarantee: f(x_n)-f_* <= $(round(τ_Theory, digits=6)) (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)"
134134
````
135135
136+
We should see the output:
137+
138+
````Julia
139+
PEPit guarantee: f(x_n)-f_* <= 0.347602 (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)
140+
141+
Theoretical guarantee: f(x_n)-f_* <= 0.467544 (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)
142+
````
143+
136144
So the gurantee that we found via `PEPit` is tighter than the theoretical guarantee!
137145
138146
**References**:
1011 Bytes
Binary file not shown.

examples/tutorials/accelerated_gradient_strongly_convex.tutorial.tex

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ \section{Problem in consideration}
4242
\begin{equation*}
4343
f_\star \triangleq \min_x f(x),
4444
\end{equation*}
45-
where $f$ is $L$-smooth and $\mu$-strongly convex. For this example, let us take $\mu=0.1$ and $L=1$. Let us declare this function type in \texttt{PEPit}.
45+
where $f$ is $L$-smooth and $\mu$-strongly convex. For this example, let us take $\mu=0.1$ and $L=1$. Let us declare this function type in \texttt{PEPit} next. Let us start with defining the parameters $\mu$ and $L$.
4646

4747
\begin{tcblisting}{listing only, minted language=julia, minted options={mathescape=true, breaklines}, colback=lightgray, colframe=black, boxrule=0pt, toprule=2pt, bottomrule=2pt, arc=0pt, outer arc=0pt, left=5pt, right=5pt, top=5pt, bottom=5pt}
4848
μ = 0.1
@@ -172,6 +172,15 @@ \subsection{Comparison with theoretical results}
172172
\end{tcblisting}
173173

174174

175+
We should see the output:
176+
177+
\begin{tcblisting}{listing only, minted language=julia, minted options={mathescape=true, breaklines}, colback=lightgray, colframe=black, boxrule=0pt, toprule=2pt, bottomrule=2pt, arc=0pt, outer arc=0pt, left=5pt, right=5pt, top=5pt, bottom=5pt}
178+
PEPit guarantee: f(x_n)-f_* <= 0.347602 (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)
179+
180+
Theoretical guarantee: f(x_n)-f_* <= 0.467544 (f(x_0) - f(x_*) + mu/2*||x_0 - x_*||^2)
181+
\end{tcblisting}
182+
183+
175184
So the gurantee that we found via \texttt{PEPit} is tighter than the theoretical guarantee!
176185

177186
\textbf{References}:

0 commit comments

Comments
 (0)