Skip to content

Commit 6c8db39

Browse files
committed
DOC: update lightning doc
1 parent 381ad3c commit 6c8db39

30 files changed

+171
-18
lines changed

lightning/_downloads/plot_1d_total_variation.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,11 @@
1212
y = sign(X ground_truth + noise)
1313
1414
where X is a random matrix. We obtain the vector ground_truth by solving
15-
an optimization problem using lightning FistaClassifier.
15+
an optimization problem using lightning's :class:`lightning.classification.FistaClassifier`.
1616
1717
The 1D total variation is also known as fused lasso.
1818
"""
19+
# Author: Fabian Pedregosa <[email protected]>
1920

2021
import numpy as np
2122
import matplotlib.pyplot as plt

lightning/_modules/lightning/impl/sdca.html

Lines changed: 44 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,29 @@ <h1>Source code for lightning.impl.sdca</h1><div class="highlight"><pre>
281281
<span class="sd"> minimize_w 1 / n_samples * \sum_i loss(w^T x_i, y_i)</span>
282282
<span class="sd"> + alpha * l1_ratio * ||w||_1</span>
283283
<span class="sd"> + alpha * (1 - l1_ratio) * 0.5 * ||w||^2_2</span>
284+
285+
<span class="sd"> Parameters</span>
286+
<span class="sd"> ----------</span>
287+
<span class="sd"> loss: string, {&#39;squared&#39;, &#39;absolute&#39;, &#39;hinge&#39;, &#39;smooth_hinge&#39;, &#39;squared_hinge&#39;}</span>
288+
<span class="sd"> Loss function to use in the model.</span>
289+
<span class="sd"> alpha: float</span>
290+
<span class="sd"> Amount of regularization (see model formulation above).</span>
291+
<span class="sd"> l1_ratio: float</span>
292+
<span class="sd"> Ratio between the L1 and L2 regularization (see model formulation above).</span>
293+
<span class="sd"> gamma : float</span>
294+
<span class="sd"> gamma parameter in the &quot;smooth_hinge&quot; loss (not used for other</span>
295+
<span class="sd"> loss functions)</span>
296+
<span class="sd"> tol : float</span>
297+
<span class="sd"> stopping criterion tolerance.</span>
298+
<span class="sd"> max_iter : int</span>
299+
<span class="sd"> maximum number of outer iterations (also known as epochs).</span>
300+
<span class="sd"> verbose : int</span>
301+
<span class="sd"> verbosity level. Set positive to print progress information.</span>
302+
<span class="sd"> callback : callable or None</span>
303+
<span class="sd"> if given, callback(self) will be called on each outer iteration</span>
304+
<span class="sd"> (epoch).</span>
305+
<span class="sd"> random_state: int or RandomState</span>
306+
<span class="sd"> Pseudo-random number generator state used for random sampling.</span>
284307
<span class="sd"> &quot;&quot;&quot;</span>
285308

286309
<div class="viewcode-block" id="SDCAClassifier.__init__"><a class="viewcode-back" href="../../../generated/lightning.classification.SDCAClassifier.html#lightning.classification.SDCAClassifier.__init__">[docs]</a> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">alpha</span><span class="o">=</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">l1_ratio</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">loss</span><span class="o">=</span><span class="s2">&quot;hinge&quot;</span><span class="p">,</span> <span class="n">gamma</span><span class="o">=</span><span class="mf">1.0</span><span class="p">,</span>
@@ -323,15 +346,34 @@ <h1>Source code for lightning.impl.sdca</h1><div class="highlight"><pre>
323346
<span class="sd"> minimize_w 1 / n_samples * \sum_i loss(w^T x_i, y_i)</span>
324347
<span class="sd"> + alpha * l1_ratio * ||w||_1</span>
325348
<span class="sd"> + alpha * (1 - l1_ratio) * 0.5 * ||w||^2_2</span>
349+
<span class="sd"> Parameters</span>
350+
<span class="sd"> ----------</span>
351+
<span class="sd"> loss: string, {&#39;squared&#39;, &#39;absolute&#39;}</span>
352+
<span class="sd"> Loss function to use in the model.</span>
353+
<span class="sd"> alpha: float</span>
354+
<span class="sd"> Amount of regularization (see model formulation above).</span>
355+
<span class="sd"> l1_ratio: float</span>
356+
<span class="sd"> Ratio between the L1 and L2 regularization (see model formulation above).</span>
357+
<span class="sd"> tol : float</span>
358+
<span class="sd"> stopping criterion tolerance.</span>
359+
<span class="sd"> max_iter : int</span>
360+
<span class="sd"> maximum number of outer iterations (also known as epochs).</span>
361+
<span class="sd"> verbose : int</span>
362+
<span class="sd"> verbosity level. Set positive to print progress information.</span>
363+
<span class="sd"> callback : callable or None</span>
364+
<span class="sd"> if given, callback(self) will be called on each outer iteration</span>
365+
<span class="sd"> (epoch).</span>
366+
<span class="sd"> random_state: int or RandomState</span>
367+
<span class="sd"> Pseudo-random number generator state used for random sampling.</span>
326368
<span class="sd"> &quot;&quot;&quot;</span>
327369

328-
<div class="viewcode-block" id="SDCARegressor.__init__"><a class="viewcode-back" href="../../../generated/lightning.regression.SDCARegressor.html#lightning.classification.SDCARegressor.__init__">[docs]</a> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">alpha</span><span class="o">=</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">l1_ratio</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">loss</span><span class="o">=</span><span class="s2">&quot;squared&quot;</span><span class="p">,</span> <span class="n">gamma</span><span class="o">=</span><span class="mf">1.0</span><span class="p">,</span>
370+
<div class="viewcode-block" id="SDCARegressor.__init__"><a class="viewcode-back" href="../../../generated/lightning.regression.SDCARegressor.html#lightning.classification.SDCARegressor.__init__">[docs]</a> <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">alpha</span><span class="o">=</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">l1_ratio</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">loss</span><span class="o">=</span><span class="s2">&quot;squared&quot;</span><span class="p">,</span>
329371
<span class="n">max_iter</span><span class="o">=</span><span class="mi">100</span><span class="p">,</span> <span class="n">tol</span><span class="o">=</span><span class="mf">1e-3</span><span class="p">,</span> <span class="n">callback</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span> <span class="n">n_calls</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span>
330372
<span class="n">random_state</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
331373
<span class="bp">self</span><span class="o">.</span><span class="n">alpha</span> <span class="o">=</span> <span class="n">alpha</span>
332374
<span class="bp">self</span><span class="o">.</span><span class="n">l1_ratio</span> <span class="o">=</span> <span class="n">l1_ratio</span>
333375
<span class="bp">self</span><span class="o">.</span><span class="n">loss</span> <span class="o">=</span> <span class="n">loss</span>
334-
<span class="bp">self</span><span class="o">.</span><span class="n">gamma</span> <span class="o">=</span> <span class="n">gamma</span>
376+
<span class="bp">self</span><span class="o">.</span><span class="n">gamma</span> <span class="o">=</span> <span class="mf">1.0</span>
335377
<span class="bp">self</span><span class="o">.</span><span class="n">max_iter</span> <span class="o">=</span> <span class="n">max_iter</span>
336378
<span class="bp">self</span><span class="o">.</span><span class="n">tol</span> <span class="o">=</span> <span class="n">tol</span>
337379
<span class="bp">self</span><span class="o">.</span><span class="n">callback</span> <span class="o">=</span> <span class="n">callback</span>

lightning/_sources/auto_examples/plot_1d_total_variation.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ Given a ground truth vectors, the signal that we observe is given by
1616
y = sign(X ground_truth + noise)
1717

1818
where X is a random matrix. We obtain the vector ground_truth by solving
19-
an optimization problem using lightning FistaClassifier.
19+
an optimization problem using lightning's :class:`lightning.classification.FistaClassifier`.
2020

2121
The 1D total variation is also known as fused lasso.
2222

@@ -33,6 +33,6 @@ The 1D total variation is also known as fused lasso.
3333
.. literalinclude:: plot_1d_total_variation.py
3434
:lines: 19-
3535

36-
**Total running time of the example:** 0.90 seconds
37-
( 0 minutes 0.90 seconds)
36+
**Total running time of the example:** 0.92 seconds
37+
( 0 minutes 0.92 seconds)
3838

lightning/auto_examples/plot_1d_total_variation.html

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -208,11 +208,12 @@
208208
<blockquote>
209209
<div>y = sign(X ground_truth + noise)</div></blockquote>
210210
<p>where X is a random matrix. We obtain the vector ground_truth by solving
211-
an optimization problem using lightning FistaClassifier.</p>
211+
an optimization problem using lightning&#8217;s <a class="reference internal" href="../generated/lightning.classification.FistaClassifier.html#lightning.classification.FistaClassifier" title="lightning.classification.FistaClassifier"><code class="xref py py-class docutils literal"><span class="pre">lightning.classification.FistaClassifier</span></code></a>.</p>
212212
<p>The 1D total variation is also known as fused lasso.</p>
213213
<img alt="../_images/plot_1d_total_variation_001.png" class="align-center" src="../_images/plot_1d_total_variation_001.png" />
214214
<p><strong>Python source code:</strong> <a class="reference download internal" href="../_downloads/plot_1d_total_variation.py"><code class="xref download docutils literal"><span class="pre">plot_1d_total_variation.py</span></code></a></p>
215-
<div class="highlight-python"><div class="highlight"><pre><span></span>
215+
<div class="highlight-python"><div class="highlight"><pre><span></span><span class="c1"># Author: Fabian Pedregosa &lt;[email protected]&gt;</span>
216+
216217
<span class="kn">import</span> <span class="nn">numpy</span> <span class="kn">as</span> <span class="nn">np</span>
217218
<span class="kn">import</span> <span class="nn">matplotlib.pyplot</span> <span class="kn">as</span> <span class="nn">plt</span>
218219
<span class="kn">from</span> <span class="nn">lightning.classification</span> <span class="kn">import</span> <span class="n">FistaClassifier</span>
@@ -243,8 +244,8 @@
243244
<a href="http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.show"><span class="n">plt</span><span class="o">.</span><span class="n">show</span></a><span class="p">()</span>
244245
</pre></div>
245246
</div>
246-
<p><strong>Total running time of the example:</strong> 0.90 seconds
247-
( 0 minutes 0.90 seconds)</p>
247+
<p><strong>Total running time of the example:</strong> 0.92 seconds
248+
( 0 minutes 0.92 seconds)</p>
248249
</div>
249250

250251

lightning/auto_examples/plot_l2_solvers.html

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,7 @@
306306
<span class="n">clf6</span> <span class="o">=</span> <span class="n">SAGClassifier</span><span class="p">(</span><span class="n">loss</span><span class="o">=</span><span class="s2">&quot;squared_hinge&quot;</span><span class="p">,</span> <span class="n">alpha</span><span class="o">=</span><span class="n">alpha</span><span class="p">,</span>
307307
<span class="n">max_iter</span><span class="o">=</span><span class="mi">100</span><span class="p">,</span> <span class="n">random_state</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">tol</span><span class="o">=</span><span class="n">tol</span><span class="p">)</span>
308308

309-
<span class="n">plt</span><span class="o">.</span><span class="n">figure</span><span class="p">()</span>
309+
<a href="http://matplotlib.org/api/figure_api.html#matplotlib.figure"><span class="n">plt</span><span class="o">.</span><span class="n">figure</span></a><span class="p">()</span>
310310

311311
<span class="n">data</span> <span class="o">=</span> <span class="p">{}</span>
312312
<span class="k">for</span> <span class="n">clf</span><span class="p">,</span> <span class="n">name</span> <span class="ow">in</span> <span class="p">((</span><span class="n">clf1</span><span class="p">,</span> <span class="s2">&quot;SVRG&quot;</span><span class="p">),</span>
@@ -335,7 +335,7 @@
335335
<a href="http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.yscale"><span class="n">plt</span><span class="o">.</span><span class="n">yscale</span></a><span class="p">(</span><span class="s1">&#39;log&#39;</span><span class="p">)</span>
336336
<a href="http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.xlabel"><span class="n">plt</span><span class="o">.</span><span class="n">xlabel</span></a><span class="p">(</span><span class="s2">&quot;CPU time&quot;</span><span class="p">)</span>
337337
<a href="http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.ylabel"><span class="n">plt</span><span class="o">.</span><span class="n">ylabel</span></a><span class="p">(</span><span class="s2">&quot;Objective value minus optimum&quot;</span><span class="p">)</span>
338-
<a href="http://matplotlib.org/api/legend_api.html#matplotlib.legend"><span class="n">plt</span><span class="o">.</span><span class="n">legend</span></a><span class="p">()</span>
338+
<span class="n">plt</span><span class="o">.</span><span class="n">legend</span><span class="p">()</span>
339339
<a href="http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.grid"><span class="n">plt</span><span class="o">.</span><span class="n">grid</span></a><span class="p">()</span>
340340

341341
<a href="http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.show"><span class="n">plt</span><span class="o">.</span><span class="n">show</span></a><span class="p">()</span>

lightning/auto_examples/plot_svrg.html

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@
248248
<span class="n">test_time</span> <span class="o">=</span> <span class="n">time</span><span class="o">.</span><span class="n">clock</span><span class="p">()</span>
249249
<span class="n">clf</span><span class="o">.</span><span class="n">_finalize_coef</span><span class="p">()</span>
250250
<span class="n">y_pred</span> <span class="o">=</span> <span class="n">clf</span><span class="o">.</span><span class="n">decision_function</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">X</span><span class="p">)</span><span class="o">.</span><span class="n">ravel</span><span class="p">()</span>
251-
<span class="n">loss</span> <span class="o">=</span> <span class="p">(</span><a href="http://docs.scipy.org/doc/numpy-1.6.0/reference/generated/numpy.maximum.html#numpy.maximum"><span class="n">np</span><span class="o">.</span><span class="n">maximum</span></a><span class="p">(</span><span class="mi">1</span> <span class="o">-</span> <span class="bp">self</span><span class="o">.</span><span class="n">y</span> <span class="o">*</span> <span class="n">y_pred</span><span class="p">,</span> <span class="mi">0</span><span class="p">)</span> <span class="o">**</span> <span class="mi">2</span><span class="p">)</span><span class="o">.</span><span class="n">mean</span><span class="p">()</span>
251+
<span class="n">loss</span> <span class="o">=</span> <span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">maximum</span><span class="p">(</span><span class="mi">1</span> <span class="o">-</span> <span class="bp">self</span><span class="o">.</span><span class="n">y</span> <span class="o">*</span> <span class="n">y_pred</span><span class="p">,</span> <span class="mi">0</span><span class="p">)</span> <span class="o">**</span> <span class="mi">2</span><span class="p">)</span><span class="o">.</span><span class="n">mean</span><span class="p">()</span>
252252
<span class="n">coef</span> <span class="o">=</span> <span class="n">clf</span><span class="o">.</span><span class="n">coef_</span><span class="o">.</span><span class="n">ravel</span><span class="p">()</span>
253253
<span class="n">regul</span> <span class="o">=</span> <span class="mf">0.5</span> <span class="o">*</span> <span class="n">clf</span><span class="o">.</span><span class="n">alpha</span> <span class="o">*</span> <a href="http://docs.scipy.org/doc/numpy-1.6.0/reference/generated/numpy.dot.html#numpy.dot"><span class="n">np</span><span class="o">.</span><span class="n">dot</span></a><span class="p">(</span><span class="n">coef</span><span class="p">,</span> <span class="n">coef</span><span class="p">)</span>
254254
<span class="bp">self</span><span class="o">.</span><span class="n">obj</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">loss</span> <span class="o">+</span> <span class="n">regul</span><span class="p">)</span>
@@ -279,7 +279,7 @@
279279
<span class="n">y</span> <span class="o">=</span> <span class="n">y</span> <span class="o">*</span> <span class="mi">2</span> <span class="o">-</span> <span class="mi">1</span>
280280

281281

282-
<span class="n">plt</span><span class="o">.</span><span class="n">figure</span><span class="p">()</span>
282+
<a href="http://matplotlib.org/api/figure_api.html#matplotlib.figure"><span class="n">plt</span><span class="o">.</span><span class="n">figure</span></a><span class="p">()</span>
283283

284284
<span class="k">for</span> <span class="n">eta</span> <span class="ow">in</span> <span class="n">etas</span><span class="p">:</span>
285285
<span class="k">print</span><span class="p">(</span><span class="s2">&quot;eta =&quot;</span><span class="p">,</span> <span class="n">eta</span><span class="p">)</span>
@@ -293,7 +293,7 @@
293293
<a href="http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.ylabel"><span class="n">plt</span><span class="o">.</span><span class="n">ylabel</span></a><span class="p">(</span><span class="s2">&quot;Objective value&quot;</span><span class="p">)</span>
294294
<a href="http://matplotlib.org/api/legend_api.html#matplotlib.legend"><span class="n">plt</span><span class="o">.</span><span class="n">legend</span></a><span class="p">()</span>
295295

296-
<span class="n">plt</span><span class="o">.</span><span class="n">figure</span><span class="p">()</span>
296+
<a href="http://matplotlib.org/api/figure_api.html#matplotlib.figure"><span class="n">plt</span><span class="o">.</span><span class="n">figure</span></a><span class="p">()</span>
297297

298298
<span class="k">for</span> <span class="n">n_inner</span> <span class="ow">in</span> <span class="n">n_inners</span><span class="p">:</span>
299299
<span class="k">print</span><span class="p">(</span><span class="s2">&quot;n_inner =&quot;</span><span class="p">,</span> <span class="n">n_inner</span><span class="p">)</span>

lightning/generated/lightning.classification.AdaGradClassifier.html

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,7 @@
7676
role="menu"
7777
aria-labelledby="dLabelGlobalToc"><ul class="current">
7878
<li class="toctree-l1"><a class="reference internal" href="../auto_examples/index.html">Examples</a><ul>
79+
<li class="toctree-l2"><a class="reference internal" href="../auto_examples/plot_1d_total_variation.html">Signal recovery by 1D total variation</a></li>
7980
<li class="toctree-l2"><a class="reference internal" href="../auto_examples/plot_sgd_loss_functions.html">SGD: Convex Loss Functions</a></li>
8081
<li class="toctree-l2"><a class="reference internal" href="../auto_examples/plot_robust_regression.html">Robust regression</a></li>
8182
<li class="toctree-l2"><a class="reference internal" href="../auto_examples/trace.html">Trace norm</a></li>
@@ -126,6 +127,7 @@
126127
<li class="toctree-l2"><a class="reference internal" href="../intro.html#stochastic-variance-reduced-gradient-svrg">Stochastic variance-reduced gradient (SVRG)</a></li>
127128
<li class="toctree-l2 current"><a class="reference internal" href="../intro.html#prank">PRank</a><ul class="current">
128129
<li class="toctree-l3"><a class="reference internal" href="../auto_examples/index.html">Examples</a><ul>
130+
<li class="toctree-l4"><a class="reference internal" href="../auto_examples/plot_1d_total_variation.html">Signal recovery by 1D total variation</a></li>
129131
<li class="toctree-l4"><a class="reference internal" href="../auto_examples/plot_sgd_loss_functions.html">SGD: Convex Loss Functions</a></li>
130132
<li class="toctree-l4"><a class="reference internal" href="../auto_examples/plot_robust_regression.html">Robust regression</a></li>
131133
<li class="toctree-l4"><a class="reference internal" href="../auto_examples/trace.html">Trace norm</a></li>

0 commit comments

Comments
 (0)