|
15 | 15 | # Let's first generate synthetic data on which to run the Cox estimator, |
16 | 16 | # using ``skglm`` data utils. |
17 | 17 | # |
| 18 | +import warnings |
| 19 | +import time |
| 20 | +from lifelines import CoxPHFitter |
| 21 | +import pandas as pd |
| 22 | +import numpy as np |
| 23 | +from skglm.solvers import ProxNewton |
| 24 | +from skglm.penalties import L1 |
| 25 | +from skglm.datafits import Cox |
| 26 | +import matplotlib.pyplot as plt |
18 | 27 | from skglm.utils.data import make_dummy_survival_data |
19 | 28 |
|
20 | 29 | n_samples, n_features = 500, 100 |
|
34 | 43 | # * ``s`` indicates the observations censorship and follows a Bernoulli(0.5) distribution |
35 | 44 | # |
36 | 45 | # Let's inspect the data quickly: |
37 | | -import matplotlib.pyplot as plt |
38 | 46 |
|
39 | 47 | fig, axes = plt.subplots( |
40 | 48 | 1, 3, |
|
59 | 67 | # Todo so, we need to combine a Cox datafit and a :math:`\ell_1` penalty |
60 | 68 | # and solve the resulting problem using skglm Proximal Newton solver ``ProxNewton``. |
61 | 69 | # We set the intensity of the :math:`\ell_1` regularization to ``alpha=1e-2``. |
62 | | -from skglm.datafits import Cox |
63 | | -from skglm.penalties import L1 |
64 | | -from skglm.solvers import ProxNewton |
65 | 70 |
|
66 | | -from skglm.utils.jit_compilation import compiled_clone |
67 | 71 |
|
68 | 72 | # regularization intensity |
69 | 73 | alpha = 1e-2 |
70 | 74 |
|
71 | 75 | # skglm internals: init datafit and penalty |
72 | | -datafit = compiled_clone(Cox()) |
73 | | -penalty = compiled_clone(L1(alpha)) |
| 76 | +datafit = Cox() |
| 77 | +penalty = L1(alpha) |
74 | 78 |
|
75 | 79 | datafit.initialize(X, y) |
76 | 80 |
|
|
90 | 94 | # %% |
91 | 95 | # Let's solve the problem with ``lifelines`` through its ``CoxPHFitter`` |
92 | 96 | # estimator and compare the objectives found by the two packages. |
93 | | -import numpy as np |
94 | | -import pandas as pd |
95 | | -from lifelines import CoxPHFitter |
96 | 97 |
|
97 | 98 | # format data |
98 | 99 | stacked_y_X = np.hstack((y, X)) |
|
126 | 127 | # let's compare their execution time. To get the evolution of the suboptimality |
127 | 128 | # (objective - optimal objective) we run both estimators with increasing number of |
128 | 129 | # iterations. |
129 | | -import time |
130 | | -import warnings |
131 | 130 |
|
132 | 131 | warnings.filterwarnings('ignore') |
133 | 132 |
|
|
230 | 229 | # We only need to pass in ``use_efron=True`` to the ``Cox`` datafit. |
231 | 230 |
|
232 | 231 | # ensure using Efron estimate |
233 | | -datafit = compiled_clone(Cox(use_efron=True)) |
| 232 | +datafit = Cox(use_efron=True) |
234 | 233 | datafit.initialize(X, y) |
235 | 234 |
|
236 | 235 | # solve the problem |
|
0 commit comments