Skip to content

Commit 89aae14

Browse files
committed
numpy 2.0 fixes + minor doc improvments
1 parent 126c00d commit 89aae14

File tree

2 files changed

+23
-19
lines changed

2 files changed

+23
-19
lines changed

src/skmatter/decomposition/_kernel_pcovr.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ class KernelPCovR(_BasePCA, LinearModel):
3939
Parameters
4040
----------
4141
mixing : float, default=0.5
42-
mixing parameter, as described in PCovR as :math:`{\\alpha}`
42+
mixing parameter, as described in PCovR as :math:`{\alpha}`
4343
n_components : int, float or str, default=None
4444
Number of components to keep.
4545
if n_components is not set all components are kept::
@@ -64,7 +64,7 @@ class KernelPCovR(_BasePCA, LinearModel):
6464
run randomized SVD by the method of Halko et al.
6565
regressor : {instance of `sklearn.kernel_ridge.KernelRidge`, `precomputed`, None}, default=None
6666
The regressor to use for computing
67-
the property predictions :math:`\\hat{\\mathbf{Y}}`.
67+
the property predictions :math:`\hat{\mathbf{Y}}`.
6868
A pre-fitted regressor may be provided.
6969
If the regressor is not `None`, its kernel parameters
7070
(`kernel`, `gamma`, `degree`, `coef0`, and `kernel_params`)
@@ -112,17 +112,17 @@ class KernelPCovR(_BasePCA, LinearModel):
112112
pseudo-inverse of the latent-space projection, which
113113
can be used to contruct projectors from latent-space
114114
pkt_: numpy.ndarray of size :math:`({n_{samples}, n_{components}})`
115-
the projector, or weights, from the input kernel :math:`\\mathbf{K}`
116-
to the latent-space projection :math:`\\mathbf{T}`
115+
the projector, or weights, from the input kernel :math:`\mathbf{K}`
116+
to the latent-space projection :math:`\mathbf{T}`
117117
pky_: numpy.ndarray of size :math:`({n_{samples}, n_{properties}})`
118-
the projector, or weights, from the input kernel :math:`\\mathbf{K}`
119-
to the properties :math:`\\mathbf{Y}`
118+
the projector, or weights, from the input kernel :math:`\mathbf{K}`
119+
to the properties :math:`\mathbf{Y}`
120120
pty_: numpy.ndarray of size :math:`({n_{components}, n_{properties}})`
121121
the projector, or weights, from the latent-space projection
122-
:math:`\\mathbf{T}` to the properties :math:`\\mathbf{Y}`
122+
:math:`\mathbf{T}` to the properties :math:`\mathbf{Y}`
123123
ptx_: numpy.ndarray of size :math:`({n_{components}, n_{features}})`
124124
the projector, or weights, from the latent-space projection
125-
:math:`\\mathbf{T}` to the feature matrix :math:`\\mathbf{X}`
125+
:math:`\mathbf{T}` to the feature matrix :math:`\mathbf{X}`
126126
X_fit_: numpy.ndarray of shape (n_samples, n_features)
127127
The data used to fit the model. This attribute is used to build kernels
128128
from new data.
@@ -160,7 +160,7 @@ class KernelPCovR(_BasePCA, LinearModel):
160160
[ 1.11923584, -1.04798016],
161161
[-1.5635827 , 1.11078662]])
162162
>>> round(kpcovr.score(X, Y), 5)
163-
-0.52039
163+
np.float64(-0.52039)
164164
""" # NoQa: E501
165165

166166
def __init__(
@@ -246,15 +246,15 @@ def fit(self, X, Y, W=None):
246246
247247
It is suggested that :math:`\mathbf{X}` be centered by its column-
248248
means and scaled. If features are related, the matrix should be scaled
249-
to have unit variance, otherwise :math:`\\mathbf{X}` should be
249+
to have unit variance, otherwise :math:`\mathbf{X}` should be
250250
scaled so that each feature has a variance of 1 / n_features.
251251
Y : numpy.ndarray, shape (n_samples, n_properties)
252252
Training data, where n_samples is the number of samples and
253253
n_properties is the number of properties
254254
255-
It is suggested that :math:`\\mathbf{X}` be centered by its column-
255+
It is suggested that :math:`\mathbf{X}` be centered by its column-
256256
means and scaled. If features are related, the matrix should be scaled
257-
to have unit variance, otherwise :math:`\\mathbf{Y}` should be
257+
to have unit variance, otherwise :math:`\mathbf{Y}` should be
258258
scaled so that each feature has a variance of 1 / n_features.
259259
W : numpy.ndarray, shape (n_samples, n_properties)
260260
Regression weights, optional when regressor=`precomputed`. If not
@@ -420,7 +420,7 @@ def inverse_transform(self, T):
420420
r"""Transform input data back to its original space.
421421
422422
.. math::
423-
\mathbf{\\hat{X}} = \mathbf{T} \mathbf{P}_{TX}
423+
\mathbf{\hat{X}} = \mathbf{T} \mathbf{P}_{TX}
424424
= \mathbf{K} \mathbf{P}_{KT} \mathbf{P}_{TX}
425425
426426
Similar to KPCA, the original features are not always recoverable,

src/skmatter/feature_selection/_base.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -249,9 +249,9 @@ class CUR(_CUR):
249249
>>> Xr = selector.transform(X)
250250
>>> print(Xr.shape)
251251
(3, 2)
252-
>>> np.round(selector.pi_, 2) # importance scole
253-
array([0. , 0. , 0.05])
254-
>>> selector.selected_idx_ # importance scole
252+
>>> np.round(selector.pi_) # importance score
253+
array([0., 0., 0.])
254+
>>> selector.selected_idx_
255255
array([1, 0])
256256
"""
257257

@@ -332,6 +332,10 @@ class PCovCUR(_PCovCUR):
332332
Counter tracking the number of selections that have been made
333333
X_selected_ : numpy.ndarray,
334334
Matrix containing the selected features, for use in fitting
335+
pi_ : numpy.ndarray (n_features),
336+
the importance score see :func:`_compute_pi`
337+
selected_idx_ : numpy.ndarray
338+
indices of selected features
335339
336340
Examples
337341
--------
@@ -351,9 +355,9 @@ class PCovCUR(_PCovCUR):
351355
>>> Xr = selector.transform(X)
352356
>>> print(Xr.shape)
353357
(3, 2)
354-
>>> np.round(selector.pi_, 2) # importance scole
355-
array([0. , 0. , 0.05])
356-
>>> selector.selected_idx_ # importance scole
358+
>>> np.round(selector.pi_) # importance score
359+
array([0., 0., 0.])
360+
>>> selector.selected_idx_
357361
array([1, 0])
358362
"""
359363

0 commit comments

Comments
 (0)