|
6 | 6 | Unsupervised feature selection |
7 | 7 | ============================== |
8 | 8 |
|
9 | | -We can use :class:`FastCan` to do unsupervised feature selection. |
| 9 | +We can use :class:`FastCan` to do unsupervised feature selection. |
| 10 | +The unsupervised application of :class:`FastCan` tries to select features, which |
| 11 | +maximize the sum of the squared canonical correlation (SSC) with the principal |
| 12 | +components (PCs) acquired from PCA (principal component analysis) of the feature |
| 13 | +matrix :math:`X`. |
| 14 | + |
| 15 | + >>> from sklearn.decomposition import PCA |
| 16 | + >>> from sklearn import datasets |
| 17 | + >>> from fastcan import FastCan |
| 18 | + >>> iris = datasets.load_iris() |
| 19 | + >>> X = iris["data"] |
| 20 | + >>> y = iris["target"] |
| 21 | + >>> f_names = iris["feature_names"] |
| 22 | + >>> t_names = iris["target_names"] |
| 23 | + >>> pca = PCA(n_components=2) |
| 24 | + >>> X_pcs = pca.fit_transform(X) |
| 25 | + >>> selector = FastCan(n_features_to_select=2, verbose=0) |
| 26 | + >>> selector.fit(X, X_pcs[:, :2]) |
| 27 | + >>> selector.indices_ |
| 28 | + array([2, 1], dtype=int32) |
| 29 | + |
| 30 | +.. note:: |
| 31 | + There is no guarantee that this unsupervised :class:`FastCan` will select |
| 32 | + the optimal subset of the features, which has the highest SSC with PCs. |
| 33 | + Because :class:`FastCan` selects features in a greedy manner, which may lead to |
| 34 | + suboptimal results. See the following plots. |
| 35 | + |
| 36 | +.. plot:: |
| 37 | + :context: close-figs |
| 38 | + :align: center |
| 39 | + |
| 40 | + from itertools import combinations |
| 41 | + import matplotlib.pyplot as plt |
| 42 | + from sklearn.cross_decomposition import CCA |
| 43 | + |
| 44 | + def ssc(X, y): |
| 45 | + """Sum of the squared canonical correlation coefficients. |
| 46 | + Parameters |
| 47 | + ---------- |
| 48 | + X : array-like of shape (n_samples, n_features) |
| 49 | + Feature matrix. |
| 50 | + |
| 51 | + y : array-like of shape (n_samples, n_outputs) |
| 52 | + Target matrix. |
| 53 | + |
| 54 | + Returns |
| 55 | + ------- |
| 56 | + ssc : float |
| 57 | + Sum of the squared canonical correlation coefficients. |
| 58 | + """ |
| 59 | + n_components = min(X.shape[1], y.shape[1]) |
| 60 | + cca = CCA(n_components=n_components) |
| 61 | + X_c, y_c = cca.fit_transform(X, y) |
| 62 | + corrcoef = np.diagonal( |
| 63 | + np.corrcoef(X_c, y_c, rowvar=False), |
| 64 | + offset=n_components |
| 65 | + ) |
| 66 | + return sum(corrcoef**2) |
| 67 | + |
| 68 | + comb = list(combinations([0, 1, 2, 3], 2)) |
| 69 | + fig, axs = plt.subplots(ncols=3, nrows=2, figsize=(8, 6), layout="constrained") |
| 70 | + for i in range(2): |
| 71 | + for j in range(3): |
| 72 | + f1_idx = comb[i*3+j][0] |
| 73 | + f2_idx = comb[i*3+j][1] |
| 74 | + score = ssc(X[:, [f1_idx, f2_idx]], X_pcs) |
| 75 | + scatter = axs[i, j].scatter(X[:, f1_idx], X[:, f2_idx], c=y) |
| 76 | + axs[i, j].set(xlabel=f_names[f1_idx], ylabel=f_names[f2_idx]) |
| 77 | + axs[i, j].set_title(f"SSC: {score:.3f}") |
| 78 | + for spine in axs[1, 0].spines.values(): |
| 79 | + spine.set_edgecolor('red') |
| 80 | + _ = axs[1, 2].legend(scatter.legend_elements()[0], t_names, loc="lower right") |
| 81 | + |
0 commit comments