Skip to content

Commit 953068c

Browse files
author
Alexander Ororbia
committed
minor clean-up of ngclearn.utils.viz.dim_reduce
1 parent bb7f453 commit 953068c

File tree

1 file changed

+14
-18
lines changed

1 file changed

+14
-18
lines changed

ngclearn/utils/viz/dim_reduce.py

Lines changed: 14 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
default_cmap = plt.cm.jet
44

55
import numpy as np
6-
from sklearn.decomposition import IncrementalPCA
7-
from sklearn.manifold import TSNE
6+
from sklearn.decomposition import IncrementalPCA ## sci-kit learning dependency
7+
from sklearn.manifold import TSNE ## sci-kit learning dependency
88

99
def extract_pca_latents(vectors): ## PCA mapping routine
1010
"""
@@ -20,7 +20,6 @@ def extract_pca_latents(vectors): ## PCA mapping routine
2020
"""
2121
batch_size = 50
2222
z_dim = vectors.shape[1]
23-
z_2D = None
2423
if z_dim != 2:
2524
ipca = IncrementalPCA(n_components=2, batch_size=batch_size)
2625
ipca.fit(vectors)
@@ -31,26 +30,25 @@ def extract_pca_latents(vectors): ## PCA mapping routine
3130

3231
def extract_tsne_latents(vectors, perplexity=30, n_pca_comp=32, batch_size=500): ## tSNE mapping routine
3332
"""
34-
Projects collection of K vectors (stored in a matrix) to a two-dimensional (2D)
35-
visualization space via the t-distributed stochastic neighbor embedding
36-
algorithm (t-SNE). This algorithm also uses PCA to produce an
37-
intermediate project to speed up the t-SNE final mapping step. Note that
38-
if the input already has a 2D dimensionality, the original input is returned.
33+
Projects collection of K vectors (stored in a matrix) to a two-dimensional (2D) visualization space via the
34+
t-distributed stochastic neighbor embedding algorithm (t-SNE). This algorithm also uses PCA to produce an
35+
intermediate project to speed up the t-SNE final mapping step. Note that if the input already has a 2D
36+
dimensionality, the original input is returned.
3937
4038
Args:
4139
vectors: a matrix/codebook of (K x D) vectors to project
4240
4341
perplexity: the perplexity control factor for t-SNE (Default: 30)
4442
45-
batch_size: number of sampled embedding vectors to use per iteration
46-
of online internal PCA
43+
n_pca_comp: number of PCA top components (sorted by eigen-values) to retain/extract before continuing
44+
with t-SNE dimensionality reduction
45+
46+
batch_size: number of sampled embedding vectors to use per iteration of online internal PCA
4747
4848
Returns:
4949
a matrix (K x 2) of projected vectors (to 2D space)
5050
"""
51-
#batch_size = 500 #50
5251
z_dim = vectors.shape[1]
53-
z_2D = None
5452
if z_dim != 2:
5553
print(" > Projecting latents via iPCA...")
5654
n_comp = n_pca_comp #32 #10 #16 #50
@@ -69,11 +67,10 @@ def extract_tsne_latents(vectors, perplexity=30, n_pca_comp=32, batch_size=500):
6967
z_2D = vectors
7068
return z_2D
7169

72-
def plot_latents(code_vectors, labels, plot_fname="2Dcode_plot.jpg", alpha=1.,
73-
cmap=None):
70+
def plot_latents(code_vectors, labels, plot_fname="2Dcode_plot.jpg", alpha=1., cmap=None):
7471
"""
75-
Produces a label-overlaid (label map to distinct colors) scatterplot for
76-
visualizing two-dimensional latent codes (produced by either PCA or t-SNE).
72+
Produces a label-overlaid (label map to distinct colors) scatterplot for visualizing two-dimensional latent codes
73+
(produced by either PCA or t-SNE).
7774
7875
Args:
7976
code_vectors: a matrix of shape (K x 2) with vectors to plot/visualize
@@ -92,8 +89,7 @@ def plot_latents(code_vectors, labels, plot_fname="2Dcode_plot.jpg", alpha=1.,
9289
matplotlib.use('Agg') ## temporarily go in Agg plt backend for tsne plotting
9390
print(" > Plotting 2D latent encodings...")
9491
curr_backend = plt.rcParams["backend"]
95-
matplotlib.use(
96-
'Agg') ## temporarily go in Agg plt backend for tsne plotting
92+
matplotlib.use('Agg') ## temporarily go in Agg plt backend for tsne plotting
9793
lab = labels
9894
if lab.shape[1] > 1: ## extract integer class labels from a one-hot matrix
9995
lab = np.argmax(lab, 1)

0 commit comments

Comments
 (0)