Skip to content

Commit cb12d5b

Browse files
committed
Spacing changes.
1 parent 37df53a commit cb12d5b

File tree

2 files changed

+33
-33
lines changed

2 files changed

+33
-33
lines changed

hdbscan/_hdbscan_linkage.pyx

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -23,14 +23,14 @@ cpdef np.ndarray[np.double_t, ndim=2] mst_linkage_core(
2323
cdef np.ndarray[np.double_t, ndim=1] left
2424
cdef np.ndarray[np.double_t, ndim=1] right
2525
cdef np.ndarray[np.double_t, ndim=2] result
26-
26+
2727
cdef np.ndarray label_filter
28-
28+
2929
cdef np.intp_t current_node
3030
cdef np.intp_t new_node_index
3131
cdef np.intp_t new_node
3232
cdef np.intp_t i
33-
33+
3434
result = np.zeros((distance_matrix.shape[0] - 1, 3))
3535
node_labels = np.arange(distance_matrix.shape[0], dtype=np.intp)
3636
current_node = 0
@@ -42,14 +42,14 @@ cpdef np.ndarray[np.double_t, ndim=2] mst_linkage_core(
4242
left = current_distances[label_filter]
4343
right = distance_matrix[current_node][current_labels]
4444
current_distances = np.where(left < right, left, right)
45-
45+
4646
new_node_index = np.argmin(current_distances)
4747
new_node = current_labels[new_node_index]
4848
result[i - 1, 0] = <double> current_node
4949
result[i - 1, 1] = <double> new_node
5050
result[i - 1, 2] = current_distances[new_node_index]
5151
current_node = new_node
52-
52+
5353
return result
5454

5555
cdef void select_distances(
@@ -93,9 +93,9 @@ cpdef np.ndarray[np.double_t, ndim=2] mst_linkage_core_pdist(
9393
cdef np.ndarray[np.double_t, ndim=1] right
9494
cdef np.ndarray[np.intp_t, ndim=1] col_select
9595
cdef np.ndarray[np.double_t, ndim=2] result
96-
96+
9797
cdef np.ndarray label_filter
98-
98+
9999
cdef np.intp_t current_node
100100
cdef np.intp_t new_node_index
101101
cdef np.intp_t new_node
@@ -104,7 +104,7 @@ cpdef np.ndarray[np.double_t, ndim=2] mst_linkage_core_pdist(
104104

105105
dim = int((1 + np.sqrt(1 + 8 * pdist_matrix.shape[0])) / 2.0)
106106
col_select = np.cumsum(np.arange(dim - 1, 0, -1))
107-
107+
108108
result = np.zeros((dim - 1, 3))
109109
node_labels = np.arange(dim, dtype=np.intp)
110110
current_node = 0
@@ -118,14 +118,14 @@ cpdef np.ndarray[np.double_t, ndim=2] mst_linkage_core_pdist(
118118
#right = fill_row(pdist_matrix, current_node, dim, col_select)[current_labels]
119119
select_distances(pdist_matrix, col_select, current_labels, right, current_node, dim)
120120
current_distances = np.where(left < right[:len(current_labels)], left, right[:len(current_labels)])
121-
121+
122122
new_node_index = np.argmin(current_distances)
123123
new_node = current_labels[new_node_index]
124124
result[i - 1, 0] = <double> current_node
125125
result[i - 1, 1] = <double> new_node
126126
result[i - 1, 2] = current_distances[new_node_index]
127127
current_node = new_node
128-
128+
129129
return result
130130

131131

@@ -244,29 +244,29 @@ cdef class UnionFind (object):
244244
cdef np.intp_t next_label
245245
cdef np.intp_t *parent
246246
cdef np.intp_t *size
247-
247+
248248
def __init__(self, N):
249249
self.parent_arr = -1 * np.ones(2 * N - 1, dtype=np.intp)
250250
self.next_label = N
251251
self.size_arr = np.hstack((np.ones(N, dtype=np.intp),
252252
np.zeros(N-1, dtype=np.intp)))
253253
self.parent = (<np.intp_t *> self.parent_arr.data)
254254
self.size = (<np.intp_t *> self.size_arr.data)
255-
255+
256256
cdef void union(self, np.intp_t m, np.intp_t n):
257257
self.size[self.next_label] = self.size[m] + self.size[n]
258258
self.parent[m] = self.next_label
259259
self.parent[n] = self.next_label
260260
self.size[self.next_label] = self.size[m] + self.size[n]
261261
self.next_label += 1
262-
262+
263263
return
264-
264+
265265
cdef np.intp_t find(self, np.intp_t n):
266266
while self.parent[n] != -1:
267267
n = self.parent[n]
268268
return n
269-
269+
270270
cdef np.intp_t fast_find(self, np.intp_t n):
271271
cdef np.intp_t p
272272
p = n
@@ -276,15 +276,15 @@ cdef class UnionFind (object):
276276
while self.parent_arr[p] != n:
277277
p, self.parent_arr[p] = self.parent_arr[p], n
278278
return n
279-
279+
280280
cpdef np.ndarray[np.double_t, ndim=2] label(np.ndarray[np.double_t, ndim=2] L):
281281

282282
cdef np.ndarray[np.double_t, ndim=2] result_arr
283283
cdef np.double_t[:, ::1] result
284284

285285
cdef np.intp_t N, a, aa, b, bb, idx
286286
cdef np.double_t delta
287-
287+
288288
result_arr = np.zeros((L.shape[0], L.shape[1] + 1))
289289
result = (<np.double_t[:L.shape[0], :4:1]> (<np.double_t *> result_arr.data))
290290
N = L.shape[0] + 1

hdbscan/_hdbscan_tree.pyx

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ cdef list bfs_from_hierarchy(np.ndarray[np.double_t, ndim=2] hierarchy, np.intp_
1313
"""
1414
Perform a breadth first search on a tree in scipy hclust format.
1515
"""
16-
16+
1717
cdef list to_process
1818
cdef np.intp_t max_node
1919
cdef np.intp_t num_points
@@ -28,13 +28,13 @@ cdef list bfs_from_hierarchy(np.ndarray[np.double_t, ndim=2] hierarchy, np.intp_
2828

2929
while to_process:
3030
result.extend(to_process)
31-
to_process = [x - num_points for x in
31+
to_process = [x - num_points for x in
3232
to_process if x >= num_points]
3333
if to_process:
3434
to_process = hierarchy[to_process,:2].flatten().astype(np.intp).tolist()
3535

3636
return result
37-
37+
3838
cpdef np.ndarray condense_tree(np.ndarray[np.double_t, ndim=2] hierarchy,
3939
np.intp_t min_cluster_size=10):
4040

@@ -55,22 +55,22 @@ cpdef np.ndarray condense_tree(np.ndarray[np.double_t, ndim=2] hierarchy,
5555
cdef double lambda_value
5656
cdef np.intp_t left_count
5757
cdef np.intp_t right_count
58-
58+
5959
root = 2 * hierarchy.shape[0]
6060
num_points = root // 2 + 1
6161
next_label = num_points + 1
62-
62+
6363
node_list = bfs_from_hierarchy(hierarchy, root)
64-
64+
6565
relabel = np.empty(len(node_list), dtype=np.intp)
6666
relabel[root] = num_points
6767
result_list = []
6868
ignore = np.zeros(len(node_list), dtype=np.int)
69-
69+
7070
for node in node_list:
7171
if ignore[node] or node < num_points:
7272
continue
73-
73+
7474
children = hierarchy[node - num_points]
7575
left = <np.intp_t> children[0]
7676
right = <np.intp_t> children[1]
@@ -88,41 +88,41 @@ cpdef np.ndarray condense_tree(np.ndarray[np.double_t, ndim=2] hierarchy,
8888
right_count = <np.intp_t> hierarchy[right - num_points][3]
8989
else:
9090
right_count = 1
91-
91+
9292
if left_count >= min_cluster_size and right_count >= min_cluster_size:
9393
relabel[left] = next_label
9494
next_label += 1
9595
result_list.append((relabel[node], relabel[left], lambda_value, left_count))
96-
96+
9797
relabel[right] = next_label
9898
next_label += 1
9999
result_list.append((relabel[node], relabel[right], lambda_value, right_count))
100-
100+
101101
elif left_count < min_cluster_size and right_count < min_cluster_size:
102102
for sub_node in bfs_from_hierarchy(hierarchy, left):
103103
if sub_node < num_points:
104104
result_list.append((relabel[node], sub_node, lambda_value, 1))
105105
ignore[sub_node] = True
106-
106+
107107
for sub_node in bfs_from_hierarchy(hierarchy, right):
108108
if sub_node < num_points:
109109
result_list.append((relabel[node], sub_node, lambda_value, 1))
110110
ignore[sub_node] = True
111-
111+
112112
elif left_count < min_cluster_size:
113113
relabel[right] = relabel[node]
114114
for sub_node in bfs_from_hierarchy(hierarchy, left):
115115
if sub_node < num_points:
116116
result_list.append((relabel[node], sub_node, lambda_value, 1))
117117
ignore[sub_node] = True
118-
118+
119119
else:
120120
relabel[left] = relabel[node]
121121
for sub_node in bfs_from_hierarchy(hierarchy, right):
122122
if sub_node < num_points:
123123
result_list.append((relabel[node], sub_node, lambda_value, 1))
124124
ignore[sub_node] = True
125-
125+
126126
return np.array(result_list, dtype=[
127127
('parent', np.intp),
128128
('child', np.intp),
@@ -203,7 +203,7 @@ cdef list bfs_from_cluster_tree(np.ndarray tree, np.intp_t bfs_root):
203203

204204
result = []
205205
to_process = np.array([bfs_root], dtype=np.intp)
206-
206+
207207
while to_process.shape[0] > 0:
208208
result.extend(to_process.tolist())
209209
to_process = tree['child'][np.in1d(tree['parent'], to_process)]

0 commit comments

Comments
 (0)