Skip to content

Commit 8e76624

Browse files
authored
Merge branch 'master' into fix/remove-cython-from-install-requires
2 parents bd486d0 + c201b2e commit 8e76624

13 files changed

+29
-36
lines changed

.github/workflows/pythonpublish_wheel.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ jobs:
3737
other-deploy:
3838
strategy:
3939
matrix:
40-
python: ["3.8", "3.9", "3.10", "3.11"]
40+
python: ["3.9", "3.10", "3.11", "3.12"]
4141
os: [windows-2019, macos-11]
4242
runs-on: ${{ matrix.os }}
4343
steps:
@@ -52,7 +52,7 @@ jobs:
5252
run: |
5353
python -m pip install --upgrade pip
5454
pip install setuptools build wheel twine
55-
pip install "cython<3" oldest-supported-numpy
55+
pip install cython "numpy>=2"
5656
- name: Build wheel
5757
run: |
5858
python -m build --no-isolation

azure-pipelines.yml

Lines changed: 9 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -27,24 +27,6 @@ stages:
2727
- job: run_platform_tests
2828
strategy:
2929
matrix:
30-
mac_py37:
31-
imageName: 'macOS-latest'
32-
python.version: '3.7'
33-
linux_py37:
34-
imageName: 'ubuntu-latest'
35-
python.version: '3.7'
36-
windows_py37:
37-
imageName: 'windows-latest'
38-
python.version: '3.7'
39-
mac_py38:
40-
imageName: 'macOS-latest'
41-
python.version: '3.8'
42-
linux_py38:
43-
imageName: 'ubuntu-latest'
44-
python.version: '3.8'
45-
windows_py38:
46-
imageName: 'windows-latest'
47-
python.version: '3.8'
4830
mac_py39:
4931
imageName: 'macOS-latest'
5032
python.version: '3.9'
@@ -72,6 +54,15 @@ stages:
7254
windows_py311:
7355
imageName: 'windows-latest'
7456
python.version: '3.11'
57+
mac_py312:
58+
imageName: 'macOS-latest'
59+
python.version: '3.12'
60+
linux_py312:
61+
imageName: 'ubuntu-latest'
62+
python.version: '3.12'
63+
windows_py312:
64+
imageName: 'windows-latest'
65+
python.version: '3.12'
7566
pool:
7667
vmImage: $(imageName)
7768

docs/how_to_use_epsilon.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ In our example, we choose to merge nested clusters below 5 meters (0.005 kilomet
4343
4444
X = np.radians(coordinates) #convert the list of lat/lon coordinates to radians
4545
earth_radius_km = 6371
46-
epsilon = 0.005 / earth_radius #calculate 5 meter epsilon threshold
46+
epsilon = 0.005 / earth_radius_km #calculate 5 meter epsilon threshold
4747
4848
clusterer = hdbscan.HDBSCAN(min_cluster_size=4, metric='haversine',
4949
cluster_selection_epsilon=epsilon, cluster_selection_method = 'eom')

docs/parameter_selection.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ Selecting ``min_samples``
128128
Since we have seen that ``min_samples`` clearly has a dramatic effect on
129129
clustering, the question becomes: how do we select this parameter? The
130130
simplest intuition for what ``min_samples`` does is provide a measure of
131-
how conservative you want you clustering to be. The larger the value of
131+
how conservative you want your clustering to be. The larger the value of
132132
``min_samples`` you provide, the more conservative the clustering --
133133
more points will be declared as noise, and clusters will be restricted
134134
to progressively more dense areas. We can see this in practice by

hdbscan/_hdbscan_linkage.pyx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ cpdef np.ndarray[np.double_t, ndim=2] mst_linkage_core(
3333
result = np.zeros((distance_matrix.shape[0] - 1, 3))
3434
node_labels = np.arange(distance_matrix.shape[0], dtype=np.intp)
3535
current_node = 0
36-
current_distances = np.infty * np.ones(distance_matrix.shape[0])
36+
current_distances = np.inf * np.ones(distance_matrix.shape[0])
3737
current_labels = node_labels
3838
for i in range(1, node_labels.shape[0]):
3939
label_filter = current_labels != current_node
@@ -100,7 +100,7 @@ cpdef np.ndarray[np.double_t, ndim=2] mst_linkage_core_vector(
100100
result_arr = np.zeros((dim - 1, 3))
101101
in_tree_arr = np.zeros(dim, dtype=np.int8)
102102
current_node = 0
103-
current_distances_arr = np.infty * np.ones(dim)
103+
current_distances_arr = np.inf * np.ones(dim)
104104
current_sources_arr = np.ones(dim)
105105

106106
result = (<np.double_t[:dim - 1, :3:1]> (<np.double_t *> result_arr.data))

hdbscan/_hdbscan_reachability.pyx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ cpdef sparse_mutual_reachability(object lil_matrix, np.intp_t min_points=5,
7979
if min_points - 1 < len(sorted_row_data):
8080
core_distance[i] = sorted_row_data[min_points - 1]
8181
else:
82-
core_distance[i] = np.infty
82+
core_distance[i] = np.inf
8383

8484
if alpha != 1.0:
8585
lil_matrix = lil_matrix / alpha

hdbscan/_hdbscan_tree.pyx

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ cpdef np.ndarray condense_tree(np.ndarray[np.double_t, ndim=2] hierarchy,
7171
cdef list result_list
7272

7373
cdef np.ndarray[np.intp_t, ndim=1] relabel
74-
cdef np.ndarray[np.int_t, ndim=1] ignore
74+
cdef np.ndarray[np.int8_t, ndim=1] ignore
7575
cdef np.ndarray[np.double_t, ndim=1] children
7676

7777
cdef np.intp_t node
@@ -91,7 +91,7 @@ cpdef np.ndarray condense_tree(np.ndarray[np.double_t, ndim=2] hierarchy,
9191
relabel = np.empty(root + 1, dtype=np.intp)
9292
relabel[root] = num_points
9393
result_list = []
94-
ignore = np.zeros(len(node_list), dtype=int)
94+
ignore = np.zeros(len(node_list), dtype=np.int8)
9595

9696
for node in node_list:
9797
if ignore[node] or node < num_points:
@@ -251,7 +251,7 @@ cdef list bfs_from_cluster_tree(np.ndarray tree, np.intp_t bfs_root):
251251

252252
while to_process.shape[0] > 0:
253253
result.extend(to_process.tolist())
254-
to_process = tree['child'][np.in1d(tree['parent'], to_process)]
254+
to_process = tree['child'][np.isin(tree['parent'], to_process)]
255255

256256
return result
257257

@@ -725,8 +725,10 @@ cpdef tuple get_clusters(np.ndarray tree, dict stability,
725725
# if you do, change this accordingly!
726726
if allow_single_cluster:
727727
node_list = sorted(stability.keys(), reverse=True)
728+
node_list = [int(n) for n in node_list]
728729
else:
729730
node_list = sorted(stability.keys(), reverse=True)[:-1]
731+
node_list = [int(n) for n in node_list]
730732
# (exclude root)
731733

732734
cluster_tree = tree[tree['child_size'] > 1]

hdbscan/plots.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def _bfs_from_cluster_tree(tree, bfs_root):
2828

2929
while to_process:
3030
result.extend(to_process)
31-
to_process = tree['child'][np.in1d(tree['parent'], to_process)].tolist()
31+
to_process = tree['child'][np.isin(tree['parent'], to_process)].tolist()
3232

3333
return result
3434

hdbscan/prediction.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def _clusters_below(self, cluster):
8181
while to_process:
8282
result.extend(to_process)
8383
to_process = \
84-
self.cluster_tree['child'][np.in1d(self.cluster_tree['parent'],
84+
self.cluster_tree['child'][np.isin(self.cluster_tree['parent'],
8585
to_process)]
8686
to_process = to_process.tolist()
8787

hdbscan/validity.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -180,8 +180,8 @@ def internal_minimum_spanning_tree(mr_distances):
180180
# A little "fancy" we select from the flattened array reshape back
181181
# (Fortran format to get indexing right) and take the product to do an and
182182
# then convert back to boolean type.
183-
edge_selection = np.prod(np.in1d(min_span_tree.T[:2], vertices).reshape(
184-
(min_span_tree.shape[0], 2), order='F'), axis=1).astype(bool)
183+
edge_selection = np.prod(
184+
np.isin(min_span_tree.T[:2], vertices), axis=0).astype(bool)
185185

186186
# Density sparseness is not well defined if there are no
187187
# internal edges (as per the referenced paper). However

0 commit comments

Comments
 (0)