Skip to content

Commit 1daa19a

Browse files
authored
Remove and resolve TODO comments copied from source python files (#385)
1 parent 76ca370 commit 1daa19a

File tree

33 files changed

+15
-300
lines changed

33 files changed

+15
-300
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ tests = [
2626
# Needed for stubtest and downloads their dependencies to get known import symbols
2727
"networkx",
2828
"scikit-image",
29-
"scikit-learn",
29+
"scikit-learn <1.7.0", # TODO: Update stubs for sklearn
3030
"sympy",
3131
"vispy",
3232
]
Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
11
from ...classes.graph import Graph
2-
from ...utils import not_implemented_for
3-
from ..matching import maximal_matching
42

53
__all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"]
64

7-
# TODO Why doesn't this algorithm work for directed graphs?
8-
95
def min_weighted_dominating_set(G: Graph, weight: str | None = None) -> set: ...
106
def min_edge_dominating_set(G: Graph) -> set: ...

stubs/networkx/algorithms/clique.pyi

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
1-
from collections import defaultdict, deque
2-
from itertools import chain, combinations, islice
3-
41
from numpy.typing import ArrayLike
52

63
from ..classes.graph import Graph
7-
from ..utils import not_implemented_for
84

95
__all__ = [
106
"find_cliques",
@@ -22,8 +18,6 @@ __all__ = [
2218

2319
def enumerate_all_cliques(G: Graph): ...
2420
def find_cliques(G: Graph, nodes=None): ...
25-
26-
# TODO Should this also be not implemented for directed graphs?
2721
def find_cliques_recursive(G: Graph, nodes=None): ...
2822
def make_max_clique_graph(G: Graph, create_using=None): ...
2923
def make_clique_bipartite(G: Graph, fpos: bool | None = None, create_using=None, name=None): ...

stubs/networkx/algorithms/cuts.pyi

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
from itertools import chain
21
from typing import Any
32

43
from ..classes.graph import Graph
@@ -14,19 +13,11 @@ __all__ = [
1413
"volume",
1514
]
1615

17-
# TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION!
18-
1916
def cut_size(G: Graph, S, T=None, weight: Any = None): ...
2017
def volume(G: Graph, S, weight: Any = None): ...
2118
def normalized_cut_size(G: Graph, S, T=None, weight: Any = None): ...
2219
def conductance(G: Graph, S, T=None, weight: Any = None): ...
2320
def edge_expansion(G: Graph, S, T=None, weight: Any = None): ...
2421
def mixing_expansion(G: Graph, S, T=None, weight: Any = None): ...
25-
26-
# TODO What is the generalization to two arguments, S and T? Does the
27-
# denominator become `min(len(S), len(T))`?
2822
def node_expansion(G: Graph, S): ...
29-
30-
# TODO What is the generalization to two arguments, S and T? Does the
31-
# denominator become `min(len(S), len(T))`?
3223
def boundary_expansion(G: Graph, S): ...

stubs/networkx/algorithms/distance_regular.pyi

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,6 @@ from collections.abc import Iterable
33
from numpy.typing import ArrayLike
44

55
from ..classes.graph import Graph
6-
from ..utils import not_implemented_for
7-
from .distance_measures import diameter
86

97
__all__ = [
108
"is_distance_regular",
@@ -16,7 +14,4 @@ __all__ = [
1614
def is_distance_regular(G: Graph) -> bool: ...
1715
def global_parameters(b: ArrayLike, c: ArrayLike) -> Iterable: ...
1816
def intersection_array(G: Graph): ...
19-
20-
# TODO There is a definition for directed strongly regular graphs.
21-
2217
def is_strongly_regular(G: Graph) -> bool: ...

stubs/networkx/algorithms/tree/branchings.pyi

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,3 @@
1-
# TODO: Implement method from Gabow, Galil, Spence and Tarjan:
2-
#
3-
# @article{
4-
# year={1986},
5-
# issn={0209-9683},
6-
# journal={Combinatorica},
7-
# volume={6},
8-
# number={2},
9-
# doi={10.1007/BF02579168},
10-
# title={Efficient algorithms for finding minimum spanning trees in
11-
# undirected and directed graphs},
12-
# url={https://doi.org/10.1007/BF02579168},
13-
# publisher={Springer-Verlag},
14-
# keywords={68 B 15; 68 C 05},
15-
# author={Gabow, Harold N. and Galil, Zvi and Spencer, Thomas and Tarjan,
16-
# Robert E.},
17-
# pages={109-122},
18-
# language={English}
19-
# }
20-
import string
21-
from dataclasses import dataclass, field
22-
from enum import Enum
23-
from operator import itemgetter
24-
from queue import PriorityQueue
251
from typing import Literal
262

273
from ...classes.digraph import DiGraph

stubs/networkx/classes/coreviews.pyi

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -82,9 +82,6 @@ class FilterAtlas(Mapping): # nodedict, nbrdict, keydict
8282
def __len__(self): ...
8383
def __iter__(self): ...
8484
def __getitem__(self, key): ...
85-
86-
# FIXME should this just be removed? we don't use it, but someone might
87-
def copy(self): ...
8885
def __str__(self): ...
8986
def __repr__(self): ...
9087

@@ -93,21 +90,12 @@ class FilterAdjacency(Mapping): # edgedict
9390
def __len__(self): ...
9491
def __iter__(self): ...
9592
def __getitem__(self, node): ...
96-
97-
# FIXME should this just be removed? we don't use it, but someone might
98-
def copy(self): ...
9993
def __str__(self): ...
10094
def __repr__(self): ...
10195

10296
class FilterMultiInner(FilterAdjacency): # muliedge_seconddict
10397
def __iter__(self): ...
10498
def __getitem__(self, nbr): ...
10599

106-
# FIXME should this just be removed? we don't use it, but someone might
107-
def copy(self): ...
108-
109100
class FilterMultiAdjacency(FilterAdjacency): # multiedgedict
110101
def __getitem__(self, node): ...
111-
112-
# FIXME should this just be removed? we don't use it, but someone might
113-
def copy(self): ...

stubs/networkx/conftest.pyi

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,8 @@
1-
import sys
2-
import warnings
3-
4-
import networkx
51
import pytest
62

73
def pytest_addoption(parser): ...
84
def pytest_configure(config): ...
95
def pytest_collection_modifyitems(config, items): ...
10-
11-
# TODO: The warnings below need to be dealt with, but for now we silence them.
126
@pytest.fixture(autouse=True)
137
def set_warnings(): ...
148
@pytest.fixture(autouse=True)

stubs/sklearn/cluster/_agglomerative.pyi

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,7 @@ class AgglomerativeClustering(ClusterMixin, BaseEstimator):
4949
self,
5050
n_clusters: None | int = 2,
5151
*,
52-
affinity: str | Callable = "deprecated", # TODO(1.4): Remove
53-
metric: None | str | Callable = None, # TODO(1.4): Set to "euclidean"
52+
metric: str | Callable = "euclidean",
5453
memory: None | Memory | str = None,
5554
connectivity: None | ArrayLike | Callable = None,
5655
compute_full_tree: Literal["auto"] | bool = "auto",
@@ -77,8 +76,7 @@ class FeatureAgglomeration(ClassNamePrefixFeaturesOutMixin, AgglomerativeCluster
7776
self,
7877
n_clusters: None | int = 2,
7978
*,
80-
affinity: str | Callable = "deprecated", # TODO(1.4): Remove
81-
metric: None | str | Callable = None, # TODO(1.4): Set to "euclidean"
79+
metric: str | Callable = "euclidean",
8280
memory: None | Memory | str = None,
8381
connectivity: None | ArrayLike | Callable = None,
8482
compute_full_tree: Literal["auto"] | bool = "auto",

stubs/sklearn/covariance/_robust_covariance.pyi

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,6 @@ from numpy.random import RandomState
77
from .._typing import Float, Int, MatrixLike
88
from . import EmpiricalCovariance
99

10-
# Author: Virgile Fritsch <[email protected]>
11-
#
12-
# License: BSD 3 clause
13-
14-
# Minimum Covariance Determinant
15-
# Implementing of an algorithm by Rousseeuw & Van Driessen described in
16-
# (A Fast Algorithm for the Minimum Covariance Determinant Estimator,
17-
# 1999, American Statistical Association and the American Society
18-
# for Quality, TECHNOMETRICS)
19-
# XXX Is this really a public function? It's not listed in the docs or
20-
# exported by sklearn.covariance. Deprecate?
2110
def c_step(
2211
X: MatrixLike,
2312
n_support: Int,

0 commit comments

Comments
 (0)