Skip to content

Commit 785f15d

Browse files
authored
Remove numpy dependencies in src/lightning/pytorch (#19841)
1 parent bac82b8 commit 785f15d

File tree

3 files changed

+8
-8
lines changed

3 files changed

+8
-8
lines changed

src/lightning/pytorch/loggers/logger.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,11 @@
1515

1616
import functools
1717
import operator
18+
import statistics
1819
from abc import ABC
1920
from collections import defaultdict
2021
from typing import Any, Callable, Dict, Mapping, Optional, Sequence
2122

22-
import numpy as np
2323
from typing_extensions import override
2424

2525
from lightning.fabric.loggers import Logger as FabricLogger
@@ -100,7 +100,7 @@ def method(*args: Any, **kwargs: Any) -> None:
100100
def merge_dicts( # pragma: no cover
101101
dicts: Sequence[Mapping],
102102
agg_key_funcs: Optional[Mapping] = None,
103-
default_func: Callable[[Sequence[float]], float] = np.mean,
103+
default_func: Callable[[Sequence[float]], float] = statistics.mean,
104104
) -> Dict:
105105
"""Merge a sequence with dictionaries into one dictionary by aggregating the same keys with some given function.
106106
@@ -126,7 +126,7 @@ def merge_dicts( # pragma: no cover
126126
>>> d2 = {'a': 1.1, 'b': 2.2, 'v': 1, 'd': {'d1': 2, 'd2': 3}}
127127
>>> d3 = {'a': 1.1, 'v': 2.3, 'd': {'d3': 3, 'd4': {'d5': 1}}}
128128
>>> dflt_func = min
129-
>>> agg_funcs = {'a': np.mean, 'v': max, 'd': {'d1': sum}}
129+
>>> agg_funcs = {'a': statistics.mean, 'v': max, 'd': {'d1': sum}}
130130
>>> pprint.pprint(merge_dicts([d1, d2, d3], agg_funcs, dflt_func))
131131
{'a': 1.3,
132132
'b': 2.0,

src/lightning/pytorch/tuner/lr_finder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ def suggestion(self, skip_begin: int = 10, skip_end: int = 1) -> Optional[float]
190190
losses = losses[torch.isfinite(losses)]
191191

192192
if len(losses) < 2:
193-
# computing np.gradient requires at least 2 points
193+
# computing torch.gradient requires at least 2 points
194194
log.error(
195195
"Failed to compute suggestion for learning rate because there are not enough points. Increase the loop"
196196
" iteration limits or the size of your dataset/dataloader."

src/lightning/pytorch/utilities/__init__.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414
"""General utilities."""
1515

16-
import numpy
16+
import torch
1717

1818
from lightning.fabric.utilities import (
1919
LightningEnum,
@@ -55,6 +55,6 @@
5555
"suggested_max_num_workers",
5656
]
5757

58-
FLOAT16_EPSILON = numpy.finfo(numpy.float16).eps
59-
FLOAT32_EPSILON = numpy.finfo(numpy.float32).eps
60-
FLOAT64_EPSILON = numpy.finfo(numpy.float64).eps
58+
FLOAT16_EPSILON = torch.finfo(torch.float16).eps
59+
FLOAT32_EPSILON = torch.finfo(torch.float32).eps
60+
FLOAT64_EPSILON = torch.finfo(torch.float64).eps

0 commit comments

Comments
 (0)