Skip to content

Commit b6442c5

Browse files
rittik9dependabot[bot]pre-commit-ci[bot]
authored
chore: bump mypy from 1.15.0 to 1.16.0 and resolve typing issues (#20900)
* build(deps): bump mypy from 1.15.0 to 1.16.0 in /requirements Bumps [mypy](https://github.com/python/mypy) from 1.15.0 to 1.16.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](python/mypy@v1.15.0...v1.16.0) --- updated-dependencies: - dependency-name: mypy dependency-version: 1.16.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <[email protected]> * fix typing * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update fsdp.py --------- Signed-off-by: dependabot[bot] <[email protected]> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent ab619cd commit b6442c5

File tree

14 files changed

+25
-25
lines changed

14 files changed

+25
-25
lines changed

requirements/typing.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
mypy==1.15.0
1+
mypy==1.16.0
22
torch==2.7.1
33

44
types-Markdown

src/lightning/fabric/plugins/precision/bitsandbytes.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,7 @@ class _Linear8bitLt(bnb.nn.Linear8bitLt):
226226
def __init__(self, *args: Any, device: Optional[_DEVICE] = None, threshold: float = 6.0, **kwargs: Any) -> None:
227227
super().__init__(*args, device=device, threshold=threshold, **kwargs)
228228
self.weight = cast(bnb.nn.Int8Params, self.weight) # type: ignore[has-type]
229-
self.bias = cast(Optional[torch.nn.Parameter], self.bias) # type: ignore[has-type]
229+
self.bias: Optional[torch.nn.Parameter] = self.bias
230230
# if the device is CUDA or we are under a CUDA context manager, quantize the weight here, so we don't end up
231231
# filling the device memory with float32 weights which could lead to OOM
232232
if torch.tensor(0, device=device).device.type == "cuda":
@@ -310,7 +310,7 @@ class _Linear4bit(bnb.nn.Linear4bit):
310310
def __init__(self, *args: Any, device: Optional[_DEVICE] = None, **kwargs: Any) -> None:
311311
super().__init__(*args, device=device, **kwargs)
312312
self.weight = cast(bnb.nn.Params4bit, self.weight) # type: ignore[has-type]
313-
self.bias = cast(Optional[torch.nn.Parameter], self.bias) # type: ignore[has-type]
313+
self.bias: Optional[torch.nn.Parameter] = self.bias
314314
# if the device is CUDA or we are under a CUDA context manager, quantize the weight here, so we don't end up
315315
# filling the device memory with float32 weights which could lead to OOM
316316
if torch.tensor(0, device=device).device.type == "cuda":

src/lightning/fabric/strategies/fsdp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,7 @@ def precision(self) -> FSDPPrecision:
237237

238238
@precision.setter
239239
@override
240-
def precision(self, precision: Optional[FSDPPrecision]) -> None:
240+
def precision(self, precision: Optional[Precision]) -> None:
241241
if precision is not None and not isinstance(precision, FSDPPrecision):
242242
raise TypeError(f"The FSDP strategy can only work with the `FSDPPrecision` plugin, found {precision}")
243243
self._precision = precision

src/lightning/fabric/strategies/single_xla.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
from lightning.fabric.accelerators import Accelerator
2020
from lightning.fabric.accelerators.xla import _XLA_AVAILABLE
21-
from lightning.fabric.plugins import XLAPrecision
21+
from lightning.fabric.plugins import CheckpointIO, Precision, XLAPrecision
2222
from lightning.fabric.plugins.io.xla import XLACheckpointIO
2323
from lightning.fabric.strategies import _StrategyRegistry
2424
from lightning.fabric.strategies.single_device import SingleDeviceStrategy
@@ -61,7 +61,7 @@ def checkpoint_io(self) -> XLACheckpointIO:
6161

6262
@checkpoint_io.setter
6363
@override
64-
def checkpoint_io(self, io: Optional[XLACheckpointIO]) -> None:
64+
def checkpoint_io(self, io: Optional[CheckpointIO]) -> None:
6565
if io is not None and not isinstance(io, XLACheckpointIO):
6666
raise TypeError(f"The XLA strategy can only work with the `XLACheckpointIO` plugin, found {io}")
6767
self._checkpoint_io = io
@@ -77,7 +77,7 @@ def precision(self) -> XLAPrecision:
7777

7878
@precision.setter
7979
@override
80-
def precision(self, precision: Optional[XLAPrecision]) -> None:
80+
def precision(self, precision: Optional[Precision]) -> None:
8181
if precision is not None and not isinstance(precision, XLAPrecision):
8282
raise TypeError(f"The XLA strategy can only work with the `XLAPrecision` plugin, found {precision}")
8383
self._precision = precision

src/lightning/fabric/strategies/strategy.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def __init__(
5252
self._checkpoint_io: Optional[CheckpointIO] = checkpoint_io
5353
self._precision: Optional[Precision] = None
5454
# Call the precision setter for input validation
55-
self.precision = precision # type: ignore[assignment]
55+
self.precision = precision
5656
self._launcher: Optional[_Launcher] = None
5757
self._backward_sync_control: Optional[_BackwardSyncControl] = None
5858

src/lightning/fabric/strategies/xla.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424
from lightning.fabric.accelerators import Accelerator
2525
from lightning.fabric.accelerators.xla import _XLA_GREATER_EQUAL_2_1
26-
from lightning.fabric.plugins import XLAPrecision
26+
from lightning.fabric.plugins import CheckpointIO, Precision, XLAPrecision
2727
from lightning.fabric.plugins.environments import XLAEnvironment
2828
from lightning.fabric.plugins.io.xla import XLACheckpointIO
2929
from lightning.fabric.strategies import ParallelStrategy, _StrategyRegistry
@@ -83,7 +83,7 @@ def checkpoint_io(self) -> XLACheckpointIO:
8383

8484
@checkpoint_io.setter
8585
@override
86-
def checkpoint_io(self, io: Optional[XLACheckpointIO]) -> None:
86+
def checkpoint_io(self, io: Optional[CheckpointIO]) -> None:
8787
if io is not None and not isinstance(io, XLACheckpointIO):
8888
raise TypeError(f"The XLA strategy can only work with the `XLACheckpointIO` plugin, found {io}")
8989
self._checkpoint_io = io
@@ -99,7 +99,7 @@ def precision(self) -> XLAPrecision:
9999

100100
@precision.setter
101101
@override
102-
def precision(self, precision: Optional[XLAPrecision]) -> None:
102+
def precision(self, precision: Optional[Precision]) -> None:
103103
if precision is not None and not isinstance(precision, XLAPrecision):
104104
raise TypeError(f"The XLA strategy can only work with the `XLAPrecision` plugin, found {precision}")
105105
self._precision = precision

src/lightning/fabric/strategies/xla_fsdp.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626

2727
from lightning.fabric.accelerators import Accelerator
2828
from lightning.fabric.accelerators.xla import _XLA_AVAILABLE
29-
from lightning.fabric.plugins import XLAPrecision
29+
from lightning.fabric.plugins import CheckpointIO, Precision, XLAPrecision
3030
from lightning.fabric.plugins.environments import XLAEnvironment
3131
from lightning.fabric.plugins.io.xla import XLACheckpointIO
3232
from lightning.fabric.strategies import ParallelStrategy, _StrategyRegistry
@@ -134,7 +134,7 @@ def checkpoint_io(self) -> XLACheckpointIO:
134134

135135
@checkpoint_io.setter
136136
@override
137-
def checkpoint_io(self, io: Optional[XLACheckpointIO]) -> None:
137+
def checkpoint_io(self, io: Optional[CheckpointIO]) -> None:
138138
if io is not None and not isinstance(io, XLACheckpointIO):
139139
raise TypeError(f"The XLA strategy can only work with the `XLACheckpointIO` plugin, found {io}")
140140
self._checkpoint_io = io
@@ -150,7 +150,7 @@ def precision(self) -> XLAPrecision:
150150

151151
@precision.setter
152152
@override
153-
def precision(self, precision: Optional[XLAPrecision]) -> None:
153+
def precision(self, precision: Optional[Precision]) -> None:
154154
if precision is not None and not isinstance(precision, XLAPrecision):
155155
raise TypeError(f"The XLA FSDP strategy can only work with the `XLAPrecision` plugin, found {precision}")
156156
self._precision = precision

src/lightning/pytorch/core/module.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ def trainer(self) -> "pl.Trainer":
218218
def trainer(self, trainer: Optional["pl.Trainer"]) -> None:
219219
for v in self.children():
220220
if isinstance(v, LightningModule):
221-
v.trainer = trainer # type: ignore[assignment]
221+
v.trainer = trainer
222222
self._trainer = trainer
223223

224224
@property

src/lightning/pytorch/serve/servable_module_validator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def on_train_start(self, trainer: "pl.Trainer", servable_module: "pl.LightningMo
9393

9494
# Note: The Trainer needs to be detached from the pl_module before starting the process.
9595
# This would fail during the deepcopy with DDP.
96-
servable_module.trainer = None # type: ignore[assignment]
96+
servable_module.trainer = None
9797

9898
process = Process(target=self._start_server, args=(servable_module, self.host, self.port, self.optimization))
9999
process.start()

src/lightning/pytorch/strategies/fsdp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def precision_plugin(self) -> FSDPPrecision:
227227

228228
@precision_plugin.setter
229229
@override
230-
def precision_plugin(self, precision_plugin: Optional[FSDPPrecision]) -> None:
230+
def precision_plugin(self, precision_plugin: Optional[Precision]) -> None:
231231
if precision_plugin is not None and not isinstance(precision_plugin, FSDPPrecision):
232232
raise TypeError(
233233
f"The FSDP strategy can only work with the `FSDPPrecision` plugin, found {precision_plugin}"

0 commit comments

Comments
 (0)