Skip to content

Commit 0c9d414

Browse files
dependabot[bot]Bordapre-commit-ci[bot]
authored
build(deps): bump torch from 2.5.1 to 2.6.0 & torchvision from <0.21.0,>=0.16.0 to >=0.16.0,<0.22.0 in /requirements (#20728)
* build(deps): bump torch from 2.5.1 to 2.6.0 in /requirements Bumps [torch](https://github.com/pytorch/pytorch) from 2.5.1 to 2.6.0. - [Release notes](https://github.com/pytorch/pytorch/releases) - [Changelog](https://github.com/pytorch/pytorch/blob/main/RELEASE.md) - [Commits](pytorch/pytorch@v2.5.1...v2.6.0) --- updated-dependencies: - dependency-name: torch dependency-version: 2.6.0 dependency-type: direct:production ... Signed-off-by: dependabot[bot] <[email protected]> * build(deps): update torchvision requirement from <0.21.0,>=0.16.0 to >=0.16.0,<0.22.0 in /requirements (#20736) build(deps): update torchvision requirement in /requirements Updates the requirements on [torchvision](https://github.com/pytorch/vision) to permit the latest version. - [Release notes](https://github.com/pytorch/vision/releases) - [Commits](pytorch/vision@v0.16.0...v0.21.0) --- updated-dependencies: - dependency-name: torchvision dependency-version: 0.21.0 dependency-type: direct:production ... Signed-off-by: dependabot[bot] <[email protected]> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * # type: ignore[arg-type] * type: ignore[arg-type] --------- Signed-off-by: dependabot[bot] <[email protected]> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jirka B <[email protected]> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 3fb8dcf commit 0c9d414

File tree

12 files changed

+23
-21
lines changed

12 files changed

+23
-21
lines changed

requirements/fabric/base.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# NOTE: the upper bound for the package version is only set for CI stability, and it is dropped while installing this package
22
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment
33

4-
torch >=2.1.0, <2.6.0
4+
torch >=2.1.0, <2.7.0
55
fsspec[http] >=2022.5.0, <2024.4.0
66
packaging >=20.0, <=25.0
77
typing-extensions >=4.4.0, <4.11.0

requirements/fabric/examples.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# NOTE: the upper bound for the package version is only set for CI stability, and it is dropped while installing this package
22
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment
33

4-
torchvision >=0.16.0, <0.21.0
4+
torchvision >=0.16.0, <0.22.0
55
torchmetrics >=0.10.0, <1.8.0
66
lightning-utilities >=0.8.0, <0.15.0

requirements/pytorch/base.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# NOTE: the upper bound for the package version is only set for CI stability, and it is dropped while installing this package
22
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment
33

4-
torch >=2.1.0, <2.6.0
4+
torch >=2.1.0, <2.7.0
55
tqdm >=4.57.0, <4.67.0
66
PyYAML >=5.4, <6.1.0
77
fsspec[http] >=2022.5.0, <2024.4.0

requirements/pytorch/examples.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment
33

44
requests <2.32.0
5-
torchvision >=0.16.0, <0.21.0
5+
torchvision >=0.16.0, <0.22.0
66
ipython[all] <8.19.0
77
torchmetrics >=0.10.0, <1.8.0
88
lightning-utilities >=0.8.0, <0.15.0

requirements/typing.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
mypy==1.11.0
2-
torch==2.5.1
2+
torch==2.6.0
33

44
types-Markdown
55
types-PyYAML

src/lightning/fabric/plugins/collectives/torch_collective.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def world_size(self) -> int:
5050

5151
@override
5252
def broadcast(self, tensor: Tensor, src: int) -> Tensor:
53-
dist.broadcast(tensor, src, group=self.group)
53+
dist.broadcast(tensor, src, group=self.group) # type: ignore[arg-type]
5454
return tensor
5555

5656
@override
@@ -62,7 +62,7 @@ def all_reduce(self, tensor: Tensor, op: Union[str, ReduceOp, RedOpType] = "sum"
6262
@override
6363
def reduce(self, tensor: Tensor, dst: int, op: Union[str, ReduceOp, RedOpType] = "sum") -> Tensor:
6464
op = self._convert_to_native_op(op)
65-
dist.reduce(tensor, dst, op=op, group=self.group)
65+
dist.reduce(tensor, dst, op=op, group=self.group) # type: ignore[arg-type]
6666
return tensor
6767

6868
@override
@@ -72,12 +72,12 @@ def all_gather(self, tensor_list: list[Tensor], tensor: Tensor) -> list[Tensor]:
7272

7373
@override
7474
def gather(self, tensor: Tensor, gather_list: list[Tensor], dst: int = 0) -> list[Tensor]:
75-
dist.gather(tensor, gather_list, dst, group=self.group)
75+
dist.gather(tensor, gather_list, dst, group=self.group) # type: ignore[arg-type]
7676
return gather_list
7777

7878
@override
7979
def scatter(self, tensor: Tensor, scatter_list: list[Tensor], src: int = 0) -> Tensor:
80-
dist.scatter(tensor, scatter_list, src, group=self.group)
80+
dist.scatter(tensor, scatter_list, src, group=self.group) # type: ignore[arg-type]
8181
return tensor
8282

8383
@override
@@ -109,27 +109,27 @@ def all_gather_object(self, object_list: list[Any], obj: Any) -> list[Any]:
109109
def broadcast_object_list(
110110
self, object_list: list[Any], src: int, device: Optional[torch.device] = None
111111
) -> list[Any]:
112-
dist.broadcast_object_list(object_list, src, group=self.group, device=device)
112+
dist.broadcast_object_list(object_list, src, group=self.group, device=device) # type: ignore[arg-type]
113113
return object_list
114114

115115
def gather_object(self, obj: Any, object_gather_list: list[Any], dst: int = 0) -> list[Any]:
116-
dist.gather_object(obj, object_gather_list, dst, group=self.group)
116+
dist.gather_object(obj, object_gather_list, dst, group=self.group) # type: ignore[arg-type]
117117
return object_gather_list
118118

119119
def scatter_object_list(
120120
self, scatter_object_output_list: list[Any], scatter_object_input_list: list[Any], src: int = 0
121121
) -> list[Any]:
122-
dist.scatter_object_list(scatter_object_output_list, scatter_object_input_list, src, group=self.group)
122+
dist.scatter_object_list(scatter_object_output_list, scatter_object_input_list, src, group=self.group) # type: ignore[arg-type]
123123
return scatter_object_output_list
124124

125125
@override
126126
def barrier(self, device_ids: Optional[list[int]] = None) -> None:
127127
if self.group == dist.GroupMember.NON_GROUP_MEMBER:
128128
return
129-
dist.barrier(group=self.group, device_ids=device_ids)
129+
dist.barrier(group=self.group, device_ids=device_ids) # type: ignore[arg-type]
130130

131131
def monitored_barrier(self, timeout: Optional[datetime.timedelta] = None, wait_all_ranks: bool = False) -> None:
132-
dist.monitored_barrier(group=self.group, timeout=timeout, wait_all_ranks=wait_all_ranks)
132+
dist.monitored_barrier(group=self.group, timeout=timeout, wait_all_ranks=wait_all_ranks) # type: ignore[arg-type]
133133

134134
@override
135135
def setup(self, main_address: Optional[str] = None, main_port: Optional[str] = None, **kwargs: Any) -> Self:

src/lightning/fabric/strategies/xla_fsdp.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -295,6 +295,7 @@ def clip_gradients_norm(
295295
) -> Tensor:
296296
"""Clip gradients by norm."""
297297
self.precision.unscale_gradients(optimizer)
298+
assert callable(module.clip_grad_norm_)
298299
return module.clip_grad_norm_(max_norm=max_norm, norm_type=norm_type)
299300

300301
@override

src/lightning/fabric/utilities/init.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,8 @@ def _materialize(module: Module, device: _DEVICE) -> None:
6767
f"Materialization requires that the `{type(module).__name__}.reset_parameters` method is implemented."
6868
" This method is used to initialize any children parameters or buffers in this module."
6969
)
70-
module.reset_parameters()
70+
if callable(module.reset_parameters):
71+
module.reset_parameters()
7172

7273

7374
def _materialize_meta_tensors(module: Module, device: _DEVICE) -> None:

src/lightning/pytorch/callbacks/finetuning.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ def flatten_modules(modules: Union[Module, Iterable[Union[Module, Iterable]]]) -
133133

134134
if isinstance(modules, Iterable):
135135
_flatten_modules = []
136-
for m in modules: # type: ignore[union-attr]
136+
for m in modules:
137137
_flatten_modules.extend(BaseFinetuning.flatten_modules(m))
138138

139139
_modules = iter(_flatten_modules)

src/lightning/pytorch/callbacks/throughput_monitor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ def _update(self, trainer: "Trainer", pl_module: "LightningModule", batch: Any,
140140
# this assumes that all iterations used the same batch size
141141
samples=iter_num * batch_size,
142142
lengths=None if self.length_fn is None else self._lengths[stage],
143-
flops=flops_per_batch,
143+
flops=flops_per_batch, # type: ignore[arg-type]
144144
)
145145

146146
def _compute(self, trainer: "Trainer", iter_num: Optional[int] = None) -> None:

0 commit comments

Comments
 (0)