Skip to content

Commit 83092cb

Browse files
dependabot[bot]Bordacarmocca
authored andcommitted
Bump torch from 2.0.1 to 2.1.0 in /requirements (#18752)
Co-authored-by: Jirka <[email protected]> Co-authored-by: Carlos Mocholí <[email protected]> (cherry picked from commit 73f5df0)
1 parent 594bb26 commit 83092cb

File tree

18 files changed

+81
-65
lines changed

18 files changed

+81
-65
lines changed

.github/workflows/code-checks.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,4 +49,4 @@ jobs:
4949
pip list
5050
5151
- name: Check typing
52-
run: mypy --no-warn-unused-ignores
52+
run: mypy

pyproject.toml

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -127,17 +127,10 @@ files = [
127127
]
128128
# This section is for folders with "-" as they are not valid python modules
129129
exclude = [
130-
"src/lightning_app/__about__.py",
131-
"src/lightning_app/__setup__.py",
132-
"src/lightning_app/__version__.py",
133-
"src/lightning_fabric/__about__.py",
134-
"src/lightning_fabric/__setup__.py",
135-
"src/lightning_fabric/__version__.py",
136130
"src/lightning/app/cli/app-template",
137131
"src/lightning/app/cli/component-template",
138132
"src/lightning/app/cli/pl-app-template",
139133
"src/lightning/app/cli/react-ui-template",
140-
"src/lightning/app/launcher",
141134
]
142135
install_types = "True"
143136
non_interactive = "True"
@@ -192,6 +185,9 @@ module = [
192185
"lightning.app.frontend.stream_lit",
193186
"lightning.app.frontend.utils",
194187
"lightning.app.frontend.web",
188+
"lightning.app.launcher.launcher",
189+
"lightning.app.launcher.lightning_backend",
190+
"lightning.app.launcher.lightning_hybrid_backend",
195191
"lightning.app.pdb.pdb",
196192
"lightning.app.runners.backends.backend",
197193
"lightning.app.runners.backends.cloud",
@@ -240,9 +236,6 @@ module = [
240236
"lightning.app.utilities.state",
241237
"lightning.app.utilities.tracer",
242238
"lightning.app.utilities.tree",
243-
"lightning.store.authentication",
244-
"lightning.store.cloud_api",
245-
"lightning.store.save",
246239
"lightning.store.utils",
247240
]
248241
ignore_errors = "True"

requirements/data/data.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@ lightning-utilities >=0.8.0, <0.10.0
55
# to be able to include also 0.6 and preserve `>` needed for CI min version bypass
66
torchdata >0.5.9, <=0.7.0
77
# to be able to include also PL 2.0 and preserve `>` needed for CI min version bypass
8-
torch >0.14.0, <=2.1.0
8+
torch >0.14.0, <2.2.0

requirements/fabric/examples.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# NOTE: the upper bound for the package version is only set for CI stability, and it is dropped while installing this package
22
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment
33

4-
torchvision >=0.13.0, <0.16.0
4+
torchvision >=0.13.0, <0.17.0
55
torchmetrics >=0.10.0, <1.3.0
66
lightning-utilities >=0.8.0, <0.10.0

requirements/pytorch/examples.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# NOTE: the upper bound for the package version is only set for CI stability, and it is dropped while installing this package
22
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment
33

4-
torchvision >=0.13.0, <0.16.0
4+
torchvision >=0.13.0, <0.17.0
55
gym[classic_control] >=0.17.0, <0.27.0
66
ipython[all] <8.15.0
77
torchmetrics >=0.10.0, <1.3.0

requirements/typing.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
mypy==1.5.1
2-
torch==2.0.1
2+
torch==2.1.0
33

44
types-Markdown
55
types-PyYAML

src/lightning/app/core/api.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -196,9 +196,9 @@ class StateUpdate(BaseModel):
196196
@fastapi_service.get("/api/v1/state", response_class=JSONResponse)
197197
async def get_state(
198198
response: Response,
199-
x_lightning_type: Optional[str] = Header(None), # type: ignore[assignment]
200-
x_lightning_session_uuid: Optional[str] = Header(None), # type: ignore[assignment]
201-
x_lightning_session_id: Optional[str] = Header(None), # type: ignore[assignment]
199+
x_lightning_type: Optional[str] = Header(None),
200+
x_lightning_session_uuid: Optional[str] = Header(None),
201+
x_lightning_session_id: Optional[str] = Header(None),
202202
) -> Mapping:
203203
if x_lightning_session_uuid is None:
204204
raise Exception("Missing X-Lightning-Session-UUID header")
@@ -246,8 +246,8 @@ async def get_layout() -> str:
246246
@fastapi_service.get("/api/v1/spec", response_class=JSONResponse)
247247
async def get_spec(
248248
response: Response,
249-
x_lightning_session_uuid: Optional[str] = Header(None), # type: ignore[assignment]
250-
x_lightning_session_id: Optional[str] = Header(None), # type: ignore[assignment]
249+
x_lightning_session_uuid: Optional[str] = Header(None),
250+
x_lightning_session_id: Optional[str] = Header(None),
251251
) -> Union[List, Dict]:
252252
if x_lightning_session_uuid is None:
253253
raise Exception("Missing X-Lightning-Session-UUID header")
@@ -266,9 +266,9 @@ async def get_spec(
266266
async def post_delta(
267267
request: Request,
268268
response: Response,
269-
x_lightning_type: Optional[str] = Header(None), # type: ignore[assignment]
270-
x_lightning_session_uuid: Optional[str] = Header(None), # type: ignore[assignment]
271-
x_lightning_session_id: Optional[str] = Header(None), # type: ignore[assignment]
269+
x_lightning_type: Optional[str] = Header(None),
270+
x_lightning_session_uuid: Optional[str] = Header(None),
271+
x_lightning_session_id: Optional[str] = Header(None),
272272
) -> Optional[Dict]:
273273
"""This endpoint is used to make an update to the app state using delta diff, mainly used by streamlit to update
274274
the state."""
@@ -292,9 +292,9 @@ async def post_delta(
292292
async def post_state(
293293
request: Request,
294294
response: Response,
295-
x_lightning_type: Optional[str] = Header(None), # type: ignore[assignment]
296-
x_lightning_session_uuid: Optional[str] = Header(None), # type: ignore[assignment]
297-
x_lightning_session_id: Optional[str] = Header(None), # type: ignore[assignment]
295+
x_lightning_type: Optional[str] = Header(None),
296+
x_lightning_session_uuid: Optional[str] = Header(None),
297+
x_lightning_session_id: Optional[str] = Header(None),
298298
) -> Optional[Dict]:
299299
if x_lightning_session_uuid is None:
300300
raise Exception("Missing X-Lightning-Session-UUID header")

src/lightning/fabric/plugins/collectives/torch_collective.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,10 +84,10 @@ def all_to_all(self, output_tensor_list: List[Tensor], input_tensor_list: List[T
8484
return output_tensor_list
8585

8686
def send(self, tensor: Tensor, dst: int, tag: int = 0) -> None:
87-
dist.send(tensor, dst, tag=tag, group=self.group) # type: ignore[arg-type]
87+
dist.send(tensor, dst, tag=tag, group=self.group)
8888

8989
def recv(self, tensor: Tensor, src: Optional[int] = None, tag: int = 0) -> Tensor:
90-
dist.recv(tensor, src, tag=tag, group=self.group) # type: ignore[arg-type]
90+
dist.recv(tensor, src, tag=tag, group=self.group)
9191
return tensor
9292

9393
def all_gather_object(self, object_list: List[Any], obj: Any) -> List[Any]:

src/lightning/fabric/plugins/precision/fsdp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ def unscale_gradients(self, optimizer: Optimizer) -> None:
143143
if scaler is not None:
144144
if _optimizer_handles_unscaling(optimizer):
145145
raise NotImplementedError("Gradient clipping is not implemented for optimizers handling the unscaling.")
146-
scaler.unscale_(optimizer) # type: ignore[arg-type] # ShardedGradScaler has wrong type annotation
146+
scaler.unscale_(optimizer)
147147

148148
def state_dict(self) -> Dict[str, Any]:
149149
if self.scaler is not None:

src/lightning/fabric/strategies/xla_fsdp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ def clip_gradients_norm(
277277
) -> Tensor:
278278
"""Clip gradients by norm."""
279279
self.precision.unscale_gradients(optimizer)
280-
return module.clip_grad_norm_(max_norm=max_norm, norm_type=norm_type) # type: ignore[operator]
280+
return module.clip_grad_norm_(max_norm=max_norm, norm_type=norm_type)
281281

282282
def clip_gradients_value(self, module: Module, optimizer: Optimizer, clip_val: Union[float, int]) -> None:
283283
"""Clip gradients by value."""

0 commit comments

Comments
 (0)