Skip to content

Commit b9a3220

Browse files
authored
Merge branch 'master' into feature/19743-tensorboard-histograms
2 parents eb24220 + a08b64e commit b9a3220

File tree

11 files changed

+66
-14
lines changed

11 files changed

+66
-14
lines changed

.github/workflows/ci-tests-fabric.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ jobs:
9696
run: |
9797
cd requirements/fabric
9898
pip install -U "lightning-utilities[cli]"
99-
python -m lightning_utilities.cli requirements set-oldest --req_files "['base.txt', 'strategies.txt']"
99+
python -m lightning_utilities.cli requirements set-oldest --req_files "['base.txt', 'strategies.txt', 'test.txt']"
100100
pip install "cython<3.0" wheel
101101
pip install "pyyaml==5.4" --no-build-isolation
102102
@@ -142,7 +142,8 @@ jobs:
142142
run: |
143143
pip install -e ".[${EXTRA_PREFIX}test,${EXTRA_PREFIX}strategies]" \
144144
-U --upgrade-strategy=eager --prefer-binary \
145-
--extra-index-url="${TORCH_URL}" --find-links="${PYPI_CACHE_DIR}"
145+
--extra-index-url="${TORCH_URL}" \
146+
--find-links="${PYPI_CACHE_DIR}"
146147
pip list
147148
- name: Dump handy wheels
148149
if: github.event_name == 'push' && github.ref == 'refs/heads/master'

.github/workflows/ci-tests-pytorch.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ jobs:
101101
run: |
102102
cd requirements/pytorch
103103
pip install -U "lightning-utilities[cli]"
104-
python -m lightning_utilities.cli requirements set-oldest --req_files "['base.txt', 'extra.txt', 'strategies.txt', 'examples.txt']"
104+
python -m lightning_utilities.cli requirements set-oldest --req_files "['base.txt', 'extra.txt', 'strategies.txt', 'examples.txt', 'test.txt']"
105105
pip install "cython<3.0" wheel
106106
pip install "pyyaml==5.4" --no-build-isolation
107107
@@ -141,7 +141,9 @@ jobs:
141141
pip install ".[${EXTRA_PREFIX}extra,${EXTRA_PREFIX}test,${EXTRA_PREFIX}strategies]" \
142142
-U --upgrade-strategy=eager --prefer-binary \
143143
-r requirements/_integrations/accelerators.txt \
144-
--extra-index-url="${TORCH_URL}" --find-links="${PYPI_CACHE_DIR}" --find-links="https://download.pytorch.org/whl/torch-tensorrt"
144+
--extra-index-url="${TORCH_URL}" \
145+
--find-links="${PYPI_CACHE_DIR}" \
146+
--find-links="https://download.pytorch.org/whl/torch-tensorrt"
145147
pip list
146148
- name: Drop LAI from extensions
147149
if: ${{ matrix.pkg-name != 'lightning' }}

docs/source-pytorch/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def _load_py_module(name: str, location: str) -> ModuleType:
127127
"https://pytorch.org/docs/stable/", "https://pytorch.org/docs/{torch.__version__}/", _PATH_ROOT
128128
)
129129
adjust_linked_external_docs(
130-
"https://lightning.ai/docs/torchmetrics", "https://lightning.ai/docs/torchmetrics/v{torchmetrics.__version__}/", _PATH_ROOT, version_digits=3
130+
"https://lightning.ai/docs/torchmetrics/stable/", "https://lightning.ai/docs/torchmetrics/v{torchmetrics.__version__}/", _PATH_ROOT, version_digits=3
131131
)
132132
adjust_linked_external_docs(
133133
"https://lightning.ai/docs/fabric/stable/", "https://lightning.ai/docs/fabric/{lightning_fabric.__version__}/", _PATH_ROOT, version_digits=3

docs/source-pytorch/extensions/logging.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,10 @@ methods to log from anywhere in a :doc:`LightningModule <../common/lightning_mod
120120
.. note::
121121
Everything explained below applies to both :meth:`~lightning.pytorch.core.LightningModule.log` or :meth:`~lightning.pytorch.core.LightningModule.log_dict` methods.
122122

123+
.. note::
124+
125+
When using TorchMetrics with Lightning, we recommend referring to the `TorchMetrics Lightning integration documentation <https://lightning.ai/docs/torchmetrics/stable/pages/lightning.html>`_ for logging best practices, common pitfalls, and proper usage patterns.
126+
123127
Depending on where the :meth:`~lightning.pytorch.core.LightningModule.log` method is called, Lightning auto-determines
124128
the correct logging mode for you. Of course you can override the default behavior by manually setting the
125129
:meth:`~lightning.pytorch.core.LightningModule.log` parameters.

requirements/fabric/test.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
coverage ==7.10.4
2-
numpy >=1.17.2, <1.27.0
2+
numpy >=1.21.0, <1.27.0
33
pytest ==8.4.1
44
pytest-cov ==6.2.1
55
pytest-timeout ==2.4.0
66
pytest-rerunfailures ==15.1
77
pytest-random-order ==1.2.0
88
click ==8.1.8; python_version < "3.11"
99
click ==8.2.1; python_version > "3.10"
10-
tensorboardX >=2.2, <2.7.0 # min version is set by torch.onnx missing attribute
10+
tensorboardX >=2.6, <2.7.0 # todo: relax it back to `>=2.2` after fixing tests

requirements/pytorch/loggers.info

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@ neptune >=1.0.0
44
comet-ml >=3.31.0
55
mlflow >=1.0.0
66
wandb >=0.12.10
7-
tensorboard >=2.9.1
7+
tensorboard >=2.11

requirements/pytorch/test.txt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,16 @@ pytest-random-order ==1.2.0
88
# needed in tests
99
cloudpickle >=1.3, <3.2.0
1010
scikit-learn >0.22.1, <1.8.0
11-
numpy >=1.17.2, <1.27.0
12-
onnx >=1.12.0, <1.19.0
11+
numpy >1.20.0, <1.27.0
12+
onnx >1.12.0, <1.19.0
1313
onnxruntime >=1.12.0, <1.23.0
14-
onnxscript >= 0.2.2, <0.4.0
14+
onnxscript >= 0.1.0, <0.4.0
1515
psutil <7.0.1 # for `DeviceStatsMonitor`
1616
pandas >2.0, <2.4.0 # needed in benchmarks
1717
fastapi # for `ServableModuleValidator` # not setting version as re-defined in App
1818
uvicorn # for `ServableModuleValidator` # not setting version as re-defined in App
1919

20-
tensorboard >=2.9.1, <2.21.0 # for `TensorBoardLogger`
20+
tensorboard >=2.11, <2.21.0 # for `TensorBoardLogger`
2121

2222
--find-links https://download.pytorch.org/whl/torch-tensorrt
2323
torch-tensorrt; platform_system == "Linux" and python_version >= "3.12"

src/lightning/pytorch/CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
4040
- Fixed learning rate not being correctly set after using `LearningRateFinder` callback ([#21068](https://github.com/Lightning-AI/pytorch-lightning/pull/21068))
4141

4242

43+
- Fixed misalignment column while using rich model summary in `DeepSpeedstrategy` ([#21100](https://github.com/Lightning-AI/pytorch-lightning/pull/21100))
44+
4345
---
4446

4547
## [2.5.3] - 2025-08-13

src/lightning/pytorch/callbacks/rich_model_summary.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,18 +79,21 @@ def summarize(
7979
from rich.table import Table
8080

8181
console = get_console()
82+
column_names = list(zip(*summary_data))[0]
8283

8384
header_style: str = summarize_kwargs.get("header_style", "bold magenta")
8485
table = Table(header_style=header_style)
8586
table.add_column(" ", style="dim")
8687
table.add_column("Name", justify="left", no_wrap=True)
8788
table.add_column("Type")
8889
table.add_column("Params", justify="right")
90+
91+
if "Params per Device" in column_names:
92+
table.add_column("Params per Device", justify="right")
93+
8994
table.add_column("Mode")
9095
table.add_column("FLOPs", justify="right")
9196

92-
column_names = list(zip(*summary_data))[0]
93-
9497
for column_name in ["In sizes", "Out sizes"]:
9598
if column_name in column_names:
9699
table.add_column(column_name, justify="right", style="white")

src/lightning/pytorch/utilities/model_summary/model_summary_deepspeed.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,7 @@ def _get_summary_data(self) -> list[tuple[str, list[str]]]:
9999
("Params", list(map(get_human_readable_count, self.param_nums))),
100100
("Params per Device", list(map(get_human_readable_count, self.parameters_per_layer))),
101101
("Mode", ["train" if mode else "eval" for mode in self.training_modes]),
102+
("FLOPs", list(map(get_human_readable_count, (sum(x.values()) for x in self.flop_counts.values())))),
102103
]
103104
if self._model.example_input_array is not None:
104105
arrays.append(("In sizes", [str(x) for x in self.in_sizes]))

0 commit comments

Comments
 (0)