Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/docs-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ jobs:
lfs: ${{ matrix.pkg-name == 'pytorch' }}
- uses: actions/setup-python@v5
with:
python-version: "3.9"
python-version: "3.10"

- name: List notebooks
if: ${{ matrix.pkg-name == 'pytorch' }}
Expand Down
2 changes: 2 additions & 0 deletions docs/source-fabric/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,8 @@
("py:.*", "torch_xla.*"),
("py:class", "transformer_engine.*"),
("py:class", "bitsandbytes.*"),
# loggers
('py:class', 'tensorboardX.writer.SummaryWriter'), # todo: this is unexpected as the imports locally works
]

# -- Options for todo extension ----------------------------------------------
Expand Down
1 change: 1 addition & 0 deletions docs/source-pytorch/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,7 @@ def _load_py_module(name: str, location: str) -> ModuleType:
# missing in generated API
("py:exc", "MisconfigurationException"),
# TODO: generated list of all existing ATM, need to be fixed
('py:class', 'tensorboardX.writer.SummaryWriter'),
("py:class", "AveragedModel"),
("py:class", "CometExperiment"),
("py:meth", "DataModule.__init__"),
Expand Down
4 changes: 2 additions & 2 deletions src/lightning/fabric/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
class TensorBoardLogger(Logger):
r"""Log to local file system in `TensorBoard <https://www.tensorflow.org/tensorboard>`_ format.

Implemented using :class:`~tensorboardX.SummaryWriter`. Logs are saved to
Implemented using :class:`~tensorboardX.writer.SummaryWriter`. Logs are saved to
``os.path.join(root_dir, name, version)``. This is the recommended logger in Lightning Fabric.

Args:
Expand All @@ -60,7 +60,7 @@ class TensorBoardLogger(Logger):
sub_dir: Sub-directory to group TensorBoard logs. If a ``sub_dir`` argument is passed
then logs are saved in ``/root_dir/name/version/sub_dir/``. Defaults to ``None`` in which case
logs are saved in ``/root_dir/name/version/``.
\**kwargs: Additional arguments used by :class:`tensorboardX.SummaryWriter` can be passed as keyword
\**kwargs: Additional arguments used by :class:`tensorboardX.writer.SummaryWriter` can be passed as keyword
arguments in this logger. To automatically flush to disk, `max_queue` sets the size
of the queue for pending logs before flushing. `flush_secs` determines how many seconds
elapses before flushing.
Expand Down
4 changes: 2 additions & 2 deletions src/lightning/pytorch/loggers/tensorboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
class TensorBoardLogger(Logger, FabricTensorBoardLogger):
r"""Log to local or remote file system in `TensorBoard <https://www.tensorflow.org/tensorboard>`_ format.

Implemented using :class:`~tensorboardX.SummaryWriter`. Logs are saved to
Implemented using :class:`~tensorboardX.writer.SummaryWriter`. Logs are saved to
``os.path.join(save_dir, name, version)``. This is the default logger in Lightning, it comes
preinstalled.

Expand Down Expand Up @@ -74,7 +74,7 @@ class TensorBoardLogger(Logger, FabricTensorBoardLogger):
sub_dir: Sub-directory to group TensorBoard logs. If a sub_dir argument is passed
then logs are saved in ``/save_dir/name/version/sub_dir/``. Defaults to ``None`` in which
logs are saved in ``/save_dir/name/version/``.
\**kwargs: Additional arguments used by :class:`tensorboardX.SummaryWriter` can be passed as keyword
\**kwargs: Additional arguments used by :class:`tensorboardX.writer.SummaryWriter` can be passed as keyword
arguments in this logger. To automatically flush to disk, `max_queue` sets the size
of the queue for pending logs before flushing. `flush_secs` determines how many seconds
elapses before flushing.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,8 @@ def _configure_checkpoint_callbacks(self, enable_checkpointing: bool) -> None:
model_checkpoint = LitModelCheckpoint(model_registry=self.trainer._model_registry)
else:
rank_zero_info(
"You are using the default ModelCheckpoint callback."
" Install `pip install litmodels` package to use the `LitModelCheckpoint` instead"
" for seamless uploading to the Lightning model registry."
"Using default `ModelCheckpoint`. Consider installing `litmodels` package to enable"
" `LitModelCheckpoint` for automatic upload to the Lightning model registry."
)
model_checkpoint = ModelCheckpoint()
self.trainer.callbacks.append(model_checkpoint)
Expand Down
Loading