Skip to content

Commit 2af39e9

Browse files
committed
update
1 parent a84babe commit 2af39e9

File tree

1 file changed

+6
-2
lines changed

1 file changed

+6
-2
lines changed

src/lightning/pytorch/plugins/io/async_plugin.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,9 +41,12 @@ def __init__(self, checkpoint_io: Optional["CheckpointIO"] = None) -> None:
4141
self._error: Optional[BaseException] = None
4242

4343
# CheckpointIO doesn't have a setup method so we have to do something like.
44-
# We can't do setup in __init__ because if train or validate is called more than once the
45-
# teardown method deletes the executor.
4644
def _ensure_setup(self) -> None:
45+
"""Ensures that the executor is setup.
46+
47+
We can't do setup in __init__ because if train or validate is called more than once,
48+
the teardown method deletes the executor.
49+
"""
4750
if self._executor is None:
4851
self._executor = ThreadPoolExecutor(max_workers=1)
4952
self._error: Optional[BaseException] = None
@@ -89,5 +92,6 @@ def teardown(self) -> None:
8992

9093
# snapshot the checkpoint payload on the caller thread to avoid races with parameter mutation
9194
def _clone_tensor(t: torch.Tensor) -> torch.Tensor:
95+
"""Clones a tensor on the caller thread."""
9296
# detach to avoid autograd history and clone to take a point-in-time copy
9397
return t.detach().clone()

0 commit comments

Comments
 (0)