Skip to content

Commit d5cf62a

Browse files
author
Your Name
committed
minor
1 parent 0ec40bd commit d5cf62a

File tree

2 files changed

+1
-16
lines changed

2 files changed

+1
-16
lines changed

modelopt/torch/opt/conversion.py

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -493,10 +493,6 @@ def save(model: nn.Module, f: str | os.PathLike | BinaryIO, **kwargs) -> None:
493493
model: Any model.
494494
f: Target file location.
495495
**kwargs: additional args for ``torch.save()``.
496-
497-
.. note::
498-
499-
If model is a wrapper such as DistributedDataParallel, it will be unwrapped for saving.
500496
"""
501497
# unwrap model
502498
model = unwrap_model(model, warn=True)
@@ -545,11 +541,6 @@ def restore_from_modelopt_state(model: ModelLike, modelopt_state: dict[str, Any]
545541
Returns:
546542
A modified model architecture based on the restored modifications with the unmodified
547543
weights as stored in the provided ``model`` argument.
548-
549-
.. note::
550-
551-
Note that wrappers such as DistributedDataParallel are `not` supported during the restore
552-
process. Please wrap the model after the restore process.
553544
"""
554545
# initialize ModelLikeModule if needed.
555546
model = model if isinstance(model, nn.Module) else ModelLikeModule(model)
@@ -588,11 +579,6 @@ def restore(model: ModelLike, f: str | os.PathLike | BinaryIO, **kwargs) -> nn.M
588579
589580
Returns:
590581
The model with original weights and stored architecture.
591-
592-
.. note::
593-
594-
Note that wrappers such as DistributedDataParallel are `not` supported during the restore
595-
process. Please wrap the model after the restore process.
596582
"""
597583
# initialize ModelLikeModule if needed.
598584
model = model if isinstance(model, nn.Module) else ModelLikeModule(model)

modelopt/torch/quantization/calib/histogram.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,8 +157,7 @@ def compute_amax(
157157
"""
158158
if dist.is_initialized():
159159
warnings.warn(
160-
"This method does not perform any synchronization across DistributedDataParallel"
161-
" (DDP) https://pytorch.org/docs/stable/notes/ddp.html modules. The recommended"
160+
"This method does not perform any synchronization across distributed processes. The recommended"
162161
" method is to use the same calibration dataset across all distributed data"
163162
" parallel groups so that `amax` is the same for all DDP modules."
164163
)

0 commit comments

Comments
 (0)