We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c60baae commit c8da426Copy full SHA for c8da426
modelopt/torch/opt/plugins/mcore_dist_checkpointing.py
@@ -154,7 +154,9 @@ def _parse_transformer_config(transformer_config: dict) -> dict:
154
155
if dist.is_master():
156
run_config_name = f"{checkpoint_name}/modelopt_run_config.yaml"
157
- config_dict = _parse_transformer_config(copy.deepcopy(model[0].config.__dict__))
+ # We avoid deepcopy here since some attributes in Megatron-Bridge config cannot be
158
+ # deepcopy.
159
+ config_dict = _parse_transformer_config(model[0].config.__dict__)
160
config_dict["nvidia_modelopt_version"] = modelopt.__version__
161
with open(run_config_name, "w") as f:
162
yaml.dump(config_dict, f, default_flow_style=False)
0 commit comments