We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents 5f11aeb + fb2e802 commit 7255790Copy full SHA for 7255790
src/llama_recipes/finetuning.py
@@ -188,7 +188,7 @@ def main(**kwargs):
188
device_id=device_id,
189
limit_all_gathers=True,
190
sync_module_states=train_config.low_cpu_fsdp,
191
- param_init_fn=lambda module: module.to_empty(device=torch.device("cuda"), recurse=False)
+ param_init_fn=(lambda module: module.to_empty(device=torch.device("cuda"), recurse=False))
192
if train_config.low_cpu_fsdp and rank != 0 else None,
193
)
194
if fsdp_config.fsdp_activation_checkpointing:
0 commit comments