We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent b8757c5 commit 4dcd698Copy full SHA for 4dcd698
comfy_extras/nodes_train.py
@@ -429,13 +429,13 @@ def train(
429
430
if existing_adapter is not None:
431
train_adapter = existing_adapter.to_train().to(lora_dtype)
432
- for name, parameter in train_adapter.named_parameters():
433
- lora_sd[f"{n}.{name}"] = parameter
434
else:
435
# Use LoRA with alpha=1.0 by default
436
train_adapter = adapter_cls.create_train(
437
m.weight, rank=rank, alpha=1.0
438
).to(lora_dtype)
+ for name, parameter in train_adapter.named_parameters():
+ lora_sd[f"{n}.{name}"] = parameter
439
440
mp.add_weight_wrapper(key, train_adapter)
441
all_weight_adapters.append(train_adapter)
0 commit comments