Skip to content

Commit 47e8292

Browse files
committed
remove comment.
1 parent f6905e8 commit 47e8292

File tree

1 file changed

+0
-3
lines changed

1 file changed

+0
-3
lines changed

src/diffusers/loaders/lora_base.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1078,16 +1078,13 @@ def _save_lora_weights(
10781078
state_dict = {}
10791079
final_lora_adapter_metadata = {}
10801080

1081-
# Pack the weights for each component (e.g., 'unet', 'text_encoder')
10821081
for prefix, layers in lora_layers.items():
10831082
state_dict.update(cls.pack_weights(layers, prefix))
10841083

1085-
# Pack the metadata for each component
10861084
for prefix, metadata in lora_metadata.items():
10871085
if metadata:
10881086
final_lora_adapter_metadata.update(_pack_dict_with_prefix(metadata, prefix))
10891087

1090-
# Call the existing writer function
10911088
cls.write_lora_layers(
10921089
state_dict=state_dict,
10931090
save_directory=save_directory,

0 commit comments

Comments
 (0)