Skip to content

Commit 0fde49a

Browse files
committed
style
1 parent 717b5ad commit 0fde49a

File tree

1 file changed

+16
-3
lines changed

1 file changed

+16
-3
lines changed

examples/advanced_diffusion_training/train_dreambooth_lora_flux_advanced.py

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -662,7 +662,8 @@ def parse_args(input_args=None):
662662
type=str,
663663
default=None,
664664
help=(
665-
'The transformer modules to apply LoRA training on. Please specify the layers in a comma seperated. E.g. - "q_proj,k_proj,v_proj,out_proj" will result in lora training of attention layers only'),
665+
'The transformer modules to apply LoRA training on. Please specify the layers in a comma seperated. E.g. - "q_proj,k_proj,v_proj,out_proj" will result in lora training of attention layers only'
666+
),
666667
)
667668
parser.add_argument(
668669
"--adam_epsilon",
@@ -1591,8 +1592,20 @@ def main(args):
15911592
if args.lora_blocks is not None:
15921593
target_modules = [block.strip() for block in args.lora_blocks.split(",")]
15931594
else:
1594-
target_modules = ["to_k", "to_q", "to_v", "to_out.0",
1595-
"add_k_proj", "add_q_proj", "add_v_proj", "to_add_out", "ff.net.0.proj","ff.net.2", "ff_context.net.0.proj","ff_context.net.2"]
1595+
target_modules = [
1596+
"to_k",
1597+
"to_q",
1598+
"to_v",
1599+
"to_out.0",
1600+
"add_k_proj",
1601+
"add_q_proj",
1602+
"add_v_proj",
1603+
"to_add_out",
1604+
"ff.net.0.proj",
1605+
"ff.net.2",
1606+
"ff_context.net.0.proj",
1607+
"ff_context.net.2",
1608+
]
15961609
# now we will add new LoRA weights to the attention layers
15971610
transformer_lora_config = LoraConfig(
15981611
r=args.rank,

0 commit comments

Comments
 (0)