Skip to content

Commit 2de5e97

Browse files
committed
comment
1 parent b34c6d0 commit 2de5e97

File tree

1 file changed

+1
-0
lines changed

1 file changed

+1
-0
lines changed

tests/models/transformers/test_models_transformer_flux.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -189,6 +189,7 @@ def test_lora_exclude_modules(self):
189189
f"{target_module}.lora_A.weight": torch.ones(lora_rank, target_mod_shape[1]) * 22,
190190
f"{target_module}.lora_B.weight": torch.ones(target_mod_shape[0], lora_rank) * 33,
191191
}
192+
# Passing exclude_modules should no longer be necessary (or even passing target_modules, for that matter).
192193
config = LoraConfig(
193194
r=lora_rank, target_modules=["single_transformer_blocks.0.proj_out"], exclude_modules=["proj_out"]
194195
)

0 commit comments

Comments
 (0)