Skip to content

Commit 84c168c

Browse files
committed
alpha_pattern.
1 parent 6b35c92 commit 84c168c

File tree

2 files changed

+11
-2
lines changed

2 files changed

+11
-2
lines changed

src/diffusers/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -338,8 +338,8 @@
338338
"StableDiffusion3ControlNetPipeline",
339339
"StableDiffusion3Img2ImgPipeline",
340340
"StableDiffusion3InpaintPipeline",
341-
"StableDiffusion3PAGPipeline",
342341
"StableDiffusion3PAGImg2ImgPipeline",
342+
"StableDiffusion3PAGPipeline",
343343
"StableDiffusion3Pipeline",
344344
"StableDiffusionAdapterPipeline",
345345
"StableDiffusionAttendAndExcitePipeline",

src/diffusers/loaders/peft.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,16 @@ def _maybe_adjust_config(config):
9494
if mod != ambiguous_key and mod not in config["rank_pattern"]:
9595
config["rank_pattern"][mod] = original_r
9696

97+
# handle alphas to deal with cases like
98+
# https://github.com/huggingface/diffusers/pull/9999#issuecomment-2516180777
99+
has_different_ranks = len(config["rank_pattern"]) > 1 and list(config["rank_pattern"])[0] != config["r"]
100+
if has_different_ranks:
101+
config["lora_alpha"] = config["r"]
102+
alpha_pattern = {}
103+
for module_name, rank in config["rank_pattern"].items():
104+
alpha_pattern[module_name] = rank
105+
config["alpha_pattern"] = alpha_pattern
106+
97107
return config
98108

99109

@@ -290,7 +300,6 @@ def load_lora_adapter(self, pretrained_model_name_or_path_or_dict, prefix="trans
290300
lora_config_kwargs.pop("lora_bias")
291301

292302
lora_config = LoraConfig(**lora_config_kwargs)
293-
294303
# adapter_name
295304
if adapter_name is None:
296305
adapter_name = get_adapter_name(self)

0 commit comments

Comments
 (0)