Skip to content

Commit 18382f4

Browse files
committed
fix
1 parent ea451d1 commit 18382f4

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5065,7 +5065,7 @@ class WanLoraLoaderMixin(LoraBaseMixin):
50655065
Load LoRA layers into [`WanTransformer3DModel`]. Specific to [`WanPipeline`] and `[WanImageToVideoPipeline`].
50665066
"""
50675067

5068-
_lora_loadable_modules = ["transformer", "transformer_2"]
5068+
_lora_loadable_modules = ["transformer"]
50695069
transformer_name = TRANSFORMER_NAME
50705070

50715071
@classmethod
@@ -5273,10 +5273,13 @@ def load_lora_weights(
52735273
load_into_transformer_2 = kwargs.pop("load_into_transformer_2", False)
52745274
if load_into_transformer_2:
52755275
if not hasattr(self, "transformer_2"):
5276-
raise ValueError(
5277-
"Cannot load LoRA into transformer_2: transformer_2 is not available for this model"
5276+
raise AttributeError(
5277+
f"'{type(self).__name__}' object has no attribute transformer_2"
5278+
"Note that Wan2.1 models do not have a transformer_2 component."
52785279
"Ensure the model has a transformer_2 component before setting load_into_transformer_2=True."
52795280
)
5281+
if "transformer_2" not in self._lora_loadable_modules:
5282+
self._lora_loadable_modules.append("transformer_2")
52805283
self.load_lora_into_transformer(
52815284
state_dict,
52825285
transformer=self.transformer_2,

0 commit comments

Comments
 (0)