Skip to content

Commit 2a5b07d

Browse files
committed
revert change
1 parent 386cf1c commit 2a5b07d

File tree

1 file changed

+1
-3
lines changed

1 file changed

+1
-3
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5065,7 +5065,7 @@ class WanLoraLoaderMixin(LoraBaseMixin):
50655065
Load LoRA layers into [`WanTransformer3DModel`]. Specific to [`WanPipeline`] and `[WanImageToVideoPipeline`].
50665066
"""
50675067

5068-
_lora_loadable_modules = ["transformer"]
5068+
_lora_loadable_modules = ["transformer", "transformer_2"]
50695069
transformer_name = TRANSFORMER_NAME
50705070

50715071
@classmethod
@@ -5278,8 +5278,6 @@ def load_lora_weights(
52785278
"Note that Wan2.1 models do not have a transformer_2 component."
52795279
"Ensure the model has a transformer_2 component before setting load_into_transformer_2=True."
52805280
)
5281-
if "transformer_2" not in self._lora_loadable_modules:
5282-
self._lora_loadable_modules.append("transformer_2")
52835281
self.load_lora_into_transformer(
52845282
state_dict,
52855283
transformer=self.transformer_2,

0 commit comments

Comments
 (0)