We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent bcb0924 commit cabcf3dCopy full SHA for cabcf3d
src/diffusers/loaders/lora_pipeline.py
@@ -5271,6 +5271,11 @@ def load_lora_weights(
5271
5272
load_into_transformer_2 = kwargs.pop("load_into_transformer_2", False)
5273
if load_into_transformer_2:
5274
+ if geattr(self, "transformer_2", None) is None:
5275
+ raise ValueError(
5276
+ "Cannot load LoRA into transformer_2: transformer_2 is not available for this model"
5277
+ "Ensure the model has a transformer_2 component before setting load_into_transformer_2=True."
5278
+ )
5279
self.load_lora_into_transformer(
5280
state_dict,
5281
transformer=self.transformer_2,
0 commit comments