Skip to content

Commit 52c55c1

Browse files
authored
Merge branch 'main' into handle-unload-lora-control
2 parents 6ed1131 + 0d96a89 commit 52c55c1

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3159,7 +3159,7 @@ def load_lora_weights(
31593159
)
31603160

31613161
@classmethod
3162-
# Copied from diffusers.loaders.lora_pipeline.SD3LoraLoaderMixin.load_lora_into_transformer with SD3Transformer2DModel->CogVideoXTransformer3DModel
3162+
# Copied from diffusers.loaders.lora_pipeline.SD3LoraLoaderMixin.load_lora_into_transformer with SD3Transformer2DModel->MochiTransformer3DModel
31633163
def load_lora_into_transformer(
31643164
cls, state_dict, transformer, adapter_name=None, _pipeline=None, low_cpu_mem_usage=False
31653165
):
@@ -3171,7 +3171,7 @@ def load_lora_into_transformer(
31713171
A standard state dict containing the lora layer parameters. The keys can either be indexed directly
31723172
into the unet or prefixed with an additional `unet` which can be used to distinguish between text
31733173
encoder lora layers.
3174-
transformer (`CogVideoXTransformer3DModel`):
3174+
transformer (`MochiTransformer3DModel`):
31753175
The Transformer model to load the LoRA layers into.
31763176
adapter_name (`str`, *optional*):
31773177
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use

0 commit comments

Comments
 (0)