Skip to content

Commit 639ed3d

Browse files
committed
updates
1 parent 135561b commit 639ed3d

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3963,7 +3963,7 @@ def load_lora_weights(
39633963
)
39643964

39653965
@classmethod
3966-
# Copied from diffusers.loaders.lora_pipeline.SD3LoraLoaderMixin.load_lora_into_transformer with SD3Transformer2DModel->HunyuanVideoTransformer3DModel
3966+
# Copied from diffusers.loaders.lora_pipeline.SD3LoraLoaderMixin.load_lora_into_transformer with SD3Transformer2DModel->Lumina2Transformer2DModel
39673967
def load_lora_into_transformer(
39683968
cls, state_dict, transformer, adapter_name=None, _pipeline=None, low_cpu_mem_usage=False
39693969
):
@@ -3975,7 +3975,7 @@ def load_lora_into_transformer(
39753975
A standard state dict containing the lora layer parameters. The keys can either be indexed directly
39763976
into the unet or prefixed with an additional `unet` which can be used to distinguish between text
39773977
encoder lora layers.
3978-
transformer (`HunyuanVideoTransformer3DModel`):
3978+
transformer (`Lumina2Transformer2DModel`):
39793979
The Transformer model to load the LoRA layers into.
39803980
adapter_name (`str`, *optional*):
39813981
Adapter name to be used for referencing the loaded adapter model. If not specified, it will use

0 commit comments

Comments
 (0)