Skip to content

Commit d952267

Browse files
committed
_pack_dict_with_prefix
1 parent 72b489d commit d952267

File tree

2 files changed

+7
-7
lines changed

2 files changed

+7
-7
lines changed

src/diffusers/loaders/lora_base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,7 @@ def _best_guess_weight_name(
314314
return weight_name
315315

316316

317-
def _pack_sd_with_prefix(state_dict, prefix):
317+
def _pack_dict_with_prefix(state_dict, prefix):
318318
sd_with_prefix = {f"{prefix}.{key}": value for key, value in state_dict.items()}
319319
return sd_with_prefix
320320

@@ -914,7 +914,7 @@ def set_lora_device(self, adapter_names: List[str], device: Union[torch.device,
914914
@staticmethod
915915
def pack_weights(layers, prefix):
916916
layers_weights = layers.state_dict() if isinstance(layers, torch.nn.Module) else layers
917-
return _pack_sd_with_prefix(layers_weights, prefix)
917+
return _pack_dict_with_prefix(layers_weights, prefix)
918918

919919
@staticmethod
920920
def write_lora_layers(

src/diffusers/loaders/lora_pipeline.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
LoraBaseMixin,
3838
_fetch_state_dict,
3939
_load_lora_into_text_encoder,
40-
_pack_sd_with_prefix,
40+
_pack_dict_with_prefix,
4141
)
4242
from .lora_conversion_utils import (
4343
_convert_bfl_flux_control_lora_to_diffusers,
@@ -511,11 +511,11 @@ def save_lora_weights(
511511
state_dict.update(cls.pack_weights(text_encoder_lora_layers, cls.text_encoder_name))
512512

513513
if unet_lora_adapter_metadata:
514-
lora_adapter_metadata.update(_pack_sd_with_prefix(unet_lora_adapter_metadata, cls.unet_name))
514+
lora_adapter_metadata.update(_pack_dict_with_prefix(unet_lora_adapter_metadata, cls.unet_name))
515515

516516
if text_encoder_lora_adapter_metadata:
517517
lora_adapter_metadata.update(
518-
_pack_sd_with_prefix(text_encoder_lora_adapter_metadata, cls.text_encoder_name)
518+
_pack_dict_with_prefix(text_encoder_lora_adapter_metadata, cls.text_encoder_name)
519519
)
520520

521521
# Save the model
@@ -2376,11 +2376,11 @@ def save_lora_weights(
23762376
state_dict.update(cls.pack_weights(text_encoder_lora_layers, cls.text_encoder_name))
23772377

23782378
if transformer_lora_adapter_metadata:
2379-
lora_adapter_metadata.update(_pack_sd_with_prefix(transformer_lora_adapter_metadata, cls.transformer_name))
2379+
lora_adapter_metadata.update(_pack_dict_with_prefix(transformer_lora_adapter_metadata, cls.transformer_name))
23802380

23812381
if text_encoder_lora_adapter_metadata:
23822382
lora_adapter_metadata.update(
2383-
_pack_sd_with_prefix(text_encoder_lora_adapter_metadata, cls.text_encoder_name)
2383+
_pack_dict_with_prefix(text_encoder_lora_adapter_metadata, cls.text_encoder_name)
23842384
)
23852385

23862386
# Save the model

0 commit comments

Comments
 (0)