Skip to content

Commit 876132a

Browse files
committed
fix-copies.
1 parent c44f7a3 commit 876132a

File tree

1 file changed

+0
-3
lines changed

1 file changed

+0
-3
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3119,7 +3119,6 @@ def load_lora_into_transformer(
31193119
)
31203120

31213121
# Load the layers corresponding to transformer.
3122-
logger.info(f"Loading {cls.transformer_name}.")
31233122
transformer.load_lora_adapter(
31243123
state_dict,
31253124
network_alphas=None,
@@ -3427,7 +3426,6 @@ def load_lora_into_transformer(
34273426
)
34283427

34293428
# Load the layers corresponding to transformer.
3430-
logger.info(f"Loading {cls.transformer_name}.")
34313429
transformer.load_lora_adapter(
34323430
state_dict,
34333431
network_alphas=None,
@@ -4042,7 +4040,6 @@ def load_lora_into_transformer(
40424040
)
40434041

40444042
# Load the layers corresponding to transformer.
4045-
logger.info(f"Loading {cls.transformer_name}.")
40464043
transformer.load_lora_adapter(
40474044
state_dict,
40484045
network_alphas=None,

0 commit comments

Comments
 (0)