@@ -543,7 +543,7 @@ def fuse_lora(
543543 > [!WARNING] > This is an experimental API.
544544
545545 Args:
546- components: (`list[str]`): List of LoRA-injectable components to fuse the LoRAs into.
546+ components: (`list[str]`): list of LoRA-injectable components to fuse the LoRAs into.
547547 lora_scale (`float`, defaults to 1.0):
548548 Controls how much to influence the outputs with the LoRA parameters.
549549 safe_fusing (`bool`, defaults to `False`):
@@ -580,7 +580,7 @@ def unfuse_lora(self, components: list[str] = ["unet", "text_encoder"], **kwargs
580580 > [!WARNING] > This is an experimental API.
581581
582582 Args:
583- components (`list[str]`): List of LoRA-injectable components to unfuse LoRA from.
583+ components (`list[str]`): list of LoRA-injectable components to unfuse LoRA from.
584584 unfuse_unet (`bool`, defaults to `True`): Whether to unfuse the UNet LoRA parameters.
585585 unfuse_text_encoder (`bool`, defaults to `True`):
586586 Whether to unfuse the text encoder LoRA parameters. If the text encoder wasn't monkey-patched with the
@@ -1992,7 +1992,7 @@ def unfuse_lora(self, components: list[str] = ["transformer", "text_encoder"], *
19921992 > [!WARNING] > This is an experimental API.
19931993
19941994 Args:
1995- components (`list[str]`): List of LoRA-injectable components to unfuse LoRA from.
1995+ components (`list[str]`): list of LoRA-injectable components to unfuse LoRA from.
19961996 """
19971997 transformer = getattr (self , self .transformer_name ) if not hasattr (self , "transformer" ) else self .transformer
19981998 if hasattr (transformer , "_transformer_norm_layers" ) and transformer ._transformer_norm_layers :
@@ -3889,7 +3889,7 @@ def fuse_lora(
38893889 Fuses the LoRA parameters into the original parameters of the corresponding blocks.
38903890
38913891 Args:
3892- components: (`list[str]`): List of LoRA-injectable components to fuse the LoRAs into.
3892+ components: (`list[str]`): list of LoRA-injectable components to fuse the LoRAs into.
38933893 lora_scale (`float`, defaults to 1.0):
38943894 Controls how much to influence the outputs with the LoRA parameters.
38953895 safe_fusing (`bool`, defaults to `False`):
@@ -3919,7 +3919,7 @@ def unfuse_lora(self, components: list[str] = ["transformer"], **kwargs):
39193919 Reverses the effect of [`pipe.fuse_lora()`].
39203920
39213921 Args:
3922- components (`list[str]`): List of LoRA-injectable components to unfuse LoRA from.
3922+ components (`list[str]`): list of LoRA-injectable components to unfuse LoRA from.
39233923 """
39243924 super ().unfuse_lora (components = components , ** kwargs )
39253925
0 commit comments