41
41
42
42
43
43
if is_transformers_available ():
44
- from transformers import CLIPTextModel , CLIPTextModelWithProjection , PreTrainedModel , PreTrainedTokenizer
44
+ from transformers import CLIPTextModel , CLIPTextModelWithProjection
45
45
46
46
if is_accelerate_available ():
47
47
from accelerate import init_empty_weights
@@ -627,7 +627,7 @@ class TextualInversionLoaderMixin:
627
627
Load textual inversion tokens and embeddings to the tokenizer and text encoder.
628
628
"""
629
629
630
- def maybe_convert_prompt (self , prompt : Union [str , List [str ]], tokenizer : "PreTrainedTokenizer" ):
630
+ def maybe_convert_prompt (self , prompt : Union [str , List [str ]], tokenizer : "PreTrainedTokenizer" ): # noqa: F821
631
631
r"""
632
632
Processes prompts that include a special token corresponding to a multi-vector textual inversion embedding to
633
633
be replaced with multiple special tokens each corresponding to one of the vectors. If the prompt has no textual
@@ -654,7 +654,7 @@ def maybe_convert_prompt(self, prompt: Union[str, List[str]], tokenizer: "PreTra
654
654
655
655
return prompts
656
656
657
- def _maybe_convert_prompt (self , prompt : str , tokenizer : "PreTrainedTokenizer" ):
657
+ def _maybe_convert_prompt (self , prompt : str , tokenizer : "PreTrainedTokenizer" ): # noqa: F821
658
658
r"""
659
659
Maybe convert a prompt into a "multi vector"-compatible prompt. If the prompt includes a token that corresponds
660
660
to a multi-vector textual inversion embedding, this function will process the prompt so that the special token
@@ -688,8 +688,8 @@ def load_textual_inversion(
688
688
self ,
689
689
pretrained_model_name_or_path : Union [str , List [str ], Dict [str , torch .Tensor ], List [Dict [str , torch .Tensor ]]],
690
690
token : Optional [Union [str , List [str ]]] = None ,
691
- tokenizer : Optional [PreTrainedTokenizer ] = None ,
692
- text_encoder : Optional [PreTrainedModel ] = None ,
691
+ tokenizer : Optional [" PreTrainedTokenizer" ] = None , # noqa: F821
692
+ text_encoder : Optional [" PreTrainedModel" ] = None , # noqa: F821
693
693
** kwargs ,
694
694
):
695
695
r"""
0 commit comments