Skip to content

Commit f422423

Browse files
committed
make style
1 parent 7dbd4bc commit f422423

File tree

2 files changed

+4
-12
lines changed

2 files changed

+4
-12
lines changed

examples/research_projects/anytext/auxiliary_latent_module.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
from PIL import Image, ImageDraw, ImageFont
1111
from torch import nn
1212

13-
from diffusers.models.autoencoders import AutoencoderKL
1413
from diffusers.utils import logging
1514

1615

examples/research_projects/anytext/text_embedding_module.py

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -3,23 +3,14 @@
33
# text -> tokenizer ->
44

55

6-
from typing import List, Optional
6+
from typing import Optional
77

88
import torch
99
from PIL import ImageFont
1010
from torch import nn
1111

12-
from diffusers.loaders import (
13-
StableDiffusionLoraLoaderMixin,
14-
TextualInversionLoaderMixin,
15-
)
16-
from diffusers.models.autoencoders.vae import DiagonalGaussianDistribution
17-
from diffusers.models.lora import adjust_lora_scale_text_encoder
1812
from diffusers.utils import (
19-
USE_PEFT_BACKEND,
2013
logging,
21-
scale_lora_layers,
22-
unscale_lora_layers,
2314
)
2415

2516
from .embedding_manager import EmbeddingManager
@@ -74,6 +65,8 @@ def forward(
7465
prompt_embeds = self.frozen_CLIP_embedder_t3.encode([prompt], embedding_manager=self.embedding_manager)
7566

7667
self.embedding_manager.encode_text(text_info)
77-
negative_prompt_embeds = self.frozen_CLIP_embedder_t3.encode([negative_prompt], embedding_manager=self.embedding_manager)
68+
negative_prompt_embeds = self.frozen_CLIP_embedder_t3.encode(
69+
[negative_prompt], embedding_manager=self.embedding_manager
70+
)
7871

7972
return prompt_embeds, negative_prompt_embeds

0 commit comments

Comments
 (0)