Skip to content

Commit 3393c01

Browse files
vladmandicyiyixuxuyiyixuxu
authored
fix pixart-sigma negative prompt handling (#8299)
* fix negative prompt * fix --------- Co-authored-by: yiyixuxu <yixu310@gmail,com> Co-authored-by: YiYi Xu <[email protected]>
1 parent 1fa8dbc commit 3393c01

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

src/diffusers/pipelines/pixart_alpha/pipeline_pixart_alpha.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -394,7 +394,7 @@ def encode_prompt(
394394

395395
# get unconditional embeddings for classifier free guidance
396396
if do_classifier_free_guidance and negative_prompt_embeds is None:
397-
uncond_tokens = [negative_prompt] * batch_size
397+
uncond_tokens = [negative_prompt] * batch_size if isinstance(negative_prompt, str) else negative_prompt
398398
uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)
399399
max_length = prompt_embeds.shape[1]
400400
uncond_input = self.tokenizer(

src/diffusers/pipelines/pixart_alpha/pipeline_pixart_sigma.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -320,7 +320,7 @@ def encode_prompt(
320320

321321
# get unconditional embeddings for classifier free guidance
322322
if do_classifier_free_guidance and negative_prompt_embeds is None:
323-
uncond_tokens = [negative_prompt] * batch_size
323+
uncond_tokens = [negative_prompt] * batch_size if isinstance(negative_prompt, str) else negative_prompt
324324
uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)
325325
max_length = prompt_embeds.shape[1]
326326
uncond_input = self.tokenizer(

0 commit comments

Comments
 (0)