Skip to content

Commit 3f9228b

Browse files
committed
test: add slow ChromaPipeline attention mask tests
1 parent 1cf90c4 commit 3f9228b

File tree

1 file changed

+3
-0
lines changed

1 file changed

+3
-0
lines changed

tests/pipelines/chroma/test_pipeline_chroma.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from transformers import AutoTokenizer, T5EncoderModel
66

77
from diffusers import AutoencoderKL, ChromaPipeline, ChromaTransformer2DModel, FlowMatchEulerDiscreteScheduler
8+
from diffusers.utils.testing_utils import slow
89

910
from ...testing_utils import torch_device
1011
from ..test_pipelines_common import FluxIPAdapterTesterMixin, PipelineTesterMixin, check_qkv_fused_layers_exist
@@ -167,12 +168,14 @@ def setUp(self):
167168
torch_dtype=torch.float16,
168169
)
169170

171+
@slow
170172
def test_attention_mask_dtype_is_bool_short_prompt(self):
171173
prompt_embeds, attn_mask = self.pipe._get_t5_prompt_embeds("man")
172174
self.assertEqual(attn_mask.dtype, torch.bool, f"Expected bool, got {attn_mask.dtype}")
173175
self.assertGreater(prompt_embeds.shape[0], 0)
174176
self.assertGreater(prompt_embeds.shape[1], 0)
175177

178+
@slow
176179
def test_attention_mask_dtype_is_bool_long_prompt(self):
177180
long_prompt = "a detailed portrait of a man standing in a garden with flowers and trees"
178181
prompt_embeds, attn_mask = self.pipe._get_t5_prompt_embeds(long_prompt)

0 commit comments

Comments
 (0)