Skip to content

Commit effe4b9

Browse files
authored
Update xformers SD3 test (#8712)
update
1 parent 5b51ad0 commit effe4b9

File tree

2 files changed

+8
-0
lines changed

2 files changed

+8
-0
lines changed

tests/models/test_modeling_common.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -382,6 +382,10 @@ def test_set_xformers_attn_processor_for_determinism(self):
382382
# If not has `set_attn_processor`, skip test
383383
return
384384

385+
if not hasattr(model, "set_default_attn_processor"):
386+
# If not has `set_attn_processor`, skip test
387+
return
388+
385389
model.set_default_attn_processor()
386390
assert all(type(proc) == AttnProcessor for proc in model.attn_processors.values())
387391
with torch.no_grad():

tests/pipelines/controlnet_sd3/test_controlnet_sd3.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,10 @@ def test_controlnet_sd3(self):
190190
np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
191191
), f"Expected: {expected_slice}, got: {image_slice.flatten()}"
192192

193+
@unittest.skip("xFormersAttnProcessor does not work with SD3 Joint Attention")
194+
def test_xformers_attention_forwardGenerator_pass(self):
195+
pass
196+
193197

194198
@slow
195199
@require_torch_gpu

0 commit comments

Comments
 (0)