Skip to content

Commit e76fc94

Browse files
committed
fix
1 parent f859fdf commit e76fc94

File tree

1 file changed

+1
-0
lines changed

1 file changed

+1
-0
lines changed

src/diffusers/models/attention_dispatch.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -899,6 +899,7 @@ def _templated_context_parallel_attention(
899899
@_AttentionBackendRegistry.register(
900900
AttentionBackendName.FLASH,
901901
constraints=[_check_device, _check_qkv_dtype_bf16_or_fp16, _check_shape],
902+
supports_context_parallel=True,
902903
)
903904
def _flash_attention(
904905
query: torch.Tensor,

0 commit comments

Comments
 (0)