Skip to content

Commit 66a6811

Browse files
committed
up
1 parent 943b4a8 commit 66a6811

File tree

1 file changed

+0
-2
lines changed

1 file changed

+0
-2
lines changed

src/diffusers/models/attention_dispatch.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,6 @@
5252
_CAN_USE_NPU_ATTN = is_torch_npu_available()
5353
_CAN_USE_XLA_ATTN = is_torch_xla_available() and is_torch_xla_version(">=", _REQUIRED_XLA_VERSION)
5454
_CAN_USE_XFORMERS_ATTN = is_xformers_available() and is_xformers_version(">=", _REQUIRED_XFORMERS_VERSION)
55-
5655
if _CAN_USE_FLASH_ATTN:
5756
from flash_attn import flash_attn_func, flash_attn_varlen_func
5857
else:
@@ -141,7 +140,6 @@ def wrap(func):
141140

142141
_custom_op = custom_op_no_op
143142
_register_fake = register_fake_no_op
144-
145143
logger = get_logger(__name__) # pylint: disable=invalid-name
146144

147145
# TODO(aryan): Add support for the following:

0 commit comments

Comments
 (0)