Skip to content

Commit 2bb3796

Browse files
committed
up
1 parent 87d0879 commit 2bb3796

File tree

2 files changed

+6
-2
lines changed

2 files changed

+6
-2
lines changed

src/diffusers/models/attention_dispatch.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -374,6 +374,10 @@ def _check_attention_backend_requirements(backend: AttentionBackendName) -> None
374374
raise RuntimeError(
375375
f"Flash Attention 3 Hub backend '{backend.value}' is not usable because the `kernels` package isn't available. Please install it with `pip install kernels`."
376376
)
377+
if flash_attn_3_hub_func is None:
378+
raise RuntimeError(
379+
"`flash_attn_3_hub_func` wasn't available. Please double if `kernels` was able to successfully pull the FA3 kernel from kernels-community/vllm-flash-attn3."
380+
)
377381
elif backend in [AttentionBackendName._FLASH_VARLEN_3_HUB]:
378382
raise NotImplementedError
379383

src/diffusers/utils/kernels_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,5 +13,5 @@ def _get_fa3_from_hub():
1313
try:
1414
flash_attn_3_hub = get_kernel(_DEFAULT_HUB_ID_FA3)
1515
return flash_attn_3_hub
16-
except Exception as e:
17-
raise e
16+
except Exception:
17+
return None

0 commit comments

Comments
 (0)