Skip to content

Commit 587f0a6

Browse files
LoserCheemsCopilot
andauthored
Update flash_dmattn/integrations/modeling_flash_dynamic_mask_attention_utils.py
Co-authored-by: Copilot <[email protected]>
1 parent 37098b7 commit 587f0a6

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

flash_dmattn/integrations/modeling_flash_dynamic_mask_attention_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -461,7 +461,7 @@ def _process_flash_dynamic_mask_attention_kwargs(
461461

462462
if supports_mapping["deterministic"]:
463463
flash_kwargs["deterministic"] = (
464-
deterministic if deterministic is not None else os.getenv("FLASH_ATTENTION_DETERMINISTIC", "0") == "1"
464+
deterministic if deterministic is not None else os.getenv("FLASH_DMATTN_DETERMINISTIC", "0") == "1"
465465
)
466466

467467
if supports_mapping["softcap"] and softcap is not None:

0 commit comments

Comments
 (0)