We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 76b0c3a commit 89dd45dCopy full SHA for 89dd45d
pyproject.toml
@@ -64,7 +64,7 @@ dev = [
64
flash_attn = [
65
# it's easier to install flash-attn from wheel rather than like this as extra
66
# "https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.6/flash_attn-2.5.6+cu118torch2.0cxx11abiFALSE-cp311-cp311-linux_x86_64.whl",
67
- "flash-attn==2.6.3",
+ "flash-attn==2.8.2",
68
"packaging", # FIXME: temporary, until https://github.com/Dao-AILab/flash-attention/pull/937 is released
69
"ninja"
70
]
0 commit comments