We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 516f857 commit 8e3709aCopy full SHA for 8e3709a
pyproject.toml
@@ -63,7 +63,7 @@ dev = [
63
flash_attn = [
64
# it's easier to install flash-attn from wheel rather than like this as extra
65
# "https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.6/flash_attn-2.5.6+cu118torch2.0cxx11abiFALSE-cp311-cp311-linux_x86_64.whl",
66
- "flash-attn==2.6.3",
+ "flash-attn==2.8.2",
67
"packaging", # FIXME: temporary, until https://github.com/Dao-AILab/flash-attention/pull/937 is released
68
"ninja"
69
]
0 commit comments