Skip to content

Commit cdfd687

Browse files
[Bugfix] Misaligned params in TreeAttentionImpl (#22226)
Signed-off-by: DarkLight1337 <[email protected]>
1 parent 4b3e447 commit cdfd687

File tree

1 file changed

+1
-5
lines changed

1 file changed

+1
-5
lines changed

vllm/v1/attention/backends/tree_attn.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
import ast
66
from dataclasses import dataclass
7-
from typing import TYPE_CHECKING, Any, Optional
7+
from typing import TYPE_CHECKING, Optional
88

99
import torch
1010

@@ -313,15 +313,11 @@ def __init__(
313313
alibi_slopes: Optional[list[float]],
314314
sliding_window: Optional[int],
315315
kv_cache_dtype: str,
316-
blocksparse_params: Optional[dict[str, Any]] = None,
317316
logits_soft_cap: Optional[float] = None,
318317
attn_type: AttentionType = AttentionType.DECODER,
319318
kv_sharing_target_layer_name: Optional[str] = None,
320319
use_irope: bool = False,
321320
) -> None:
322-
if blocksparse_params is not None:
323-
raise ValueError(
324-
"TreeAttention does not support block-sparse attention.")
325321
self.num_heads = num_heads
326322
self.head_size = head_size
327323
self.scale = float(scale)

0 commit comments

Comments
 (0)