We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e4c65a3 commit 35b8e96Copy full SHA for 35b8e96
vllm/attention/backends/torch_sdpa.py
@@ -65,7 +65,7 @@ def swap_blocks(
65
dst_kv_cache: torch.Tensor,
66
src_to_dst: torch.Tensor,
67
) -> None:
68
- PagedAttention.swap_blocks(src_kv_cache, dst_kv_cache, src_to_dst)
+ raise NotImplementedError("Swap is not supported in TorchSDPABackend.")
69
70
@staticmethod
71
def copy_blocks(
0 commit comments