We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6d98843 commit b5dfb94Copy full SHA for b5dfb94
vllm/model_executor/layers/activation.py
@@ -65,11 +65,13 @@ class SiluAndMul(CustomOp):
65
66
def __init__(self):
67
super().__init__()
68
- if current_platform.is_cuda_alike() or current_platform.is_cpu():
+ if current_platform.is_cuda_alike():
69
self.op = torch.ops._C.silu_and_mul
70
elif current_platform.is_xpu():
71
from vllm._ipex_ops import ipex_ops
72
self.op = ipex_ops.silu_and_mul
73
+ elif current_platform.is_cpu():
74
+ self._forward_method = self.forward_native
75
76
def forward_native(self, x: torch.Tensor) -> torch.Tensor:
77
"""PyTorch-native implementation equivalent to forward()."""
0 commit comments