We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent ece3d13 commit b247f77Copy full SHA for b247f77
lightllm/common/fused_moe/grouped_fused_moe.py
@@ -941,7 +941,6 @@ def inplace_fused_experts_impl_fake(
941
w2_scale: Optional[torch.Tensor] = None,
942
a1_scale: Optional[torch.Tensor] = None,
943
a2_scale: Optional[torch.Tensor] = None,
944
- activate_fn: str = "silu",
945
layout: str = "blocked",
946
alpha: Optional[float] = None,
947
limit: Optional[float] = None,
0 commit comments