Skip to content

Commit 5e6f1ec

Browse files
committed
enable ConfigurableMoE by default
Signed-off-by: Enwei Zhu <21126786+syuoni@users.noreply.github.com>
1 parent 2082a0c commit 5e6f1ec

File tree

1 file changed

+3
-5
lines changed

1 file changed

+3
-5
lines changed

tensorrt_llm/_torch/modules/fused_moe/create_moe.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@
2020
from .moe_load_balancer import get_moe_load_balancer
2121
from .routing import BaseMoeRoutingMethod
2222

23+
ENABLE_CONFIGURABLE_MOE = os.environ.get("ENABLE_CONFIGURABLE_MOE", "0") == "1"
24+
2325

2426
def get_moe_cls(
2527
model_config: ModelConfig,
@@ -343,11 +345,7 @@ def create_moe(
343345

344346
moe_cls = get_moe_cls(model_config, override_quant_config)
345347

346-
# Check if ENABLE_CONFIGURABLE_MOE environment variable is set
347-
enable_configurable_moe = os.environ.get('ENABLE_CONFIGURABLE_MOE',
348-
'0') == '1'
349-
350-
if enable_configurable_moe:
348+
if ENABLE_CONFIGURABLE_MOE or moe_cls == CuteDslFusedMoE:
351349
# ConfigurableMoE is only supported for TRTLLMGenFusedMoE backend
352350
if moe_cls in (TRTLLMGenFusedMoE, CuteDslFusedMoE):
353351
return ConfigurableMoE(

0 commit comments

Comments
 (0)