Skip to content

Commit abbeb70

Browse files
committed
don't double eplb prefix
Signed-off-by: Patryk Saffer <[email protected]>
1 parent 2ebf603 commit abbeb70

File tree

6 files changed

+10
-10
lines changed

6 files changed

+10
-10
lines changed

vllm/distributed/eplb/eplb_state.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -297,7 +297,7 @@ def build(
297297
device=device,
298298
)
299299

300-
eplb_load_path = parallel_config.eplb_config.eplb_load_path
300+
eplb_load_path = parallel_config.eplb_config.load_path
301301
eplb_save_dir = parallel_config.eplb_config.save_dir
302302

303303
eplb_step_interval = parallel_config.eplb_config.step_interval

vllm/model_executor/models/deepseek_v2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,7 @@ def __init__(
246246
else self.routed_scaling_factor,
247247
e_score_correction_bias=self.gate.e_score_correction_bias,
248248
enable_eplb=self.enable_eplb,
249-
eplb_record_metrics=eplb_config.eplb_record_metrics,
249+
eplb_record_metrics=eplb_config.record_metrics,
250250
num_redundant_experts=self.n_redundant_experts,
251251
is_sequence_parallel=self.is_sequence_parallel,
252252
n_shared_experts=config.n_shared_experts

vllm/model_executor/models/mixtral.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ def __init__(
140140
dp_size=dp_size,
141141
prefix=f"{prefix}.experts",
142142
enable_eplb=self.enable_eplb,
143-
eplb_record_metrics=parallel_config.eplb_config.eplb_record_metrics,
143+
eplb_record_metrics=parallel_config.eplb_config.record_metrics,
144144
num_redundant_experts=self.n_redundant_experts,
145145
)
146146

vllm/model_executor/models/qwen3_moe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ def __init__(
169169
quant_config=quant_config,
170170
prefix=f"{prefix}.experts",
171171
enable_eplb=self.enable_eplb,
172-
eplb_record_metrics=eplb_config.eplb_record_metrics,
172+
eplb_record_metrics=eplb_config.record_metrics,
173173
num_redundant_experts=self.n_redundant_experts,
174174
is_sequence_parallel=self.is_sequence_parallel,
175175
)

vllm/model_executor/models/qwen3_next.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ def __init__(self, vllm_config: VllmConfig, prefix: str = ""):
168168
quant_config=quant_config,
169169
prefix=f"{prefix}.experts",
170170
enable_eplb=self.enable_eplb,
171-
eplb_record_metrics=eplb_config.eplb_record_metrics,
171+
eplb_record_metrics=eplb_config.record_metrics,
172172
num_redundant_experts=self.n_redundant_experts,
173173
is_sequence_parallel=self.is_sequence_parallel,
174174
)

vllm/v1/worker/gpu_model_runner.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1992,11 +1992,11 @@ def eplb_step(self, is_dummy: bool = False, is_profile: bool = False) -> None:
19921992
"""
19931993
if not self.parallel_config.enable_eplb:
19941994
return
1995-
if is_profile and self.parallel_config.eplb_config.eplb_load_path is not None:
1995+
if is_profile and self.parallel_config.eplb_config.load_path is not None:
19961996
return
19971997
if (
1998-
self.parallel_config.eplb_config.eplb_load_path is not None
1999-
and self.parallel_config.eplb_config.eplb_save_dir is None
1998+
self.parallel_config.eplb_config.load_path is not None
1999+
and self.parallel_config.eplb_config.save_dir is None
20002000
):
20012001
return
20022002

@@ -2930,9 +2930,9 @@ def load_model(self, eep_scale_up: bool = False) -> None:
29302930
old_global_expert_indices,
29312931
rank_mapping,
29322932
)
2933-
if self.parallel_config.eplb_config.eplb_load_path is not None:
2933+
if self.parallel_config.eplb_config.load_path is not None:
29342934
self.eplb_state.rearrange(self.model)
2935-
if self.parallel_config.eplb_config.eplb_save_dir is None:
2935+
if self.parallel_config.eplb_config.save_dir is None:
29362936
self.eplb_state = None
29372937

29382938
if (

0 commit comments

Comments
 (0)