Skip to content

Commit 698c6a2

Browse files
committed
fix
1 parent b474d73 commit 698c6a2

File tree

3 files changed

+3
-1
lines changed

3 files changed

+3
-1
lines changed

lightllm/common/basemodel/basemodel.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -460,7 +460,7 @@ def create_inferstate(cur_batch: PrefillMicroBatch, batch_index):
460460
infer_state.b_ready_cache_len = torch.zeros_like(
461461
cur_batch.b_seq_len, dtype=cur_batch.b_seq_len.dtype, device=cur_batch.b_seq_len.device
462462
)
463-
infer_state.multimodal_params = None
463+
infer_state.multimodal_params = cur_batch.multimodal_params
464464
infer_state.microbatch_index = batch_index
465465

466466
infer_state.mem_manager = self.mem_manager

lightllm/common/basemodel/microbatch_overlap_objs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,3 +25,4 @@ class PrefillMicroBatch:
2525
b_start_loc: torch.Tensor
2626
b_seq_len: torch.Tensor
2727
b_ready_cache_len: torch.Tensor
28+
multimodal_params: list

lightllm/server/router/model_infer/mode_backend/dp_backend/pre_process.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -336,6 +336,7 @@ def _padded_prepare_prefill_micro_batch(req_objs: List[InferReq], is_multimodal=
336336
b_start_loc=nopad_b_start_loc,
337337
b_seq_len=nopad_b_seq_len,
338338
b_ready_cache_len=b_ready_cache_len,
339+
multimodal_params=batch_multimodal_params,
339340
)
340341

341342
return micro_batch, run_reqs, padded_req_num

0 commit comments

Comments
 (0)