Skip to content

Commit 47b0adc

Browse files
committed
fix
1 parent 0f90ac9 commit 47b0adc

File tree

3 files changed

+17
-17
lines changed

3 files changed

+17
-17
lines changed

lightllm/server/router/batch.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -69,23 +69,23 @@ def is_clear(self):
6969
def merge(self, mini_batch: "Batch"):
7070
if mini_batch is None:
7171
return
72-
72+
7373
for _req in mini_batch.reqs:
7474
self.reqs.append(_req)
7575
self.id_to_reqs = {req.request_id: req for req in self.reqs}
7676
return
77-
77+
7878
@staticmethod
79-
def merge_two_batch(batch1: "Batch", batch2: "Batch"):
79+
def merge_two_batch(batch1: "Batch", batch2: "Batch") -> "Batch":
8080
if batch1 is None and batch2 is None:
8181
return None
82-
82+
8383
not_none_batch = batch1 if batch1 is not None else batch2
8484

8585
merge_batch = Batch(-1, [], not_none_batch.dp_size_in_node)
8686
merge_batch.merge(batch1)
8787
merge_batch.merge(batch2)
88-
return
88+
return merge_batch
8989

9090
def __repr__(self):
9191
return f"batch_id={self.batch_id}, " f"reqs={self.reqs}, "

lightllm/server/router/manager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -295,9 +295,9 @@ def generate_new_batch(self):
295295

296296
# 调度的时候需要考虑当前运行的batch,和调度了但是暂时还没有推理的部分请求。
297297
new_batch = self.req_queue.generate_new_batch(
298-
Batch.merge(self.running_batch, self.schedule_new_batch), limit_router_queue_length
298+
Batch.merge_two_batch(self.running_batch, self.schedule_new_batch), limit_router_queue_length
299299
)
300-
self.schedule_new_batch = Batch.merge(self.schedule_new_batch, new_batch)
300+
self.schedule_new_batch = Batch.merge_two_batch(self.schedule_new_batch, new_batch)
301301
return
302302

303303
async def _step(self):

lightllm/server/router/stats.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44

55
logger = init_logger(__name__)
66

7-
class Stats:
87

8+
class Stats:
99
def __init__(self, log_status, log_stats_interval) -> None:
1010
self.log_stats = log_status
1111
self.log_stats_interval = log_stats_interval
@@ -14,16 +14,16 @@ def __init__(self, log_status, log_stats_interval) -> None:
1414
self.output_tokens = 0
1515
self.prompt_tokens = 0
1616
return
17-
17+
1818
def count_prompt_tokens(self, run_batch: Batch):
19-
if self.log_stats:
19+
if self.log_stats and run_batch is not None:
2020
tokens = run_batch.input_tokens()
2121
self.prompt_tokens += tokens
2222
self.all_tokens += tokens
2323
return
24-
24+
2525
def count_output_tokens(self, run_batch: Batch):
26-
if self.log_stats:
26+
if self.log_stats and run_batch is not None:
2727
tokens = len(run_batch.reqs)
2828
self.output_tokens += tokens
2929
self.all_tokens += tokens
@@ -35,13 +35,13 @@ def print_stats(self):
3535

3636
now = time.time()
3737
if now - self.last_log_time > self.log_stats_interval:
38-
logger.debug(f"Avg tokens(prompt+generate) throughput: {self.all_tokens/(now-self.last_log_time):8.3f} tokens/s\n"
39-
f"Avg prompt tokens throughput: {self.prompt_tokens/(now-self.last_log_time):8.3f} tokens/s\n"
40-
f"Avg generate tokens throughput: {self.output_tokens/(now-self.last_log_time):8.3f} tokens/s")
38+
logger.debug(
39+
f"Avg tokens(prompt+generate) throughput: {self.all_tokens/(now-self.last_log_time):8.3f} tokens/s\n"
40+
f"Avg prompt tokens throughput: {self.prompt_tokens/(now-self.last_log_time):8.3f} tokens/s\n"
41+
f"Avg generate tokens throughput: {self.output_tokens/(now-self.last_log_time):8.3f} tokens/s"
42+
)
4143
self.all_tokens = 0
4244
self.output_tokens = 0
4345
self.prompt_tokens = 0
4446
self.last_log_time = now
4547
return
46-
47-

0 commit comments

Comments
 (0)