Skip to content

Commit 7bc99ef

Browse files
authored
fix
1 parent 46624be commit 7bc99ef

File tree

3 files changed

+5
-6
lines changed

3 files changed

+5
-6
lines changed

lightllm/distributed/communication_op.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
)
3737
from lightllm.utils.device_utils import get_device_sm_count
3838
from lightllm.utils.sgl_utils import HAS_SGL_KERNEL
39+
from lightllm.utils.light_utils import HAS_LIGHTLLM_KERNEL
3940
from contextlib import nullcontext, contextmanager
4041

4142
logger = init_logger(__name__)
@@ -56,9 +57,6 @@
5657
HAS_DEEPEP = False
5758
logger.info("deep_ep is not installed, you can't use the api of it.")
5859

59-
# TODO: lightllm_kernel release.
60-
HAS_LIGHTLLM_KERNEL = False
61-
6260

6361
class CustomProcessGroup:
6462
def __init__(self):

lightllm/utils/vllm_utils.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
except:
1515
HAS_VLLM = False
1616
cutlass_scaled_mm = None
17+
vllm_ops = None
1718
logger.warning(
1819
"vllm is not installed, you can't use the api of it. \
1920
You can solve it by running `pip install vllm`."

test/model/model_infer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ def tppart_model_infer(args, model_kvargs, batch_size, input_len, output_len, an
377377
b_ready_cache_len=b_ready_cache_len,
378378
is_prefill=True,
379379
),
380-
log_dir=f"./logs_sglang_4k/forward_prefill_{model_kvargs['rank_id']}",
380+
log_dir=f"./logs/forward_prefill_{model_kvargs['rank_id']}",
381381
)
382382
except Exception as e:
383383
print(str(e))
@@ -417,7 +417,7 @@ def tppart_model_infer(args, model_kvargs, batch_size, input_len, output_len, an
417417
b_seq_len,
418418
total_token_num,
419419
),
420-
log_dir=f"./logs_sglang_4k/forward_decode_{model_kvargs['rank_id']}",
420+
log_dir=f"./logs/forward_decode_{model_kvargs['rank_id']}",
421421
)
422422
else:
423423
logits = decode(
@@ -442,7 +442,7 @@ def tppart_model_infer(args, model_kvargs, batch_size, input_len, output_len, an
442442
b_seq_len,
443443
total_token_num,
444444
),
445-
log_dir=f"./logs_sglang_4k/forward_decode_{model_kvargs['rank_id']}",
445+
log_dir=f"./logs/forward_decode_{model_kvargs['rank_id']}",
446446
)
447447

448448
prob_out = torch.softmax(logits, dim=-1)

0 commit comments

Comments
 (0)