Skip to content

Commit a5edd13

Browse files
authored
remove deprecated codes (#3759)
* remove deprecated codes * update * update * remote tritonserver backend
1 parent 5f0647f commit a5edd13

File tree

15 files changed

+39
-849
lines changed

15 files changed

+39
-849
lines changed

lmdeploy/archs.py

Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -31,35 +31,25 @@ def autoget_backend(model_path: str) -> Literal['turbomind', 'pytorch']:
3131
Returns:
3232
str: the backend type.
3333
"""
34-
from lmdeploy.pytorch.supported_models import is_supported as is_supported_pytorch
3534

36-
pytorch_has, turbomind_has = False, False
35+
turbomind_has = False
3736
is_turbomind_installed = True
3837
try:
3938
from lmdeploy.turbomind.supported_models import is_supported as is_supported_turbomind
4039
turbomind_has = is_supported_turbomind(model_path)
4140
except ImportError:
4241
is_turbomind_installed = False
4342

44-
pytorch_has = is_supported_pytorch(model_path)
45-
46-
try_run_msg = (f'Try to run with pytorch engine because `{model_path}`'
47-
' is not explicitly supported by lmdeploy. ')
4843
if is_turbomind_installed:
4944
if not turbomind_has:
50-
if pytorch_has:
51-
logger.warning('Fallback to pytorch engine because '
52-
f'`{model_path}` not supported by turbomind'
53-
' engine.')
54-
else:
55-
logger.warning(try_run_msg)
45+
logger.warning('Fallback to pytorch engine because '
46+
f'`{model_path}` not supported by turbomind'
47+
' engine.')
5648
else:
5749
logger.warning('Fallback to pytorch engine because turbomind engine is not '
5850
'installed correctly. If you insist to use turbomind engine, '
5951
'you may need to reinstall lmdeploy from pypi or build from '
6052
'source and try again.')
61-
if not pytorch_has:
62-
logger.warning(try_run_msg)
6353

6454
backend = 'turbomind' if turbomind_has else 'pytorch'
6555
return backend

lmdeploy/pytorch/chat.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,7 @@
66
from typing import Optional
77

88
from lmdeploy.messages import GenerationConfig, PytorchEngineConfig
9-
from lmdeploy.model import ChatTemplateConfig
10-
from lmdeploy.serve.async_engine import get_names_from_model
9+
from lmdeploy.model import ChatTemplateConfig, best_match_model
1110

1211
os.environ['TM_LOG_LEVEL'] = 'ERROR'
1312

@@ -85,7 +84,7 @@ async def __chat_step(prompt: str):
8584
async def __chat_loop(model_path: str):
8685
"""Chat loop."""
8786
__reset_chat_state()
88-
_, chat_template_name = get_names_from_model(model_path)
87+
chat_template_name = best_match_model(model_path)
8988
while True:
9089
prompt = input_prompt(chat_template_name)
9190
await __chat_step(prompt)

lmdeploy/pytorch/supported_models.py

Lines changed: 0 additions & 143 deletions
This file was deleted.

lmdeploy/serve/async_engine.py

Lines changed: 2 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import atexit
55
import concurrent.futures
66
import dataclasses
7-
import os
87
import random
98
from contextlib import asynccontextmanager, closing
109
from copy import deepcopy
@@ -33,23 +32,6 @@
3332
logger = get_logger('lmdeploy')
3433

3534

36-
def get_names_from_model(model_path: str, model_name: str = None):
37-
"""Get model name and chat template name from workspace model."""
38-
triton_model_path = os.path.join(model_path, 'triton_models', 'weights')
39-
if not os.path.exists(triton_model_path):
40-
chat_template_name = best_match_model(model_path)
41-
else:
42-
# `model_path` refers to a turbomind model, reading
43-
# chat_template_name from the config
44-
config_path = os.path.join(triton_model_path, 'config.yaml')
45-
with open(config_path, 'r') as f:
46-
import yaml
47-
config = yaml.safe_load(f)
48-
chat_template_name = config['model_config']['chat_template']
49-
model_name = model_name if model_name else model_path
50-
return model_name, chat_template_name
51-
52-
5335
@dataclasses.dataclass
5436
class GenOut:
5537
"""Pack all response information together."""
@@ -266,7 +248,8 @@ def __init__(self,
266248
logger.info(f'input backend={backend}, backend_config={backend_config}')
267249
logger.info(f'input chat_template_config={chat_template_config}')
268250

269-
self.model_name, chat_template_name = get_names_from_model(model_path, model_name)
251+
self.model_name = model_name if model_name else model_path
252+
chat_template_name = best_match_model(model_path)
270253
if chat_template_config is None:
271254
chat_template_config = ChatTemplateConfig(chat_template_name)
272255
elif chat_template_config.model_name is None:

lmdeploy/serve/turbomind/triton_python_backend/README.md

Lines changed: 0 additions & 85 deletions
This file was deleted.

0 commit comments

Comments
 (0)