7
7
import json
8
8
import time
9
9
from typing import AsyncGenerator , Dict , List , Optional
10
+ from packaging import version
10
11
11
12
import fastapi
12
13
from fastapi import BackgroundTasks , Request
31
32
from vllm .utils import random_uuid
32
33
33
34
try :
35
+ import fastchat
34
36
from fastchat .conversation import Conversation , SeparatorStyle
35
37
from fastchat .model .model_adapter import get_conversation_template
36
38
_fastchat_available = True
@@ -72,10 +74,16 @@ async def get_gen_prompt(request) -> str:
72
74
"fastchat is not installed. Please install fastchat to use "
73
75
"the chat completion and conversation APIs: `$ pip install fschat`"
74
76
)
77
+ if version .parse (fastchat .__version__ ) < version .parse ("0.2.23" ):
78
+ raise ImportError (
79
+ f"fastchat version is low. Current version: { fastchat .__version__ } "
80
+ "Please upgrade fastchat to use: `$ pip install -U fschat`" )
81
+
75
82
conv = get_conversation_template (request .model )
76
83
conv = Conversation (
77
84
name = conv .name ,
78
- system = conv .system ,
85
+ system_template = conv .system_template ,
86
+ system_message = conv .system_message ,
79
87
roles = conv .roles ,
80
88
messages = list (conv .messages ), # prevent in-place modification
81
89
offset = conv .offset ,
@@ -92,7 +100,7 @@ async def get_gen_prompt(request) -> str:
92
100
for message in request .messages :
93
101
msg_role = message ["role" ]
94
102
if msg_role == "system" :
95
- conv .system = message ["content" ]
103
+ conv .system_message = message ["content" ]
96
104
elif msg_role == "user" :
97
105
conv .append_message (conv .roles [0 ], message ["content" ])
98
106
elif msg_role == "assistant" :
0 commit comments