Skip to content

Commit 4c3d258

Browse files
committed
small clean up
1 parent fb4b9be commit 4c3d258

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

examples/server/tests/unit/test_chat_completion.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -40,15 +40,15 @@ def test_chat_completion(model, system_prompt, user_prompt, max_tokens, re_conte
4040

4141

4242
@pytest.mark.parametrize(
43-
"model,system_prompt,user_prompt,max_tokens,re_content,n_prompt,n_predicted,finish_reason",
43+
"system_prompt,user_prompt,max_tokens,re_content,n_prompt,n_predicted,finish_reason",
4444
[
45-
("llama-2", "Book", "What is the best book", 8, "(Suddenly)+", 77, 8, "length"),
46-
("codellama70b", "You are a coding assistant.", "Write the fibonacci function in c++.", 128, "(Aside|she|felter|alonger)+", 104, 64, "length"),
45+
("Book", "What is the best book", 8, "(Suddenly)+", 77, 8, "length"),
46+
("You are a coding assistant.", "Write the fibonacci function in c++.", 128, "(Aside|she|felter|alonger)+", 104, 64, "length"),
4747
]
4848
)
49-
def test_chat_completion_stream(model, system_prompt, user_prompt, max_tokens, re_content, n_prompt, n_predicted, finish_reason):
49+
def test_chat_completion_stream(system_prompt, user_prompt, max_tokens, re_content, n_prompt, n_predicted, finish_reason):
5050
global server
51-
server.model_alias = None
51+
server.model_alias = None # try using DEFAULT_OAICOMPAT_MODEL
5252
server.start()
5353
res = server.make_stream_request("POST", "/chat/completions", data={
5454
"max_tokens": max_tokens,

0 commit comments

Comments
 (0)