@@ -147,3 +147,39 @@ def test_internlm7b_pipeline():
147147@pytest .mark .skipif (not INTERNLM20B_MODEL_PATH .exists (), reason = "model file not found" )
148148def test_internlm20b_pipeline ():
149149 check_pipeline (model_path = INTERNLM20B_MODEL_PATH , prompt = "你好" , target = "你好!有什么我可以帮助你的吗?" )
150+
151+
152+ @pytest .mark .skipif (not CHATGLM4_MODEL_PATH .exists (), reason = "model file not found" )
153+ def test_langchain_api ():
154+ import os
155+ from unittest .mock import patch
156+
157+ from fastapi .testclient import TestClient
158+
159+ with patch .dict (os .environ , {"MODEL" : str (CHATGLM4_MODEL_PATH )}):
160+ from chatglm_cpp .langchain_api import app
161+
162+ client = TestClient (app )
163+ response = client .post ("/" , json = {"prompt" : "你好" , "temperature" : 0 })
164+ assert response .status_code == 200
165+ assert response .json ()["response" ] == "你好👋!有什么可以帮助你的吗?"
166+
167+
168+ @pytest .mark .skipif (not CHATGLM4_MODEL_PATH .exists (), reason = "model file not found" )
169+ def test_openai_api ():
170+ import os
171+ from unittest .mock import patch
172+
173+ from fastapi .testclient import TestClient
174+
175+ with patch .dict (os .environ , {"MODEL" : str (CHATGLM4_MODEL_PATH )}):
176+ from chatglm_cpp .openai_api import app
177+
178+ client = TestClient (app )
179+ response = client .post (
180+ "/v1/chat/completions" , json = {"messages" : [{"role" : "user" , "content" : "你好" }], "temperature" : 0 }
181+ )
182+ assert response .status_code == 200
183+ response_message = response .json ()["choices" ][0 ]["message" ]
184+ assert response_message ["role" ] == "assistant"
185+ assert response_message ["content" ] == "你好👋!有什么可以帮助你的吗?"
0 commit comments