Skip to content

Commit 6124b6e

Browse files
committed
单元测试
1 parent 5c26cbf commit 6124b6e

File tree

10 files changed

+482
-20
lines changed

10 files changed

+482
-20
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ pytest-mock = "^3.10.0"
3131
pytest-socket = "^0.6.0"
3232
syrupy = { version = "^4.0.2", python = ">=3.8.1,<3.9.7 || >3.9.7,<3.12" }
3333
requests-mock = "^1.11.0"
34+
respx = "0.21.1"
3435

3536
[tool.poetry.group.test_langchain.dependencies]
3637
pytest = "^7.3.0"

tests/integration_tests/demo.jsonl

100755100644
File mode changed.

tests/integration_tests/img/MetaGLM.png

100755100644
File mode changed.

tests/integration_tests/test_charglm3.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
# -*- coding: utf-8 -*-
2+
13
from zhipuai import ZhipuAI
24
import zhipuai
35

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# -*- coding: utf-8 -*-
2+
import unittest
3+
from typing import Type, cast, Iterable
4+
import pytest
5+
import httpx
6+
import inspect
7+
8+
import pydantic
9+
from zhipuai import APIResponseValidationError
10+
from zhipuai.core import BaseModel, StreamResponse, get_args, HttpClient, construct_type
11+
from zhipuai.core._base_type import ResponseT
12+
from zhipuai.types.chat.chat_completion_chunk import ChatCompletionChunk
13+
14+
15+
class MockClient:
16+
def _process_response_data(
17+
self,
18+
*,
19+
data: object,
20+
cast_type: Type[ResponseT],
21+
response: httpx.Response,
22+
) -> ResponseT:
23+
pass
24+
25+
26+
def test_stream_cls_chunk() -> None:
27+
MockClient._process_response_data = HttpClient._process_response_data
28+
29+
def body() -> Iterable[bytes]:
30+
yield b'data: {"id":"8635243129834723621","created":1715329207,"model":"glm-4","choices":[{"index":0,"delta":{"role":"assistant","content":"1"}}]}\n\n'
31+
yield b'data: {"id":"8635243129834723621","created":1715329207,"model":"glm-4","choices":[{"index":0,"delta":{"role":"assistant","content":"2"}}]}\n\n'
32+
33+
_stream_cls = StreamResponse[ChatCompletionChunk]
34+
http_response = httpx.Response(
35+
status_code=200,
36+
content=body()
37+
)
38+
39+
stream_cls = _stream_cls(cast_type=cast(type, get_args(_stream_cls)[0]),
40+
response=http_response,
41+
client=MockClient()
42+
)
43+
chat_completion_chunk1 = next(stream_cls)
44+
45+
assert chat_completion_chunk1.choices[0].delta.content == "1"
46+
assert chat_completion_chunk1.choices[0].delta.role == "assistant"
47+
assert chat_completion_chunk1.choices[0].index == 0
48+
assert chat_completion_chunk1.model == "glm-4"
49+
chat_completion_chunk2 = next(stream_cls)
50+
assert chat_completion_chunk2.choices[0].delta.content == "2"
51+
assert chat_completion_chunk2.choices[0].delta.role == "assistant"
52+
assert chat_completion_chunk2.choices[0].index == 0
53+
assert chat_completion_chunk2.model == "glm-4"

0 commit comments

Comments
 (0)