Skip to content

Commit eb9c4d6

Browse files
committed
Merge 'integration_2025-09-18_1055712761602' into 'master'
merge branch integration_2025-09-18_1055712761602 into master See merge request: !823
2 parents 8e0f736 + 65be840 commit eb9c4d6

File tree

99 files changed

+11307
-4637
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

99 files changed

+11307
-4637
lines changed

meta.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
{
2-
"lasted": "4.0.20",
3-
"meta_commit": "e7655ce548cff3db4a0a006c789fd60506584d28"
2+
"lasted": "4.0.21",
3+
"meta_commit": "95abf0a288a30f7141a386d5b1280125d001e576"
44
}

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "volcengine-python-sdk"
3-
version = "4.0.20"
3+
version = "4.0.21"
44
authors = [
55
{name = "volc-engine", email = "[email protected]"},
66
]

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from setuptools import setup, find_packages # noqa: H301
44

55
NAME = "volcengine-python-sdk"
6-
VERSION = "4.0.20"
6+
VERSION = "4.0.21"
77
# To install the library, run the following
88
#
99
# python setup.py install

volcenginesdkarkruntime/resources/images/images.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def generate(
125125
sequential_image_generation: str | None = None,
126126
sequential_image_generation_options: SequentialImageGenerationOptions | None = None,
127127
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
128-
) -> ImagesResponse:
128+
) -> ImagesResponse | Stream[ImageGenStreamEvent]:
129129
resp = self._post(
130130
"/images/generations",
131131
body={

volcenginesdkarkruntime/resources/responses/input_items.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ def list(
107107
{
108108
"after": after,
109109
"before": before,
110-
"include": include,
110+
"include[]": include,
111111
"limit": limit,
112112
"order": order,
113113
},
@@ -191,7 +191,7 @@ def list(
191191
{
192192
"after": after,
193193
"before": before,
194-
"include": include,
194+
"include[]": include,
195195
"limit": limit,
196196
"order": order,
197197
},

volcenginesdkarkruntime/resources/responses/responses.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,10 @@ def _add_beta_headers(extra_headers: Headers | None = None, tools: Iterable[Tool
5454
if extra_headers is None:
5555
extra_headers = {}
5656
extra_headers["ark-beta-web-search"] = "true"
57-
break
57+
if tool_param.get("type", "") == "mcp":
58+
if extra_headers is None:
59+
extra_headers = {}
60+
extra_headers["ark-beta-mcp"] = "true"
5861
return extra_headers
5962

6063

volcenginesdkarkruntime/types/images/images.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ class Image(BaseModel):
3333
b64_json: str
3434
"""The Base 64 encoded string of the generated image, if any."""
3535

36+
size: str
37+
"""The size of the generated image, if any."""
38+
3639

3740
class Error(BaseModel):
3841
message: str

volcenginesdkcore/api_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def __init__(self, configuration=None, header_name=None, header_value=None,
6767
self.default_headers[header_name] = header_value
6868
self.cookie = cookie
6969
# Set default User-Agent.
70-
self.user_agent = 'volcstack-python-sdk/4.0.20'
70+
self.user_agent = 'volcstack-python-sdk/4.0.21'
7171
self.client_side_validation = configuration.client_side_validation
7272

7373
self.interceptor_chain = InterceptorChain()

volcenginesdkcore/configuration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ def to_debug_report(self):
267267
"OS: {env}\n" \
268268
"Python Version: {pyversion}\n" \
269269
"Version of the API: 0.1.0\n" \
270-
"SDK Package Version: 4.0.20".\
270+
"SDK Package Version: 4.0.21".\
271271
format(env=sys.platform, pyversion=sys.version)
272272

273273
@property
Lines changed: 224 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,224 @@
1+
import asyncio
2+
3+
from volcenginesdkarkruntime import AsyncArk
4+
from volcenginesdkarkruntime.types.responses.response_completed_event import ResponseCompletedEvent
5+
from volcenginesdkarkruntime.types.responses.response_output_item_done_event import ResponseOutputItemDoneEvent
6+
from volcenginesdkarkruntime.types.responses.response_function_tool_call import ResponseFunctionToolCall
7+
from volcenginesdkarkruntime.types.responses.response_mcp_item import McpApprovalRequest
8+
9+
"""
10+
示例代码:演示 Responses API 的常见用法
11+
-------------------------------------------------
12+
1. 多轮对话中使用缓存 (caching)
13+
2. 调用外部函数 (function calling)
14+
3. Web 搜索工具 (web search)
15+
4. 使用MCP工具 (MCP)
16+
"""
17+
18+
client = AsyncArk(api_key="${YOUR_API_KEY}")
19+
20+
21+
async def main():
22+
# ==========================================================
23+
# 示例 1:多轮对话,开启 caching
24+
# ==========================================================
25+
print("Example 1: Use caching for multi-round chat")
26+
# ---------- 第 1 轮 ----------
27+
# 说明:开启 caching,store=True 表示把对话存储在服务端,以便后续引用
28+
stream = await client.responses.create(
29+
model="${YOUR_ENDPOINT_ID}",
30+
input=[
31+
{"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"},
32+
{"role": "user", "content": "你好"},
33+
],
34+
caching={
35+
"type": "enabled",
36+
},
37+
store=True,
38+
stream=True
39+
)
40+
response_id = ""
41+
async for event in stream:
42+
print(event)
43+
if isinstance(event, ResponseCompletedEvent):
44+
response_id = event.response.id
45+
46+
# ---------- 第 2 轮 ----------
47+
# 说明:通过 previous_response_id 关联上一轮的上下文
48+
stream = await client.responses.create(
49+
model="${YOUR_ENDPOINT_ID}",
50+
previous_response_id=response_id,
51+
input=[
52+
{"role": "user", "content": "你是谁"},
53+
],
54+
caching={
55+
"type": "enabled",
56+
},
57+
store=True,
58+
stream=True
59+
)
60+
async for event in stream:
61+
print(event)
62+
63+
# ==========================================================
64+
# 示例 2:函数调用 (Function Calling)
65+
# ==========================================================
66+
print("Example 2: Use responses API for function calling")
67+
68+
# ---------- 第 1 轮 ----------
69+
# 用户询问北京天气,模型会触发工具调用
70+
stream = await client.responses.create(
71+
model="${YOUR_ENDPOINT_ID}",
72+
input=[
73+
{"role": "user", "content": "请问北京今天天气怎么样"},
74+
],
75+
tools=[
76+
{
77+
"type": "function",
78+
"name": "get_current_weather",
79+
"description": "获取当前城市的天气",
80+
"parameters": {
81+
"type": "object",
82+
"properties": {
83+
"location": {
84+
"type": "string",
85+
"description": "城市名称,例如北京"
86+
},
87+
"unit": {
88+
"type": "string",
89+
"description": "温度单位,例如摄氏度"
90+
}
91+
},
92+
"required": ["location"]
93+
}
94+
}
95+
],
96+
caching={
97+
"type": "enabled",
98+
},
99+
store=True,
100+
stream=True
101+
)
102+
call_id = ""
103+
response_id = ""
104+
async for event in stream:
105+
print(event)
106+
if isinstance(event, ResponseCompletedEvent):
107+
response_id = event.response.id
108+
if isinstance(event, ResponseOutputItemDoneEvent) and isinstance(event.item, ResponseFunctionToolCall):
109+
call_id = event.item.call_id
110+
111+
# ---------- 第 2 轮 ----------
112+
# 把函数返回结果传回模型,让它继续生成最终回答
113+
stream = await client.responses.create(
114+
model="${YOUR_ENDPOINT_ID}",
115+
previous_response_id=response_id,
116+
input=[
117+
{
118+
"type": "function_call_output",
119+
"call_id": call_id,
120+
"output": "{\"temperature\": \"30\"}",
121+
},
122+
],
123+
caching={
124+
"type": "enabled",
125+
},
126+
store=True,
127+
stream=True
128+
)
129+
async for event in stream:
130+
print(event)
131+
132+
# ==========================================================
133+
# 示例 3:Web 搜索工具
134+
# ==========================================================
135+
print("Example 3: Use responses API for web search")
136+
stream = await client.responses.create(
137+
model="${YOUR_ENDPOINT_ID}",
138+
input=[
139+
{"role": "user", "content": "今天的新闻"},
140+
],
141+
tools=[
142+
{
143+
"type": "web_search",
144+
"limit": 3,
145+
"sources": ["toutiao"],
146+
"user_location": {
147+
"type": "approximate",
148+
"city": "北京",
149+
"country": "中国",
150+
"region": "北京",
151+
}
152+
}
153+
],
154+
store=True,
155+
stream=True
156+
)
157+
async for event in stream:
158+
print(event)
159+
160+
# ==========================================================
161+
# 示例 4:使用 MCP
162+
# ==========================================================
163+
# ---------- 第 1 轮 ----------
164+
# 用户询问repo信息,模型会触发mcp工具调用
165+
stream = await client.responses.create(
166+
model="${YOUR_ENDPOINT_ID}",
167+
input=[{
168+
"role": "user",
169+
"content": [
170+
{
171+
"type": "input_text",
172+
"text": "查看这个 repo的文档 expressjs/express "
173+
}
174+
]
175+
}],
176+
tools=[
177+
{
178+
"type": "mcp",
179+
"server_label": "deepwiki-test",
180+
"server_url": "https://mcp.deepwiki.com/mcp",
181+
"require_approval": "always"
182+
}
183+
],
184+
store=True,
185+
stream=True,
186+
)
187+
approval_id = ""
188+
response_id = ""
189+
async for event in stream:
190+
print(event)
191+
if isinstance(event, ResponseCompletedEvent):
192+
response_id = event.response.id
193+
if isinstance(event, ResponseOutputItemDoneEvent) and isinstance(event.item, McpApprovalRequest):
194+
approval_id = event.item.id
195+
196+
# ---------- 第 2 轮 ----------
197+
# 用户同意mcp工具调用,模型会继续生成最终回答
198+
stream = await client.responses.create(
199+
model="${YOUR_ENDPOINT_ID}",
200+
input=[
201+
{
202+
"type": "mcp_approval_response",
203+
"approval_request_id": approval_id,
204+
"approve": True
205+
}
206+
],
207+
previous_response_id=response_id,
208+
tools=[
209+
{
210+
"type": "mcp",
211+
"server_label": "deepwiki-test",
212+
"server_url": "https://mcp.deepwiki.com/mcp",
213+
"require_approval": "always"
214+
}
215+
],
216+
store=True,
217+
stream=True,
218+
)
219+
async for event in stream:
220+
print(event)
221+
222+
223+
if __name__ == "__main__":
224+
asyncio.run(main())

0 commit comments

Comments
 (0)