10
10
ThreadMessageCreated , ThreadMessageDelta , ThreadRunCompleted ,
11
11
ThreadRunRequiresAction , ThreadRunStepCreated , ThreadRunStepDelta
12
12
)
13
+ from openai .types .beta .threads .run_submit_tool_outputs_params import ToolOutput
13
14
from openai .types .beta .threads .run import RequiredAction
14
15
from fastapi .responses import StreamingResponse
15
16
from fastapi import APIRouter , Depends , Form , HTTPException
16
17
from pydantic import BaseModel
18
+
17
19
import json
18
20
19
21
from utils .weather import get_weather
@@ -39,14 +41,22 @@ class ToolCallOutputs(BaseModel):
39
41
async def post_tool_outputs (client : AsyncOpenAI , data : dict , thread_id : str ):
40
42
"""
41
43
data is expected to be something like
42
-
43
44
{
44
- "tool_outputs": {"location": "City", "temperature": 70, "conditions": "Sunny"},
45
+ "tool_outputs": {
46
+ "output": {"location": "City", "temperature": 70, "conditions": "Sunny"},
47
+ "tool_call_id": "call_123"
48
+ },
45
49
"runId": "some-run-id",
46
50
}
47
51
"""
48
52
try :
49
- outputs_list = [data ["tool_outputs" ]]
53
+ outputs_list = [
54
+ ToolOutput (
55
+ output = data ["tool_outputs" ]["output" ],
56
+ tool_call_id = data ["tool_outputs" ]["tool_call_id" ]
57
+ )
58
+ ]
59
+
50
60
51
61
stream_manager = client .beta .threads .runs .submit_tool_outputs_stream (
52
62
thread_id = thread_id ,
@@ -227,10 +237,13 @@ async def event_generator():
227
237
logger .info (f"Weather output: { weather_output } " )
228
238
229
239
data_for_tool = {
230
- "tool_outputs" : weather_output ,
231
- "runId" : event .data .id ,
240
+ "tool_outputs" : {
241
+ "output" : str (weather_output ),
242
+ "tool_call_id" : tool_call .id
243
+ },
244
+ "runId" : run_requires_action_event .data .id ,
232
245
}
233
-
246
+
234
247
# Afterwards, create a fresh stream_manager for the next iteration
235
248
new_stream_manager : AsyncAssistantStreamManager = await post_tool_outputs (
236
249
client ,
0 commit comments