File tree Expand file tree Collapse file tree 4 files changed +19
-8
lines changed Expand file tree Collapse file tree 4 files changed +19
-8
lines changed Original file line number Diff line number Diff line change @@ -96,7 +96,11 @@ async def get_response(
9696 logger .debug ("Received model response" )
9797 else :
9898 logger .debug (
99- f"LLM resp:\n { json .dumps (response .choices [0 ].message .model_dump (), indent = 2 )} \n "
99+ f"LLM resp:\n {
100+ json .dumps (
101+ response .choices [0 ].message .model_dump (), indent = 2 , ensure_ascii = False
102+ )
103+ } \n "
100104 )
101105
102106 if hasattr (response , "usage" ):
@@ -263,8 +267,8 @@ async def _fetch_response(
263267 else :
264268 logger .debug (
265269 f"Calling Litellm model: { self .model } \n "
266- f"{ json .dumps (converted_messages , indent = 2 )} \n "
267- f"Tools:\n { json .dumps (converted_tools , indent = 2 )} \n "
270+ f"{ json .dumps (converted_messages , indent = 2 , ensure_ascii = False )} \n "
271+ f"Tools:\n { json .dumps (converted_tools , indent = 2 , ensure_ascii = False )} \n "
268272 f"Stream: { stream } \n "
269273 f"Tool choice: { tool_choice } \n "
270274 f"Response format: { response_format } \n "
Original file line number Diff line number Diff line change @@ -248,8 +248,8 @@ async def _fetch_response(
248248 logger .debug ("Calling LLM" )
249249 else :
250250 logger .debug (
251- f"{ json .dumps (converted_messages , indent = 2 )} \n "
252- f"Tools:\n { json .dumps (converted_tools , indent = 2 )} \n "
251+ f"{ json .dumps (converted_messages , indent = 2 , ensure_ascii = False )} \n "
252+ f"Tools:\n { json .dumps (converted_tools , indent = 2 , ensure_ascii = False )} \n "
253253 f"Stream: { stream } \n "
254254 f"Tool choice: { tool_choice } \n "
255255 f"Response format: { response_format } \n "
Original file line number Diff line number Diff line change @@ -93,7 +93,13 @@ async def get_response(
9393 else :
9494 logger .debug (
9595 "LLM resp:\n "
96- f"{ json .dumps ([x .model_dump () for x in response .output ], indent = 2 )} \n "
96+ f"""{
97+ json .dumps (
98+ [x .model_dump () for x in response .output ],
99+ indent = 2 ,
100+ ensure_ascii = False ,
101+ )
102+ } \n """
97103 )
98104
99105 usage = (
@@ -237,8 +243,8 @@ async def _fetch_response(
237243 else :
238244 logger .debug (
239245 f"Calling LLM { self .model } with input:\n "
240- f"{ json .dumps (list_input , indent = 2 )} \n "
241- f"Tools:\n { json .dumps (converted_tools .tools , indent = 2 )} \n "
246+ f"{ json .dumps (list_input , indent = 2 , ensure_ascii = False )} \n "
247+ f"Tools:\n { json .dumps (converted_tools .tools , indent = 2 , ensure_ascii = False )} \n "
242248 f"Stream: { stream } \n "
243249 f"Tool choice: { tool_choice } \n "
244250 f"Response format: { response_format } \n "
Original file line number Diff line number Diff line change 77def _assert_must_pass_tool_call_id () -> str :
88 raise ValueError ("tool_call_id must be passed to ToolContext" )
99
10+
1011@dataclass
1112class ToolContext (RunContextWrapper [TContext ]):
1213 """The context of a tool call."""
You can’t perform that action at this time.
0 commit comments