@@ -99,6 +99,7 @@ def chat(
9999 model : Optional [str ] = None ,
100100 max_tokens : int = 1024 ,
101101 temperature : float = 1 ,
102+ parallel_tool_calls : bool = False ,
102103 token : Optional [str ] = None ,
103104 extra_headers : Optional [Dict [str , str ]] = None ,
104105 ** kwargs ,
@@ -109,6 +110,7 @@ def chat(
109110 model = model ,
110111 max_tokens = max_tokens ,
111112 temperature = temperature ,
113+ parallel_tool_calls = parallel_tool_calls ,
112114 token = token ,
113115 extra_headers = extra_headers ,
114116 raw = False ,
@@ -126,6 +128,7 @@ def stream_chat(
126128 model : Optional [str ] = None ,
127129 max_tokens : int = 1024 ,
128130 temperature : float = 1 ,
131+ parallel_tool_calls : bool = False ,
129132 token : Optional [str ] = None ,
130133 timeout = (5 , 60 ),
131134 extra_headers : Optional [Dict [str , str ]] = None ,
@@ -142,6 +145,7 @@ def stream_chat(
142145 "model" : model or self .model_id ,
143146 "stream" : True ,
144147 "max_tokens" : max_tokens ,
148+ "parallel_tool_calls" : parallel_tool_calls ,
145149 }
146150 if isinstance (chats , tt .Thread ) and len (chats .tools ):
147151 data ["tools" ] = [
0 commit comments