@@ -249,8 +249,8 @@ async def async_respond(self, user_input=None):
249249 provider = self .provider # Keep existing provider if set
250250 max_tokens = self .max_tokens # Keep existing max_tokens if set
251251
252- if self .model == "claude-3-5-sonnet-latest" :
253- # For some reason, Litellm can't find the model info for claude-3-5-sonnet-latest
252+ if self .model in [ "claude-3-5-sonnet-latest" , "claude-3-5-sonnet-20241022" ] :
253+ # For some reason, Litellm can't find the model info for these
254254 provider = "anthropic"
255255
256256 # Only try to get model info if we need either provider or max_tokens
@@ -294,33 +294,33 @@ async def async_respond(self, user_input=None):
294294
295295 self ._spinner .start ()
296296
297- enable_prompt_caching = False
298297 betas = [COMPUTER_USE_BETA_FLAG ]
299298
300- if enable_prompt_caching :
301- betas .append (PROMPT_CACHING_BETA_FLAG )
302- image_truncation_threshold = 50
303- system ["cache_control" ] = {"type" : "ephemeral" }
304-
305299 edit = ToolRenderer ()
306300
307301 if (
308302 provider == "anthropic" and not self .serve
309303 ): # Server can't handle Anthropic yet
310304 if self ._client is None :
311- if self .api_key :
312- self ._client = Anthropic (api_key = self .api_key )
313- else :
314- self ._client = Anthropic ()
305+ anthropic_params = {}
306+ if self .api_key is not None :
307+ anthropic_params ["api_key" ] = self .api_key
308+ if self .api_base is not None :
309+ anthropic_params ["base_url" ] = self .api_base
310+ self ._client = Anthropic (** anthropic_params )
315311
316312 if self .debug :
317313 print ("Sending messages:" , self .messages , "\n " )
318314
315+ model = self .model
316+ if model .startswith ("anthropic/" ):
317+ model = model [len ("anthropic/" ) :]
318+
319319 # Use Anthropic API which supports betas
320320 raw_response = self ._client .beta .messages .create (
321321 max_tokens = max_tokens ,
322322 messages = self .messages ,
323- model = self . model ,
323+ model = model ,
324324 system = system ["text" ],
325325 tools = tool_collection .to_params (),
326326 betas = betas ,
@@ -698,7 +698,7 @@ async def async_respond(self, user_input=None):
698698 "temperature" : self .temperature ,
699699 "api_key" : self .api_key ,
700700 "api_version" : self .api_version ,
701- "parallel_tool_calls" : False ,
701+ # "parallel_tool_calls": True ,
702702 }
703703
704704 if self .tool_calling :
@@ -707,13 +707,32 @@ async def async_respond(self, user_input=None):
707707 params ["stream" ] = False
708708 stream = False
709709
710- if self .debug :
711- print (params )
710+ if provider == "anthropic" and self .tool_calling :
711+ params ["tools" ] = tool_collection .to_params ()
712+ for t in params ["tools" ]:
713+ t ["function" ] = {"name" : t ["name" ]}
714+ if t ["name" ] == "computer" :
715+ t ["function" ]["parameters" ] = {
716+ "display_height_px" : t ["display_height_px" ],
717+ "display_width_px" : t ["display_width_px" ],
718+ "display_number" : t ["display_number" ],
719+ }
720+ params ["extra_headers" ] = {
721+ "anthropic-beta" : "computer-use-2024-10-22"
722+ }
712723
713- if self .debug :
714- print ("Sending request..." , params )
724+ # if self.debug:
725+ # print("Sending request...", params)
726+ # time.sleep(3)
715727
716- time .sleep (3 )
728+ if self .debug :
729+ print ("Messages:" )
730+ for m in self .messages :
731+ if len (str (m )) > 1000 :
732+ print (str (m )[:1000 ] + "..." )
733+ else :
734+ print (str (m ))
735+ print ()
717736
718737 raw_response = litellm .completion (** params )
719738
@@ -856,6 +875,8 @@ async def async_respond(self, user_input=None):
856875 else :
857876 user_approval = input ("\n Run tool(s)? (y/n): " ).lower ().strip ()
858877
878+ user_content_to_add = []
879+
859880 for tool_call in message .tool_calls :
860881 function_arguments = json .loads (tool_call .function .arguments )
861882
@@ -869,43 +890,46 @@ async def async_respond(self, user_input=None):
869890
870891 if self .tool_calling :
871892 if result .base64_image :
872- # Add image to tool result
873893 self .messages .append (
874894 {
875895 "role" : "tool" ,
876- "content" : "The user will reply with the image outputted by the tool ." ,
896+ "content" : "The user will reply with the tool's image output ." ,
877897 "tool_call_id" : tool_call .id ,
878898 }
879899 )
880- self . messages .append (
900+ user_content_to_add .append (
881901 {
882- "role" : "user" ,
883- "content" : [
884- {
885- "type" : "image_url" ,
886- "image_url" : {
887- "url" : f"data:image/png;base64,{ result .base64_image } " ,
888- },
889- }
890- ],
891- }
892- )
893- else :
894- self .messages .append (
895- {
896- "role" : "tool" ,
897- "content" : json .dumps (dataclasses .asdict (result )),
898- "tool_call_id" : tool_call .id ,
902+ "type" : "image_url" ,
903+ "image_url" : {
904+ "url" : f"data:image/png;base64,{ result .base64_image } " ,
905+ },
899906 }
900907 )
901908 else :
902- self .messages .append (
903- {
904- "role" : "user" ,
905- "content" : "This was the output of the tool call. What does it mean/what's next?"
906- + json .dumps (dataclasses .asdict (result )),
907- }
909+ text_content = (
910+ "This was the output of the tool call. What does it mean/what's next?\n "
911+ + (result .output or "" )
908912 )
913+ if result .base64_image :
914+ content = [
915+ {"type" : "text" , "text" : text_content },
916+ {
917+ "type" : "image" ,
918+ "image_url" : {
919+ "url" : "data:image/png;base64,"
920+ + result .base64_image
921+ },
922+ },
923+ ]
924+ else :
925+ content = text_content
926+
927+ self .messages .append ({"role" : "user" , "content" : content })
928+
929+ if user_content_to_add :
930+ self .messages .append (
931+ {"role" : "user" , "content" : user_content_to_add }
932+ )
909933
910934 def _ask_user_approval (self ) -> str :
911935 """Ask user for approval to run a tool"""
0 commit comments