1010from typing import Optional , Dict , Any , Tuple , List
1111
1212import tuneapi .utils as tu
13- from tuneapi .types import Thread , human , Message
13+ import tuneapi .types as tt
1414
1515
16- class Anthropic :
16+ class Anthropic ( tt . ModelInterface ) :
1717 def __init__ (
1818 self ,
1919 id : Optional [str ] = "claude-3-haiku-20240307" ,
2020 base_url : str = "https://api.anthropic.com/v1/messages" ,
2121 ):
22- self .anthropic_model = id
22+ self .model_id = id
2323 self .base_url = base_url
24- self .anthropic_api_token = tu .ENV .ANTHROPIC_TOKEN ("" )
24+ self .api_token = tu .ENV .ANTHROPIC_TOKEN ("" )
2525
2626 def set_api_token (self , token : str ) -> None :
27- self .anthropic_api_token = token
28-
29- def tool_to_claude_xml (self , tool ):
30- """
31- Deprecated: was written when function calling did not exist in Anthropic API.
32- """
33- tool_signature = ""
34- if len (tool ["parameters" ]) > 0 :
35- for name , p in tool ["parameters" ]["properties" ].items ():
36- param = f"""<parameter>
37- <name> { name } </name>
38- <type> { p ['type' ]} </type>
39- <description> { p ['description' ]} </description>
40- """
41- if name in tool ["parameters" ]["required" ]:
42- param += "<required> true </required>\n "
43- param += "</parameter>"
44- tool_signature += param + "\n "
45- tool_signature = tool_signature .strip ()
46-
47- constructed_prompt = (
48- "<tool_description>\n "
49- f"<tool_name> { tool ['name' ]} </tool_name>\n "
50- "<description>\n "
51- f"{ tool ['description' ]} \n "
52- "</description>\n "
53- "<parameters>\n "
54- f"{ tool_signature } \n "
55- "</parameters>\n "
56- "</tool_description>"
57- )
58- return constructed_prompt
27+ self .api_token = token
5928
6029 def _process_input (self , chats , token : Optional [str ] = None ):
61- if not token and not self .anthropic_api_token : # type: ignore
30+ if not token and not self .api_token : # type: ignore
6231 raise Exception (
6332 "Please set ANTHROPIC_TOKEN environment variable or pass through function"
6433 )
65- token = token or self .anthropic_api_token
66- if isinstance (chats , Thread ):
34+ token = token or self .api_token
35+ if isinstance (chats , tt . Thread ):
6736 thread = chats
6837 elif isinstance (chats , str ):
69- thread = Thread (human (chats ))
38+ thread = tt . Thread (tt . human (chats ))
7039 else :
7140 raise Exception ("Invalid input" )
7241
7342 # create the anthropic style data
7443 system = ""
75- if thread .chats [0 ].role == Message .SYSTEM :
44+ if thread .chats [0 ].role == tt . Message .SYSTEM :
7645 system = thread .chats [0 ].value
7746
7847 claude_messages = []
7948 prev_tool_id = tu .get_random_string (5 )
8049 for m in thread .chats [int (system != "" ) :]:
81- if m .role == Message .HUMAN :
50+ if m .role == tt . Message .HUMAN :
8251 msg = {
8352 "role" : "user" ,
8453 "content" : [{"type" : "text" , "text" : m .value .strip ()}],
@@ -95,12 +64,12 @@ def _process_input(self, chats, token: Optional[str] = None):
9564 },
9665 }
9766 )
98- elif m .role == Message .GPT :
67+ elif m .role == tt . Message .GPT :
9968 msg = {
10069 "role" : "assistant" ,
10170 "content" : [{"type" : "text" , "text" : m .value .strip ()}],
10271 }
103- elif m .role == Message .FUNCTION_CALL :
72+ elif m .role == tt . Message .FUNCTION_CALL :
10473 _m = tu .from_json (m .value ) if isinstance (m .value , str ) else m .value
10574 msg = {
10675 "role" : "assistant" ,
@@ -113,7 +82,7 @@ def _process_input(self, chats, token: Optional[str] = None):
11382 }
11483 ],
11584 }
116- elif m .role == Message .FUNCTION_RESP :
85+ elif m .role == tt . Message .FUNCTION_RESP :
11786 # _m = tu.from_json(m.value) if isinstance(m.value, str) else m.value
11887 msg = {
11988 "role" : "user" ,
@@ -139,7 +108,7 @@ def _process_input(self, chats, token: Optional[str] = None):
139108
140109 def chat (
141110 self ,
142- chats : Thread | str ,
111+ chats : tt . Thread | str ,
143112 model : Optional [str ] = None ,
144113 max_tokens : int = 1024 ,
145114 temperature : Optional [float ] = None ,
@@ -170,7 +139,7 @@ def chat(
170139
171140 def stream_chat (
172141 self ,
173- chats : Thread | str ,
142+ chats : tt . Thread | str ,
174143 model : Optional [str ] = None ,
175144 max_tokens : int = 1024 ,
176145 temperature : Optional [float ] = None ,
@@ -182,14 +151,14 @@ def stream_chat(
182151 ) -> Any :
183152
184153 tools = []
185- if isinstance (chats , Thread ):
154+ if isinstance (chats , tt . Thread ):
186155 tools = [x .to_dict () for x in chats .tools ]
187156 for t in tools :
188157 t ["input_schema" ] = t .pop ("parameters" )
189158 headers , system , claude_messages = self ._process_input (chats = chats , token = token )
190159
191160 data = {
192- "model" : model or self .anthropic_model ,
161+ "model" : model or self .model_id ,
193162 "max_tokens" : max_tokens ,
194163 "messages" : claude_messages ,
195164 "system" : system ,
0 commit comments