File tree Expand file tree Collapse file tree 3 files changed +4
-4
lines changed
hello-providers/anthropic Expand file tree Collapse file tree 3 files changed +4
-4
lines changed Original file line number Diff line number Diff line change 1515models = {
1616 "openai" : ["gpt-4o" , "gpt-3.5-turbo" ],
1717 "claude" : [
18- "claude-3-opus-20240229 " ,
19- "claude-3-5-sonnet-20240620 " ,
18+ "claude-3-opus-latest " ,
19+ "claude-3-5-sonnet-latest " ,
2020 "claude-3-haiku-20240307" ,
2121 ],
2222 "google" : ["gemini-1.5-pro-latest" ],
Original file line number Diff line number Diff line change @@ -35,7 +35,7 @@ async def _():
3535 messages = chat .messages (format = "anthropic" )
3636 # Create a response message stream
3737 response = await llm .messages .create (
38- model = "claude-3-5-sonnet-20241022 " ,
38+ model = "claude-3-5-sonnet-latest " ,
3939 messages = messages ,
4040 stream = True ,
4141 max_tokens = 1000 ,
Original file line number Diff line number Diff line change 2424
2525
2626MODEL_INFO = {
27- "name" : "claude-3-5-sonnet-20241022 " ,
27+ "name" : "claude-3-5-sonnet-latest " ,
2828 # DISCLAIMER: Anthropic has not yet released a public tokenizer for Claude models,
2929 # so this uses the generic default provided by Chat() (for now). That is probably
3030 # ok though since the default tokenizer likely overestimates the token count.
You can’t perform that action at this time.
0 commit comments