diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 9eb505de738..ab65dca8e8c 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -28,7 +28,7 @@ body: - **OS**: macOS - **Continue version**: v0.9.4 - **IDE version**: VSCode 1.85.1 - - Model: Claude Sonnet 3.5 + - Model: Claude Sonnet 4.5 - Agent configuration value: | - OS: diff --git a/core/config/workspace/workspaceBlocks.ts b/core/config/workspace/workspaceBlocks.ts index 9ba16b40092..d055b43793e 100644 --- a/core/config/workspace/workspaceBlocks.ts +++ b/core/config/workspace/workspaceBlocks.ts @@ -1,9 +1,8 @@ import { BlockType, ConfigYaml, - createRuleMarkdown, createPromptMarkdown, - RULE_FILE_EXTENSION, + createRuleMarkdown, } from "@continuedev/config-yaml"; import * as YAML from "yaml"; import { IDE } from "../.."; @@ -41,9 +40,9 @@ function getContentsForNewBlock(blockType: BlockType): ConfigYaml { configYaml.models = [ { provider: "anthropic", - model: "claude-3-7-sonnet-latest", + model: "claude-sonnet-4-latest", apiKey: "${{ secrets.ANTHROPIC_API_KEY }}", - name: "Claude 3.7 Sonnet", + name: "Claude Sonnet 4", roles: ["chat", "edit"], }, ]; diff --git a/core/llm/llms/Anthropic.vitest.ts b/core/llm/llms/Anthropic.vitest.ts index 5c623b05e5d..5a423ff9102 100644 --- a/core/llm/llms/Anthropic.vitest.ts +++ b/core/llm/llms/Anthropic.vitest.ts @@ -134,7 +134,7 @@ describe("Anthropic", () => { test("streamChat should send a valid request", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", apiBase: "https://api.anthropic.com/v1/", }); @@ -155,7 +155,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", max_tokens: 8192, stream: true, messages: [ @@ -177,7 +177,7 @@ describe("Anthropic", () => { test("chat should send a valid request", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", apiBase: "https://api.anthropic.com/v1/", }); @@ -198,7 +198,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", max_tokens: 8192, stream: true, messages: [ @@ -220,7 +220,7 @@ describe("Anthropic", () => { test("streamComplete should send a valid request", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", apiBase: "https://api.anthropic.com/v1/", }); @@ -238,7 +238,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", max_tokens: 8192, stream: true, messages: [ @@ -260,7 +260,7 @@ describe("Anthropic", () => { test("complete should send a valid request", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", apiBase: "https://api.anthropic.com/v1/", }); @@ -278,7 +278,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", max_tokens: 8192, stream: true, messages: [ @@ -301,7 +301,7 @@ describe("Anthropic", () => { test("should handle system message", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", apiBase: "https://api.anthropic.com/v1/", }); @@ -325,7 +325,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", max_tokens: 8192, stream: true, messages: [ @@ -347,7 +347,7 @@ describe("Anthropic", () => { test("should handle tool calls", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", apiBase: "https://api.anthropic.com/v1/", }); @@ -386,7 +386,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", max_tokens: 8192, stream: true, messages: [ @@ -424,7 +424,7 @@ describe("Anthropic", () => { test("should handle custom max tokens", async () => { const anthropic = new Anthropic({ apiKey: "test-api-key", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", apiBase: "https://api.anthropic.com/v1/", }); @@ -446,7 +446,7 @@ describe("Anthropic", () => { "x-api-key": "test-api-key", }, body: { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", max_tokens: 1000, stream: true, messages: [ @@ -470,7 +470,7 @@ describe("Anthropic", () => { test("should throw error when API key is missing", async () => { const anthropic = new Anthropic({ apiKey: "", - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4", apiBase: "https://api.anthropic.com/v1/", }); diff --git a/core/llm/llms/OpenRouter.vitest.ts b/core/llm/llms/OpenRouter.vitest.ts index e6e8afdd216..650f14306a6 100644 --- a/core/llm/llms/OpenRouter.vitest.ts +++ b/core/llm/llms/OpenRouter.vitest.ts @@ -6,13 +6,13 @@ import OpenRouter from "./OpenRouter"; describe("OpenRouter Anthropic Caching", () => { it("should detect Anthropic models correctly", () => { const openRouter = new OpenRouter({ - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-latest", apiKey: "test-key", }); // Test private method through modifyChatBody const body: ChatCompletionCreateParams = { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-latest", messages: [], }; @@ -71,7 +71,7 @@ describe("OpenRouter Anthropic Caching", () => { it("should correctly handle cache_control with system messages present", () => { const openRouter = new OpenRouter({ - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-latest", apiKey: "test-key", cacheBehavior: { cacheConversation: true, @@ -80,7 +80,7 @@ describe("OpenRouter Anthropic Caching", () => { }); const body: ChatCompletionCreateParams = { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-latest", messages: [ { role: "system", content: "You are a helpful assistant" }, { role: "user", content: "First user message" }, @@ -137,7 +137,7 @@ describe("OpenRouter Anthropic Caching", () => { it("should add cache_control to system message when caching is enabled", () => { const openRouter = new OpenRouter({ - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-latest", apiKey: "test-key", cacheBehavior: { cacheConversation: false, @@ -146,7 +146,7 @@ describe("OpenRouter Anthropic Caching", () => { }); const body: ChatCompletionCreateParams = { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-latest", messages: [ { role: "system", content: "You are a helpful assistant" }, { role: "user", content: "Hello" }, @@ -176,7 +176,7 @@ describe("OpenRouter Anthropic Caching", () => { it("should handle array content correctly", () => { const openRouter = new OpenRouter({ - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-latest", apiKey: "test-key", cacheBehavior: { cacheConversation: true, @@ -185,7 +185,7 @@ describe("OpenRouter Anthropic Caching", () => { }); const body: ChatCompletionCreateParams = { - model: "claude-3-5-sonnet-latest", + model: "claude-sonnet-4-latest", messages: [ { role: "user", diff --git a/core/llm/toolSupport.ts b/core/llm/toolSupport.ts index 7cdbd02cd44..50f26e37354 100644 --- a/core/llm/toolSupport.ts +++ b/core/llm/toolSupport.ts @@ -14,32 +14,12 @@ export const PROVIDER_TOOL_SUPPORT: Record boolean> = } } catch (e) {} - return [ - "claude-3-5", - "claude-3.5", - "claude-3-7", - "claude-3.7", - "claude-sonnet-4", - "claude-4-sonnet", - "gpt-4", - "o3", - "gemini", - "claude-opus-4", - "gemma", - ].some((part) => model.toLowerCase().startsWith(part)); + return ["claude", "gpt-4", "o3", "gemini", "gemma"].some((part) => + model.toLowerCase().startsWith(part), + ); }, anthropic: (model) => { - if ( - [ - "claude-3-5", - "claude-3.5", - "claude-3-7", - "claude-3.7", - "claude-sonnet-4", - "claude-4-sonnet", - "claude-opus-4", - ].some((part) => model.toLowerCase().startsWith(part)) - ) { + if (["claude"].some((part) => model.toLowerCase().startsWith(part))) { return true; } diff --git a/docs/customize/model-providers/top-level/anthropic.mdx b/docs/customize/model-providers/top-level/anthropic.mdx index 7231e93e935..fc8ec83f802 100644 --- a/docs/customize/model-providers/top-level/anthropic.mdx +++ b/docs/customize/model-providers/top-level/anthropic.mdx @@ -41,7 +41,7 @@ sidebarTitle: "Anthropic" - **Check out a more advanced configuration [here](https://hub.continue.dev/anthropic/claude-4-sonnet?view=config)** + **Check out a more advanced configuration [here](https://hub.continue.dev/anthropic/claude-sonnet-4-5?view=config)** ## How to Enable Prompt Caching with Claude diff --git a/extensions/intellij/src/main/kotlin/com/github/continuedev/continueintellijextension/constants/ServerConstants.kt b/extensions/intellij/src/main/kotlin/com/github/continuedev/continueintellijextension/constants/ServerConstants.kt index 5135dc87ae9..725cc948ae2 100644 --- a/extensions/intellij/src/main/kotlin/com/github/continuedev/continueintellijextension/constants/ServerConstants.kt +++ b/extensions/intellij/src/main/kotlin/com/github/continuedev/continueintellijextension/constants/ServerConstants.kt @@ -14,10 +14,10 @@ const val DEFAULT_CONFIG = { "models": [ { - "model": "claude-3-5-sonnet-latest", + "model": "claude-sonnet-4-5", "provider": "anthropic", "apiKey": "", - "title": "Claude 3.5 Sonnet" + "title": "Claude Sonnet 4.5" } ], "tabAutocompleteModel": { diff --git a/extensions/vscode/config_schema.json b/extensions/vscode/config_schema.json index e22df61b8fa..75f8df3fd84 100644 --- a/extensions/vscode/config_schema.json +++ b/extensions/vscode/config_schema.json @@ -850,14 +850,10 @@ "anyOf": [ { "enum": [ - "claude-2", - "claude-instant-1", - "claude-3-5-sonnet-latest", - "claude-3-7-sonnet-20250219", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1" + "claude-sonnet-4", + "claude-sonnet-4-5", + "claude-opus-4-1", + "claude-haiku-4-5" ] }, { @@ -1608,14 +1604,10 @@ "codeup-13b", "deepseek-7b", "deepseek-33b", - "claude-2", - "claude-instant-1", - "claude-3-5-sonnet-latest", - "claude-3-5-sonnet-20240620", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "claude-2.1", + "claude-sonnet-4", + "claude-sonnet-4-5", + "claude-opus-4-1", + "claude-haiku-4-5", "command-r", "command-r-plus", "chat-bison-001", diff --git a/extensions/vscode/e2e/test-continue/config.json b/extensions/vscode/e2e/test-continue/config.json index fbe78fa0aac..bb0a7c22f96 100644 --- a/extensions/vscode/e2e/test-continue/config.json +++ b/extensions/vscode/e2e/test-continue/config.json @@ -16,7 +16,7 @@ { "provider": "mock", "title": "TOOL MOCK LLM", - "model": "claude-3-5-sonnet-latest", + "model": "claude-sonnet-4-latest", "capabilities": { "tools": true }, @@ -50,7 +50,7 @@ { "provider": "mock", "title": "SYSTEM MESSAGE MOCK LLM", - "model": "claude-3-5-sonnet-latest", + "model": "claude-sonnet-4-latest", "requestOptions": { "extraBodyProperties": { "chatStream": [["REPEAT_SYSTEM_MSG"]] @@ -60,7 +60,7 @@ { "provider": "mock", "title": "LAST MESSAGE MOCK LLM", - "model": "claude-3-5-sonnet-latest", + "model": "claude-sonnet-4-latest", "requestOptions": { "extraBodyProperties": { "chatStream": [["REPEAT_LAST_MSG"]] diff --git a/gui/src/pages/AddNewModel/configs/models.ts b/gui/src/pages/AddNewModel/configs/models.ts index aaa8092f749..bf77f1de253 100644 --- a/gui/src/pages/AddNewModel/configs/models.ts +++ b/gui/src/pages/AddNewModel/configs/models.ts @@ -1145,27 +1145,42 @@ export const models: { [key: string]: ModelPackage } = { icon: "openai.png", isOpenSource: false, }, - claude4Sonnet: { - title: "Claude 4 Sonnet", + claude45Sonnet: { + title: "Claude Sonnet 4.5", description: - "The most intelligent model in the Claude 4 series. Costing lesser than Claude 4 Opus.", + "Anthropic's smartest model for complex agents and coding with exceptional performance in reasoning and multilingual tasks.", params: { - model: "claude-4-sonnet-latest", + model: "claude-sonnet-4-5-20250929", contextLength: 200_000, - title: "Claude 4 Sonnet", + title: "Claude 4.5 Sonnet", apiKey: "", }, providerOptions: ["anthropic"], icon: "anthropic.png", isOpenSource: false, }, - claude41Opus: { - title: "Claude 4.1 Opus", - description: "The most capable model in the Claude 4 series", + claude45Haiku: { + title: "Claude Haiku 4.5", + description: + "Anthropic's fastest model with near-frontier intelligence, ideal for quick and accurate responses.", params: { - model: "claude-opus-4-1-20250805", + model: "claude-haiku-4-5-20251001", + contextLength: 200_000, + title: "Claude Haiku 4.5", + apiKey: "", + }, + providerOptions: ["anthropic"], + icon: "anthropic.png", + isOpenSource: false, + }, + claude4Sonnet: { + title: "Claude Sonnet 4", + description: + "The most intelligent model in the Claude 4 series. Costing lesser than Claude Opus 4.", + params: { + model: "claude-4-sonnet-latest", contextLength: 200_000, - title: "Claude 4.1 Opus", + title: "Claude 4 Sonnet", apiKey: "", }, providerOptions: ["anthropic"], @@ -1186,6 +1201,20 @@ export const models: { [key: string]: ModelPackage } = { icon: "anthropic.png", isOpenSource: false, }, + claude41Opus: { + title: "Claude Opus 4.1", + description: "The most capable model in the Claude 4 series", + params: { + model: "claude-opus-4-1-20250805", + contextLength: 200_000, + title: "Claude Opus 4.1", + apiKey: "", + }, + providerOptions: ["anthropic"], + icon: "anthropic.png", + isOpenSource: false, + }, + graniteChat: { title: "Granite Chat 13b", description: @@ -1575,20 +1604,7 @@ export const models: { [key: string]: ModelPackage } = { icon: "openai.png", isOpenSource: false, }, - asksageclaude35Sonnet: { - title: "Claude 3.5 Sonnet", - description: - "Anthropic's most intelligent model, but much less expensive than Claude 3 Opus", - params: { - model: "claude-35-sonnet", - contextLength: 200_000, - title: "Claude 3.5 Sonnet", - apiKey: "", - }, - providerOptions: ["askSage"], - icon: "anthropic.png", - isOpenSource: false, - }, + asksageclaude37sonnet: { title: "Claude 3.7 Sonnet", description: "Anthropic's 3.7 model.", @@ -1615,19 +1631,7 @@ export const models: { [key: string]: ModelPackage } = { icon: "anthropic.png", isOpenSource: false, }, - asksageclaude35gov: { - title: "Claude 3.5 Sonnet gov*", - description: "Anthropic's 3.5 Sonnet model.", - params: { - model: "aws-bedrock-claude-35-sonnet-gov", - contextLength: 200_000, - title: "Claude 3.5 Sonnet gov*", - apiKey: "", - }, - providerOptions: ["askSage"], - icon: "anthropic.png", - isOpenSource: false, - }, + asksageclaude4s: { title: "Claude 4 Sonnet", description: "Anthropic's Claude 4 Sonnet", @@ -2320,20 +2324,61 @@ export const models: { [key: string]: ModelPackage } = { icon: "cometapi.png", isOpenSource: false, }, - cometapiClaude35HaikuLatest: { - title: "Claude 3.5 Haiku Latest", + cometapiClaude45Sonnet: { + title: "Claude 4.5 Sonnet Latest", description: - "Claude 3.5 Haiku Latest via CometAPI - fast and efficient model from Anthropic.", + "Claude 4.5 Sonnet Latest via CometAPI - Anthropic's smartest model for complex agents and coding.", params: { - model: "claude-3-5-haiku-latest", + model: "claude-sonnet-4-5-20250929", + contextLength: 200_000, + title: "Claude 4.5 Sonnet Latest", + apiKey: "", + }, + providerOptions: ["cometapi"], + icon: "cometapi.png", + isOpenSource: false, + }, + cometapiClaude45Haiku: { + title: "Claude 4.5 Haiku Latest", + description: + "Claude 4.5 Haiku Latest via CometAPI - Anthropic's fastest model with near-frontier intelligence.", + params: { + model: "claude-haiku-4-5-20251001", contextLength: 200_000, - title: "Claude 3.5 Haiku Latest", + title: "Claude 4.5 Haiku Latest", apiKey: "", }, providerOptions: ["cometapi"], icon: "cometapi.png", isOpenSource: false, }, + asksageclaude35Sonnet: { + title: "Claude 3.5 Sonnet", + description: + "Anthropic's most intelligent model, but much less expensive than Claude 3 Opus", + params: { + model: "claude-35-sonnet", + contextLength: 200_000, + title: "Claude 3.5 Sonnet", + apiKey: "", + }, + providerOptions: ["askSage"], + icon: "anthropic.png", + isOpenSource: false, + }, + asksageclaude35gov: { + title: "Claude 3.5 Sonnet gov*", + description: "Anthropic's 3.5 Sonnet model.", + params: { + model: "aws-bedrock-claude-35-sonnet-gov", + contextLength: 200_000, + title: "Claude 3.5 Sonnet gov*", + apiKey: "", + }, + providerOptions: ["askSage"], + icon: "anthropic.png", + isOpenSource: false, + }, // Gemini series models via CometAPI cometapiGemini25Pro: { diff --git a/gui/src/pages/AddNewModel/configs/providers.ts b/gui/src/pages/AddNewModel/configs/providers.ts index 88e091ce775..062533ead9a 100644 --- a/gui/src/pages/AddNewModel/configs/providers.ts +++ b/gui/src/pages/AddNewModel/configs/providers.ts @@ -77,12 +77,13 @@ export const providers: Partial> = { models.cometapiO4Mini, models.cometapiO3Pro, // Anthropic Claude family + models.cometapiClaude45Sonnet, + models.cometapiClaude45Haiku, models.cometapiClaudeOpus41, models.cometapiClaudeOpus41Thinking, models.cometapiClaudeSonnet4, models.cometapiClaudeSonnet4Thinking, models.cometapiClaude37SonnetLatest, - models.cometapiClaude35HaikuLatest, // Google Gemini family models.cometapiGemini25Pro, models.cometapiGemini25Flash, @@ -166,7 +167,13 @@ export const providers: Partial> = { defaultValue: 100000, }, ], - packages: [models.claude4Sonnet, models.claude41Opus, models.claude35Haiku], + packages: [ + models.claude45Sonnet, + models.claude45Haiku, + models.claude41Opus, + models.claude4Sonnet, + models.claude35Haiku, + ], apiKeyUrl: "https://console.anthropic.com/account/keys", }, moonshot: { diff --git a/manual-testing-sandbox/test.js b/manual-testing-sandbox/test.js index d2920d6af41..1716c956667 100644 --- a/manual-testing-sandbox/test.js +++ b/manual-testing-sandbox/test.js @@ -1,21 +1,51 @@ +/** + * Calculator class that performs basic arithmetic operations + * Uses method chaining for convenient calculation sequences + */ class Calculator { + /** + * Initializes the calculator with result set to 0 + */ constructor() { this.result = 0; } + /** + * Adds a number to the current result + * @param {number} number - The number to add + * @returns {Calculator} - Returns this for method chaining + */ add(number) { this.result += number; return this; } + + /** + * Subtracts a number from the current result + * @param {number} number - The number to subtract + * @returns {Calculator} - Returns this for method chaining + */ subtract(number) { + this.result -= number; return this; } + /** + * Multiplies the current result by a number + * @param {number} number - The number to multiply by + * @returns {Calculator} - Returns this for method chaining + */ multiply(number) { this.result *= number; return this; } + /** + * Divides the current result by a number + * @param {number} number - The number to divide by + * @throws {Error} - Throws an error if attempting to divide by zero + * @returns {Calculator} - Returns this for method chaining + */ divide(number) { if (number === 0) { throw new Error("Cannot divide by zero"); @@ -24,10 +54,18 @@ class Calculator { return this; } + /** + * Gets the current result value + * @returns {number} - The current calculation result + */ getResult() { return this.result; } + /** + * Resets the result to 0 + * @returns {Calculator} - Returns this for method chaining + */ reset() { this.result = 0; return this; diff --git a/packages/config-yaml/src/markdown/agentFiles.test.ts b/packages/config-yaml/src/markdown/agentFiles.test.ts index 5f1a951af1c..1fa477320af 100644 --- a/packages/config-yaml/src/markdown/agentFiles.test.ts +++ b/packages/config-yaml/src/markdown/agentFiles.test.ts @@ -185,7 +185,7 @@ describe("serializeAgentFile", () => { const agentFile: AgentFile = { name: "Test Agent File", description: "A test agent file", - model: "anthropic/claude-3-sonnet", + model: "anthropic/claude-sonnet-4-5", tools: "tool1, tool2", rules: "rule1, rule2", prompt: "This is the test prompt", diff --git a/packages/config-yaml/src/schemas/commonSlugs.ts b/packages/config-yaml/src/schemas/commonSlugs.ts index 3ba10bbd5f2..c0790c44805 100644 --- a/packages/config-yaml/src/schemas/commonSlugs.ts +++ b/packages/config-yaml/src/schemas/commonSlugs.ts @@ -1,19 +1,20 @@ export const commonModelSlugs = [ - "anthropic/claude-3-7-sonnet", + "anthropic/claude-sonnet-4", "togetherai/llama-4-maverick-instruct-17bx128e", "google/gemini-2.5-pro", "mistral/codestral", "voyageai/voyage-code-3", + "anthropic/claude-sonnet-4-5", "relace/instant-apply", "xai/grok-2", "openai/gpt-4o", "togetherai/llama-4-scout-instruct-17bx16e", - "anthropic/claude-3-5-sonnet", + "anthropic/claude-haiku-4-5", "google/gemini-2.0-flash", "voyageai/rerank-2", + "anthropic/claude-opus-4-1", "ollama/deepseek-r1", "morphllm/morph-v0", - "anthropic/claude-3-5-haiku", "lmstudio/deepseek-r1", "openai/o3-mini", "voyageai/voyage-code-2", diff --git a/packages/llm-info/src/providers/anthropic.ts b/packages/llm-info/src/providers/anthropic.ts index 5c37f24da39..62ae62b6a97 100644 --- a/packages/llm-info/src/providers/anthropic.ts +++ b/packages/llm-info/src/providers/anthropic.ts @@ -4,13 +4,43 @@ export const Anthropic: ModelProvider = { id: "anthropic", displayName: "Anthropic", models: [ + { + model: "claude-sonnet-4-5-20250929", + displayName: "Claude 4.5 Sonnet", + contextLength: 200000, + maxCompletionTokens: 64000, + description: + "Anthropic's smartest model for complex agents and coding with exceptional performance in reasoning and multilingual tasks.", + regex: /claude-(?:4[.-]5-sonnet|sonnet-4[.-]5).*/i, + recommendedFor: ["chat"], + }, + { + model: "claude-haiku-4-5-20251001", + displayName: "Claude 4.5 Haiku", + contextLength: 200000, + maxCompletionTokens: 64000, + description: + "Anthropic's fastest model with near-frontier intelligence, ideal for quick and accurate responses.", + regex: /claude-(?:4[.-]5-haiku|haiku-4[.-]5).*/i, + recommendedFor: ["chat"], + }, + { + model: "claude-opus-4-1-20250805", + displayName: "Claude 4.1 Opus", + contextLength: 200000, + maxCompletionTokens: 32000, + description: + "Exceptional model for specialized reasoning tasks with advanced agentic capabilities and superior coding performance.", + regex: /claude-opus-4[.-]1.*/i, + recommendedFor: ["chat"], + }, { model: "claude-sonnet-4-20250514", displayName: "Claude 4 Sonnet", contextLength: 200000, maxCompletionTokens: 8192, description: - "Most intelligent model with the highest level of intelligence and capability.", + "Previous generation model with strong coding and reasoning capabilities, now superseded by Claude 4.5 Sonnet.", // Sometimes written as claude-4-sonnet, other times as claude-sonnet-4 regex: /claude-(?:4-sonnet|sonnet-4).*/i, recommendedFor: ["chat"], @@ -21,28 +51,28 @@ export const Anthropic: ModelProvider = { contextLength: 200000, maxCompletionTokens: 8192, description: - "Most intelligent model with the highest level of intelligence and capability.", + "Previous generation model with high intelligence, now superseded by Claude 4.1 Opus.", regex: /claude-(?:4-opus|opus-4).*/i, recommendedFor: ["chat"], }, { - model: "claude-3-5-sonnet-latest", - displayName: "Claude 3.5 Sonnet", + model: "claude-3-7-sonnet-latest", + displayName: "Claude 3.7 Sonnet", contextLength: 200000, - maxCompletionTokens: 8192, + maxCompletionTokens: 128000, description: - "Most intelligent model with the highest level of intelligence and capability.", - regex: /claude-3[.-]5-sonnet.*/i, + "First hybrid reasoning model with extended thinking capabilities, excellent for coding and front-end development.", + regex: /claude-3[.-]7-sonnet.*/i, recommendedFor: ["chat"], }, { - model: "claude-3-7-sonnet-latest", - displayName: "Claude 3.7 Sonnet", + model: "claude-3-5-sonnet-latest", + displayName: "Claude 3.5 Sonnet", contextLength: 200000, maxCompletionTokens: 8192, description: - "Most intelligent model with the highest level of intelligence and capability.", - regex: /claude-3[.-]7-sonnet.*/i, + "Previous flagship model with strong performance across diverse tasks, now superseded by Claude 4.5.", + regex: /claude-3[.-]5-sonnet.*/i, recommendedFor: ["chat"], }, { @@ -96,7 +126,7 @@ export const Anthropic: ModelProvider = { contextLength: 100000, maxCompletionTokens: 4096, description: - "Our cheapest small and fast model, a predecessor of Claude Haiku.", + "Anthropic's cheapest small and fast model, a predecessor of Claude Haiku.", regex: /claude-instant-1\.2/i, }, ], diff --git a/packages/llm-info/src/providers/cometapi.ts b/packages/llm-info/src/providers/cometapi.ts index 441f5bd75b8..34f0a998a16 100644 --- a/packages/llm-info/src/providers/cometapi.ts +++ b/packages/llm-info/src/providers/cometapi.ts @@ -79,6 +79,24 @@ export const CometAPI: ModelProvider = { }, // Claude Series + { + model: "claude-sonnet-4-5", + displayName: "Claude 4.5 Sonnet", + contextLength: 200000, + maxCompletionTokens: 64000, + description: + "Anthropic's smartest model for complex agents and coding with exceptional performance in reasoning and multilingual tasks.", + recommendedFor: ["chat"], + }, + { + model: "claude-haiku-4-5-20251001", + displayName: "Claude 4.5 Haiku (2025-10-01)", + contextLength: 200000, + maxCompletionTokens: 64000, + description: + "Anthropic's fastest model with near-frontier intelligence, ideal for quick and accurate responses.", + recommendedFor: ["chat"], + }, { model: "claude-opus-4-1-20250805", displayName: "Claude Opus 4.1 (2025-08-05)", diff --git a/packages/openai-adapters/src/apis/OpenRouter.test.ts b/packages/openai-adapters/src/apis/OpenRouter.test.ts index 6b7175446b8..e62e2123f17 100644 --- a/packages/openai-adapters/src/apis/OpenRouter.test.ts +++ b/packages/openai-adapters/src/apis/OpenRouter.test.ts @@ -14,7 +14,7 @@ describe("OpenRouterApi Anthropic caching", () => { const api = new OpenRouterApi(baseConfig); const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4-5", messages: [ { role: "user", content: "First" }, { role: "assistant", content: "Resp" }, @@ -161,7 +161,7 @@ describe("OpenRouterApi Anthropic caching", () => { describe("applyAnthropicCachingToOpenRouterBody", () => { it("mutates OpenAI chat body with system and tool caching", () => { const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4-5", messages: [ { role: "system", content: "You are helpful" }, { role: "user", content: "Alpha" }, @@ -241,7 +241,7 @@ describe("OpenRouterApi Anthropic caching", () => { it("leaves system untouched when strategy is none while caching users", () => { const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4-5", messages: [ { role: "system", content: "Stay focused" }, { role: "user", content: "Question" }, @@ -276,7 +276,7 @@ describe("OpenRouterApi Anthropic caching", () => { it("adds cache_control only to final text segment of user arrays", () => { const body: ChatCompletionCreateParams = { - model: "anthropic/claude-3.5-sonnet", + model: "anthropic/claude-sonnet-4-5", messages: [ { role: "user", diff --git a/packages/openai-adapters/src/test/anthropic-adapter.vitest.ts b/packages/openai-adapters/src/test/anthropic-adapter.vitest.ts index 859bcab02ad..dba75137cd5 100644 --- a/packages/openai-adapters/src/test/anthropic-adapter.vitest.ts +++ b/packages/openai-adapters/src/test/anthropic-adapter.vitest.ts @@ -35,7 +35,7 @@ describe("Anthropic Adapter Tests", () => { methodToTest: "chatCompletionNonStream", params: [ { - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", messages: [{ role: "user", content: "hello" }], }, new AbortController().signal, @@ -62,7 +62,7 @@ describe("Anthropic Adapter Tests", () => { }, ], system: undefined, - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", max_tokens: 32000, stream: undefined, }, @@ -77,7 +77,7 @@ describe("Anthropic Adapter Tests", () => { text: "Hello! How can I help you today?", }, ], - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", stop_reason: "end_turn", stop_sequence: null, usage: { @@ -98,7 +98,7 @@ describe("Anthropic Adapter Tests", () => { methodToTest: "chatCompletionStream", params: [ { - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", messages: [{ role: "user", content: "hello" }], stream: true, }, @@ -126,7 +126,7 @@ describe("Anthropic Adapter Tests", () => { }, ], system: undefined, - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", max_tokens: 32000, stream: true, }, @@ -160,7 +160,7 @@ describe("Anthropic Adapter Tests", () => { methodToTest: "chatCompletionStream", params: [ { - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", messages: [ { role: "system", content: "You are a helpful assistant." }, { role: "user", content: "hello" }, @@ -197,7 +197,7 @@ describe("Anthropic Adapter Tests", () => { cache_control: { type: "ephemeral" }, }, ], - model: "claude-3-5-sonnet-20241022", + model: "claude-sonnet-4-5", max_tokens: 32000, stream: true, }, diff --git a/packages/openai-adapters/src/test/main.test.ts b/packages/openai-adapters/src/test/main.test.ts index 3b7b96193e6..8a10c8b727f 100644 --- a/packages/openai-adapters/src/test/main.test.ts +++ b/packages/openai-adapters/src/test/main.test.ts @@ -70,7 +70,7 @@ const TESTS: Omit[] = [ }, { provider: "anthropic", - model: "claude-3-5-haiku-latest", + model: "claude-haiku-4-5", apiKey: process.env.ANTHROPIC_API_KEY!, roles: ["chat"], options: {