Skip to content

Commit 21469bc

Browse files
committed
Merge branch 'main' into adil/add_archgw
2 parents 6f1be84 + df6c57d commit 21469bc

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+946
-43
lines changed

packages/types/src/__tests__/provider-settings.test.ts

Lines changed: 11 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,12 @@ describe("getApiProtocol", () => {
1212
expect(getApiProtocol("claude-code")).toBe("anthropic")
1313
expect(getApiProtocol("claude-code", "some-model")).toBe("anthropic")
1414
})
15+
16+
it("should return 'anthropic' for bedrock provider", () => {
17+
expect(getApiProtocol("bedrock")).toBe("anthropic")
18+
expect(getApiProtocol("bedrock", "gpt-4")).toBe("anthropic")
19+
expect(getApiProtocol("bedrock", "claude-3-opus")).toBe("anthropic")
20+
})
1521
})
1622

1723
describe("Vertex provider with Claude models", () => {
@@ -27,25 +33,14 @@ describe("getApiProtocol", () => {
2733
expect(getApiProtocol("vertex", "gemini-pro")).toBe("openai")
2834
expect(getApiProtocol("vertex", "llama-2")).toBe("openai")
2935
})
30-
})
31-
32-
describe("Bedrock provider with Claude models", () => {
33-
it("should return 'anthropic' for bedrock provider with claude models", () => {
34-
expect(getApiProtocol("bedrock", "claude-3-opus")).toBe("anthropic")
35-
expect(getApiProtocol("bedrock", "Claude-3-Sonnet")).toBe("anthropic")
36-
expect(getApiProtocol("bedrock", "CLAUDE-instant")).toBe("anthropic")
37-
expect(getApiProtocol("bedrock", "anthropic.claude-v2")).toBe("anthropic")
38-
})
3936

40-
it("should return 'openai' for bedrock provider with non-claude models", () => {
41-
expect(getApiProtocol("bedrock", "gpt-4")).toBe("openai")
42-
expect(getApiProtocol("bedrock", "titan-text")).toBe("openai")
43-
expect(getApiProtocol("bedrock", "llama-2")).toBe("openai")
37+
it("should return 'openai' for vertex provider without model", () => {
38+
expect(getApiProtocol("vertex")).toBe("openai")
4439
})
4540
})
4641

47-
describe("Other providers with Claude models", () => {
48-
it("should return 'openai' for non-vertex/bedrock providers with claude models", () => {
42+
describe("Other providers", () => {
43+
it("should return 'openai' for non-anthropic providers regardless of model", () => {
4944
expect(getApiProtocol("openrouter", "claude-3-opus")).toBe("openai")
5045
expect(getApiProtocol("openai", "claude-3-sonnet")).toBe("openai")
5146
expect(getApiProtocol("litellm", "claude-instant")).toBe("openai")
@@ -59,20 +54,13 @@ describe("getApiProtocol", () => {
5954
expect(getApiProtocol(undefined, "claude-3-opus")).toBe("openai")
6055
})
6156

62-
it("should return 'openai' when model is undefined", () => {
63-
expect(getApiProtocol("openai")).toBe("openai")
64-
expect(getApiProtocol("vertex")).toBe("openai")
65-
expect(getApiProtocol("bedrock")).toBe("openai")
66-
})
67-
6857
it("should handle empty strings", () => {
6958
expect(getApiProtocol("vertex", "")).toBe("openai")
70-
expect(getApiProtocol("bedrock", "")).toBe("openai")
7159
})
7260

7361
it("should be case-insensitive for claude detection", () => {
7462
expect(getApiProtocol("vertex", "CLAUDE-3-OPUS")).toBe("anthropic")
75-
expect(getApiProtocol("bedrock", "claude-3-opus")).toBe("anthropic")
63+
expect(getApiProtocol("vertex", "claude-3-opus")).toBe("anthropic")
7664
expect(getApiProtocol("vertex", "ClAuDe-InStAnT")).toBe("anthropic")
7765
})
7866
})

packages/types/src/global-settings.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,7 @@ export const SECRET_STATE_KEYS = [
159159
"geminiApiKey",
160160
"openAiNativeApiKey",
161161
"deepSeekApiKey",
162+
"moonshotApiKey",
162163
"mistralApiKey",
163164
"unboundApiKey",
164165
"requestyApiKey",

packages/types/src/provider-settings.ts

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ export const providerNames = [
2222
"gemini-cli",
2323
"openai-native",
2424
"mistral",
25+
"moonshot",
2526
"deepseek",
2627
"archgw",
2728
"unbound",
@@ -62,6 +63,7 @@ export const DEFAULT_CONSECUTIVE_MISTAKE_LIMIT = 3
6263
const baseProviderSettingsSchema = z.object({
6364
includeMaxTokens: z.boolean().optional(),
6465
diffEnabled: z.boolean().optional(),
66+
todoListEnabled: z.boolean().optional(),
6567
fuzzyMatchThreshold: z.number().optional(),
6668
modelTemperature: z.number().nullish(),
6769
rateLimitSeconds: z.number().optional(),
@@ -187,6 +189,13 @@ const deepSeekSchema = apiModelIdProviderModelSchema.extend({
187189
deepSeekApiKey: z.string().optional(),
188190
})
189191

192+
const moonshotSchema = apiModelIdProviderModelSchema.extend({
193+
moonshotBaseUrl: z
194+
.union([z.literal("https://api.moonshot.ai/v1"), z.literal("https://api.moonshot.cn/v1")])
195+
.optional(),
196+
moonshotApiKey: z.string().optional(),
197+
})
198+
190199
const archgwSchema = apiModelIdProviderModelSchema.extend({
191200
archgwBaseUrl: z.string().optional(),
192201
archgwApiKey: z.string().optional(),
@@ -249,6 +258,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
249258
openAiNativeSchema.merge(z.object({ apiProvider: z.literal("openai-native") })),
250259
mistralSchema.merge(z.object({ apiProvider: z.literal("mistral") })),
251260
deepSeekSchema.merge(z.object({ apiProvider: z.literal("deepseek") })),
261+
moonshotSchema.merge(z.object({ apiProvider: z.literal("moonshot") })),
252262
archgwSchema.merge(z.object({ apiProvider: z.literal("archgw") })),
253263
unboundSchema.merge(z.object({ apiProvider: z.literal("unbound") })),
254264
requestySchema.merge(z.object({ apiProvider: z.literal("requesty") })),
@@ -278,6 +288,7 @@ export const providerSettingsSchema = z.object({
278288
...openAiNativeSchema.shape,
279289
...mistralSchema.shape,
280290
...deepSeekSchema.shape,
291+
...moonshotSchema.shape,
281292
...archgwSchema.shape,
282293
...unboundSchema.shape,
283294
...requestySchema.shape,
@@ -313,7 +324,7 @@ export const getModelId = (settings: ProviderSettings): string | undefined => {
313324
}
314325

315326
// Providers that use Anthropic-style API protocol
316-
export const ANTHROPIC_STYLE_PROVIDERS: ProviderName[] = ["anthropic", "claude-code"]
327+
export const ANTHROPIC_STYLE_PROVIDERS: ProviderName[] = ["anthropic", "claude-code", "bedrock"]
317328

318329
// Helper function to determine API protocol for a provider and model
319330
export const getApiProtocol = (provider: ProviderName | undefined, modelId?: string): "anthropic" | "openai" => {
@@ -322,13 +333,8 @@ export const getApiProtocol = (provider: ProviderName | undefined, modelId?: str
322333
return "anthropic"
323334
}
324335

325-
// For vertex and bedrock providers, check if the model ID contains "claude" (case-insensitive)
326-
if (
327-
provider &&
328-
(provider === "vertex" || provider === "bedrock") &&
329-
modelId &&
330-
modelId.toLowerCase().includes("claude")
331-
) {
336+
// For vertex provider, check if the model ID contains "claude" (case-insensitive)
337+
if (provider && provider === "vertex" && modelId && modelId.toLowerCase().includes("claude")) {
332338
return "anthropic"
333339
}
334340

packages/types/src/providers/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ export * from "./groq.js"
99
export * from "./lite-llm.js"
1010
export * from "./lm-studio.js"
1111
export * from "./mistral.js"
12+
export * from "./moonshot.js"
1213
export * from "./ollama.js"
1314
export * from "./openai.js"
1415
export * from "./openrouter.js"
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import type { ModelInfo } from "../model.js"
2+
3+
// https://platform.moonshot.ai/
4+
export type MoonshotModelId = keyof typeof moonshotModels
5+
6+
export const moonshotDefaultModelId: MoonshotModelId = "kimi-k2-0711-preview"
7+
8+
export const moonshotModels = {
9+
"kimi-k2-0711-preview": {
10+
maxTokens: 32_000,
11+
contextWindow: 131_072,
12+
supportsImages: false,
13+
supportsPromptCache: true,
14+
inputPrice: 0.6, // $0.60 per million tokens (cache miss)
15+
outputPrice: 2.5, // $2.50 per million tokens
16+
cacheWritesPrice: 0, // $0 per million tokens (cache miss)
17+
cacheReadsPrice: 0.15, // $0.15 per million tokens (cache hit)
18+
description: `Kimi K2 is a state-of-the-art mixture-of-experts (MoE) language model with 32 billion activated parameters and 1 trillion total parameters.`,
19+
},
20+
} as const satisfies Record<string, ModelInfo>
21+
22+
export const MOONSHOT_DEFAULT_TEMPERATURE = 0.6

src/api/index.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ import {
1717
GeminiHandler,
1818
OpenAiNativeHandler,
1919
DeepSeekHandler,
20+
MoonshotHandler,
2021
MistralHandler,
2122
VsCodeLmHandler,
2223
UnboundHandler,
@@ -90,6 +91,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
9091
return new OpenAiNativeHandler(options)
9192
case "deepseek":
9293
return new DeepSeekHandler(options)
94+
case "moonshot":
95+
return new MoonshotHandler(options)
9396
case "archgw":
9497
return new ArchGwHandler(options)
9598
case "vscode-lm":
@@ -113,6 +116,7 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
113116
case "litellm":
114117
return new LiteLLMHandler(options)
115118
default:
119+
apiProvider satisfies "gemini-cli" | undefined
116120
return new AnthropicHandler(options)
117121
}
118122
}

0 commit comments

Comments
 (0)