diff --git a/packages/types/src/providers/groq.ts b/packages/types/src/providers/groq.ts index c48ee0e95d..1782a6a72a 100644 --- a/packages/types/src/providers/groq.ts +++ b/packages/types/src/providers/groq.ts @@ -8,6 +8,7 @@ export type GroqModelId = | "meta-llama/llama-4-maverick-17b-128e-instruct" | "mistral-saba-24b" | "qwen-qwq-32b" + | "qwen/qwen3-32b" | "deepseek-r1-distill-llama-70b" export const groqDefaultModelId: GroqModelId = "llama-3.3-70b-versatile" // Defaulting to Llama3 70B Versatile @@ -19,8 +20,8 @@ export const groqModels = { contextWindow: 131072, supportsImages: false, supportsPromptCache: false, - inputPrice: 0, - outputPrice: 0, + inputPrice: 0.05, + outputPrice: 0.08, description: "Meta Llama 3.1 8B Instant model, 128K context.", }, "llama-3.3-70b-versatile": { @@ -28,8 +29,8 @@ export const groqModels = { contextWindow: 131072, supportsImages: false, supportsPromptCache: false, - inputPrice: 0, - outputPrice: 0, + inputPrice: 0.59, + outputPrice: 0.79, description: "Meta Llama 3.3 70B Versatile model, 128K context.", }, "meta-llama/llama-4-scout-17b-16e-instruct": { @@ -37,8 +38,8 @@ export const groqModels = { contextWindow: 131072, supportsImages: false, supportsPromptCache: false, - inputPrice: 0, - outputPrice: 0, + inputPrice: 0.11, + outputPrice: 0.34, description: "Meta Llama 4 Scout 17B Instruct model, 128K context.", }, "meta-llama/llama-4-maverick-17b-128e-instruct": { @@ -46,8 +47,8 @@ export const groqModels = { contextWindow: 131072, supportsImages: false, supportsPromptCache: false, - inputPrice: 0, - outputPrice: 0, + inputPrice: 0.2, + outputPrice: 0.6, description: "Meta Llama 4 Maverick 17B Instruct model, 128K context.", }, "mistral-saba-24b": { @@ -55,8 +56,8 @@ export const groqModels = { contextWindow: 32768, supportsImages: false, supportsPromptCache: false, - inputPrice: 0, - outputPrice: 0, + inputPrice: 0.79, + outputPrice: 0.79, description: "Mistral Saba 24B model, 32K context.", }, "qwen-qwq-32b": { @@ -64,17 +65,26 @@ export const groqModels = { contextWindow: 131072, supportsImages: false, supportsPromptCache: false, - inputPrice: 0, - outputPrice: 0, + inputPrice: 0.29, + outputPrice: 0.39, description: "Alibaba Qwen QwQ 32B model, 128K context.", }, + "qwen/qwen3-32b": { + maxTokens: 131072, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0.29, + outputPrice: 0.59, + description: "Alibaba Qwen 3 32B model, 128K context.", + }, "deepseek-r1-distill-llama-70b": { maxTokens: 131072, contextWindow: 131072, supportsImages: false, supportsPromptCache: false, - inputPrice: 0, - outputPrice: 0, + inputPrice: 0.75, + outputPrice: 0.99, description: "DeepSeek R1 Distill Llama 70B model, 128K context.", }, } as const satisfies Record diff --git a/packages/types/src/providers/mistral.ts b/packages/types/src/providers/mistral.ts index acbe6d4ec7..be53e9fc2a 100644 --- a/packages/types/src/providers/mistral.ts +++ b/packages/types/src/providers/mistral.ts @@ -6,6 +6,22 @@ export type MistralModelId = keyof typeof mistralModels export const mistralDefaultModelId: MistralModelId = "codestral-latest" export const mistralModels = { + "magistral-medium-latest": { + maxTokens: 41_000, + contextWindow: 41_000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 2.0, + outputPrice: 5.0, + }, + "mistral-medium-latest": { + maxTokens: 131_000, + contextWindow: 131_000, + supportsImages: true, + supportsPromptCache: false, + inputPrice: 0.4, + outputPrice: 2.0, + }, "codestral-latest": { maxTokens: 256_000, contextWindow: 256_000,