File tree Expand file tree Collapse file tree 1 file changed +22
-0
lines changed
packages/types/src/providers Expand file tree Collapse file tree 1 file changed +22
-0
lines changed Original file line number Diff line number Diff line change @@ -11,6 +11,8 @@ export type GroqModelId =
1111 | "qwen/qwen3-32b"
1212 | "deepseek-r1-distill-llama-70b"
1313 | "moonshotai/kimi-k2-instruct"
14+ | "openai/gpt-oss-120b"
15+ | "openai/gpt-oss-20b"
1416
1517export const groqDefaultModelId : GroqModelId = "llama-3.3-70b-versatile" // Defaulting to Llama3 70B Versatile
1618
@@ -97,4 +99,24 @@ export const groqModels = {
9799 outputPrice : 3.0 ,
98100 description : "Moonshot AI Kimi K2 Instruct 1T model, 128K context." ,
99101 } ,
102+ "openai/gpt-oss-120b" : {
103+ maxTokens : 32766 ,
104+ contextWindow : 131072 ,
105+ supportsImages : false ,
106+ supportsPromptCache : false ,
107+ inputPrice : 0.15 ,
108+ outputPrice : 0.75 ,
109+ description :
110+ "GPT-OSS 120B is OpenAI's flagship open source model, built on a Mixture-of-Experts (MoE) architecture with 20 billion parameters and 128 experts." ,
111+ } ,
112+ "openai/gpt-oss-20b" : {
113+ maxTokens : 32768 ,
114+ contextWindow : 131072 ,
115+ supportsImages : false ,
116+ supportsPromptCache : false ,
117+ inputPrice : 0.1 ,
118+ outputPrice : 0.5 ,
119+ description :
120+ "GPT-OSS 20B is OpenAI's flagship open source model, built on a Mixture-of-Experts (MoE) architecture with 20 billion parameters and 32 experts." ,
121+ } ,
100122} as const satisfies Record < string , ModelInfo >
You can’t perform that action at this time.
0 commit comments