Skip to content

Commit a02eb40

Browse files
authored
Merge pull request RooCodeInc#2014 from cline/andrewmonostate/add-xai-provider
feat: add X AI provider integration
2 parents eedae08 + c87fb06 commit a02eb40

File tree

8 files changed

+212
-0
lines changed

8 files changed

+212
-0
lines changed

.changeset/clean-crabs-do.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"claude-dev": minor
3+
---
4+
5+
Added X AI as a new provider with support for all current models including Grok-2 and Grok Vision. This integration enables users to connect to X AI's API using their API key and access models with context windows up to 131K tokens. The implementation includes proper handling for vision models and accurate pricing information.

src/api/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ import { QwenHandler } from "./providers/qwen"
1717
import { MistralHandler } from "./providers/mistral"
1818
import { VsCodeLmHandler } from "./providers/vscode-lm"
1919
import { LiteLlmHandler } from "./providers/litellm"
20+
import { XAIHandler } from "./providers/xai"
2021

2122
export interface ApiHandler {
2223
createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream
@@ -62,6 +63,8 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
6263
return new VsCodeLmHandler(options)
6364
case "litellm":
6465
return new LiteLlmHandler(options)
66+
case "xai":
67+
return new XAIHandler(options)
6568
default:
6669
return new AnthropicHandler(options)
6770
}

src/api/providers/xai.ts

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
import { Anthropic } from "@anthropic-ai/sdk"
2+
import OpenAI from "openai"
3+
import { ApiHandler } from "../"
4+
import { ApiHandlerOptions, XAIModelId, ModelInfo, xaiDefaultModelId, xaiModels } from "../../shared/api"
5+
import { convertToOpenAiMessages } from "../transform/openai-format"
6+
import { ApiStream } from "../transform/stream"
7+
8+
export class XAIHandler implements ApiHandler {
9+
private options: ApiHandlerOptions
10+
private client: OpenAI
11+
12+
constructor(options: ApiHandlerOptions) {
13+
this.options = options
14+
this.client = new OpenAI({
15+
baseURL: "https://api.x.ai/v1",
16+
apiKey: this.options.xaiApiKey,
17+
})
18+
}
19+
20+
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
21+
const stream = await this.client.chat.completions.create({
22+
model: this.getModel().id,
23+
max_completion_tokens: this.getModel().info.maxTokens,
24+
temperature: 0,
25+
messages: [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)],
26+
stream: true,
27+
stream_options: { include_usage: true },
28+
})
29+
30+
for await (const chunk of stream) {
31+
const delta = chunk.choices[0]?.delta
32+
if (delta?.content) {
33+
yield {
34+
type: "text",
35+
text: delta.content,
36+
}
37+
}
38+
39+
if (chunk.usage) {
40+
yield {
41+
type: "usage",
42+
inputTokens: 0,
43+
outputTokens: chunk.usage.completion_tokens || 0,
44+
// @ts-ignore-next-line
45+
cacheReadTokens: chunk.usage.prompt_cache_hit_tokens || 0,
46+
// @ts-ignore-next-line
47+
cacheWriteTokens: chunk.usage.prompt_cache_miss_tokens || 0,
48+
}
49+
}
50+
}
51+
}
52+
53+
getModel(): { id: XAIModelId; info: ModelInfo } {
54+
const modelId = this.options.apiModelId
55+
if (modelId && modelId in xaiModels) {
56+
const id = modelId as XAIModelId
57+
return { id, info: xaiModels[id] }
58+
}
59+
return {
60+
id: xaiDefaultModelId,
61+
info: xaiModels[xaiDefaultModelId],
62+
}
63+
}
64+
}

src/core/webview/ClineProvider.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ type SecretKey =
5959
| "liteLlmApiKey"
6060
| "authToken"
6161
| "authNonce"
62+
| "xaiApiKey"
6263
type GlobalStateKey =
6364
| "apiProvider"
6465
| "apiModelId"
@@ -593,6 +594,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
593594
liteLlmModelId,
594595
liteLlmApiKey,
595596
qwenApiLine,
597+
xaiApiKey,
596598
} = message.apiConfiguration
597599
await this.updateGlobalState("apiProvider", apiProvider)
598600
await this.updateGlobalState("apiModelId", apiModelId)
@@ -624,6 +626,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
624626
await this.storeSecret("qwenApiKey", qwenApiKey)
625627
await this.storeSecret("mistralApiKey", mistralApiKey)
626628
await this.storeSecret("liteLlmApiKey", liteLlmApiKey)
629+
await this.storeSecret("xaiApiKey", xaiApiKey)
627630
await this.updateGlobalState("azureApiVersion", azureApiVersion)
628631
await this.updateGlobalState("openRouterModelId", openRouterModelId)
629632
await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo)
@@ -1879,6 +1882,7 @@ Here is the project's README to help you get started:\n\n${mcpDetails.readmeCont
18791882
qwenApiLine,
18801883
liteLlmApiKey,
18811884
telemetrySetting,
1885+
xaiApiKey,
18821886
] = await Promise.all([
18831887
this.getGlobalState("apiProvider") as Promise<ApiProvider | undefined>,
18841888
this.getGlobalState("apiModelId") as Promise<string | undefined>,
@@ -1931,6 +1935,7 @@ Here is the project's README to help you get started:\n\n${mcpDetails.readmeCont
19311935
this.getGlobalState("qwenApiLine") as Promise<string | undefined>,
19321936
this.getSecret("liteLlmApiKey") as Promise<string | undefined>,
19331937
this.getGlobalState("telemetrySetting") as Promise<TelemetrySetting | undefined>,
1938+
this.getSecret("xaiApiKey") as Promise<string | undefined>,
19341939
])
19351940

19361941
let apiProvider: ApiProvider
@@ -1995,6 +2000,7 @@ Here is the project's README to help you get started:\n\n${mcpDetails.readmeCont
19952000
liteLlmBaseUrl,
19962001
liteLlmModelId,
19972002
liteLlmApiKey,
2003+
xaiApiKey,
19982004
},
19992005
lastShownAnnouncementId,
20002006
customInstructions,
@@ -2138,6 +2144,7 @@ Here is the project's README to help you get started:\n\n${mcpDetails.readmeCont
21382144
"mistralApiKey",
21392145
"liteLlmApiKey",
21402146
"authToken",
2147+
"xaiApiKey",
21412148
]
21422149
for (const key of secretKeys) {
21432150
await this.storeSecret(key, undefined)

src/shared/api.ts

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ export type ApiProvider =
1515
| "mistral"
1616
| "vscode-lm"
1717
| "litellm"
18+
| "xai"
1819

1920
export interface ApiHandlerOptions {
2021
apiModelId?: string
@@ -56,6 +57,7 @@ export interface ApiHandlerOptions {
5657
vsCodeLmModelSelector?: any
5758
o3MiniReasoningEffort?: string
5859
qwenApiLine?: string
60+
xaiApiKey?: string
5961
}
6062

6163
export type ApiConfiguration = ApiHandlerOptions & {
@@ -799,3 +801,82 @@ export const liteLlmModelInfoSaneDefaults: ModelInfo = {
799801
inputPrice: 0,
800802
outputPrice: 0,
801803
}
804+
805+
// X AI
806+
// https://docs.x.ai/docs/api-reference
807+
export type XAIModelId = keyof typeof xaiModels
808+
export const xaiDefaultModelId: XAIModelId = "grok-2-latest"
809+
export const xaiModels = {
810+
"grok-2-latest": {
811+
maxTokens: 8192,
812+
contextWindow: 131072,
813+
supportsImages: false,
814+
supportsPromptCache: false,
815+
inputPrice: 2.0,
816+
outputPrice: 10.0,
817+
description: "X AI's Grok-2 model - latest version with 131K context window",
818+
},
819+
"grok-2": {
820+
maxTokens: 8192,
821+
contextWindow: 131072,
822+
supportsImages: false,
823+
supportsPromptCache: false,
824+
inputPrice: 2.0,
825+
outputPrice: 10.0,
826+
description: "X AI's Grok-2 model with 131K context window",
827+
},
828+
"grok-2-1212": {
829+
maxTokens: 8192,
830+
contextWindow: 131072,
831+
supportsImages: false,
832+
supportsPromptCache: false,
833+
inputPrice: 2.0,
834+
outputPrice: 10.0,
835+
description: "X AI's Grok-2 model (version 1212) with 131K context window",
836+
},
837+
"grok-2-vision-latest": {
838+
maxTokens: 8192,
839+
contextWindow: 32768,
840+
supportsImages: true,
841+
supportsPromptCache: false,
842+
inputPrice: 2.0,
843+
outputPrice: 10.0,
844+
description: "X AI's Grok-2 Vision model - latest version with image support and 32K context window",
845+
},
846+
"grok-2-vision": {
847+
maxTokens: 8192,
848+
contextWindow: 32768,
849+
supportsImages: true,
850+
supportsPromptCache: false,
851+
inputPrice: 2.0,
852+
outputPrice: 10.0,
853+
description: "X AI's Grok-2 Vision model with image support and 32K context window",
854+
},
855+
"grok-2-vision-1212": {
856+
maxTokens: 8192,
857+
contextWindow: 32768,
858+
supportsImages: true,
859+
supportsPromptCache: false,
860+
inputPrice: 2.0,
861+
outputPrice: 10.0,
862+
description: "X AI's Grok-2 Vision model (version 1212) with image support and 32K context window",
863+
},
864+
"grok-vision-beta": {
865+
maxTokens: 8192,
866+
contextWindow: 8192,
867+
supportsImages: true,
868+
supportsPromptCache: false,
869+
inputPrice: 5.0,
870+
outputPrice: 15.0,
871+
description: "X AI's Grok Vision Beta model with image support and 8K context window",
872+
},
873+
"grok-beta": {
874+
maxTokens: 8192,
875+
contextWindow: 131072,
876+
supportsImages: false,
877+
supportsPromptCache: false,
878+
inputPrice: 5.0,
879+
outputPrice: 15.0,
880+
description: "X AI's Grok Beta model (legacy) with 131K context window",
881+
},
882+
} as const satisfies Record<string, ModelInfo>

webview-ui/src/components/settings/ApiOptions.tsx

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@ import {
3333
openRouterDefaultModelInfo,
3434
vertexDefaultModelId,
3535
vertexModels,
36+
xaiDefaultModelId,
37+
xaiModels,
3638
} from "../../../../src/shared/api"
3739
import { ExtensionMessage } from "../../../../src/shared/ExtensionMessage"
3840
import { useExtensionState } from "../../context/ExtensionStateContext"
@@ -195,6 +197,7 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage, is
195197
<VSCodeOption value="lmstudio">LM Studio</VSCodeOption>
196198
<VSCodeOption value="ollama">Ollama</VSCodeOption>
197199
<VSCodeOption value="litellm">LiteLLM</VSCodeOption>
200+
<VSCodeOption value="xai">X AI</VSCodeOption>
198201
</VSCodeDropdown>
199202
</DropdownContainer>
200203

@@ -1122,6 +1125,46 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage, is
11221125
</div>
11231126
)}
11241127

1128+
{selectedProvider === "xai" && (
1129+
<div>
1130+
<VSCodeTextField
1131+
value={apiConfiguration?.xaiApiKey || ""}
1132+
style={{ width: "100%" }}
1133+
type="password"
1134+
onInput={handleInputChange("xaiApiKey")}
1135+
placeholder="Enter API Key...">
1136+
<span style={{ fontWeight: 500 }}>X AI API Key</span>
1137+
</VSCodeTextField>
1138+
<p
1139+
style={{
1140+
fontSize: "12px",
1141+
marginTop: 3,
1142+
color: "var(--vscode-descriptionForeground)",
1143+
}}>
1144+
This key is stored locally and only used to make API requests from this extension.
1145+
{!apiConfiguration?.xaiApiKey && (
1146+
<VSCodeLink href="https://x.ai" style={{ display: "inline", fontSize: "inherit" }}>
1147+
You can get an X AI API key by signing up here.
1148+
</VSCodeLink>
1149+
)}
1150+
</p>
1151+
{/* Note: To fully implement this, you would need to add a handler in ClineProvider.ts */}
1152+
{/* {apiConfiguration?.xaiApiKey && (
1153+
<button
1154+
onClick={() => {
1155+
vscode.postMessage({
1156+
type: "requestXAIModels",
1157+
text: apiConfiguration?.xaiApiKey,
1158+
})
1159+
}}
1160+
style={{ margin: "5px 0 0 0" }}
1161+
className="vscode-button">
1162+
Fetch Available Models
1163+
</button>
1164+
)} */}
1165+
</div>
1166+
)}
1167+
11251168
{apiErrorMessage && (
11261169
<p
11271170
style={{
@@ -1152,6 +1195,7 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage, is
11521195
{selectedProvider === "deepseek" && createDropdown(deepSeekModels)}
11531196
{selectedProvider === "qwen" && createDropdown(qwenModels)}
11541197
{selectedProvider === "mistral" && createDropdown(mistralModels)}
1198+
{selectedProvider === "xai" && createDropdown(xaiModels)}
11551199
</DropdownContainer>
11561200

11571201
<ModelInfoView
@@ -1403,6 +1447,8 @@ export function normalizeApiConfiguration(apiConfiguration?: ApiConfiguration):
14031447
selectedModelId: apiConfiguration?.liteLlmModelId || "",
14041448
selectedModelInfo: openAiModelInfoSaneDefaults,
14051449
}
1450+
case "xai":
1451+
return getProviderData(xaiModels, xaiDefaultModelId)
14061452
default:
14071453
return getProviderData(anthropicModels, anthropicDefaultModelId)
14081454
}

webview-ui/src/context/ExtensionStateContext.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,7 @@ export const ExtensionStateContextProvider: React.FC<{
7979
config.qwenApiKey,
8080
config.mistralApiKey,
8181
config.vsCodeLmModelSelector,
82+
config.xaiApiKey,
8283
].some((key) => key !== undefined)
8384
: false
8485
setShowWelcome(!hasKey)

webview-ui/src/utils/validate.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,11 @@ export function validateApiConfiguration(apiConfiguration?: ApiConfiguration): s
3838
return "You must provide a valid API key or choose a different provider."
3939
}
4040
break
41+
case "xai":
42+
if (!apiConfiguration.xaiApiKey) {
43+
return "You must provide a valid API key or choose a different provider."
44+
}
45+
break
4146
case "qwen":
4247
if (!apiConfiguration.qwenApiKey) {
4348
return "You must provide a valid API key or choose a different provider."

0 commit comments

Comments
 (0)