File tree Expand file tree Collapse file tree 13 files changed +115
-5
lines changed Expand file tree Collapse file tree 13 files changed +115
-5
lines changed Original file line number Diff line number Diff line change @@ -19,3 +19,4 @@ GROQ_API_KEY=
19
19
MISTRAL_API_KEY=
20
20
PERPLEXITY_API_KEY=
21
21
TOGETHER_API_KEY=
22
+ XAI_API_KEY=
Original file line number Diff line number Diff line change @@ -90,6 +90,8 @@ export const PERPLEXITY: string = 'Perplexity';
90
90
export const DEEPINFRA : string = 'deepinfra' ;
91
91
export const BEDROCK : string = 'bedrock' ;
92
92
export const AZURE_OPEN_AI : string = 'azure-openai' ;
93
+ export const X_AI : string = 'xAI' ;
94
+ export const OLLAMA : string = 'ollama' ;
93
95
94
96
interface Model {
95
97
id : string ;
@@ -570,6 +572,18 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
570
572
promptCost : 0.2 ,
571
573
completionCost : 0.2
572
574
}
575
+ ] ,
576
+ [ X_AI ] : [
577
+ {
578
+ id : 'grok-beta' ,
579
+ provider : X_AI ,
580
+ promptCost : 5 ,
581
+ completionCost : 15 ,
582
+ toolSupport : {
583
+ toolChoice : true ,
584
+ parallelToolCalls : false
585
+ }
586
+ }
573
587
]
574
588
} ;
575
589
Original file line number Diff line number Diff line change @@ -11,6 +11,7 @@ export const DEEPINFRA: string = 'deepinfra';
11
11
export const BEDROCK : string = 'bedrock' ;
12
12
export const AZURE_OPEN_AI : string = 'azure-openai' ;
13
13
export const OLLAMA : string = 'ollama' ;
14
+ export const X_AI : string = 'xAI' ;
14
15
15
16
interface Model {
16
17
id : string ;
@@ -511,6 +512,14 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
511
512
promptCost : 1 ,
512
513
completionCost : 3
513
514
}
515
+ ] ,
516
+ [ X_AI ] : [
517
+ {
518
+ id : 'grok-beta' ,
519
+ provider : X_AI ,
520
+ promptCost : 5 ,
521
+ completionCost : 15
522
+ }
514
523
]
515
524
} ;
516
525
Original file line number Diff line number Diff line change 7
7
OLLAMA ,
8
8
OPEN_AI ,
9
9
PERPLEXITY ,
10
- TOGETHER_AI
10
+ TOGETHER_AI ,
11
+ X_AI
11
12
} from '@/dev/data/models' ;
12
13
13
14
import { addContextFromMemory } from '@/utils/memory/lib' ;
@@ -25,6 +26,7 @@ import { callOllama } from './call-ollama';
25
26
import { callOpenAI } from './call-openai' ;
26
27
import { callPerplexity } from './call-perplexity' ;
27
28
import { callTogether } from './call-together' ;
29
+ import { callXAI } from './call-xai' ;
28
30
29
31
export async function callLLM ( {
30
32
pipe,
@@ -110,6 +112,16 @@ export async function callLLM({
110
112
} ) ;
111
113
}
112
114
115
+ if ( modelProvider === X_AI ) {
116
+ dlog ( 'XAI' , 'β
' ) ;
117
+ return await callXAI ( {
118
+ pipe,
119
+ messages,
120
+ llmApiKey,
121
+ stream
122
+ } ) ;
123
+ }
124
+
113
125
if ( modelProvider === COHERE ) {
114
126
dlog ( 'COHERE' , 'β
' ) ;
115
127
return await callCohere ( {
Original file line number Diff line number Diff line change
1
+ import OpenAI from 'openai' ;
2
+ import { dlog } from '../utils/dlog' ;
3
+ import { X_AI } from '../data/models' ;
4
+ import { handleLlmError } from './utils' ;
5
+ import type { Message } from 'types/pipe' ;
6
+ import type { ModelParams } from 'types/providers' ;
7
+ import { addToolsToParams } from '../utils/add-tools-to-params' ;
8
+
9
+ export async function callXAI ( {
10
+ pipe,
11
+ stream,
12
+ llmApiKey,
13
+ messages
14
+ } : {
15
+ pipe : any ;
16
+ stream : boolean ;
17
+ llmApiKey : string ;
18
+ messages : Message [ ] ;
19
+ } ) {
20
+ try {
21
+ const modelParams = buildModelParams ( pipe , stream , messages ) ;
22
+
23
+ // LLM.
24
+ const groq = new OpenAI ( {
25
+ apiKey : llmApiKey ,
26
+ baseURL : 'https://api.x.ai/v1'
27
+ } ) ;
28
+
29
+ // Add tools (functions) to modelParams
30
+ addToolsToParams ( modelParams , pipe ) ;
31
+ dlog ( 'modelParams' , modelParams ) ;
32
+
33
+ return await groq . chat . completions . create ( modelParams as any ) ;
34
+ } catch ( error : any ) {
35
+ handleLlmError ( { error, provider : X_AI } ) ;
36
+ }
37
+ }
38
+
39
+ function buildModelParams (
40
+ pipe : any ,
41
+ stream : boolean ,
42
+ messages : Message [ ]
43
+ ) : ModelParams {
44
+ return {
45
+ messages,
46
+ stream,
47
+ model : pipe . model . name ,
48
+ ...pipe . model . params
49
+ } ;
50
+ }
Original file line number Diff line number Diff line change 6
6
GROQ ,
7
7
OPEN_AI ,
8
8
PERPLEXITY ,
9
- TOGETHER_AI
9
+ TOGETHER_AI ,
10
+ X_AI
10
11
} from '@/dev/data/models' ;
11
12
12
13
export function getLLMApiKey ( modelProvider : string ) : string {
@@ -27,6 +28,8 @@ export function getLLMApiKey(modelProvider: string): string {
27
28
return process . env . FIREWORKS_API_KEY || '' ;
28
29
case modelProvider . includes ( PERPLEXITY ) :
29
30
return process . env . PERPLEXITY_API_KEY || '' ;
31
+ case modelProvider . includes ( X_AI ) :
32
+ return process . env . XAI_API_KEY || '' ;
30
33
default :
31
34
throw new Error ( `Unsupported model provider: ${ modelProvider } ` ) ;
32
35
}
Original file line number Diff line number Diff line change @@ -292,6 +292,7 @@ GROQ_API_KEY=
292
292
MISTRAL_API_KEY=
293
293
PERPLEXITY_API_KEY=
294
294
TOGETHER_API_KEY=
295
+ XAI_API_KEY=
295
296
` ;
296
297
297
298
try {
Original file line number Diff line number Diff line change 1
- import { OLLAMA } from '@/dev/data/models' ;
2
1
import type { Pipe , PipeOld } from './../../types/pipe' ;
3
2
import {
4
3
ANTHROPIC ,
5
4
COHERE ,
6
5
FIREWORKS_AI ,
7
6
GOOGLE ,
8
7
GROQ ,
8
+ OLLAMA ,
9
9
OPEN_AI ,
10
10
PERPLEXITY ,
11
- TOGETHER_AI
11
+ TOGETHER_AI ,
12
+ X_AI
12
13
} from './../data/models' ;
13
14
14
15
type Provider =
@@ -102,7 +103,8 @@ function getProvider(providerString: string): Provider {
102
103
cohere : COHERE ,
103
104
fireworks : FIREWORKS_AI ,
104
105
perplexity : PERPLEXITY ,
105
- ollama : OLLAMA
106
+ ollama : OLLAMA ,
107
+ xai : X_AI
106
108
} ;
107
109
108
110
const provider = providerMap [ providerString . toLowerCase ( ) ] ;
Original file line number Diff line number Diff line change @@ -11,6 +11,7 @@ export const DEEPINFRA: string = 'deepinfra';
11
11
export const BEDROCK : string = 'bedrock' ;
12
12
export const AZURE_OPEN_AI : string = 'azure-openai' ;
13
13
export const OLLAMA : string = 'ollama' ;
14
+ export const X_AI : string = 'xAI' ;
14
15
15
16
interface Model {
16
17
id : string ;
@@ -400,6 +401,14 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
400
401
completionCost : 3 ,
401
402
} ,
402
403
] ,
404
+ [ X_AI ] : [
405
+ {
406
+ id : 'grok-beta' ,
407
+ provider : X_AI ,
408
+ promptCost : 5 ,
409
+ completionCost : 15 ,
410
+ } ,
411
+ ] ,
403
412
} ;
404
413
405
414
export const jsonModeModels = [
Original file line number Diff line number Diff line change 8
8
OPEN_AI ,
9
9
PERPLEXITY ,
10
10
TOGETHER_AI ,
11
+ X_AI ,
11
12
} from '../data/models' ;
12
13
13
14
export function getLLMApiKey ( modelProvider : string ) : string {
@@ -30,6 +31,8 @@ export function getLLMApiKey(modelProvider: string): string {
30
31
return process . env . PERPLEXITY_API_KEY || '' ;
31
32
case modelProvider . includes ( OLLAMA ) :
32
33
return process . env . OLLAMA_API_KEY || '' ;
34
+ case modelProvider . includes ( X_AI ) :
35
+ return process . env . XAI_API_KEY || '' ;
33
36
34
37
default :
35
38
throw new Error ( `Unsupported model provider: ${ modelProvider } ` ) ;
You canβt perform that action at this time.
0 commit comments