File tree Expand file tree Collapse file tree 13 files changed +115
-5
lines changed
Expand file tree Collapse file tree 13 files changed +115
-5
lines changed Original file line number Diff line number Diff line change @@ -19,3 +19,4 @@ GROQ_API_KEY=
1919MISTRAL_API_KEY=
2020PERPLEXITY_API_KEY=
2121TOGETHER_API_KEY=
22+ XAI_API_KEY=
Original file line number Diff line number Diff line change @@ -90,6 +90,8 @@ export const PERPLEXITY: string = 'Perplexity';
9090export const DEEPINFRA : string = 'deepinfra' ;
9191export const BEDROCK : string = 'bedrock' ;
9292export const AZURE_OPEN_AI : string = 'azure-openai' ;
93+ export const X_AI : string = 'xAI' ;
94+ export const OLLAMA : string = 'ollama' ;
9395
9496interface Model {
9597 id : string ;
@@ -570,6 +572,18 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
570572 promptCost : 0.2 ,
571573 completionCost : 0.2
572574 }
575+ ] ,
576+ [ X_AI ] : [
577+ {
578+ id : 'grok-beta' ,
579+ provider : X_AI ,
580+ promptCost : 5 ,
581+ completionCost : 15 ,
582+ toolSupport : {
583+ toolChoice : true ,
584+ parallelToolCalls : false
585+ }
586+ }
573587 ]
574588} ;
575589
Original file line number Diff line number Diff line change @@ -11,6 +11,7 @@ export const DEEPINFRA: string = 'deepinfra';
1111export const BEDROCK : string = 'bedrock' ;
1212export const AZURE_OPEN_AI : string = 'azure-openai' ;
1313export const OLLAMA : string = 'ollama' ;
14+ export const X_AI : string = 'xAI' ;
1415
1516interface Model {
1617 id : string ;
@@ -511,6 +512,14 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
511512 promptCost : 1 ,
512513 completionCost : 3
513514 }
515+ ] ,
516+ [ X_AI ] : [
517+ {
518+ id : 'grok-beta' ,
519+ provider : X_AI ,
520+ promptCost : 5 ,
521+ completionCost : 15
522+ }
514523 ]
515524} ;
516525
Original file line number Diff line number Diff line change 77 OLLAMA ,
88 OPEN_AI ,
99 PERPLEXITY ,
10- TOGETHER_AI
10+ TOGETHER_AI ,
11+ X_AI
1112} from '@/dev/data/models' ;
1213
1314import { addContextFromMemory } from '@/utils/memory/lib' ;
@@ -25,6 +26,7 @@ import { callOllama } from './call-ollama';
2526import { callOpenAI } from './call-openai' ;
2627import { callPerplexity } from './call-perplexity' ;
2728import { callTogether } from './call-together' ;
29+ import { callXAI } from './call-xai' ;
2830
2931export async function callLLM ( {
3032 pipe,
@@ -110,6 +112,16 @@ export async function callLLM({
110112 } ) ;
111113 }
112114
115+ if ( modelProvider === X_AI ) {
116+ dlog ( 'XAI' , 'β
' ) ;
117+ return await callXAI ( {
118+ pipe,
119+ messages,
120+ llmApiKey,
121+ stream
122+ } ) ;
123+ }
124+
113125 if ( modelProvider === COHERE ) {
114126 dlog ( 'COHERE' , 'β
' ) ;
115127 return await callCohere ( {
Original file line number Diff line number Diff line change 1+ import OpenAI from 'openai' ;
2+ import { dlog } from '../utils/dlog' ;
3+ import { X_AI } from '../data/models' ;
4+ import { handleLlmError } from './utils' ;
5+ import type { Message } from 'types/pipe' ;
6+ import type { ModelParams } from 'types/providers' ;
7+ import { addToolsToParams } from '../utils/add-tools-to-params' ;
8+
9+ export async function callXAI ( {
10+ pipe,
11+ stream,
12+ llmApiKey,
13+ messages
14+ } : {
15+ pipe : any ;
16+ stream : boolean ;
17+ llmApiKey : string ;
18+ messages : Message [ ] ;
19+ } ) {
20+ try {
21+ const modelParams = buildModelParams ( pipe , stream , messages ) ;
22+
23+ // LLM.
24+ const groq = new OpenAI ( {
25+ apiKey : llmApiKey ,
26+ baseURL : 'https://api.x.ai/v1'
27+ } ) ;
28+
29+ // Add tools (functions) to modelParams
30+ addToolsToParams ( modelParams , pipe ) ;
31+ dlog ( 'modelParams' , modelParams ) ;
32+
33+ return await groq . chat . completions . create ( modelParams as any ) ;
34+ } catch ( error : any ) {
35+ handleLlmError ( { error, provider : X_AI } ) ;
36+ }
37+ }
38+
39+ function buildModelParams (
40+ pipe : any ,
41+ stream : boolean ,
42+ messages : Message [ ]
43+ ) : ModelParams {
44+ return {
45+ messages,
46+ stream,
47+ model : pipe . model . name ,
48+ ...pipe . model . params
49+ } ;
50+ }
Original file line number Diff line number Diff line change 66 GROQ ,
77 OPEN_AI ,
88 PERPLEXITY ,
9- TOGETHER_AI
9+ TOGETHER_AI ,
10+ X_AI
1011} from '@/dev/data/models' ;
1112
1213export function getLLMApiKey ( modelProvider : string ) : string {
@@ -27,6 +28,8 @@ export function getLLMApiKey(modelProvider: string): string {
2728 return process . env . FIREWORKS_API_KEY || '' ;
2829 case modelProvider . includes ( PERPLEXITY ) :
2930 return process . env . PERPLEXITY_API_KEY || '' ;
31+ case modelProvider . includes ( X_AI ) :
32+ return process . env . XAI_API_KEY || '' ;
3033 default :
3134 throw new Error ( `Unsupported model provider: ${ modelProvider } ` ) ;
3235 }
Original file line number Diff line number Diff line change @@ -292,6 +292,7 @@ GROQ_API_KEY=
292292MISTRAL_API_KEY=
293293PERPLEXITY_API_KEY=
294294TOGETHER_API_KEY=
295+ XAI_API_KEY=
295296` ;
296297
297298 try {
Original file line number Diff line number Diff line change 1- import { OLLAMA } from '@/dev/data/models' ;
21import type { Pipe , PipeOld } from './../../types/pipe' ;
32import {
43 ANTHROPIC ,
54 COHERE ,
65 FIREWORKS_AI ,
76 GOOGLE ,
87 GROQ ,
8+ OLLAMA ,
99 OPEN_AI ,
1010 PERPLEXITY ,
11- TOGETHER_AI
11+ TOGETHER_AI ,
12+ X_AI
1213} from './../data/models' ;
1314
1415type Provider =
@@ -102,7 +103,8 @@ function getProvider(providerString: string): Provider {
102103 cohere : COHERE ,
103104 fireworks : FIREWORKS_AI ,
104105 perplexity : PERPLEXITY ,
105- ollama : OLLAMA
106+ ollama : OLLAMA ,
107+ xai : X_AI
106108 } ;
107109
108110 const provider = providerMap [ providerString . toLowerCase ( ) ] ;
Original file line number Diff line number Diff line change @@ -11,6 +11,7 @@ export const DEEPINFRA: string = 'deepinfra';
1111export const BEDROCK : string = 'bedrock' ;
1212export const AZURE_OPEN_AI : string = 'azure-openai' ;
1313export const OLLAMA : string = 'ollama' ;
14+ export const X_AI : string = 'xAI' ;
1415
1516interface Model {
1617 id : string ;
@@ -400,6 +401,14 @@ export const modelsByProvider: ModelsByProviderInclCosts = {
400401 completionCost : 3 ,
401402 } ,
402403 ] ,
404+ [ X_AI ] : [
405+ {
406+ id : 'grok-beta' ,
407+ provider : X_AI ,
408+ promptCost : 5 ,
409+ completionCost : 15 ,
410+ } ,
411+ ] ,
403412} ;
404413
405414export const jsonModeModels = [
Original file line number Diff line number Diff line change 88 OPEN_AI ,
99 PERPLEXITY ,
1010 TOGETHER_AI ,
11+ X_AI ,
1112} from '../data/models' ;
1213
1314export function getLLMApiKey ( modelProvider : string ) : string {
@@ -30,6 +31,8 @@ export function getLLMApiKey(modelProvider: string): string {
3031 return process . env . PERPLEXITY_API_KEY || '' ;
3132 case modelProvider . includes ( OLLAMA ) :
3233 return process . env . OLLAMA_API_KEY || '' ;
34+ case modelProvider . includes ( X_AI ) :
35+ return process . env . XAI_API_KEY || '' ;
3336
3437 default :
3538 throw new Error ( `Unsupported model provider: ${ modelProvider } ` ) ;
You canβt perform that action at this time.
0 commit comments