Skip to content

Commit db7cd7b

Browse files
authored
Merge pull request #1235 from shiwo6324/feat/302AI-integration
feat: add 302AI provider integration
2 parents 1a1b881 + b599b18 commit db7cd7b

File tree

5 files changed

+194
-0
lines changed

5 files changed

+194
-0
lines changed

src/globals.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,7 @@ export const BYTEZ: string = 'bytez';
9999
export const FEATHERLESS_AI: string = 'featherless-ai';
100100
export const KRUTRIM: string = 'krutrim';
101101
export const QDRANT: string = 'qdrant';
102+
export const THREE_ZERO_TWO_AI: string = '302ai';
102103

103104
export const VALID_PROVIDERS = [
104105
ANTHROPIC,
@@ -163,6 +164,7 @@ export const VALID_PROVIDERS = [
163164
FEATHERLESS_AI,
164165
KRUTRIM,
165166
QDRANT,
167+
THREE_ZERO_TWO_AI,
166168
];
167169

168170
export const CONTENT_TYPES = {

src/providers/302ai/api.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import { ProviderAPIConfig } from '../types';
2+
3+
const AI302APIConfig: ProviderAPIConfig = {
4+
getBaseURL: () => 'https://api.302.ai',
5+
headers: ({ providerOptions }) => {
6+
return { Authorization: `Bearer ${providerOptions.apiKey}` };
7+
},
8+
getEndpoint: ({ fn }) => {
9+
switch (fn) {
10+
case 'chatComplete':
11+
return '/v1/chat/completions';
12+
default:
13+
return '';
14+
}
15+
},
16+
};
17+
18+
export default AI302APIConfig;
Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
import { THREE_ZERO_TWO_AI } from '../../globals';
2+
import { OpenAIErrorResponseTransform } from '../openai/utils';
3+
import {
4+
ChatCompletionResponse,
5+
ErrorResponse,
6+
ProviderConfig,
7+
} from '../types';
8+
import { generateInvalidProviderResponseError } from '../utils';
9+
10+
export const AI302ChatCompleteConfig: ProviderConfig = {
11+
model: {
12+
param: 'model',
13+
required: true,
14+
default: 'gpt-3.5-turbo',
15+
},
16+
messages: {
17+
param: 'messages',
18+
default: '',
19+
},
20+
max_tokens: {
21+
param: 'max_tokens',
22+
default: 100,
23+
min: 0,
24+
},
25+
temperature: {
26+
param: 'temperature',
27+
default: 1,
28+
min: 0,
29+
max: 2,
30+
},
31+
top_p: {
32+
param: 'top_p',
33+
default: 1,
34+
min: 0,
35+
max: 1,
36+
},
37+
stream: {
38+
param: 'stream',
39+
default: false,
40+
},
41+
frequency_penalty: {
42+
param: 'frequency_penalty',
43+
default: 0,
44+
min: -2,
45+
max: 2,
46+
},
47+
presence_penalty: {
48+
param: 'presence_penalty',
49+
default: 0,
50+
min: -2,
51+
max: 2,
52+
},
53+
stop: {
54+
param: 'stop',
55+
default: null,
56+
},
57+
};
58+
59+
interface AI302ChatCompleteResponse extends ChatCompletionResponse {
60+
id: string;
61+
object: string;
62+
created: number;
63+
model: string;
64+
usage?: {
65+
prompt_tokens: number;
66+
completion_tokens: number;
67+
total_tokens: number;
68+
};
69+
}
70+
71+
interface AI302StreamChunk {
72+
id: string;
73+
object: string;
74+
created: number;
75+
model: string;
76+
choices: {
77+
delta: {
78+
role?: string | null;
79+
content?: string;
80+
};
81+
index: number;
82+
finish_reason: string | null;
83+
}[];
84+
}
85+
86+
export const AI302ChatCompleteResponseTransform: (
87+
response: AI302ChatCompleteResponse | ErrorResponse,
88+
responseStatus: number
89+
) => ChatCompletionResponse | ErrorResponse = (response, responseStatus) => {
90+
if ('error' in response && responseStatus !== 200) {
91+
return OpenAIErrorResponseTransform(response, THREE_ZERO_TWO_AI);
92+
}
93+
94+
if ('choices' in response) {
95+
return {
96+
id: response.id,
97+
object: response.object,
98+
created: response.created,
99+
model: response.model,
100+
provider: THREE_ZERO_TWO_AI,
101+
choices: response.choices.map((c) => ({
102+
index: c.index,
103+
message: {
104+
role: c.message.role,
105+
content: c.message.content,
106+
},
107+
finish_reason: c.finish_reason,
108+
})),
109+
usage: {
110+
prompt_tokens: response.usage?.prompt_tokens || 0,
111+
completion_tokens: response.usage?.completion_tokens || 0,
112+
total_tokens: response.usage?.total_tokens || 0,
113+
},
114+
};
115+
}
116+
117+
return generateInvalidProviderResponseError(response, THREE_ZERO_TWO_AI);
118+
};
119+
120+
export const AI302ChatCompleteStreamChunkTransform: (
121+
response: string
122+
) => string = (responseChunk) => {
123+
let chunk = responseChunk.trim();
124+
chunk = chunk.replace(/^data: /, '');
125+
chunk = chunk.trim();
126+
127+
if (chunk === '[DONE]') {
128+
return `data: ${chunk}\n\n`;
129+
}
130+
131+
try {
132+
const parsedChunk: AI302StreamChunk = JSON.parse(chunk);
133+
134+
return (
135+
`data: ${JSON.stringify({
136+
id: parsedChunk.id,
137+
object: parsedChunk.object,
138+
created: parsedChunk.created,
139+
model: parsedChunk.model,
140+
provider: THREE_ZERO_TWO_AI,
141+
choices: [
142+
{
143+
index: parsedChunk.choices[0]?.index ?? 0,
144+
delta: parsedChunk.choices[0]?.delta ?? {},
145+
finish_reason: parsedChunk.choices[0]?.finish_reason ?? null,
146+
},
147+
],
148+
})}` + '\n\n'
149+
);
150+
} catch (error) {
151+
console.error('Error parsing 302AI stream chunk:', error);
152+
return `data: ${chunk}\n\n`;
153+
}
154+
};

src/providers/302ai/index.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import { ProviderConfigs } from '../types';
2+
import AI302APIConfig from './api';
3+
import {
4+
AI302ChatCompleteConfig,
5+
AI302ChatCompleteResponseTransform,
6+
AI302ChatCompleteStreamChunkTransform,
7+
} from './chatComplete';
8+
9+
const AI302Config: ProviderConfigs = {
10+
chatComplete: AI302ChatCompleteConfig,
11+
api: AI302APIConfig,
12+
responseTransforms: {
13+
chatComplete: AI302ChatCompleteResponseTransform,
14+
'stream-chatComplete': AI302ChatCompleteStreamChunkTransform,
15+
},
16+
};
17+
18+
export default AI302Config;

src/providers/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@ import NscaleConfig from './nscale';
6262
import HyperbolicConfig from './hyperbolic';
6363
import { FeatherlessAIConfig } from './featherless-ai';
6464
import KrutrimConfig from './krutrim';
65+
import AI302Config from './302ai';
6566

6667
const Providers: { [key: string]: ProviderConfigs } = {
6768
openai: OpenAIConfig,
@@ -124,6 +125,7 @@ const Providers: { [key: string]: ProviderConfigs } = {
124125
bytez: BytezConfig,
125126
'featherless-ai': FeatherlessAIConfig,
126127
krutrim: KrutrimConfig,
128+
'302ai': AI302Config,
127129
};
128130

129131
export default Providers;

0 commit comments

Comments
 (0)