Skip to content

Commit 4186135

Browse files
committed
feat: add 302AI provider integration
1 parent 16d9f39 commit 4186135

File tree

5 files changed

+202
-0
lines changed

5 files changed

+202
-0
lines changed

src/globals.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,8 @@ export const NSCALE: string = 'nscale';
9797
export const HYPERBOLIC: string = 'hyperbolic';
9898
export const FEATHERLESS_AI: string = 'featherless-ai';
9999
export const KRUTRIM: string = 'krutrim';
100+
export const THREE_ZERO_TWO_AI: string = '302ai';
101+
100102

101103
export const VALID_PROVIDERS = [
102104
ANTHROPIC,
@@ -159,6 +161,7 @@ export const VALID_PROVIDERS = [
159161
HYPERBOLIC,
160162
FEATHERLESS_AI,
161163
KRUTRIM,
164+
THREE_ZERO_TWO_AI,
162165
];
163166

164167
export const CONTENT_TYPES = {

src/providers/302ai/api.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import { ProviderAPIConfig } from '../types';
2+
3+
const AI302APIConfig: ProviderAPIConfig = {
4+
getBaseURL: () => 'https://api.302.ai',
5+
headers: ({ providerOptions }) => {
6+
return { Authorization: `Bearer ${providerOptions.apiKey}` };
7+
},
8+
getEndpoint: ({ fn }) => {
9+
switch (fn) {
10+
case 'chatComplete':
11+
return '/v1/chat/completions';
12+
default:
13+
return '';
14+
}
15+
},
16+
};
17+
18+
export default AI302APIConfig;
Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
import { THREE_ZERO_TWO_AI } from '../../globals';
2+
import { OpenAIErrorResponseTransform } from '../openai/utils';
3+
4+
import {
5+
ChatCompletionResponse,
6+
ErrorResponse,
7+
ProviderConfig,
8+
} from '../types';
9+
import {
10+
generateErrorResponse,
11+
generateInvalidProviderResponseError,
12+
} from '../utils';
13+
14+
interface AI302ChatCompleteResponse extends ChatCompletionResponse {}
15+
16+
export const AI302ChatCompleteConfig: ProviderConfig = {
17+
model: {
18+
param: 'model',
19+
required: true,
20+
default: 'gpt-3.5-turbo',
21+
},
22+
messages: {
23+
param: 'messages',
24+
default: '',
25+
},
26+
max_tokens: {
27+
param: 'max_tokens',
28+
default: 100,
29+
min: 0,
30+
},
31+
temperature: {
32+
param: 'temperature',
33+
default: 1,
34+
min: 0,
35+
max: 2,
36+
},
37+
top_p: {
38+
param: 'top_p',
39+
default: 1,
40+
min: 0,
41+
max: 1,
42+
},
43+
stream: {
44+
param: 'stream',
45+
default: false,
46+
},
47+
frequency_penalty: {
48+
param: 'frequency_penalty',
49+
default: 0,
50+
min: -2,
51+
max: 2,
52+
},
53+
presence_penalty: {
54+
param: 'presence_penalty',
55+
default: 0,
56+
min: -2,
57+
max: 2,
58+
},
59+
stop: {
60+
param: 'stop',
61+
default: null,
62+
},
63+
};
64+
65+
interface AI302ChatCompleteResponse extends ChatCompletionResponse {
66+
id: string;
67+
object: string;
68+
created: number;
69+
model: string;
70+
usage?: {
71+
prompt_tokens: number;
72+
completion_tokens: number;
73+
total_tokens: number;
74+
};
75+
}
76+
77+
interface AI302StreamChunk {
78+
id: string;
79+
object: string;
80+
created: number;
81+
model: string;
82+
choices: {
83+
delta: {
84+
role?: string | null;
85+
content?: string;
86+
};
87+
index: number;
88+
finish_reason: string | null;
89+
}[];
90+
}
91+
92+
export const AI302ChatCompleteResponseTransform: (
93+
response: AI302ChatCompleteResponse | ErrorResponse,
94+
responseStatus: number
95+
) => ChatCompletionResponse | ErrorResponse = (response, responseStatus) => {
96+
if ('error' in response && responseStatus !== 200) {
97+
return OpenAIErrorResponseTransform(response, THREE_ZERO_TWO_AI);
98+
}
99+
100+
if ('choices' in response) {
101+
return {
102+
id: response.id,
103+
object: response.object,
104+
created: response.created,
105+
model: response.model,
106+
provider: THREE_ZERO_TWO_AI,
107+
choices: response.choices.map((c) => ({
108+
index: c.index,
109+
message: {
110+
role: c.message.role,
111+
content: c.message.content,
112+
},
113+
finish_reason: c.finish_reason,
114+
})),
115+
usage: {
116+
prompt_tokens: response.usage?.prompt_tokens || 0,
117+
completion_tokens: response.usage?.completion_tokens || 0,
118+
total_tokens: response.usage?.total_tokens || 0,
119+
},
120+
};
121+
}
122+
123+
return generateInvalidProviderResponseError(response, THREE_ZERO_TWO_AI);
124+
};
125+
126+
export const AI302ChatCompleteStreamChunkTransform: (
127+
response: string
128+
) => string = (responseChunk) => {
129+
let chunk = responseChunk.trim();
130+
chunk = chunk.replace(/^data: /, '');
131+
chunk = chunk.trim();
132+
133+
if (chunk === '[DONE]') {
134+
return `data: ${chunk}\n\n`;
135+
}
136+
137+
try {
138+
const parsedChunk: AI302StreamChunk = JSON.parse(chunk);
139+
140+
return (
141+
`data: ${JSON.stringify({
142+
id: parsedChunk.id,
143+
object: parsedChunk.object,
144+
created: parsedChunk.created,
145+
model: parsedChunk.model,
146+
provider: THREE_ZERO_TWO_AI,
147+
choices: [
148+
{
149+
index: parsedChunk.choices[0]?.index ?? 0,
150+
delta: parsedChunk.choices[0]?.delta ?? {},
151+
finish_reason: parsedChunk.choices[0]?.finish_reason ?? null,
152+
},
153+
],
154+
})}` + '\n\n'
155+
);
156+
} catch (error) {
157+
console.error('Error parsing 302AI stream chunk:', error);
158+
return `data: ${chunk}\n\n`;
159+
}
160+
};

src/providers/302ai/index.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import { ProviderConfigs } from '../types';
2+
import AI302APIConfig from './api';
3+
import {
4+
AI302ChatCompleteConfig,
5+
AI302ChatCompleteResponseTransform,
6+
AI302ChatCompleteStreamChunkTransform,
7+
} from './chatComplete';
8+
9+
const AI302Config: ProviderConfigs = {
10+
chatComplete: AI302ChatCompleteConfig,
11+
api: AI302APIConfig,
12+
responseTransforms: {
13+
chatComplete: AI302ChatCompleteResponseTransform,
14+
'stream-chatComplete': AI302ChatCompleteStreamChunkTransform,
15+
},
16+
};
17+
18+
export default AI302Config;

src/providers/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ import NscaleConfig from './nscale';
6161
import HyperbolicConfig from './hyperbolic';
6262
import { FeatherlessAIConfig } from './featherless-ai';
6363
import KrutrimConfig from './krutrim';
64+
import AI302Config from './302ai';
6465

6566
const Providers: { [key: string]: ProviderConfigs } = {
6667
openai: OpenAIConfig,
@@ -122,6 +123,8 @@ const Providers: { [key: string]: ProviderConfigs } = {
122123
hyperbolic: HyperbolicConfig,
123124
'featherless-ai': FeatherlessAIConfig,
124125
krutrim: KrutrimConfig,
126+
'302ai': AI302Config,
127+
125128
};
126129

127130
export default Providers;

0 commit comments

Comments
 (0)