Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/globals.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ export const ORACLE: string = 'oracle';
export const IO_INTELLIGENCE: string = 'iointelligence';
export const AIBADGR: string = 'aibadgr';
export const OVHCLOUD: string = 'ovhcloud';
export const LATITUDE: string = 'latitude';

export const VALID_PROVIDERS = [
ANTHROPIC,
Expand Down Expand Up @@ -189,6 +190,7 @@ export const VALID_PROVIDERS = [
IO_INTELLIGENCE,
AIBADGR,
OVHCLOUD,
LATITUDE,
];

export const CONTENT_TYPES = {
Expand Down
2 changes: 2 additions & 0 deletions src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ import OracleConfig from './oracle';
import IOIntelligenceConfig from './iointelligence';
import AIBadgrConfig from './aibadgr';
import OVHcloudConfig from './ovhcloud';
import LatitudeConfig from './latitude';

const Providers: { [key: string]: ProviderConfigs } = {
openai: OpenAIConfig,
Expand Down Expand Up @@ -148,6 +149,7 @@ const Providers: { [key: string]: ProviderConfigs } = {
iointelligence: IOIntelligenceConfig,
aibadgr: AIBadgrConfig,
ovhcloud: OVHcloudConfig,
latitude: LatitudeConfig,
};

export default Providers;
18 changes: 18 additions & 0 deletions src/providers/latitude/api.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { ProviderAPIConfig } from '../types';

const LatitudeAPIConfig: ProviderAPIConfig = {
getBaseURL: () => 'https://api.lsh.ai',
headers: ({ providerOptions }) => {
return { Authorization: `Bearer ${providerOptions.apiKey}` };
},
getEndpoint: ({ fn }) => {
switch (fn) {
case 'chatComplete':
return '/v1/chat/completions';
default:
return '';
}
},
};

export default LatitudeAPIConfig;
229 changes: 229 additions & 0 deletions src/providers/latitude/chatComplete.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,229 @@
import { LATITUDE } from '../../globals';
import { Params } from '../../types/requestBody';

import {
ChatCompletionResponse,
ErrorResponse,
ProviderConfig,
} from '../types';
import {
generateErrorResponse,
generateInvalidProviderResponseError,
transformFinishReason,
} from '../utils';
import { LATITUDE_STOP_REASON } from './types';

export const LatitudeChatCompleteConfig: ProviderConfig = {
model: {
param: 'model',
required: true,
default: 'qwen-2.5-7b',
},
messages: {
param: 'messages',
default: '',
transform: (params: Params) => {
return params.messages?.map((message) => {
if (message.role === 'developer') return { ...message, role: 'system' };
return message;
});
},
},
response_format: {
param: 'response_format',
default: null,
},
max_tokens: {
param: 'max_tokens',
default: 100,
min: 0,
},
max_completion_tokens: {
param: 'max_tokens',
default: 100,
min: 0,
},
temperature: {
param: 'temperature',
default: 1,
min: 0,
max: 2,
},
top_p: {
param: 'top_p',
default: 1,
min: 0,
max: 1,
},
stream: {
param: 'stream',
default: false,
},
frequency_penalty: {
param: 'frequency_penalty',
default: 0,
min: -2,
max: 2,
},
presence_penalty: {
param: 'presence_penalty',
default: 0,
min: -2,
max: 2,
},
stop: {
param: 'stop',
default: null,
},
seed: {
param: 'seed',
default: null,
},
tools: {
param: 'tools',
default: null,
},
tool_choice: {
param: 'tool_choice',
default: null,
},
};

interface LatitudeChatCompleteResponse extends ChatCompletionResponse {
id: string;
object: string;
created: number;
model: string;
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
}

export interface LatitudeErrorResponse {
object: string;
message: string;
type: string;
param: string | null;
code: string;
}

interface LatitudeStreamChunk {
id: string;
object: string;
created: number;
model: string;
usage?: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
choices: {
delta: {
role?: string | null;
content?: string;
tool_calls?: any[];
};
index: number;
finish_reason: string | null;
}[];
}

export const LatitudeChatCompleteResponseTransform: (
response: LatitudeChatCompleteResponse | LatitudeErrorResponse,
responseStatus: number,
responseHeaders: Headers,
strictOpenAiCompliance: boolean
) => ChatCompletionResponse | ErrorResponse = (
response,
responseStatus,
_responseHeaders,
strictOpenAiCompliance
) => {
if ('message' in response && responseStatus !== 200) {
return generateErrorResponse(
{
message: response.message,
type: response.type,
param: response.param,
code: response.code,
},
LATITUDE
);
}

if ('choices' in response) {
return {
id: response.id,
object: response.object,
created: response.created,
model: response.model,
provider: LATITUDE,
choices: response.choices.map((c) => ({
index: c.index,
message: {
role: c.message.role,
content: c.message.content,
tool_calls: c.message.tool_calls,
},
finish_reason: transformFinishReason(
c.finish_reason as LATITUDE_STOP_REASON,
strictOpenAiCompliance
),
})),
usage: {
prompt_tokens: response.usage?.prompt_tokens,
completion_tokens: response.usage?.completion_tokens,
total_tokens: response.usage?.total_tokens,
},
};
}

return generateInvalidProviderResponseError(response, LATITUDE);
};

export const LatitudeChatCompleteStreamChunkTransform: (
response: string,
fallbackId: string,
streamState: any,
strictOpenAiCompliance: boolean,
gatewayRequest: Params
) => string | string[] = (
responseChunk,
fallbackId,
_streamState,
strictOpenAiCompliance,
_gatewayRequest
) => {
let chunk = responseChunk.trim();
chunk = chunk.replace(/^data: /, '');
chunk = chunk.trim();
if (chunk === '[DONE]') {
return `data: ${chunk}\n\n`;
}
const parsedChunk: LatitudeStreamChunk = JSON.parse(chunk);
const finishReason = parsedChunk.choices[0].finish_reason
? transformFinishReason(
parsedChunk.choices[0].finish_reason as LATITUDE_STOP_REASON,
strictOpenAiCompliance
)
: null;
return (
`data: ${JSON.stringify({
id: parsedChunk.id,
object: parsedChunk.object,
created: parsedChunk.created,
model: parsedChunk.model,
provider: LATITUDE,
choices: [
{
index: parsedChunk.choices[0].index,
delta: parsedChunk.choices[0].delta,
finish_reason: finishReason,
},
],
usage: parsedChunk.usage,
})}` + '\n\n'
);
};
18 changes: 18 additions & 0 deletions src/providers/latitude/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { ProviderConfigs } from '../types';
import LatitudeAPIConfig from './api';
import {
LatitudeChatCompleteConfig,
LatitudeChatCompleteResponseTransform,
LatitudeChatCompleteStreamChunkTransform,
} from './chatComplete';

const LatitudeConfig: ProviderConfigs = {
chatComplete: LatitudeChatCompleteConfig,
api: LatitudeAPIConfig,
responseTransforms: {
chatComplete: LatitudeChatCompleteResponseTransform,
'stream-chatComplete': LatitudeChatCompleteStreamChunkTransform,
},
};

export default LatitudeConfig;
6 changes: 6 additions & 0 deletions src/providers/latitude/types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
export enum LATITUDE_STOP_REASON {
stop = 'stop',
length = 'length',
tool_calls = 'tool_calls',
content_filter = 'content_filter',
}
4 changes: 3 additions & 1 deletion src/providers/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { DEEPSEEK_STOP_REASON } from './deepseek/types';
import { MISTRAL_AI_FINISH_REASON } from './mistral-ai/types';
import { TOGETHER_AI_FINISH_REASON } from './together-ai/types';
import { COHERE_STOP_REASON } from './cohere/types';
import { LATITUDE_STOP_REASON } from './latitude/types';

/**
* Configuration for a parameter.
Expand Down Expand Up @@ -453,4 +454,5 @@ export type PROVIDER_FINISH_REASON =
| DEEPSEEK_STOP_REASON
| MISTRAL_AI_FINISH_REASON
| TOGETHER_AI_FINISH_REASON
| COHERE_STOP_REASON;
| COHERE_STOP_REASON
| LATITUDE_STOP_REASON;