Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions .github/workflows/ai-sdk-build-check.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: Check AI SDK Build

on:
pull_request:
types:
- synchronize
- opened
- reopened
paths:
- 'ai-sdk/**' # Trigger only if files in the `/ai-sdk` directory change

jobs:
build:
runs-on: ubuntu-latest

steps:
# Step 1: Checkout the code
- name: Checkout code
uses: actions/checkout@v5

# Step 2: Install pnpm
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 9

# Step 3: Set up Node.js
- name: Set up Node.js
uses: actions/setup-node@v6
with:
node-version: 'lts/*' # Use the latest LTS version of Node.js
cache: 'pnpm'
# Specify the subdirectory containing the lockfile
cache-dependency-path: ./ai-sdk/pnpm-lock.yaml

# Step 4: Install dependencies
- name: Install dependencies
working-directory: ./ai-sdk
run: pnpm install

# Step 5: Run the build
- name: Build app
working-directory: ./ai-sdk
run: pnpm build
3 changes: 3 additions & 0 deletions .github/workflows/build-push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ jobs:
- dockerfile: ./query-engine/Dockerfile
context: ./query-engine
image: ghcr.io/lmnr-ai/query-engine
- dockerfile: ./ai-sdk/Dockerfile
context: ./ai-sdk
image: ghcr.io/lmnr-ai/ai-sdk
permissions:
contents: read
packages: write
Expand Down
2 changes: 2 additions & 0 deletions ai-sdk/.env.local.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
AEAD_SECRET_KEY=
PORT=3000
3 changes: 3 additions & 0 deletions ai-sdk/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
node_modules
tsconfig.tsbuildinfo
dist/
26 changes: 26 additions & 0 deletions ai-sdk/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
FROM node:24-alpine AS base
WORKDIR /app

FROM base AS deps
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app

COPY package.json pnpm-lock.yaml ./
RUN corepack enable && pnpm install --frozen-lockfile

FROM base AS builder
COPY --from=deps /app/node_modules ./node_modules
COPY . .
RUN corepack enable
RUN pnpm build

FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
COPY --from=builder /app/package.json ./
COPY --from=builder /app/pnpm-lock.yaml ./
COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/dist ./dist
EXPOSE 3000
CMD ["node", "dist/api.js"]
221 changes: 221 additions & 0 deletions ai-sdk/aisdk.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createAzure } from '@ai-sdk/azure';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { createGroq } from '@ai-sdk/groq';
import { createMistral } from '@ai-sdk/mistral';
import { createOpenAI } from '@ai-sdk/openai';
import { generateObject, generateText, jsonSchema, tool } from 'ai';
import type { ModelMessage, ToolChoice, ToolSet } from 'ai';

import type { GenerateResponse, Provider, ProviderApiKey } from './types';
import { decryptApiKey } from './crypto';

export type StructuredOutput = { [key: PropertyKey]: StructuredOutput | string | number | boolean | null | StructuredOutput[] } | null;

const providerFactories: Record<Provider, (options: Record<string, any>) => (model: string) => any> = {
openai: createOpenAI,
anthropic: createAnthropic,
gemini: createGoogleGenerativeAI,
groq: createGroq,
mistral: createMistral,
bedrock: createAmazonBedrock,
'openai-azure': createAzure,
};

const isProvider = (value: string): value is Provider =>
['openai', 'anthropic', 'gemini', 'groq', 'mistral', 'bedrock', 'openai-azure'].includes(value as Provider);

type BaseParams = Record<string, unknown>;

export function parseTools(tools?: string): ToolSet | undefined {
if (!tools) {
return undefined;
}

try {
const parsed = JSON.parse(tools) as Record<string, { description?: string; parameters: object }>;
return Object.entries(parsed).reduce((acc, [toolName, toolItem]) => {
acc[toolName] = tool({
...toolItem,
inputSchema: jsonSchema(toolItem.parameters ?? {}),
});
return acc;
}, {} as ToolSet);
} catch {
throw new Error('tools must be valid JSON');
}
}

export async function runAiSdkRequest(params: {
model: string;
messages: ModelMessage[];
temperature?: number;
maxTokens?: number;
topP?: number;
topK?: number;
tools?: ToolSet;
toolChoice?: ToolChoice<ToolSet>;
structuredOutput?: StructuredOutput;
providerOptions?: unknown;
provider_api_key: ProviderApiKey;
}): Promise<GenerateResponse> {
const decryptedKey = await decryptProviderApiKey(params.provider_api_key);
const modelInstance = resolveModel(params.model, decryptedKey);

const baseParams = createBaseParams(modelInstance, params.messages, {
temperature: params.temperature,
topK: params.topK,
topP: params.topP,
providerOptions: params.providerOptions,
});

const result = params.structuredOutput
? await getStructuredResult(baseParams, params.structuredOutput, params.maxTokens)
: await generateText(
createTextParams(baseParams, {
maxTokens: params.maxTokens,
tools: params.tools,
toolChoice: params.toolChoice,
}) as Parameters<typeof generateText>[0]
);

return normalizeResult(result);
}

function createBaseParams(
modelInstance: ReturnType<ReturnType<(typeof providerFactories)[Provider]>>,
messages: ModelMessage[],
options: {
temperature?: number;
topK?: number;
topP?: number;
providerOptions?: unknown;
}
): BaseParams {
const params: BaseParams = {
model: modelInstance,
messages,
};

if (options.providerOptions !== undefined) {
params.providerOptions = options.providerOptions;
}
if (typeof options.temperature === 'number') {
params.temperature = options.temperature;
}
if (typeof options.topK === 'number') {
params.topK = options.topK;
}
if (typeof options.topP === 'number') {
params.topP = options.topP;
}

return params;
}

function createTextParams(
baseParams: BaseParams,
options: {
maxTokens?: number;
tools?: ToolSet;
toolChoice?: ToolChoice<ToolSet>;
}
): BaseParams {
const params: BaseParams = {
...baseParams,
};

if (typeof options.maxTokens === 'number') {
params.maxOutputTokens = options.maxTokens;
}
if (options.tools !== undefined) {
params.tools = options.tools;
}
if (options.toolChoice !== undefined) {
params.toolChoice = options.toolChoice;
}

return params;
}

async function getStructuredResult(
baseParams: BaseParams,
structuredOutput: NonNullable<StructuredOutput>,
maxTokens?: number
) {
const objectParams: BaseParams = {
...baseParams,
schema: jsonSchema(structuredOutput),
};

if (typeof maxTokens === 'number') {
objectParams.maxOutputTokens = maxTokens;
}

const objectResult = await generateObject(objectParams as Parameters<typeof generateObject>[0]);

return {
...objectResult,
text: JSON.stringify(objectResult.object, null, 2),
reasoning: [],
toolCalls: [],
content: [],
files: [],
sources: [],
reasoningText: '',
};
}

function normalizeResult(result: any): GenerateResponse {
return {
text: result?.text ?? (result?.object ? JSON.stringify(result.object) : ''),
content: result?.content ?? [],
reasoning: result?.reasoning ?? [],
reasoningText: result?.reasoningText ?? '',
files: result?.files ?? [],
sources: result?.sources ?? [],
toolCalls: result?.toolCalls ?? [],
staticToolCalls: result?.staticToolCalls ?? [],
dynamicToolCalls: result?.dynamicToolCalls ?? [],
toolResults: result?.toolResults ?? [],
staticToolResults: result?.staticToolResults ?? [],
dynamicToolResults: result?.dynamicToolResults ?? [],
finishReason: result?.finishReason,
usage: result?.usage ?? {},
totalUsage: result?.totalUsage ?? {},
warnings: result?.warnings,
request: result?.request,
response: result?.response,
object: result?.object,
};
}

async function decryptProviderApiKey(providerKey: ProviderApiKey): Promise<string> {
return decryptApiKey(providerKey.name, providerKey.nonce, providerKey.value);
}

function resolveModel(modelKey: string, apiKey: string) {
const [providerSegment, modelName] = modelKey.split(':') as [string, string | undefined];

if (!providerSegment || !modelName) {
throw new Error('Model value must include provider prefix (e.g. gemini:gemini-2.5-flash)');
}

if (!isProvider(providerSegment)) {
throw new Error(`Unsupported provider: ${providerSegment}`);
}

const createProvider = providerFactories[providerSegment];

if (!createProvider) {
throw new Error(`Provider ${providerSegment} is not configured`);
}

try {
const providerInstance = createProvider({ apiKey });
return providerInstance(modelName);
} catch (error) {
throw new Error(`Failed to initialize provider ${providerSegment}`);
}
}
Loading