Skip to content

Commit 8a2338f

Browse files
authored
πŸ“¦ NEW: tools support in pipe.run() endpoint (#140)
* πŸ“¦ NEW: tools support in pipe.run() endpoint * πŸ‘Œ IMPROVE: Code
1 parent 85c5725 commit 8a2338f

File tree

13 files changed

+120
-58
lines changed

13 files changed

+120
-58
lines changed

β€Žexamples/nextjs/app/api/langbase/pipes/run-tool-stream/route.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,10 @@ export async function POST(req: NextRequest) {
99
const pipe = new Pipe(pipeWithToolsStream());
1010

1111
// 2. Run the pipe with user messages and other run options.
12-
let {stream, threadId} = (await pipe.run(
13-
runOptions,
14-
)) as unknown as RunResponseStream;
12+
let {stream, threadId} = (await pipe.run({
13+
...runOptions,
14+
stream: true,
15+
})) as unknown as RunResponseStream;
1516

1617
// 3. Stream the response.
1718
return new Response(stream, {

β€Žpackages/baseai/src/dev/llms/call-anthropic.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,21 +6,24 @@ import { handleLlmError } from './utils';
66
import type { ModelParams } from 'types/providers';
77
import type { Message, Pipe } from 'types/pipe';
88
import { addToolsToParams } from '../utils/add-tools-to-params';
9+
import type { PipeTool } from 'types/tools';
910

1011
export async function callAnthropic({
1112
pipe,
1213
messages,
1314
llmApiKey,
14-
stream
15+
stream,
16+
paramsTools
1517
}: {
1618
pipe: Pipe;
1719
llmApiKey: string;
1820
stream: boolean;
1921
messages: Message[];
22+
paramsTools: PipeTool[] | undefined;
2023
}) {
2124
try {
2225
const modelParams = buildModelParams(pipe, stream, messages);
23-
addToolsToParams(modelParams, pipe);
26+
addToolsToParams(modelParams, pipe, paramsTools);
2427

2528
// Transform params according to provider's format
2629
const transformedRequestParams = transformToProviderRequest({

β€Žpackages/baseai/src/dev/llms/call-google.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,21 +6,24 @@ import { applyJsonModeIfEnabledForGoogle, handleLlmError } from './utils';
66
import type { ModelParams } from 'types/providers';
77
import type { Message, Pipe } from 'types/pipe';
88
import { addToolsToParams } from '../utils/add-tools-to-params';
9+
import type { PipeTool } from 'types/tools';
910

1011
export async function callGoogle({
1112
pipe,
1213
messages,
1314
llmApiKey,
14-
stream
15+
stream,
16+
paramsTools
1517
}: {
1618
pipe: Pipe;
1719
stream: boolean;
1820
llmApiKey: string;
1921
messages: Message[];
22+
paramsTools: PipeTool[] | undefined;
2023
}) {
2124
try {
2225
const modelParams = buildModelParams(pipe, stream, messages);
23-
addToolsToParams(modelParams, pipe);
26+
addToolsToParams(modelParams, pipe, paramsTools);
2427

2528
// Transform params according to provider's format
2629
const transformedRequestParams = transformToProviderRequest({

β€Žpackages/baseai/src/dev/llms/call-llm.ts

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,19 +27,22 @@ import { callPerplexity } from './call-perplexity';
2727
import { callTogether } from './call-together';
2828
import { callXAI } from './call-xai';
2929
import { getProvider } from '../utils/get-provider';
30+
import type { PipeTool } from 'types/tools';
3031

3132
export async function callLLM({
3233
pipe,
3334
stream,
3435
messages,
3536
llmApiKey,
36-
variables
37+
variables,
38+
paramsTools
3739
}: {
3840
pipe: Pipe;
3941
stream: boolean;
4042
llmApiKey: string;
4143
messages: Message[];
4244
variables?: VariablesI;
45+
paramsTools: PipeTool[] | undefined;
4346
}) {
4447
try {
4548
// Get the model provider from the pipe.
@@ -70,27 +73,30 @@ export async function callLLM({
7073
pipe,
7174
stream,
7275
messages,
73-
llmApiKey
76+
llmApiKey,
77+
paramsTools
7478
});
7579
}
7680

7781
if (modelProvider === ANTHROPIC) {
7882
dlog('ANTHROPIC', 'βœ…');
7983
return await callAnthropic({
8084
pipe,
85+
stream,
8186
messages,
8287
llmApiKey,
83-
stream
88+
paramsTools
8489
});
8590
}
8691

8792
if (modelProvider === TOGETHER_AI) {
8893
dlog('TOGETHER_AI', 'βœ…');
8994
return await callTogether({
9095
pipe,
96+
stream,
9197
messages,
9298
llmApiKey,
93-
stream
99+
paramsTools,
94100
});
95101
}
96102

@@ -110,7 +116,8 @@ export async function callLLM({
110116
pipe,
111117
messages,
112118
llmApiKey,
113-
stream
119+
stream,
120+
paramsTools
114121
});
115122
}
116123

@@ -120,7 +127,8 @@ export async function callLLM({
120127
pipe,
121128
messages,
122129
llmApiKey,
123-
stream
130+
stream,
131+
paramsTools
124132
});
125133
}
126134

β€Žpackages/baseai/src/dev/llms/call-openai.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,25 +7,28 @@ import { applyJsonModeIfEnabled, handleLlmError } from './utils';
77
import type { Message, Pipe } from 'types/pipe';
88
import type { ModelParams } from 'types/providers';
99
import { addToolsToParams } from '../utils/add-tools-to-params';
10+
import type { PipeTool } from 'types/tools';
1011

1112
export async function callOpenAI({
1213
pipe,
1314
stream,
1415
llmApiKey,
15-
messages
16+
messages,
17+
paramsTools
1618
}: {
1719
pipe: Pipe;
1820
stream: boolean;
1921
llmApiKey: string;
2022
messages: Message[];
23+
paramsTools: PipeTool[] | undefined;
2124
}) {
2225
try {
2326
validateInput(pipe, messages);
2427
const openai = new OpenAI({ apiKey: llmApiKey });
2528
await moderateContent(openai, messages, pipe.moderate);
2629

2730
const modelParams = buildModelParams(pipe, stream, messages);
28-
addToolsToParams(modelParams, pipe);
31+
addToolsToParams(modelParams, pipe, paramsTools);
2932
applyJsonModeIfEnabled(modelParams, pipe);
3033

3134
dlog('modelParams', modelParams);

β€Žpackages/baseai/src/dev/llms/call-together.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,20 @@ import { applyJsonModeIfEnabled, handleLlmError } from './utils';
55
import type { Message, Pipe } from 'types/pipe';
66
import type { ModelParams } from 'types/providers';
77
import { addToolsToParams } from '../utils/add-tools-to-params';
8+
import type { PipeTool } from 'types/tools';
89

910
export async function callTogether({
1011
pipe,
1112
messages,
1213
llmApiKey,
13-
stream
14+
stream,
15+
paramsTools
1416
}: {
1517
pipe: Pipe;
1618
llmApiKey: string;
1719
stream: boolean;
1820
messages: Message[];
21+
paramsTools: PipeTool[] | undefined;
1922
}) {
2023
try {
2124
const modelParams = buildModelParams(pipe, stream, messages);
@@ -29,7 +32,7 @@ export async function callTogether({
2932
// Together behaves weirdly with stop value. Omitting it.
3033
delete modelParams['stop'];
3134
applyJsonModeIfEnabled(modelParams, pipe);
32-
addToolsToParams(modelParams, pipe);
35+
addToolsToParams(modelParams, pipe, paramsTools);
3336
dlog('modelParams', modelParams);
3437

3538
return await together.chat.completions.create(modelParams as any);

β€Žpackages/baseai/src/dev/llms/call-xai.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,20 @@ import { handleLlmError } from './utils';
55
import type { Message, Pipe } from 'types/pipe';
66
import type { ModelParams } from 'types/providers';
77
import { addToolsToParams } from '../utils/add-tools-to-params';
8+
import type { PipeTool } from 'types/tools';
89

910
export async function callXAI({
1011
pipe,
1112
stream,
1213
llmApiKey,
13-
messages
14+
messages,
15+
paramsTools
1416
}: {
1517
pipe: Pipe;
1618
stream: boolean;
1719
llmApiKey: string;
1820
messages: Message[];
21+
paramsTools: PipeTool[] | undefined;
1922
}) {
2023
try {
2124
const modelParams = buildModelParams(pipe, stream, messages);
@@ -27,7 +30,7 @@ export async function callXAI({
2730
});
2831

2932
// Add tools (functions) to modelParams
30-
addToolsToParams(modelParams, pipe);
33+
addToolsToParams(modelParams, pipe, paramsTools);
3134
dlog('modelParams', modelParams);
3235

3336
return await groq.chat.completions.create(modelParams as any);

β€Žpackages/baseai/src/dev/routes/v1/pipes/run.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ const RequestBodySchema = z.object({
4444
stream: z.boolean(),
4545
messages: z.array(schemaMessage),
4646
llmApiKey: z.string(),
47+
tools: z.array(pipeToolSchema).optional(),
4748
variables: VariablesSchema.optional()
4849
});
4950

@@ -138,7 +139,8 @@ const handleRun = async (c: any) => {
138139
messages,
139140
llmApiKey,
140141
stream,
141-
variables
142+
variables,
143+
paramsTools: validatedBody.tools
142144
});
143145

144146
return processLlmResponse(c, validatedBody, rawLlmResponse);
Lines changed: 35 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,19 @@
1-
import type { Pipe } from 'types/pipe';
1+
import type { Pipe, ToolCall } from 'types/pipe';
22
import { getProvider } from './get-provider';
33
import { getSupportedToolSettings, hasToolSupport } from './has-tool-support';
4-
import type { ModelParams, Tool } from 'types/providers';
4+
import type { ModelParams } from 'types/providers';
5+
import type { PipeTool } from 'types/tools';
56

6-
export function addToolsToParams(modelParams: ModelParams, pipe: Pipe) {
7-
if (!pipe.tools.length) return;
7+
export function addToolsToParams(
8+
modelParams: ModelParams,
9+
pipe: Pipe,
10+
paramsTools: PipeTool[] | undefined
11+
) {
12+
const pipeTools = pipe.tools as unknown as string[];
13+
const hasParamsTools = paramsTools && paramsTools.length > 0;
14+
15+
// 1. If no tools are provided, return the modelParams as is
16+
if (!hasParamsTools && !pipeTools.length) return modelParams;
817

918
const [providerString, modelName] = pipe.model.split(':');
1019
const provider = getProvider(providerString);
@@ -15,21 +24,30 @@ export function addToolsToParams(modelParams: ModelParams, pipe: Pipe) {
1524
provider
1625
});
1726

18-
if (hasToolCallSupport) {
19-
const { hasParallelToolCallSupport, hasToolChoiceSupport } =
20-
getSupportedToolSettings({
21-
modelName,
22-
provider
23-
});
27+
// 2. If the model does not support tool calls, return the modelParams as is
28+
if (!hasToolCallSupport) return modelParams;
29+
30+
// If tools are provided in request param, prioritize and use them
31+
if (hasParamsTools) {
32+
modelParams.tools = paramsTools as ToolCall[];
33+
}
34+
35+
// If tools are not provided in request param, use the tools from the pipe config
36+
if (!hasParamsTools && pipeTools.length) {
37+
modelParams.tools = pipe.tools as ToolCall[];
38+
}
2439

25-
if (hasParallelToolCallSupport) {
26-
modelParams.parallel_tool_calls = pipe.parallel_tool_calls;
27-
}
40+
const { hasParallelToolCallSupport, hasToolChoiceSupport } =
41+
getSupportedToolSettings({
42+
modelName,
43+
provider
44+
});
2845

29-
if (hasToolChoiceSupport) {
30-
modelParams.tool_choice = pipe.tool_choice;
31-
}
46+
if (hasParallelToolCallSupport) {
47+
modelParams.parallel_tool_calls = pipe.parallel_tool_calls;
48+
}
3249

33-
modelParams.tools = pipe.tools as Tool[];
50+
if (hasToolChoiceSupport) {
51+
modelParams.tool_choice = pipe.tool_choice;
3452
}
3553
}

β€Žpackages/baseai/types/tools.ts

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,6 @@ export interface Tool {
1010
};
1111
}
1212

13-
export interface PipeTool {
14-
type: 'function';
15-
function: {
16-
name: string;
17-
description?: string;
18-
parameters?: Record<string, any>;
19-
};
20-
}
2113
export const pipeToolSchema = z.object({
2214
type: z.literal('function'),
2315
function: z.object({
@@ -26,3 +18,5 @@ export const pipeToolSchema = z.object({
2618
parameters: z.record(z.any()).optional()
2719
})
2820
});
21+
22+
export type PipeTool = z.infer<typeof pipeToolSchema>;

0 commit comments

Comments
Β (0)