Skip to content

Commit 0371ff7

Browse files
nirgaclaude
andauthored
fix(sdk): support vercel AI SDK tool calling + structured outputs (#675)
Co-authored-by: Claude <[email protected]>
1 parent f411da1 commit 0371ff7

File tree

8 files changed

+1316
-161
lines changed

8 files changed

+1316
-161
lines changed

packages/sample-app/package.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
"run:with": "npm run build && node dist/src/sample_with.js",
1919
"run:prompt_mgmt": "npm run build && node dist/src/sample_prompt_mgmt.js",
2020
"run:vercel": "npm run build && node dist/src/sample_vercel_ai.js",
21+
"run:vercel_object": "npm run build && node dist/src/sample_vercel_ai_object.js",
22+
"run:vercel_tools": "npm run build && node dist/src/sample_vercel_ai_tools.js",
2123
"run:sample_vision": "npm run build && node dist/src/sample_vision_prompt.js",
2224
"run:sample_azure": "npm run build && node dist/src/sample_azure.js",
2325
"run:openai_streaming": "npm run build && node dist/src/sample_openai_streaming.js",

packages/sample-app/src/sample_experiment.ts

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@ import type {
88
TaskOutput,
99
} from "@traceloop/node-server-sdk";
1010

11+
import "dotenv/config";
12+
1113
const main = async () => {
1214
console.log("Starting sample experiment");
1315
traceloop.initialize({
@@ -70,9 +72,12 @@ const main = async () => {
7072
max_tokens: 500,
7173
});
7274

75+
const completion = answer.choices?.[0]?.message?.content || "";
76+
7377
return {
74-
completion: answer.choices?.[0]?.message?.content || "",
78+
completion: completion,
7579
prompt: promptText,
80+
answer: completion,
7681
};
7782
};
7883

@@ -88,7 +93,7 @@ const main = async () => {
8893
return {
8994
completion: answer,
9095
prompt: promptText,
91-
strategy: "provide_info",
96+
answer,
9297
};
9398
};
9499

@@ -115,10 +120,10 @@ const main = async () => {
115120
const loader1 = startLoader(" Processing experiment");
116121

117122
const results1 = await client.experiment.run(medicalTaskRefuseAdvice, {
118-
datasetSlug: "medical-q",
123+
datasetSlug: "ai-doctor-dataset",
119124
datasetVersion: "v1",
120-
evaluators: ["medical_advice"],
121-
experimentSlug: "medical-advice-exp-ts",
125+
evaluators: ["Medical Advice Given"],
126+
experimentSlug: "medical-advice-experiment",
122127
stopOnError: false,
123128
});
124129

@@ -137,10 +142,10 @@ const main = async () => {
137142
const loader2 = startLoader(" Processing experiment");
138143

139144
const results2 = await client.experiment.run(medicalTaskProvideInfo, {
140-
datasetSlug: "medical-q",
145+
datasetSlug: "ai-doctor-dataset",
141146
datasetVersion: "v1",
142-
evaluators: ["medical_advice"],
143-
experimentSlug: "medical-advice-exp-ts",
147+
evaluators: ["Medical Advice Given"],
148+
experimentSlug: "medical-advice-experiment",
144149
stopOnError: false,
145150
waitForResults: true,
146151
});

packages/sample-app/src/sample_vercel_ai.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@ import * as traceloop from "@traceloop/node-server-sdk";
22
import { openai } from "@ai-sdk/openai";
33
import { generateText } from "ai";
44

5+
import "dotenv/config";
6+
57
traceloop.initialize({
68
appName: "sample_vercel_ai",
79
disableBatch: true,
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
import * as traceloop from "@traceloop/node-server-sdk";
2+
import { openai } from "@ai-sdk/openai";
3+
import { generateObject } from "ai";
4+
import { z } from "zod";
5+
6+
import "dotenv/config";
7+
8+
traceloop.initialize({
9+
appName: "sample_vercel_ai_object",
10+
disableBatch: true,
11+
});
12+
13+
const PersonSchema = z.object({
14+
name: z.string(),
15+
age: z.number(),
16+
occupation: z.string(),
17+
skills: z.array(z.string()),
18+
location: z.object({
19+
city: z.string(),
20+
country: z.string(),
21+
}),
22+
});
23+
24+
async function generatePersonProfile(description: string) {
25+
return await traceloop.withWorkflow(
26+
{ name: "generate_person_profile" },
27+
async () => {
28+
const { object } = await generateObject({
29+
model: openai("gpt-4o"),
30+
schema: PersonSchema,
31+
prompt: `Based on this description, generate a detailed person profile: ${description}`,
32+
experimental_telemetry: { isEnabled: true },
33+
});
34+
35+
return object;
36+
},
37+
{ description },
38+
);
39+
}
40+
41+
async function main() {
42+
const profile = await generatePersonProfile(
43+
"A talented software engineer from Paris who loves working with AI and machine learning, speaks multiple languages, and enjoys traveling.",
44+
);
45+
46+
console.log("Generated person profile:", JSON.stringify(profile, null, 2));
47+
}
48+
49+
main().catch(console.error);
Lines changed: 159 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,159 @@
1+
import * as traceloop from "@traceloop/node-server-sdk";
2+
import { openai } from "@ai-sdk/openai";
3+
import { generateText, tool } from "ai";
4+
import { z } from "zod";
5+
6+
import "dotenv/config";
7+
8+
traceloop.initialize({
9+
appName: "sample_vercel_ai_tools",
10+
disableBatch: true,
11+
});
12+
13+
// Define tools
14+
const getWeather = tool({
15+
description: "Get the current weather for a specified location",
16+
parameters: z.object({
17+
location: z.string().describe("The location to get the weather for"),
18+
}),
19+
execute: async ({ location }) => {
20+
console.log(`🔧 Tool 'getWeather' called with location: ${location}`);
21+
22+
// Simulate API call delay
23+
await new Promise((resolve) => setTimeout(resolve, 100));
24+
25+
// Simulate weather data
26+
const weatherData = {
27+
location,
28+
temperature: Math.floor(Math.random() * 30) + 60, // 60-90°F
29+
condition: ["Sunny", "Cloudy", "Rainy", "Snowy"][
30+
Math.floor(Math.random() * 4)
31+
],
32+
humidity: Math.floor(Math.random() * 40) + 40, // 40-80%
33+
};
34+
35+
console.log(`🌤️ Weather data retrieved for ${location}:`, weatherData);
36+
return weatherData;
37+
},
38+
});
39+
40+
const calculateDistance = tool({
41+
description: "Calculate the distance between two cities",
42+
parameters: z.object({
43+
fromCity: z.string().describe("The starting city"),
44+
toCity: z.string().describe("The destination city"),
45+
}),
46+
execute: async ({ fromCity, toCity }) => {
47+
console.log(
48+
`🔧 Tool 'calculateDistance' called from ${fromCity} to ${toCity}`,
49+
);
50+
51+
// Simulate API call delay
52+
await new Promise((resolve) => setTimeout(resolve, 150));
53+
54+
// Simulate distance calculation
55+
const distance = Math.floor(Math.random() * 2000) + 100; // 100-2100 miles
56+
const result = {
57+
from: fromCity,
58+
to: toCity,
59+
distance: `${distance} miles`,
60+
drivingTime: `${Math.floor(distance / 60)} hours`,
61+
};
62+
63+
console.log(`🗺️ Distance calculated:`, result);
64+
return result;
65+
},
66+
});
67+
68+
const searchRestaurants = tool({
69+
description: "Search for restaurants in a specific city",
70+
parameters: z.object({
71+
city: z.string().describe("The city to search for restaurants"),
72+
cuisine: z
73+
.string()
74+
.optional()
75+
.describe("Optional cuisine type (e.g., Italian, Mexican)"),
76+
}),
77+
execute: async ({ city, cuisine }) => {
78+
console.log(
79+
`🔧 Tool 'searchRestaurants' called for ${city}${cuisine ? ` (${cuisine} cuisine)` : ""}`,
80+
);
81+
82+
// Simulate API call delay
83+
await new Promise((resolve) => setTimeout(resolve, 200));
84+
85+
// Simulate restaurant data
86+
const restaurantNames = [
87+
"The Golden Fork",
88+
"Sunset Bistro",
89+
"Ocean View",
90+
"Mountain Top",
91+
"Urban Kitchen",
92+
"Garden Cafe",
93+
"Heritage House",
94+
"Modern Table",
95+
];
96+
97+
const restaurants = Array.from({ length: 3 }, (_, i) => ({
98+
name: restaurantNames[Math.floor(Math.random() * restaurantNames.length)],
99+
cuisine:
100+
cuisine ||
101+
["Italian", "Mexican", "Asian", "American"][
102+
Math.floor(Math.random() * 4)
103+
],
104+
rating: (Math.random() * 2 + 3).toFixed(1), // 3.0-5.0 rating
105+
priceRange: ["$", "$$", "$$$"][Math.floor(Math.random() * 3)],
106+
}));
107+
108+
console.log(
109+
`🍽️ Found ${restaurants.length} restaurants in ${city}:`,
110+
restaurants,
111+
);
112+
return { city, restaurants };
113+
},
114+
});
115+
116+
async function planTrip(destination: string) {
117+
return await traceloop.withWorkflow(
118+
{ name: "plan_trip" },
119+
async () => {
120+
console.log(`\n🌟 Planning a trip to ${destination}...\n`);
121+
122+
const result = await generateText({
123+
model: openai("gpt-4o"),
124+
prompt: `Help me plan a trip to ${destination}. I'd like to know:
125+
1. What's the weather like there?
126+
2. Find some good restaurants to try
127+
3. If I'm traveling from New York, how far is it?
128+
129+
Please use the available tools to get current information and provide a comprehensive travel guide.`,
130+
tools: {
131+
getWeather,
132+
calculateDistance,
133+
searchRestaurants,
134+
},
135+
maxSteps: 5, // Allow multiple tool calls
136+
experimental_telemetry: { isEnabled: true },
137+
});
138+
139+
return result.text;
140+
},
141+
{ destination },
142+
);
143+
}
144+
145+
async function main() {
146+
try {
147+
const travelGuide = await planTrip("San Francisco");
148+
149+
console.log("\n" + "=".repeat(80));
150+
console.log("🗺️ TRAVEL GUIDE");
151+
console.log("=".repeat(80));
152+
console.log(travelGuide);
153+
console.log("=".repeat(80));
154+
} catch (error) {
155+
console.error("❌ Error planning trip:", error);
156+
}
157+
}
158+
159+
main().catch(console.error);

0 commit comments

Comments
 (0)