Skip to content
Merged
103 changes: 74 additions & 29 deletions examples/nodejs/workflows/workflows.ts
Original file line number Diff line number Diff line change
@@ -1,40 +1,85 @@
// Experimental upcoming beta AI primitve.
// Please refer to the documentation for more information: https://langbase.com/docs for more information.

// Test script for the simplified proxy approach
import 'dotenv/config';
import {Langbase, Workflow} from 'langbase';
import {Langbase} from 'langbase';

// Create Langbase instance
const langbase = new Langbase({
apiKey: process.env.LANGBASE_API_KEY!,
});

async function main() {
const {step} = new Workflow({debug: true});

const result = await step({
id: 'sumamrize',
run: async () => {
return langbase.llm.run({
model: 'openai:gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY!,
messages: [
{
role: 'system',
content:
'You are an expert summarizer. Summarize the user input.',
},
{
role: 'user',
content:
'I am testing workflows. I just created an example of summarize workflow. Can you summarize this?',
},
],
stream: false,
});
},
// Create a workflow with debug mode enabled
const workflow = langbase.workflow({
name: 'simplified-proxy-test',
debug: true, // Enable debug logging
});

console.log(result['completion']);
try {
// STEP 1: Call langbase.agent.run but don't return its result directly
const step1Result = await workflow.step({
id: 'call-but-return-custom',
run: async () => {
// Return custom result instead
return {
customField: 'Custom result from simplified proxy',
timestamp: new Date().toISOString(),
};
},
});

// STEP 2: Return agent.run result directly
const step2Result = await workflow.step({
id: 'return-agent-run-directly',
run: async () => {
// Call Langbase API and return the result directly
return langbase.agent.run({
model: 'openai:gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY!,
instructions: 'Be brief and concise.',
input: 'What is 2+2?',
stream: false,
});
},
});

// STEP 3: Make multiple Langbase calls in one step
const step3Result = await workflow.step({
id: 'multiple-calls',
run: async () => {
// First call
const call1 = await langbase.agent.run({
model: 'openai:gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY!,
instructions: 'Be brief.',
input: 'First proxy test',
stream: false,
});

// Second call with different method
const call2 = await langbase.agent.run({
model: 'openai:gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY!,
instructions: 'Be brief.',
input: 'Second proxy test',
stream: false,
});

// Return combined result
return {
summary: 'Multiple calls completed with simplified proxy',
calls: 2,
firstOutput: call1.output,
secondOutput: call2.output,
};
},
});
} catch (error) {
console.error('❌ Workflow error:', error);
} finally {
// End the workflow to show trace report
workflow.end();
}
}

main();
// Run the test
main().catch(console.error);
11 changes: 11 additions & 0 deletions packages/langbase/src/common/request.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,17 @@ export class Request {
const isLllmGenerationEndpoint =
GENERATION_ENDPOINTS.includes(endpoint);

// All endpoints should return headers if rawResponse is true
if (!isLllmGenerationEndpoint && options.body?.rawResponse) {
const responseData = await response.json();
return {
...responseData,
rawResponse: {
headers: Object.fromEntries(response.headers.entries()),
},
} as T;
}

if (isLllmGenerationEndpoint) {
const threadId = response.headers.get('lb-thread-id');

Expand Down
26 changes: 26 additions & 0 deletions packages/langbase/src/langbase/langbase.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {convertDocToFormData} from '@/lib/utils/doc-to-formdata';
import {Request} from '../common/request';
import {Workflow} from './workflows';

export type Role = 'user' | 'assistant' | 'system' | 'tool';

Expand Down Expand Up @@ -602,6 +603,12 @@ export class Langbase {
};
};

public workflow: (config: {debug?: boolean; name: string}) => Workflow;

public traces: {
create: (trace: any) => Promise<any>;
};

constructor(options?: LangbaseOptions) {
this.baseUrl = options?.baseUrl ?? 'https://api.langbase.com';
this.apiKey = options?.apiKey ?? '';
Expand Down Expand Up @@ -684,6 +691,12 @@ export class Langbase {
this.agent = {
run: this.runAgent.bind(this),
};

this.workflow = config => new Workflow({...config, langbase: this});

this.traces = {
create: this.createTrace.bind(this),
};
}

private async runPipe(
Expand Down Expand Up @@ -1131,4 +1144,17 @@ export class Langbase {
},
});
}

/**
* Creates a new trace on Langbase.
*
* @param {any} trace - The trace data to send.
* @returns {Promise<any>} A promise that resolves to the response of the trace creation.
*/
private async createTrace(trace: any): Promise<any> {
return this.request.post({
endpoint: '/v1/traces',
body: trace,
});
}
}
133 changes: 133 additions & 0 deletions packages/langbase/src/langbase/trace.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
export interface Trace {
name: string;
startTime: number;
endTime?: number;
duration?: number;
steps: StepTrace[];
error?: string;
}

export interface StepTrace {
name: string;
output: any;
error?: string;
traces: string[] | null;
duration: number;
startTime: number;
endTime: number;
}

export type TraceType =
| 'workflow'
| 'agent'
| 'chunk'
| 'memory'
| 'parse'
| 'embed';

export type PrimitiveTrace =
| {chunk: any}
| {agent: any}
| {memory: any}
| {parse: any}
| {embed: any}
| {workflow: WorkflowTrace; entityAuthId: string};

type WorkflowTrace = {
createdAt: string;
id: string;
agentWorkflowId: string;
name: string;
startTime: number;
endTime?: number;
duration?: number;
steps: StepTrace[];
error?: string;
};

export class TraceManager {
private traces: Map<string, PrimitiveTrace> = new Map();

createTrace(type: TraceType, traceData: any = {}): string {
const traceId = crypto.randomUUID();
let trace: PrimitiveTrace;
const createdAt = new Date().toISOString();
if (type === 'workflow') {
trace = {
workflow: {
createdAt,
id: traceId,
agentWorkflowId: process.env.LANGBASE_AGENT_ID || '',
name: traceData.name || '',
startTime: Date.now(),
steps: [],
},
entityAuthId: '',
};
} else if (type === 'agent') {
trace = {agent: {...traceData, createdAt, id: traceId}};
} else if (type === 'chunk') {
trace = {chunk: {...traceData, createdAt, id: traceId}};
} else if (type === 'memory') {
trace = {memory: {...traceData, createdAt, id: traceId}};
} else if (type === 'parse') {
trace = {parse: {...traceData, createdAt, id: traceId}};
} else if (type === 'embed') {
trace = {embed: {...traceData, createdAt, id: traceId}};
} else {
throw new Error('Unknown trace type');
}
this.traces.set(traceId, trace);
return traceId;
}

addStep(traceId: string, step: StepTrace) {
const trace = this.traces.get(traceId);
if (trace && 'workflow' in trace) {
trace.workflow.steps.push(step);
}
}

endTrace(traceId: string) {
const trace = this.traces.get(traceId);
if (trace && 'workflow' in trace) {
trace.workflow.endTime = Date.now();
trace.workflow.duration =
trace.workflow.endTime - trace.workflow.startTime;
}
}

getTrace(traceId: string): PrimitiveTrace | undefined {
return this.traces.get(traceId);
}

printTrace(traceId: string) {
const trace = this.traces.get(traceId);
if (!trace) return;
if ('workflow' in trace) {
const wf = trace.workflow;
const duration = wf.endTime
? wf.endTime - wf.startTime
: Date.now() - wf.startTime;
console.log('\n📊 Workflow Trace:');
console.log(`Name: ${wf.name}`);
console.log(`Duration: ${duration}ms`);
console.log(`Start Time: ${new Date(wf.startTime).toISOString()}`);
if (wf.endTime) {
console.log(`End Time: ${new Date(wf.endTime).toISOString()}`);
}
console.log('\nSteps:');
wf.steps.forEach(step => {
console.log(`\n Step: ${step.name}`);
console.log(` Duration: ${step.duration}ms`);
if (step.traces && step.traces.length > 0) {
console.log(` Traces:`, step.traces);
}
console.log(` Output:`, step.output);
});
} else {
console.log('\n📊 Primitive Trace:');
console.dir(trace, {depth: 4});
}
}
}
Loading
Loading