Skip to content

Commit 3053156

Browse files
Showcase gpt-oss code interpreter integration (#55)
* Showcase gpt-oss code interpreter integration * Remove unused port expose instruction * Fix type errors
1 parent 871f813 commit 3053156

File tree

8 files changed

+8460
-4
lines changed

8 files changed

+8460
-4
lines changed

examples/code-interpreter/Dockerfile

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# This image is unique to this repo, and you'll never need it.
2+
# Whenever you're integrating with sandbox SDK in your own project,
3+
# you should use the official image instead:
4+
# FROM docker.io/cloudflare/sandbox:0.2.3
5+
FROM cloudflare/sandbox-test:0.2.3
6+
7+
# On a mac, you might need to actively pick up the
8+
# arm64 build of the image.
9+
# FROM --platform=linux/arm64 cloudflare/sandbox-test:0.1.3

examples/code-interpreter/README.md

Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
# Cloudflare GPT-OSS with Python Sandbox
2+
3+
A Cloudflare Worker that integrates the GPT-OSS model with Python code execution capabilities using the Cloudflare Sandbox SDK.
4+
5+
## Features
6+
7+
-**GPT-OSS Model Integration**: Uses Cloudflare's `@cf/openai/gpt-oss-120b` model via direct API calls
8+
-**Python Code Execution**: Executes Python code in a secure sandboxed environment
9+
-**Function Calling**: Implements function calling to bridge the model with code execution
10+
-**Type Safety**: Full TypeScript support with OpenAI SDK types
11+
-**Container-based Isolation**: Runs Python in Docker containers via Durable Objects
12+
13+
## How It Works
14+
15+
1. **Initial Request**: User sends a prompt to the Worker
16+
2. **Model Processing**: GPT-OSS model receives the prompt with an `execute_python` function tool
17+
3. **Function Detection**: Model decides if Python execution is needed
18+
4. **Code Execution**: Python code runs in an isolated Cloudflare Sandbox container
19+
5. **Result Integration**: Execution results are sent back to the model
20+
6. **Final Response**: Model generates a response incorporating the execution results
21+
22+
## API Endpoint
23+
24+
```bash
25+
POST /foo
26+
Content-Type: application/json
27+
28+
{
29+
"input": "Your prompt here"
30+
}
31+
```
32+
33+
## Example Usage
34+
35+
```bash
36+
# Simple calculation
37+
curl -X POST http://localhost:8787/foo \
38+
-H "Content-Type: application/json" \
39+
-d '{"input": "Calculate 5 factorial using Python"}'
40+
41+
# Execute specific code
42+
curl -X POST http://localhost:8787/foo \
43+
-H "Content-Type: application/json" \
44+
-d '{"input": "Execute this Python: print(sum(range(1, 101)))"}'
45+
46+
# Complex operations
47+
curl -X POST http://localhost:8787/foo \
48+
-H "Content-Type: application/json" \
49+
-d '{"input": "Use Python to find all prime numbers under 20"}'
50+
```
51+
52+
## Setup
53+
54+
1. From the project root, run
55+
```bash
56+
npm install
57+
npm run build
58+
```
59+
60+
2. In this directory, create `.dev.vars` file with your Cloudflare credentials:
61+
```
62+
CLOUDFLARE_API_KEY=your_api_key_here
63+
CLOUDFLARE_ACCOUNT_ID=your_account_id_here
64+
```
65+
66+
3. Run locally:
67+
```bash
68+
cd examples/code-interpreter # if you're not already here
69+
npm run dev
70+
```
71+
72+
## Notes & Limitations
73+
74+
- The openai SDK currently throws an error when using this model with workers AI, so REST API is used instead
75+
- Calling the tool `code_interpreter`, akin to OpenAI's `code_interpreter` tool type currently throws an error; so the tool is setup as 'execute_python' function instead
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
{
2+
"name": "@cloudflare/sandbox-code-interpreter-example",
3+
"version": "1.0.0",
4+
"type": "module",
5+
"private": true,
6+
"description": "An example of using the sandbox package with code interpreter",
7+
"scripts": {
8+
"deploy": "wrangler deploy",
9+
"dev": "wrangler dev",
10+
"start": "wrangler dev",
11+
"cf-typegen": "wrangler types"
12+
},
13+
"author": "",
14+
"license": "MIT",
15+
"devDependencies": {
16+
"typescript": "^5.5.2",
17+
"wrangler": "^4.27.0"
18+
},
19+
"dependencies": {
20+
"@cloudflare/sandbox": "^0.2.2",
21+
"openai": "^5.12.0"
22+
}
23+
}
Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
import OpenAI from 'openai';
2+
import { getSandbox } from '@cloudflare/sandbox';
3+
4+
export { Sandbox } from '@cloudflare/sandbox';
5+
6+
const API_PATH = '/foo';
7+
const MODEL = '@cf/openai/gpt-oss-120b';
8+
9+
type AIResponse = OpenAI.Responses.Response;
10+
type ResponseInputItem = OpenAI.Responses.ResponseInputItem;
11+
type FunctionTool = OpenAI.Responses.FunctionTool;
12+
type FunctionCall = OpenAI.Responses.ResponseFunctionToolCall;
13+
14+
interface SandboxResult {
15+
results?: Array<{ text?: string; html?: string; [key: string]: any }>;
16+
logs?: { stdout?: string[]; stderr?: string[] };
17+
error?: string;
18+
}
19+
20+
async function callCloudflareAPI(
21+
env: Env,
22+
input: ResponseInputItem[],
23+
tools?: FunctionTool[],
24+
toolChoice: string = 'auto',
25+
): Promise<AIResponse> {
26+
const response = await fetch(`https://api.cloudflare.com/client/v4/accounts/${env.CLOUDFLARE_ACCOUNT_ID}/ai/v1/responses`, {
27+
method: 'POST',
28+
headers: {
29+
'Content-Type': 'application/json',
30+
Authorization: `Bearer ${env.CLOUDFLARE_API_KEY}`,
31+
},
32+
body: JSON.stringify({
33+
model: MODEL,
34+
input,
35+
...(tools && { tools, tool_choice: toolChoice }),
36+
}),
37+
});
38+
39+
if (!response.ok) {
40+
const errorText = await response.text();
41+
throw new Error(`API call failed: ${response.status} - ${errorText}`);
42+
}
43+
44+
return response.json() as Promise<AIResponse>;
45+
}
46+
47+
async function executePythonCode(env: Env, code: string): Promise<string> {
48+
const sandboxId = env.Sandbox.idFromName('default');
49+
const sandbox = getSandbox(env.Sandbox, sandboxId.toString());
50+
const pythonCtx = await sandbox.createCodeContext({ language: 'python' });
51+
const result = (await sandbox.runCode(code, { context: pythonCtx })) as SandboxResult;
52+
53+
// Extract output from results (expressions)
54+
if (result.results?.length) {
55+
const outputs = result.results.map((r) => r.text || r.html || JSON.stringify(r)).filter(Boolean);
56+
if (outputs.length) return outputs.join('\n');
57+
}
58+
59+
// Extract output from logs
60+
let output = '';
61+
if (result.logs?.stdout?.length) {
62+
output = result.logs.stdout.join('\n');
63+
}
64+
if (result.logs?.stderr?.length) {
65+
if (output) output += '\n';
66+
output += 'Error: ' + result.logs.stderr.join('\n');
67+
}
68+
69+
return result.error ? `Error: ${result.error}` : output || 'Code executed successfully';
70+
}
71+
72+
async function handleAIRequest(input: string, env: Env): Promise<string> {
73+
const pythonTool: FunctionTool = {
74+
type: 'function',
75+
name: 'execute_python',
76+
description: 'Execute Python code and return the output',
77+
parameters: {
78+
type: 'object',
79+
properties: {
80+
code: {
81+
type: 'string',
82+
description: 'The Python code to execute',
83+
},
84+
},
85+
required: ['code'],
86+
},
87+
strict: null,
88+
};
89+
90+
// Initial AI request with Python execution tool
91+
let response = await callCloudflareAPI(env, [{ role: 'user', content: input }], [pythonTool]);
92+
93+
// Check for function call
94+
const functionCall = response.output?.find(
95+
(item): item is FunctionCall => item.type === 'function_call' && item.name === 'execute_python',
96+
);
97+
98+
if (functionCall?.arguments) {
99+
try {
100+
const { code } = JSON.parse(functionCall.arguments) as { code: string };
101+
const output = await executePythonCode(env, code);
102+
103+
const functionResult: ResponseInputItem = {
104+
type: 'function_call_output',
105+
call_id: functionCall.call_id,
106+
output,
107+
} as OpenAI.Responses.ResponseInputItem.FunctionCallOutput;
108+
109+
// Get final response with execution result
110+
response = await callCloudflareAPI(env, [{ role: 'user', content: input }, functionCall as ResponseInputItem, functionResult]);
111+
} catch (error) {
112+
console.error('Sandbox execution failed:', error);
113+
}
114+
}
115+
116+
// Extract final response text
117+
const message = response.output?.find((item) => item.type === 'message');
118+
const textContent = message?.content?.find((c: any) => c.type === 'output_text');
119+
const text = textContent && 'text' in textContent ? textContent.text : undefined;
120+
121+
return text || 'No response generated';
122+
}
123+
124+
export default {
125+
async fetch(request: Request, env: Env): Promise<Response> {
126+
const url = new URL(request.url);
127+
128+
if (url.pathname !== API_PATH || request.method !== 'POST') {
129+
return new Response('Not Found', { status: 404 });
130+
}
131+
132+
try {
133+
const { input } = await request.json<{ input?: string }>();
134+
135+
if (!input) {
136+
return Response.json({ error: 'Missing input field' }, { status: 400 });
137+
}
138+
139+
const output = await handleAIRequest(input, env);
140+
return Response.json({ output });
141+
} catch (error) {
142+
console.error('Request failed:', error);
143+
const message = error instanceof Error ? error.message : 'Internal Server Error';
144+
return Response.json({ error: message }, { status: 500 });
145+
}
146+
},
147+
} satisfies ExportedHandler<Env>;
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
{
2+
"extends": "../../tsconfig.base.json",
3+
"compilerOptions": {
4+
"types": ["./worker-configuration.d.ts"]
5+
},
6+
"include": ["worker-configuration.d.ts", "src/**/*.ts"]
7+
}

0 commit comments

Comments
 (0)