diff --git a/.changeset/ten-aliens-marry.md b/.changeset/ten-aliens-marry.md new file mode 100644 index 0000000..53aba8b --- /dev/null +++ b/.changeset/ten-aliens-marry.md @@ -0,0 +1,5 @@ +--- +"@cloudflare/sandbox": patch +--- + +update instructions diff --git a/.github/changeset-version.ts b/.github/changeset-version.ts index 3421e1c..4a31d63 100644 --- a/.github/changeset-version.ts +++ b/.github/changeset-version.ts @@ -15,7 +15,9 @@ execSync("npm install", { // Update Dockerfile and README version references after changeset updates package.json try { - const packageJson = JSON.parse(fs.readFileSync("./packages/sandbox/package.json", "utf-8")); + const packageJson = JSON.parse( + fs.readFileSync("./packages/sandbox/package.json", "utf-8") + ); const newVersion = packageJson.version; const dockerfilePath = "./examples/basic/Dockerfile"; @@ -48,7 +50,6 @@ try { fs.writeFileSync(readmePath, readmeContent); console.log(`✅ Updated README.md version to ${newVersion}`); - } catch (error) { console.error("❌ Failed to update file versions:", error); // Don't fail the whole release for this diff --git a/examples/basic/Dockerfile b/examples/basic/Dockerfile index 8a2b1df..d4b4448 100644 --- a/examples/basic/Dockerfile +++ b/examples/basic/Dockerfile @@ -8,8 +8,7 @@ FROM cloudflare/sandbox-test:0.1.3 # arm64 build of the image. # FROM --platform=linux/arm64 cloudflare/sandbox-test:0.1.3 -EXPOSE 8080 -EXPOSE 3001 -# Run the same command as the original image -CMD ["bun", "index.ts"] +# expose any ports you might want to use (necessary for local dev) +# EXPOSE 8080 +# EXPOSE 3001 diff --git a/examples/basic/README.md b/examples/basic/README.md index e88e231..bfeba45 100644 --- a/examples/basic/README.md +++ b/examples/basic/README.md @@ -49,18 +49,21 @@ This example demonstrates the proper 3-layer architecture for Sandbox SDK applic ### Layer Responsibilities **Frontend (`app/index.tsx`)** + - React-based UI with tabbed interface - HTTP requests to Worker API endpoints - Server-Sent Events for real-time streaming - State management for commands, processes, and ports **Worker (`src/index.ts`)** + - HTTP API gateway with endpoint routing - Direct calls to Sandbox SDK methods - SSE streaming for real-time updates - CORS handling and error responses **Sandbox Durable Object** + - Implements ISandbox interface methods - Process lifecycle management - AsyncIterable streaming capabilities @@ -77,6 +80,7 @@ npm run deploy ## Development ### Project Structure + ``` examples/basic/ ├── src/ diff --git a/examples/basic/app/style.css b/examples/basic/app/style.css index b86407a..605a5d8 100644 --- a/examples/basic/app/style.css +++ b/examples/basic/app/style.css @@ -499,12 +499,24 @@ body { } /* Status color classes */ -.text-yellow-500 { color: #ffc107; } -.text-blue-500 { color: #58a6ff; } -.text-green-500 { color: #3fb950; } -.text-red-500 { color: #f85149; } -.text-orange-500 { color: #ff8c00; } -.text-gray-500 { color: #8b949e; } +.text-yellow-500 { + color: #ffc107; +} +.text-blue-500 { + color: #58a6ff; +} +.text-green-500 { + color: #3fb950; +} +.text-red-500 { + color: #f85149; +} +.text-orange-500 { + color: #ff8c00; +} +.text-gray-500 { + color: #8b949e; +} /* Port Management Tab */ .port-management-tab { @@ -784,7 +796,8 @@ body { } /* Command and Log Streaming Sections */ -.command-streaming, .log-streaming { +.command-streaming, +.log-streaming { background-color: #161b22; border: 1px solid #30363d; border-radius: 8px; @@ -792,7 +805,8 @@ body { margin-bottom: 2rem; } -.command-streaming h3, .log-streaming h3 { +.command-streaming h3, +.log-streaming h3 { color: #58a6ff; font-size: 1.2rem; margin-bottom: 0.5rem; @@ -974,7 +988,8 @@ body { font-size: 0.85rem; } -.stream-time, .event-count { +.stream-time, +.event-count { color: #8b949e; font-size: 0.8rem; font-family: "Fira Code", monospace; @@ -1946,13 +1961,18 @@ button:focus { } .quick-setup-button:before { - content: ''; + content: ""; position: absolute; top: 0; left: -100%; width: 100%; height: 100%; - background: linear-gradient(90deg, transparent, rgba(255, 255, 255, 0.1), transparent); + background: linear-gradient( + 90deg, + transparent, + rgba(255, 255, 255, 0.1), + transparent + ); transition: left 0.5s; } @@ -1982,7 +2002,11 @@ button:focus { .quick-setup-button.react { border-color: #61dafb; - background: linear-gradient(135deg, rgba(97, 218, 251, 0.1) 0%, rgba(97, 218, 251, 0.05) 100%); + background: linear-gradient( + 135deg, + rgba(97, 218, 251, 0.1) 0%, + rgba(97, 218, 251, 0.05) 100% + ); } .quick-setup-button.react:hover:not(:disabled) { @@ -1992,7 +2016,11 @@ button:focus { .quick-setup-button.vue { border-color: #4fc08d; - background: linear-gradient(135deg, rgba(79, 192, 141, 0.1) 0%, rgba(79, 192, 141, 0.05) 100%); + background: linear-gradient( + 135deg, + rgba(79, 192, 141, 0.1) 0%, + rgba(79, 192, 141, 0.05) 100% + ); } .quick-setup-button.vue:hover:not(:disabled) { @@ -2002,7 +2030,11 @@ button:focus { .quick-setup-button.static { border-color: #f39c12; - background: linear-gradient(135deg, rgba(243, 156, 18, 0.1) 0%, rgba(243, 156, 18, 0.05) 100%); + background: linear-gradient( + 135deg, + rgba(243, 156, 18, 0.1) 0%, + rgba(243, 156, 18, 0.05) 100% + ); } .quick-setup-button.static:hover:not(:disabled) { @@ -2040,27 +2072,27 @@ button:focus { .input-group { flex-direction: column; } - + .file-input { min-width: unset; } - + .template-buttons { flex-direction: column; } - + .template-button { width: 100%; } - + .quick-setup-buttons { grid-template-columns: 1fr; } - + .quick-setup-button { padding: 1rem; } - + .setup-icon { font-size: 2rem; min-width: 50px; diff --git a/examples/basic/src/endpoints/execute.ts b/examples/basic/src/endpoints/execute.ts index ca34267..1677fc4 100644 --- a/examples/basic/src/endpoints/execute.ts +++ b/examples/basic/src/endpoints/execute.ts @@ -1,21 +1,24 @@ import type { Sandbox } from "@cloudflare/sandbox"; import { parseJsonBody, errorResponse, jsonResponse } from "../http"; -export async function executeCommand(sandbox: Sandbox, request: Request) { - const body = await parseJsonBody(request); - const { command, sessionId } = body; - if (!command) { - return errorResponse("Command is required"); - } +export async function executeCommand( + sandbox: Sandbox, + request: Request +) { + const body = await parseJsonBody(request); + const { command, sessionId } = body; + if (!command) { + return errorResponse("Command is required"); + } - // Use the current SDK API signature: exec(command, options) - const result = await sandbox.exec(command, { sessionId }); - return jsonResponse({ - success: result.exitCode === 0, - exitCode: result.exitCode, - stdout: result.stdout, - stderr: result.stderr, - command: result.command, - duration: result.duration - }); + // Use the current SDK API signature: exec(command, options) + const result = await sandbox.exec(command, { sessionId }); + return jsonResponse({ + success: result.exitCode === 0, + exitCode: result.exitCode, + stdout: result.stdout, + stderr: result.stderr, + command: result.command, + duration: result.duration, + }); } diff --git a/examples/basic/src/endpoints/executeStream.ts b/examples/basic/src/endpoints/executeStream.ts index e78b39b..251fc04 100644 --- a/examples/basic/src/endpoints/executeStream.ts +++ b/examples/basic/src/endpoints/executeStream.ts @@ -2,49 +2,56 @@ import type { Sandbox } from "@cloudflare/sandbox"; import { parseSSEStream, type ExecEvent } from "@cloudflare/sandbox"; import { corsHeaders, errorResponse, parseJsonBody } from "../http"; -export async function executeCommandStream(sandbox: Sandbox, request: Request) { - const body = await parseJsonBody(request); - const { command, sessionId } = body; +export async function executeCommandStream( + sandbox: Sandbox, + request: Request +) { + const body = await parseJsonBody(request); + const { command, sessionId } = body; - if (!command) { - return errorResponse("Command is required"); - } + if (!command) { + return errorResponse("Command is required"); + } + + // Create readable stream for SSE + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); - // Create readable stream for SSE - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); + // Start streaming in the background + (async () => { + try { + const encoder = new TextEncoder(); - // Start streaming in the background - (async () => { - try { - const encoder = new TextEncoder(); + // Get the ReadableStream from sandbox + const stream = await sandbox.execStream(command, { sessionId }); - // Get the ReadableStream from sandbox - const stream = await sandbox.execStream(command, { sessionId }); - - // Convert to AsyncIterable using parseSSEStream - for await (const event of parseSSEStream(stream)) { - // Forward each typed event as SSE - await writer.write(encoder.encode(`data: ${JSON.stringify(event)}\n\n`)); - } - } catch (error: any) { - const errorEvent = { - type: 'error', - timestamp: new Date().toISOString(), - error: error.message - }; - await writer.write(new TextEncoder().encode(`data: ${JSON.stringify(errorEvent)}\n\n`)); - } finally { - await writer.close(); - } - })(); + // Convert to AsyncIterable using parseSSEStream + for await (const event of parseSSEStream(stream)) { + // Forward each typed event as SSE + await writer.write( + encoder.encode(`data: ${JSON.stringify(event)}\n\n`) + ); + } + } catch (error: any) { + const errorEvent = { + type: "error", + timestamp: new Date().toISOString(), + error: error.message, + }; + await writer.write( + new TextEncoder().encode(`data: ${JSON.stringify(errorEvent)}\n\n`) + ); + } finally { + await writer.close(); + } + })(); - return new Response(readable, { - headers: { - "Content-Type": "text/event-stream", - "Cache-Control": "no-cache", - "Connection": "keep-alive", - ...corsHeaders(), - }, - }); + return new Response(readable, { + headers: { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache", + Connection: "keep-alive", + ...corsHeaders(), + }, + }); } diff --git a/examples/basic/src/endpoints/fileDelete.ts b/examples/basic/src/endpoints/fileDelete.ts index f89ea92..914a3e3 100644 --- a/examples/basic/src/endpoints/fileDelete.ts +++ b/examples/basic/src/endpoints/fileDelete.ts @@ -11,14 +11,14 @@ export async function deleteFile(sandbox: Sandbox, request: Request) { } await sandbox.deleteFile(path); - return jsonResponse({ + return jsonResponse({ success: true, message: "File deleted", path, - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); } catch (error: any) { console.error("Error deleting file:", error); return errorResponse(`Failed to delete file: ${error.message}`); } -} \ No newline at end of file +} diff --git a/examples/basic/src/endpoints/fileMove.ts b/examples/basic/src/endpoints/fileMove.ts index 16fcaae..7bd255e 100644 --- a/examples/basic/src/endpoints/fileMove.ts +++ b/examples/basic/src/endpoints/fileMove.ts @@ -11,15 +11,15 @@ export async function moveFile(sandbox: Sandbox, request: Request) { } await sandbox.moveFile(sourcePath, destinationPath); - return jsonResponse({ + return jsonResponse({ success: true, message: "File moved", sourcePath, destinationPath, - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); } catch (error: any) { console.error("Error moving file:", error); return errorResponse(`Failed to move file: ${error.message}`); } -} \ No newline at end of file +} diff --git a/examples/basic/src/endpoints/fileRead.ts b/examples/basic/src/endpoints/fileRead.ts index b19388b..af24d5d 100644 --- a/examples/basic/src/endpoints/fileRead.ts +++ b/examples/basic/src/endpoints/fileRead.ts @@ -11,14 +11,14 @@ export async function readFile(sandbox: Sandbox, request: Request) { } const result = await sandbox.readFile(path, { encoding }); - return jsonResponse({ + return jsonResponse({ success: true, - path, + path, content: result.content, // Extract the actual content string from the response - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); } catch (error: any) { console.error("Error reading file:", error); return errorResponse(`Failed to read file: ${error.message}`); } -} \ No newline at end of file +} diff --git a/examples/basic/src/endpoints/fileRename.ts b/examples/basic/src/endpoints/fileRename.ts index dc8b645..35637da 100644 --- a/examples/basic/src/endpoints/fileRename.ts +++ b/examples/basic/src/endpoints/fileRename.ts @@ -11,15 +11,15 @@ export async function renameFile(sandbox: Sandbox, request: Request) { } await sandbox.renameFile(oldPath, newPath); - return jsonResponse({ + return jsonResponse({ success: true, message: "File renamed", oldPath, newPath, - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); } catch (error: any) { console.error("Error renaming file:", error); return errorResponse(`Failed to rename file: ${error.message}`); } -} \ No newline at end of file +} diff --git a/examples/basic/src/endpoints/gitCheckout.ts b/examples/basic/src/endpoints/gitCheckout.ts index ab3f481..eaa8197 100644 --- a/examples/basic/src/endpoints/gitCheckout.ts +++ b/examples/basic/src/endpoints/gitCheckout.ts @@ -12,17 +12,17 @@ export async function gitCheckout(sandbox: Sandbox, request: Request) { const actualBranch = branch || "main"; await sandbox.gitCheckout(repoUrl, { branch: actualBranch, targetDir }); - - return jsonResponse({ + + return jsonResponse({ success: true, message: "Repository checked out", repoUrl, branch: actualBranch, targetDir, - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); } catch (error: any) { console.error("Error checking out repository:", error); return errorResponse(`Failed to checkout repository: ${error.message}`); } -} \ No newline at end of file +} diff --git a/examples/basic/src/endpoints/mkdir.ts b/examples/basic/src/endpoints/mkdir.ts index fb4fee9..280b5a2 100644 --- a/examples/basic/src/endpoints/mkdir.ts +++ b/examples/basic/src/endpoints/mkdir.ts @@ -1,7 +1,10 @@ import type { Sandbox } from "@cloudflare/sandbox"; import { errorResponse, jsonResponse, parseJsonBody } from "../http"; -export async function createDirectory(sandbox: Sandbox, request: Request) { +export async function createDirectory( + sandbox: Sandbox, + request: Request +) { try { const body = await parseJsonBody(request); const { path, recursive } = body; @@ -11,15 +14,15 @@ export async function createDirectory(sandbox: Sandbox, request: Reques } await sandbox.mkdir(path, { recursive }); - return jsonResponse({ + return jsonResponse({ success: true, message: "Directory created", path, recursive: recursive || false, - timestamp: new Date().toISOString() + timestamp: new Date().toISOString(), }); } catch (error: any) { console.error("Error creating directory:", error); return errorResponse(`Failed to create directory: ${error.message}`); } -} \ No newline at end of file +} diff --git a/examples/basic/src/endpoints/ports.ts b/examples/basic/src/endpoints/ports.ts index 23aab5a..e0c6f31 100644 --- a/examples/basic/src/endpoints/ports.ts +++ b/examples/basic/src/endpoints/ports.ts @@ -2,32 +2,34 @@ import type { Sandbox } from "@cloudflare/sandbox"; import { errorResponse, jsonResponse, parseJsonBody } from "../http"; export async function exposePort(sandbox: Sandbox, request: Request) { - const body = await parseJsonBody(request); - const { port, name } = body; + const body = await parseJsonBody(request); + const { port, name } = body; - if (!port) { - return errorResponse("Port number is required"); - } + if (!port) { + return errorResponse("Port number is required"); + } - // Automatically capture hostname from request - const hostname = new URL(request.url).host; + // Automatically capture hostname from request + const hostname = new URL(request.url).host; - const preview = await sandbox.exposePort(port, { - ...(name ? { name } : {}), - hostname - }); - return jsonResponse(preview); + const preview = await sandbox.exposePort(port, { + ...(name ? { name } : {}), + hostname, + }); + return jsonResponse(preview); } -export async function unexposePort(sandbox: Sandbox, request: Request) { - const body = await parseJsonBody(request); - const { port } = body; +export async function unexposePort( + sandbox: Sandbox, + request: Request +) { + const body = await parseJsonBody(request); + const { port } = body; - if (!port) { - return errorResponse("Port number is required"); - } + if (!port) { + return errorResponse("Port number is required"); + } - await sandbox.unexposePort(port); - return jsonResponse({ message: "Port unexposed", port }); + await sandbox.unexposePort(port); + return jsonResponse({ message: "Port unexposed", port }); } - diff --git a/examples/basic/src/endpoints/processGet.ts b/examples/basic/src/endpoints/processGet.ts index fd62d9b..db70b9c 100644 --- a/examples/basic/src/endpoints/processGet.ts +++ b/examples/basic/src/endpoints/processGet.ts @@ -2,18 +2,21 @@ import type { Sandbox } from "@cloudflare/sandbox"; import { errorResponse, jsonResponse } from "../http"; export async function getProcess(sandbox: Sandbox, pathname: string) { - const processId = pathname.split("/").pop(); - if (!processId) { - return errorResponse("Process ID is required"); - } + const processId = pathname.split("/").pop(); + if (!processId) { + return errorResponse("Process ID is required"); + } - if (typeof sandbox.getProcess === 'function') { - const process = await sandbox.getProcess(processId); - if (!process) { - return errorResponse("Process not found", 404); - } - return jsonResponse(process); - } else { - return errorResponse("Process management not implemented in current SDK version", 501); + if (typeof sandbox.getProcess === "function") { + const process = await sandbox.getProcess(processId); + if (!process) { + return errorResponse("Process not found", 404); } + return jsonResponse(process); + } else { + return errorResponse( + "Process management not implemented in current SDK version", + 501 + ); + } } diff --git a/examples/basic/src/endpoints/processKill.ts b/examples/basic/src/endpoints/processKill.ts index 11321b1..458a54c 100644 --- a/examples/basic/src/endpoints/processKill.ts +++ b/examples/basic/src/endpoints/processKill.ts @@ -1,23 +1,35 @@ import type { Sandbox } from "@cloudflare/sandbox"; import { errorResponse, jsonResponse } from "../http"; -export async function killProcesses(sandbox: Sandbox, pathname: string) { - const processId = pathname.split("/").pop(); - if (processId === "kill-all") { - if (typeof sandbox.killAllProcesses === 'function') { - const result = await sandbox.killAllProcesses(); - return jsonResponse({ message: "All processes killed", killedCount: result }); - } else { - return errorResponse("Process management not implemented in current SDK version", 501); - } - } else if (processId) { - if (typeof sandbox.killProcess === 'function') { - await sandbox.killProcess(processId); - return jsonResponse({ message: "Process killed", processId }); - } else { - return errorResponse("Process management not implemented in current SDK version", 501); - } +export async function killProcesses( + sandbox: Sandbox, + pathname: string +) { + const processId = pathname.split("/").pop(); + if (processId === "kill-all") { + if (typeof sandbox.killAllProcesses === "function") { + const result = await sandbox.killAllProcesses(); + return jsonResponse({ + message: "All processes killed", + killedCount: result, + }); } else { - return errorResponse("Process ID is required"); + return errorResponse( + "Process management not implemented in current SDK version", + 501 + ); } + } else if (processId) { + if (typeof sandbox.killProcess === "function") { + await sandbox.killProcess(processId); + return jsonResponse({ message: "Process killed", processId }); + } else { + return errorResponse( + "Process management not implemented in current SDK version", + 501 + ); + } + } else { + return errorResponse("Process ID is required"); + } } diff --git a/examples/basic/src/endpoints/processList.ts b/examples/basic/src/endpoints/processList.ts index 4fd12bb..e59e4fb 100644 --- a/examples/basic/src/endpoints/processList.ts +++ b/examples/basic/src/endpoints/processList.ts @@ -2,10 +2,13 @@ import type { Sandbox } from "@cloudflare/sandbox"; import { errorResponse, jsonResponse } from "../http"; export const listProcesses = async (sandbox: Sandbox) => { - if (typeof sandbox.listProcesses === 'function') { - const processes = await sandbox.listProcesses(); - return jsonResponse({ processes }); - } else { - return errorResponse("Process management not implemented in current SDK version", 501); - } -} + if (typeof sandbox.listProcesses === "function") { + const processes = await sandbox.listProcesses(); + return jsonResponse({ processes }); + } else { + return errorResponse( + "Process management not implemented in current SDK version", + 501 + ); + } +}; diff --git a/examples/basic/src/endpoints/processLogs.ts b/examples/basic/src/endpoints/processLogs.ts index 739b57a..fea9619 100644 --- a/examples/basic/src/endpoints/processLogs.ts +++ b/examples/basic/src/endpoints/processLogs.ts @@ -1,88 +1,109 @@ import { Sandbox, parseSSEStream, type LogEvent } from "@cloudflare/sandbox"; import { corsHeaders, errorResponse, jsonResponse } from "../http"; -export async function getProcessLogs(sandbox: Sandbox, pathname: string) { - const pathParts = pathname.split("/"); - const processId = pathParts[pathParts.length - 2]; +export async function getProcessLogs( + sandbox: Sandbox, + pathname: string +) { + const pathParts = pathname.split("/"); + const processId = pathParts[pathParts.length - 2]; - if (!processId) { - return errorResponse("Process ID is required"); - } + if (!processId) { + return errorResponse("Process ID is required"); + } - if (typeof sandbox.getProcessLogs === 'function') { - const logs = await sandbox.getProcessLogs(processId); - return jsonResponse(logs); - } else { - return errorResponse("Process management not implemented in current SDK version", 501); - } + if (typeof sandbox.getProcessLogs === "function") { + const logs = await sandbox.getProcessLogs(processId); + return jsonResponse(logs); + } else { + return errorResponse( + "Process management not implemented in current SDK version", + 501 + ); + } } -export async function streamProcessLogs(sandbox: Sandbox, pathname: string) { - const pathParts = pathname.split("/"); - const processId = pathParts[pathParts.length - 2]; +export async function streamProcessLogs( + sandbox: Sandbox, + pathname: string +) { + const pathParts = pathname.split("/"); + const processId = pathParts[pathParts.length - 2]; - if (!processId) { - return errorResponse("Process ID is required"); - } + if (!processId) { + return errorResponse("Process ID is required"); + } - // Check if process exists first - if (typeof sandbox.getProcess === 'function') { - try { - const process = await sandbox.getProcess(processId); - if (!process) { - return errorResponse("Process not found", 404); - } - } catch (error: any) { - return errorResponse(`Failed to check process: ${error.message}`, 500); - } + // Check if process exists first + if (typeof sandbox.getProcess === "function") { + try { + const process = await sandbox.getProcess(processId); + if (!process) { + return errorResponse("Process not found", 404); + } + } catch (error: any) { + return errorResponse(`Failed to check process: ${error.message}`, 500); } + } - // Use the SDK's streaming with beautiful AsyncIterable API - if (typeof sandbox.streamProcessLogs === 'function') { - try { - // Create SSE stream from AsyncIterable - const encoder = new TextEncoder(); - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); + // Use the SDK's streaming with beautiful AsyncIterable API + if (typeof sandbox.streamProcessLogs === "function") { + try { + // Create SSE stream from AsyncIterable + const encoder = new TextEncoder(); + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); - // Stream logs in the background - (async () => { - try { - // Get the ReadableStream from sandbox - const stream = await sandbox.streamProcessLogs(processId); - - // Convert to AsyncIterable using parseSSEStream - for await (const logEvent of parseSSEStream(stream)) { - // Forward each typed event as SSE - await writer.write(encoder.encode(`data: ${JSON.stringify(logEvent)}\n\n`)); - } - } catch (error: any) { - // Send error event - await writer.write(encoder.encode(`data: ${JSON.stringify({ - type: 'error', - timestamp: new Date().toISOString(), - data: error.message, - processId - })}\n\n`)); - } finally { - await writer.close(); - } - })(); + // Stream logs in the background + (async () => { + try { + // Get the ReadableStream from sandbox + const stream = await sandbox.streamProcessLogs(processId); - // Return stream with proper headers - return new Response(readable, { - headers: { - "Content-Type": "text/event-stream", - "Cache-Control": "no-cache", - "Connection": "keep-alive", - ...corsHeaders(), - }, - }); + // Convert to AsyncIterable using parseSSEStream + for await (const logEvent of parseSSEStream(stream)) { + // Forward each typed event as SSE + await writer.write( + encoder.encode(`data: ${JSON.stringify(logEvent)}\n\n`) + ); + } } catch (error: any) { - console.error('Process log streaming error:', error); - return errorResponse(`Failed to stream process logs: ${error.message}`, 500); + // Send error event + await writer.write( + encoder.encode( + `data: ${JSON.stringify({ + type: "error", + timestamp: new Date().toISOString(), + data: error.message, + processId, + })}\n\n` + ) + ); + } finally { + await writer.close(); } - } else { - return errorResponse("Process streaming not implemented in current SDK version", 501); + })(); + + // Return stream with proper headers + return new Response(readable, { + headers: { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache", + Connection: "keep-alive", + ...corsHeaders(), + }, + }); + } catch (error: any) { + console.error("Process log streaming error:", error); + return errorResponse( + `Failed to stream process logs: ${error.message}`, + 500 + ); } + } else { + return errorResponse( + "Process streaming not implemented in current SDK version", + 501 + ); + } } diff --git a/examples/basic/src/endpoints/processStart.ts b/examples/basic/src/endpoints/processStart.ts index 670d457..5edd2f4 100644 --- a/examples/basic/src/endpoints/processStart.ts +++ b/examples/basic/src/endpoints/processStart.ts @@ -1,24 +1,30 @@ import type { Sandbox } from "@cloudflare/sandbox"; import { errorResponse, jsonResponse, parseJsonBody } from "../http"; -export async function startProcess(sandbox: Sandbox, request: Request) { - const body = await parseJsonBody(request); - const { command, processId, sessionId, timeout, env: envVars, cwd } = body; +export async function startProcess( + sandbox: Sandbox, + request: Request +) { + const body = await parseJsonBody(request); + const { command, processId, sessionId, timeout, env: envVars, cwd } = body; - if (!command) { - return errorResponse("Command is required"); - } + if (!command) { + return errorResponse("Command is required"); + } - if (typeof sandbox.startProcess === 'function') { - const process = await sandbox.startProcess(command, { - processId, - sessionId, - timeout, - env: envVars, - cwd - }); - return jsonResponse(process); - } else { - return errorResponse("Process management not implemented in current SDK version", 501); - } -} \ No newline at end of file + if (typeof sandbox.startProcess === "function") { + const process = await sandbox.startProcess(command, { + processId, + sessionId, + timeout, + env: envVars, + cwd, + }); + return jsonResponse(process); + } else { + return errorResponse( + "Process management not implemented in current SDK version", + 501 + ); + } +} diff --git a/examples/basic/src/endpoints/templates.ts b/examples/basic/src/endpoints/templates.ts index 9fcc434..d5228f6 100644 --- a/examples/basic/src/endpoints/templates.ts +++ b/examples/basic/src/endpoints/templates.ts @@ -7,30 +7,32 @@ export async function setupNextjs(sandbox: Sandbox, request: Request) { const { projectName = "my-nextjs-app" } = body; // Step 1: Create Next.js app - await sandbox.exec(`npx create-next-app@latest ${projectName} --typescript --tailwind --eslint --app --src-dir --import-alias "@/*" --no-turbopack --yes`); - + await sandbox.exec( + `npx create-next-app@latest ${projectName} --typescript --tailwind --eslint --app --src-dir --import-alias "@/*" --no-turbopack --yes` + ); + // Step 2: Install dependencies (already done by create-next-app) // Step 3: Start dev server on port 8080 - const process = await sandbox.startProcess(`npm run dev -- --port 8080`, { - cwd: projectName + const process = await sandbox.startProcess(`npm run dev -- --port 8080`, { + cwd: projectName, }); - + // Step 4: Wait a moment for server to start - await new Promise(resolve => setTimeout(resolve, 3000)); - + await new Promise((resolve) => setTimeout(resolve, 3000)); + // Step 5: Expose port const hostname = new URL(request.url).host; - const preview = await sandbox.exposePort(8080, { + const preview = await sandbox.exposePort(8080, { name: "Next.js Dev Server", - hostname + hostname, }); - - return jsonResponse({ - success: true, + + return jsonResponse({ + success: true, projectName, processId: process.id, previewUrl: preview.url, - message: "Next.js project created and running!" + message: "Next.js project created and running!", }); } catch (error: any) { console.error("Error setting up Next.js:", error); @@ -44,33 +46,35 @@ export async function setupReact(sandbox: Sandbox, request: Request) { const { projectName = "my-react-app" } = body; // Step 1: Create React app - await sandbox.exec(`npx create-react-app ${projectName} --template typescript`); - + await sandbox.exec( + `npx create-react-app ${projectName} --template typescript` + ); + // Step 2: Start dev server on port 8080 - const process = await sandbox.startProcess(`npm start`, { + const process = await sandbox.startProcess(`npm start`, { cwd: projectName, - env: { + env: { BROWSER: "none", // Prevent browser from opening - PORT: "8080" // Set React dev server to use port 8080 - } + PORT: "8080", // Set React dev server to use port 8080 + }, }); - + // Step 3: Wait for server to start - await new Promise(resolve => setTimeout(resolve, 5000)); - + await new Promise((resolve) => setTimeout(resolve, 5000)); + // Step 4: Expose port const hostname = new URL(request.url).host; - const preview = await sandbox.exposePort(8080, { + const preview = await sandbox.exposePort(8080, { name: "React Dev Server", - hostname + hostname, }); - - return jsonResponse({ - success: true, + + return jsonResponse({ + success: true, projectName, processId: process.id, previewUrl: preview.url, - message: "React project created and running!" + message: "React project created and running!", }); } catch (error: any) { console.error("Error setting up React:", error); @@ -84,32 +88,34 @@ export async function setupVue(sandbox: Sandbox, request: Request) { const { projectName = "my-vue-app" } = body; // Step 1: Create Vue app - await sandbox.exec(`npm create vue@latest ${projectName} -- --typescript --jsx --router --pinia --vitest --cypress --eslint --prettier --yes`); - + await sandbox.exec( + `npm create vue@latest ${projectName} -- --typescript --jsx --router --pinia --vitest --cypress --eslint --prettier --yes` + ); + // Step 2: Install dependencies await sandbox.exec(`cd ${projectName} && npm install`); - + // Step 3: Start dev server on port 8080 - const process = await sandbox.startProcess(`npm run dev -- --port 8080`, { - cwd: projectName + const process = await sandbox.startProcess(`npm run dev -- --port 8080`, { + cwd: projectName, }); - + // Step 4: Wait for server to start - await new Promise(resolve => setTimeout(resolve, 3000)); - + await new Promise((resolve) => setTimeout(resolve, 3000)); + // Step 5: Expose port const hostname = new URL(request.url).host; - const preview = await sandbox.exposePort(8080, { + const preview = await sandbox.exposePort(8080, { name: "Vue Dev Server", - hostname + hostname, }); - - return jsonResponse({ - success: true, + + return jsonResponse({ + success: true, projectName, processId: process.id, previewUrl: preview.url, - message: "Vue project created and running!" + message: "Vue project created and running!", }); } catch (error: any) { console.error("Error setting up Vue:", error); @@ -124,7 +130,7 @@ export async function setupStatic(sandbox: Sandbox, request: Request) { // Step 1: Create directory and basic HTML await sandbox.mkdir(projectName); - + // Step 2: Create basic HTML file const htmlContent = ` @@ -152,33 +158,33 @@ export async function setupStatic(sandbox: Sandbox, request: Request) { `; - + await sandbox.writeFile(`${projectName}/index.html`, htmlContent); - + // Step 3: Start simple HTTP server on port 8080 - const process = await sandbox.startProcess(`python3 -m http.server 8080`, { - cwd: projectName + const process = await sandbox.startProcess(`python3 -m http.server 8080`, { + cwd: projectName, }); - + // Step 4: Wait for server to start - await new Promise(resolve => setTimeout(resolve, 2000)); - + await new Promise((resolve) => setTimeout(resolve, 2000)); + // Step 5: Expose port const hostname = new URL(request.url).host; - const preview = await sandbox.exposePort(8080, { + const preview = await sandbox.exposePort(8080, { name: "Static Site Server", - hostname + hostname, }); - - return jsonResponse({ - success: true, + + return jsonResponse({ + success: true, projectName, processId: process.id, previewUrl: preview.url, - message: "Static site created and running!" + message: "Static site created and running!", }); } catch (error: any) { console.error("Error setting up static site:", error); return errorResponse(`Failed to setup static site: ${error.message}`); } -} \ No newline at end of file +} diff --git a/examples/basic/src/index.ts b/examples/basic/src/index.ts index 797cce8..4fe1d75 100644 --- a/examples/basic/src/index.ts +++ b/examples/basic/src/index.ts @@ -21,7 +21,12 @@ import { setupVue, setupStatic, } from "./endpoints"; -import { corsHeaders, errorResponse, jsonResponse, parseJsonBody } from "./http"; +import { + corsHeaders, + errorResponse, + jsonResponse, + parseJsonBody, +} from "./http"; export { Sandbox } from "@cloudflare/sandbox"; @@ -29,7 +34,9 @@ export { Sandbox } from "@cloudflare/sandbox"; function generateSecureRandomString(length: number = 12): string { const array = new Uint8Array(length); crypto.getRandomValues(array); - return Array.from(array, byte => byte.toString(16).padStart(2, '0')).join(''); + return Array.from(array, (byte) => byte.toString(16).padStart(2, "0")).join( + "" + ); } type Env = { @@ -88,11 +95,19 @@ export default { return await killProcesses(sandbox, pathname); } - if (pathname.startsWith("/api/process/") && pathname.endsWith("/logs") && request.method === "GET") { + if ( + pathname.startsWith("/api/process/") && + pathname.endsWith("/logs") && + request.method === "GET" + ) { return await getProcessLogs(sandbox, pathname); } - if (pathname.startsWith("/api/process/") && pathname.endsWith("/stream") && request.method === "GET") { + if ( + pathname.startsWith("/api/process/") && + pathname.endsWith("/stream") && + request.method === "GET" + ) { return await streamProcessLogs(sandbox, pathname); } @@ -173,13 +188,18 @@ export default { // Session Management APIs if (pathname === "/api/session/create" && request.method === "POST") { const body = await parseJsonBody(request); - const sessionId = body.sessionId || `session_${Date.now()}_${generateSecureRandomString()}`; + const sessionId = + body.sessionId || + `session_${Date.now()}_${generateSecureRandomString()}`; // Sessions are managed automatically by the SDK, just return the ID return jsonResponse(sessionId); } - if (pathname.startsWith("/api/session/clear/") && request.method === "POST") { + if ( + pathname.startsWith("/api/session/clear/") && + request.method === "POST" + ) { const sessionId = pathname.split("/").pop(); // In a real implementation, you might want to clean up session state @@ -213,8 +233,8 @@ export default { "POST /api/templates/nextjs - Setup Next.js project", "POST /api/templates/react - Setup React project", "POST /api/templates/vue - Setup Vue project", - "POST /api/templates/static - Setup static site" - ] + "POST /api/templates/static - Setup static site", + ], }); } @@ -227,21 +247,23 @@ export default { return jsonResponse({ message: "pong", timestamp: new Date().toISOString(), - sandboxStatus: "ready" + sandboxStatus: "ready", }); } catch (error: any) { - return jsonResponse({ - message: "pong", - timestamp: new Date().toISOString(), - sandboxStatus: "initializing", - error: error.message - }, 202); // 202 Accepted - processing in progress + return jsonResponse( + { + message: "pong", + timestamp: new Date().toISOString(), + sandboxStatus: "initializing", + error: error.message, + }, + 202 + ); // 202 Accepted - processing in progress } } // Fallback: serve static assets for all other requests return env.ASSETS.fetch(request); - } catch (error: any) { console.error("API Error:", error); return errorResponse(`Internal server error: ${error.message}`, 500); diff --git a/packages/sandbox/README.md b/packages/sandbox/README.md index 1c6720a..211f7f5 100644 --- a/packages/sandbox/README.md +++ b/packages/sandbox/README.md @@ -66,10 +66,10 @@ npm install @cloudflare/sandbox ```dockerfile FROM docker.io/cloudflare/sandbox:0.1.3 -EXPOSE 3000 +# expose any ports you might want to use +# EXPOSE 8080 +# EXPOSE 5173 -# Run the same command as the original image -CMD ["bun", "index.ts"] ``` 2. **Configure wrangler.json**: @@ -138,23 +138,23 @@ console.log(result.stdout, result.exitCode); // With streaming callbacks const result = await sandbox.exec("npm run build", { stream: true, - onOutput: (stream, data) => console.log(`[${stream}] ${data}`) + onOutput: (stream, data) => console.log(`[${stream}] ${data}`), }); ``` **`execStream(command, options?)`** - Dedicated streaming method returning SSE stream ```typescript -import { parseSSEStream, type ExecEvent } from '@cloudflare/sandbox'; +import { parseSSEStream, type ExecEvent } from "@cloudflare/sandbox"; const stream = await sandbox.execStream("npm run test"); for await (const event of parseSSEStream(stream)) { switch (event.type) { - case 'stdout': + case "stdout": console.log(`Test output: ${event.data}`); break; - case 'complete': - console.log(`Tests ${event.exitCode === 0 ? 'passed' : 'failed'}`); + case "complete": + console.log(`Tests ${event.exitCode === 0 ? "passed" : "failed"}`); break; } } @@ -205,8 +205,7 @@ await sandbox.gitCheckout("https://github.com/user/repo", { Set environment variables dynamically in the sandbox. -> **Important**: This method must be called immediately after `getSandbox()` and before any other operations. Once a sandbox instance starts up, environment variables cannot be changed -for that instance. +> **Important**: This method must be called immediately after `getSandbox()` and before any other operations. Once a sandbox instance starts up, environment variables cannot be changed for that instance. ```typescript const sandbox = getSandbox(env.Sandbox, "my-sandbox"); @@ -215,7 +214,7 @@ const sandbox = getSandbox(env.Sandbox, "my-sandbox"); await sandbox.setEnvVars({ NODE_ENV: "production", API_KEY: "your-api-key", - DATABASE_URL: "postgresql://localhost:5432/mydb" + DATABASE_URL: "postgresql://localhost:5432/mydb", }); // Now you can run commands - environment variables are available @@ -284,7 +283,7 @@ The SDK handles: ```dockerfile # In your Dockerfile (only needed for local dev) -FROM oven/bun:latest +FROM docker.io/cloudflare/sandbox:0.1.3 # Expose the ports you'll be using EXPOSE 3000 # For a web server @@ -422,22 +421,22 @@ The SDK leverages Cloudflare's infrastructure: The SDK provides powerful streaming capabilities with typed AsyncIterable support: ```typescript -import { parseSSEStream, type ExecEvent } from '@cloudflare/sandbox'; +import { parseSSEStream, type ExecEvent } from "@cloudflare/sandbox"; // Stream command execution -const stream = await sandbox.execStream('npm run build'); +const stream = await sandbox.execStream("npm run build"); for await (const event of parseSSEStream(stream)) { switch (event.type) { - case 'start': + case "start": console.log(`Build started: ${event.command}`); break; - case 'stdout': + case "stdout": console.log(`Build: ${event.data}`); break; - case 'complete': + case "complete": console.log(`Exit code: ${event.exitCode}`); break; - case 'error': + case "error": console.error(`Error: ${event.error}`); break; } @@ -455,20 +454,21 @@ The SDK exports utilities for working with Server-Sent Event streams: #### Advanced Streaming Examples **CI/CD Build System:** + ```typescript export async function runBuild(env: Env, buildId: string) { const sandbox = getSandbox(env.SANDBOX, buildId); - const stream = await sandbox.execStream('npm run build'); + const stream = await sandbox.execStream("npm run build"); for await (const event of parseSSEStream(stream)) { switch (event.type) { - case 'start': - await env.BUILDS.put(buildId, { status: 'running' }); + case "start": + await env.BUILDS.put(buildId, { status: "running" }); break; - case 'complete': + case "complete": await env.BUILDS.put(buildId, { - status: event.exitCode === 0 ? 'success' : 'failed', - exitCode: event.exitCode + status: event.exitCode === 0 ? "success" : "failed", + exitCode: event.exitCode, }); break; } @@ -477,16 +477,17 @@ export async function runBuild(env: Env, buildId: string) { ``` **System Monitoring:** + ```typescript -const monitor = await sandbox.startProcess('tail -f /var/log/system.log'); +const monitor = await sandbox.startProcess("tail -f /var/log/system.log"); const logStream = await sandbox.streamProcessLogs(monitor.id); for await (const log of parseSSEStream(logStream)) { - if (log.type === 'stdout' && log.data.includes('ERROR')) { + if (log.type === "stdout" && log.data.includes("ERROR")) { await env.ALERTS.send({ - severity: 'high', + severity: "high", message: log.data, - timestamp: log.timestamp + timestamp: log.timestamp, }); } } diff --git a/packages/sandbox/container_src/handler/exec.ts b/packages/sandbox/container_src/handler/exec.ts index dbc781d..4719827 100644 --- a/packages/sandbox/container_src/handler/exec.ts +++ b/packages/sandbox/container_src/handler/exec.ts @@ -66,7 +66,9 @@ function executeCommand( session.activeProcess = null; } - console.log(`[Server] Command completed: ${command}, Exit code: ${code}`); + console.log( + `[Server] Command completed: ${command}, Exit code: ${code}` + ); resolve({ exitCode: code || 0, @@ -115,7 +117,12 @@ export async function handleExecuteRequest( console.log(`[Server] Executing command: ${command}`); - const result = await executeCommand(sessions, command, sessionId, background); + const result = await executeCommand( + sessions, + command, + sessionId, + background + ); return new Response( JSON.stringify({ @@ -175,9 +182,7 @@ export async function handleStreamingExecuteRequest( ); } - console.log( - `[Server] Executing streaming command: ${command}` - ); + console.log(`[Server] Executing streaming command: ${command}`); const stream = new ReadableStream({ start(controller) { diff --git a/packages/sandbox/container_src/handler/file.ts b/packages/sandbox/container_src/handler/file.ts index f54dddd..7cf99b9 100644 --- a/packages/sandbox/container_src/handler/file.ts +++ b/packages/sandbox/container_src/handler/file.ts @@ -2,439 +2,436 @@ import { spawn } from "node:child_process"; import { mkdir, readFile, rename, unlink, writeFile } from "node:fs/promises"; import { dirname } from "node:path"; import type { - DeleteFileRequest, - MkdirRequest, - MoveFileRequest, - ReadFileRequest, - RenameFileRequest, - SessionData, - WriteFileRequest + DeleteFileRequest, + MkdirRequest, + MoveFileRequest, + ReadFileRequest, + RenameFileRequest, + SessionData, + WriteFileRequest, } from "../types"; function executeMkdir( - sessions: Map, - path: string, - recursive: boolean, - sessionId?: string + sessions: Map, + path: string, + recursive: boolean, + sessionId?: string ): Promise<{ - success: boolean; - stdout: string; - stderr: string; - exitCode: number; + success: boolean; + stdout: string; + stderr: string; + exitCode: number; }> { - return new Promise((resolve, reject) => { - const args = `${recursive ? "-p " : ""} ${path}`; - const mkdirChild = spawn(`mkdir ${args}`, { - shell: true, - stdio: ["pipe", "pipe", "pipe"], - }); + return new Promise((resolve, reject) => { + const args = `${recursive ? "-p " : ""} ${path}`; + const mkdirChild = spawn(`mkdir ${args}`, { + shell: true, + stdio: ["pipe", "pipe", "pipe"], + }); - // Store the process reference for cleanup if sessionId is provided - if (sessionId && sessions.has(sessionId)) { - const session = sessions.get(sessionId)!; - session.activeProcess = mkdirChild; - } + // Store the process reference for cleanup if sessionId is provided + if (sessionId && sessions.has(sessionId)) { + const session = sessions.get(sessionId)!; + session.activeProcess = mkdirChild; + } - let stdout = ""; - let stderr = ""; + let stdout = ""; + let stderr = ""; - mkdirChild.stdout?.on("data", (data) => { - stdout += data.toString(); - }); + mkdirChild.stdout?.on("data", (data) => { + stdout += data.toString(); + }); - mkdirChild.stderr?.on("data", (data) => { - stderr += data.toString(); - }); + mkdirChild.stderr?.on("data", (data) => { + stderr += data.toString(); + }); + + mkdirChild.on("close", (code) => { + // Clear the active process reference + if (sessionId && sessions.has(sessionId)) { + const session = sessions.get(sessionId)!; + session.activeProcess = null; + } - mkdirChild.on("close", (code) => { - // Clear the active process reference - if (sessionId && sessions.has(sessionId)) { - const session = sessions.get(sessionId)!; - session.activeProcess = null; - } - - if (code === 0) { - console.log(`[Server] Directory created successfully: ${path}`); - resolve({ - exitCode: code || 0, - stderr, - stdout, - success: true, - }); - } else { - console.error( - `[Server] Failed to create directory: ${path}, Exit code: ${code}` - ); - resolve({ - exitCode: code || 1, - stderr, - stdout, - success: false, - }); - } + if (code === 0) { + console.log(`[Server] Directory created successfully: ${path}`); + resolve({ + exitCode: code || 0, + stderr, + stdout, + success: true, + }); + } else { + console.error( + `[Server] Failed to create directory: ${path}, Exit code: ${code}` + ); + resolve({ + exitCode: code || 1, + stderr, + stdout, + success: false, }); + } + }); - mkdirChild.on("error", (error) => { - // Clear the active process reference - if (sessionId && sessions.has(sessionId)) { - const session = sessions.get(sessionId)!; - session.activeProcess = null; - } + mkdirChild.on("error", (error) => { + // Clear the active process reference + if (sessionId && sessions.has(sessionId)) { + const session = sessions.get(sessionId)!; + session.activeProcess = null; + } - console.error(`[Server] Error creating directory: ${path}`, error); - reject(error); - }); + console.error(`[Server] Error creating directory: ${path}`, error); + reject(error); }); + }); } export async function handleMkdirRequest( - sessions: Map, - req: Request, - corsHeaders: Record + sessions: Map, + req: Request, + corsHeaders: Record ): Promise { - try { - const body = (await req.json()) as MkdirRequest; - const { path, recursive = false, sessionId } = body; - - if (!path || typeof path !== "string") { - return new Response( - JSON.stringify({ - error: "Path is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + try { + const body = (await req.json()) as MkdirRequest; + const { path, recursive = false, sessionId } = body; + + if (!path || typeof path !== "string") { + return new Response( + JSON.stringify({ + error: "Path is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } + ); + } - // Basic safety check - prevent dangerous paths - const dangerousPatterns = [ - /^\/$/, // Root directory - /^\/etc/, // System directories - /^\/var/, // System directories - /^\/usr/, // System directories - /^\/bin/, // System directories - /^\/sbin/, // System directories - /^\/boot/, // System directories - /^\/dev/, // System directories - /^\/proc/, // System directories - /^\/sys/, // System directories - /^\/tmp\/\.\./, // Path traversal attempts - /\.\./, // Path traversal attempts - ]; - - if (dangerousPatterns.some((pattern) => pattern.test(path))) { - return new Response( - JSON.stringify({ - error: "Dangerous path not allowed", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + // Basic safety check - prevent dangerous paths + const dangerousPatterns = [ + /^\/$/, // Root directory + /^\/etc/, // System directories + /^\/var/, // System directories + /^\/usr/, // System directories + /^\/bin/, // System directories + /^\/sbin/, // System directories + /^\/boot/, // System directories + /^\/dev/, // System directories + /^\/proc/, // System directories + /^\/sys/, // System directories + /^\/tmp\/\.\./, // Path traversal attempts + /\.\./, // Path traversal attempts + ]; + + if (dangerousPatterns.some((pattern) => pattern.test(path))) { + return new Response( + JSON.stringify({ + error: "Dangerous path not allowed", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } - - console.log( - `[Server] Creating directory: ${path} (recursive: ${recursive})` - ); - - const result = await executeMkdir(sessions, path, recursive, sessionId); - - return new Response( - JSON.stringify({ - exitCode: result.exitCode, - path, - recursive, - stderr: result.stderr, - stdout: result.stdout, - success: result.success, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleMkdirRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to create directory", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } -} + console.log( + `[Server] Creating directory: ${path} (recursive: ${recursive})` + ); + + const result = await executeMkdir(sessions, path, recursive, sessionId); + + return new Response( + JSON.stringify({ + exitCode: result.exitCode, + path, + recursive, + stderr: result.stderr, + stdout: result.stdout, + success: result.success, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleMkdirRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to create directory", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } +} function executeWriteFile( - path: string, - content: string, - encoding: string, - sessionId?: string + path: string, + content: string, + encoding: string, + sessionId?: string ): Promise<{ - success: boolean; - exitCode: number; + success: boolean; + exitCode: number; }> { - return new Promise((resolve, reject) => { - (async () => { - try { - // Ensure the directory exists - const dir = dirname(path); - if (dir !== ".") { - await mkdir(dir, { recursive: true }); - } - - // Write the file - await writeFile(path, content, { - encoding: encoding as BufferEncoding, - }); - - console.log(`[Server] File written successfully: ${path}`); - resolve({ - exitCode: 0, - success: true, - }); - } catch (error) { - console.error(`[Server] Error writing file: ${path}`, error); - reject(error); - } - })(); - }); + return new Promise((resolve, reject) => { + (async () => { + try { + // Ensure the directory exists + const dir = dirname(path); + if (dir !== ".") { + await mkdir(dir, { recursive: true }); + } + + // Write the file + await writeFile(path, content, { + encoding: encoding as BufferEncoding, + }); + + console.log(`[Server] File written successfully: ${path}`); + resolve({ + exitCode: 0, + success: true, + }); + } catch (error) { + console.error(`[Server] Error writing file: ${path}`, error); + reject(error); + } + })(); + }); } export async function handleWriteFileRequest( - req: Request, - corsHeaders: Record + req: Request, + corsHeaders: Record ): Promise { - try { - const body = (await req.json()) as WriteFileRequest; - const { path, content, encoding = "utf-8", sessionId } = body; - - if (!path || typeof path !== "string") { - return new Response( - JSON.stringify({ - error: "Path is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + try { + const body = (await req.json()) as WriteFileRequest; + const { path, content, encoding = "utf-8", sessionId } = body; + + if (!path || typeof path !== "string") { + return new Response( + JSON.stringify({ + error: "Path is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } + ); + } - // Basic safety check - prevent dangerous paths - const dangerousPatterns = [ - /^\/$/, // Root directory - /^\/etc/, // System directories - /^\/var/, // System directories - /^\/usr/, // System directories - /^\/bin/, // System directories - /^\/sbin/, // System directories - /^\/boot/, // System directories - /^\/dev/, // System directories - /^\/proc/, // System directories - /^\/sys/, // System directories - /^\/tmp\/\.\./, // Path traversal attempts - /\.\./, // Path traversal attempts - ]; - - if (dangerousPatterns.some((pattern) => pattern.test(path))) { - return new Response( - JSON.stringify({ - error: "Dangerous path not allowed", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + // Basic safety check - prevent dangerous paths + const dangerousPatterns = [ + /^\/$/, // Root directory + /^\/etc/, // System directories + /^\/var/, // System directories + /^\/usr/, // System directories + /^\/bin/, // System directories + /^\/sbin/, // System directories + /^\/boot/, // System directories + /^\/dev/, // System directories + /^\/proc/, // System directories + /^\/sys/, // System directories + /^\/tmp\/\.\./, // Path traversal attempts + /\.\./, // Path traversal attempts + ]; + + if (dangerousPatterns.some((pattern) => pattern.test(path))) { + return new Response( + JSON.stringify({ + error: "Dangerous path not allowed", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } - - console.log( - `[Server] Writing file: ${path} (content length: ${content.length})` - ); - - const result = await executeWriteFile(path, content, encoding, sessionId); - - return new Response( - JSON.stringify({ - exitCode: result.exitCode, - path, - success: result.success, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleWriteFileRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to write file", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } -} + console.log( + `[Server] Writing file: ${path} (content length: ${content.length})` + ); + + const result = await executeWriteFile(path, content, encoding, sessionId); + + return new Response( + JSON.stringify({ + exitCode: result.exitCode, + path, + success: result.success, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleWriteFileRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to write file", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } +} function executeReadFile( - path: string, - encoding: string, - sessionId?: string + path: string, + encoding: string, + sessionId?: string ): Promise<{ - success: boolean; - exitCode: number; - content: string; + success: boolean; + exitCode: number; + content: string; }> { - return new Promise((resolve, reject) => { - (async () => { - try { - // Read the file - const content = await readFile(path, { - encoding: encoding as BufferEncoding, - }); - - console.log(`[Server] File read successfully: ${path}`); - resolve({ - content, - exitCode: 0, - success: true, - }); - } catch (error) { - console.error(`[Server] Error reading file: ${path}`, error); - reject(error); - } - })(); - }); + return new Promise((resolve, reject) => { + (async () => { + try { + // Read the file + const content = await readFile(path, { + encoding: encoding as BufferEncoding, + }); + + console.log(`[Server] File read successfully: ${path}`); + resolve({ + content, + exitCode: 0, + success: true, + }); + } catch (error) { + console.error(`[Server] Error reading file: ${path}`, error); + reject(error); + } + })(); + }); } export async function handleReadFileRequest( - req: Request, - corsHeaders: Record + req: Request, + corsHeaders: Record ): Promise { - try { - const body = (await req.json()) as ReadFileRequest; - const { path, encoding = "utf-8", sessionId } = body; - - if (!path || typeof path !== "string") { - return new Response( - JSON.stringify({ - error: "Path is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + try { + const body = (await req.json()) as ReadFileRequest; + const { path, encoding = "utf-8", sessionId } = body; + + if (!path || typeof path !== "string") { + return new Response( + JSON.stringify({ + error: "Path is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } + ); + } - // Basic safety check - prevent dangerous paths - const dangerousPatterns = [ - /^\/$/, // Root directory - /^\/etc/, // System directories - /^\/var/, // System directories - /^\/usr/, // System directories - /^\/bin/, // System directories - /^\/sbin/, // System directories - /^\/boot/, // System directories - /^\/dev/, // System directories - /^\/proc/, // System directories - /^\/sys/, // System directories - /^\/tmp\/\.\./, // Path traversal attempts - /\.\./, // Path traversal attempts - ]; - - if (dangerousPatterns.some((pattern) => pattern.test(path))) { - return new Response( - JSON.stringify({ - error: "Dangerous path not allowed", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + // Basic safety check - prevent dangerous paths + const dangerousPatterns = [ + /^\/$/, // Root directory + /^\/etc/, // System directories + /^\/var/, // System directories + /^\/usr/, // System directories + /^\/bin/, // System directories + /^\/sbin/, // System directories + /^\/boot/, // System directories + /^\/dev/, // System directories + /^\/proc/, // System directories + /^\/sys/, // System directories + /^\/tmp\/\.\./, // Path traversal attempts + /\.\./, // Path traversal attempts + ]; + + if (dangerousPatterns.some((pattern) => pattern.test(path))) { + return new Response( + JSON.stringify({ + error: "Dangerous path not allowed", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } - - console.log(`[Server] Reading file: ${path}`); - - const result = await executeReadFile(path, encoding, sessionId); - - return new Response( - JSON.stringify({ - content: result.content, - exitCode: result.exitCode, - path, - success: result.success, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleReadFileRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to read file", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } -} + console.log(`[Server] Reading file: ${path}`); + + const result = await executeReadFile(path, encoding, sessionId); + + return new Response( + JSON.stringify({ + content: result.content, + exitCode: result.exitCode, + path, + success: result.success, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleReadFileRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to read file", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } +} function executeDeleteFile( path: string, @@ -463,95 +460,94 @@ function executeDeleteFile( } export async function handleDeleteFileRequest( - req: Request, - corsHeaders: Record + req: Request, + corsHeaders: Record ): Promise { - try { - const body = (await req.json()) as DeleteFileRequest; - const { path, sessionId } = body; - - if (!path || typeof path !== "string") { - return new Response( - JSON.stringify({ - error: "Path is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + try { + const body = (await req.json()) as DeleteFileRequest; + const { path, sessionId } = body; + + if (!path || typeof path !== "string") { + return new Response( + JSON.stringify({ + error: "Path is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } + ); + } - // Basic safety check - prevent dangerous paths - const dangerousPatterns = [ - /^\/$/, // Root directory - /^\/etc/, // System directories - /^\/var/, // System directories - /^\/usr/, // System directories - /^\/bin/, // System directories - /^\/sbin/, // System directories - /^\/boot/, // System directories - /^\/dev/, // System directories - /^\/proc/, // System directories - /^\/sys/, // System directories - /^\/tmp\/\.\./, // Path traversal attempts - /\.\./, // Path traversal attempts - ]; - - if (dangerousPatterns.some((pattern) => pattern.test(path))) { - return new Response( - JSON.stringify({ - error: "Dangerous path not allowed", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + // Basic safety check - prevent dangerous paths + const dangerousPatterns = [ + /^\/$/, // Root directory + /^\/etc/, // System directories + /^\/var/, // System directories + /^\/usr/, // System directories + /^\/bin/, // System directories + /^\/sbin/, // System directories + /^\/boot/, // System directories + /^\/dev/, // System directories + /^\/proc/, // System directories + /^\/sys/, // System directories + /^\/tmp\/\.\./, // Path traversal attempts + /\.\./, // Path traversal attempts + ]; + + if (dangerousPatterns.some((pattern) => pattern.test(path))) { + return new Response( + JSON.stringify({ + error: "Dangerous path not allowed", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } - - console.log(`[Server] Deleting file: ${path}`); - - const result = await executeDeleteFile(path, sessionId); - - return new Response( - JSON.stringify({ - exitCode: result.exitCode, - path, - success: result.success, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleDeleteFileRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to delete file", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } -} + console.log(`[Server] Deleting file: ${path}`); + + const result = await executeDeleteFile(path, sessionId); + + return new Response( + JSON.stringify({ + exitCode: result.exitCode, + path, + success: result.success, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleDeleteFileRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to delete file", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } +} function executeRenameFile( oldPath: string, @@ -586,115 +582,114 @@ function executeRenameFile( } export async function handleRenameFileRequest( - req: Request, - corsHeaders: Record + req: Request, + corsHeaders: Record ): Promise { - try { - const body = (await req.json()) as RenameFileRequest; - const { oldPath, newPath, sessionId } = body; - - if (!oldPath || typeof oldPath !== "string") { - return new Response( - JSON.stringify({ - error: "Old path is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + try { + const body = (await req.json()) as RenameFileRequest; + const { oldPath, newPath, sessionId } = body; + + if (!oldPath || typeof oldPath !== "string") { + return new Response( + JSON.stringify({ + error: "Old path is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } + ); + } - if (!newPath || typeof newPath !== "string") { - return new Response( - JSON.stringify({ - error: "New path is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + if (!newPath || typeof newPath !== "string") { + return new Response( + JSON.stringify({ + error: "New path is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } + ); + } - // Basic safety check - prevent dangerous paths - const dangerousPatterns = [ - /^\/$/, // Root directory - /^\/etc/, // System directories - /^\/var/, // System directories - /^\/usr/, // System directories - /^\/bin/, // System directories - /^\/sbin/, // System directories - /^\/boot/, // System directories - /^\/dev/, // System directories - /^\/proc/, // System directories - /^\/sys/, // System directories - /^\/tmp\/\.\./, // Path traversal attempts - /\.\./, // Path traversal attempts - ]; - - if ( - dangerousPatterns.some( - (pattern) => pattern.test(oldPath) || pattern.test(newPath) - ) - ) { - return new Response( - JSON.stringify({ - error: "Dangerous path not allowed", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + // Basic safety check - prevent dangerous paths + const dangerousPatterns = [ + /^\/$/, // Root directory + /^\/etc/, // System directories + /^\/var/, // System directories + /^\/usr/, // System directories + /^\/bin/, // System directories + /^\/sbin/, // System directories + /^\/boot/, // System directories + /^\/dev/, // System directories + /^\/proc/, // System directories + /^\/sys/, // System directories + /^\/tmp\/\.\./, // Path traversal attempts + /\.\./, // Path traversal attempts + ]; + + if ( + dangerousPatterns.some( + (pattern) => pattern.test(oldPath) || pattern.test(newPath) + ) + ) { + return new Response( + JSON.stringify({ + error: "Dangerous path not allowed", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } - - console.log(`[Server] Renaming file: ${oldPath} -> ${newPath}`); - - const result = await executeRenameFile(oldPath, newPath, sessionId); - - return new Response( - JSON.stringify({ - exitCode: result.exitCode, - newPath, - oldPath, - success: result.success, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleRenameFileRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to rename file", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } -} + console.log(`[Server] Renaming file: ${oldPath} -> ${newPath}`); + + const result = await executeRenameFile(oldPath, newPath, sessionId); + + return new Response( + JSON.stringify({ + exitCode: result.exitCode, + newPath, + oldPath, + success: result.success, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleRenameFileRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to rename file", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } +} function executeMoveFile( sourcePath: string, @@ -729,116 +724,115 @@ function executeMoveFile( } export async function handleMoveFileRequest( - req: Request, - corsHeaders: Record + req: Request, + corsHeaders: Record ): Promise { - try { - const body = (await req.json()) as MoveFileRequest; - const { sourcePath, destinationPath, sessionId } = body; - - if (!sourcePath || typeof sourcePath !== "string") { - return new Response( - JSON.stringify({ - error: "Source path is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + try { + const body = (await req.json()) as MoveFileRequest; + const { sourcePath, destinationPath, sessionId } = body; + + if (!sourcePath || typeof sourcePath !== "string") { + return new Response( + JSON.stringify({ + error: "Source path is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } + ); + } - if (!destinationPath || typeof destinationPath !== "string") { - return new Response( - JSON.stringify({ - error: "Destination path is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + if (!destinationPath || typeof destinationPath !== "string") { + return new Response( + JSON.stringify({ + error: "Destination path is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } + ); + } - // Basic safety check - prevent dangerous paths - const dangerousPatterns = [ - /^\/$/, // Root directory - /^\/etc/, // System directories - /^\/var/, // System directories - /^\/usr/, // System directories - /^\/bin/, // System directories - /^\/sbin/, // System directories - /^\/boot/, // System directories - /^\/dev/, // System directories - /^\/proc/, // System directories - /^\/sys/, // System directories - /^\/tmp\/\.\./, // Path traversal attempts - /\.\./, // Path traversal attempts - ]; - - if ( - dangerousPatterns.some( - (pattern) => pattern.test(sourcePath) || pattern.test(destinationPath) - ) - ) { - return new Response( - JSON.stringify({ - error: "Dangerous path not allowed", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + // Basic safety check - prevent dangerous paths + const dangerousPatterns = [ + /^\/$/, // Root directory + /^\/etc/, // System directories + /^\/var/, // System directories + /^\/usr/, // System directories + /^\/bin/, // System directories + /^\/sbin/, // System directories + /^\/boot/, // System directories + /^\/dev/, // System directories + /^\/proc/, // System directories + /^\/sys/, // System directories + /^\/tmp\/\.\./, // Path traversal attempts + /\.\./, // Path traversal attempts + ]; + + if ( + dangerousPatterns.some( + (pattern) => pattern.test(sourcePath) || pattern.test(destinationPath) + ) + ) { + return new Response( + JSON.stringify({ + error: "Dangerous path not allowed", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, } - - console.log(`[Server] Moving file: ${sourcePath} -> ${destinationPath}`); - - const result = await executeMoveFile( - sourcePath, - destinationPath, - sessionId - ); - - return new Response( - JSON.stringify({ - destinationPath, - exitCode: result.exitCode, - sourcePath, - success: result.success, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleMoveFileRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to move file", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } -} + console.log(`[Server] Moving file: ${sourcePath} -> ${destinationPath}`); + + const result = await executeMoveFile( + sourcePath, + destinationPath, + sessionId + ); + + return new Response( + JSON.stringify({ + destinationPath, + exitCode: result.exitCode, + sourcePath, + success: result.success, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleMoveFileRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to move file", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } +} diff --git a/packages/sandbox/container_src/handler/git.ts b/packages/sandbox/container_src/handler/git.ts index 99581e9..b27308c 100644 --- a/packages/sandbox/container_src/handler/git.ts +++ b/packages/sandbox/container_src/handler/git.ts @@ -129,8 +129,7 @@ export async function handleGitCheckoutRequest( // Generate target directory if not provided using cryptographically secure randomness const checkoutDir = - targetDir || - `repo_${Date.now()}_${randomBytes(6).toString('hex')}`; + targetDir || `repo_${Date.now()}_${randomBytes(6).toString("hex")}`; console.log( `[Server] Checking out repository: ${repoUrl} to ${checkoutDir}` @@ -179,4 +178,3 @@ export async function handleGitCheckoutRequest( ); } } - diff --git a/packages/sandbox/container_src/handler/ports.ts b/packages/sandbox/container_src/handler/ports.ts index a40afdc..e8b4f49 100644 --- a/packages/sandbox/container_src/handler/ports.ts +++ b/packages/sandbox/container_src/handler/ports.ts @@ -260,7 +260,9 @@ export async function handleProxyRequest( const targetUrl = `http://127.0.0.1:${port}${targetPath}${url.search}`; console.log(`[Server] Proxying request to: ${targetUrl}`); - console.log(`[Server] Method: ${req.method}, Port: ${port}, Path: ${targetPath}`); + console.log( + `[Server] Method: ${req.method}, Port: ${port}, Path: ${targetPath}` + ); try { // Forward the request to the target port @@ -284,7 +286,8 @@ export async function handleProxyRequest( return new Response( JSON.stringify({ error: `Service on port ${port} is not responding`, - message: fetchError instanceof Error ? fetchError.message : "Unknown error", + message: + fetchError instanceof Error ? fetchError.message : "Unknown error", }), { headers: { diff --git a/packages/sandbox/container_src/handler/process.ts b/packages/sandbox/container_src/handler/process.ts index 02f8fcc..d69afaa 100644 --- a/packages/sandbox/container_src/handler/process.ts +++ b/packages/sandbox/container_src/handler/process.ts @@ -1,640 +1,649 @@ import { type SpawnOptions, spawn } from "node:child_process"; import { randomBytes } from "node:crypto"; -import type { ProcessRecord, ProcessStatus, StartProcessRequest } from "../types"; +import type { + ProcessRecord, + ProcessStatus, + StartProcessRequest, +} from "../types"; // Generate a unique process ID using cryptographically secure randomness function generateProcessId(): string { - return `proc_${Date.now()}_${randomBytes(6).toString('hex')}`; + return `proc_${Date.now()}_${randomBytes(6).toString("hex")}`; } - // Process management handlers export async function handleStartProcessRequest( - processes: Map, - req: Request, - corsHeaders: Record + processes: Map, + req: Request, + corsHeaders: Record ): Promise { + try { + const body = (await req.json()) as StartProcessRequest; + const { command, options = {} } = body; + + if (!command || typeof command !== "string") { + return new Response( + JSON.stringify({ + error: "Command is required and must be a string", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 400, + } + ); + } + + const processId = options.processId || generateProcessId(); + const startTime = new Date(); + + // Check if process ID already exists + if (processes.has(processId)) { + return new Response( + JSON.stringify({ + error: `Process already exists: ${processId}`, + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 409, + } + ); + } + + console.log( + `[Server] Starting background process: ${command} (ID: ${processId})` + ); + + // Create process record in starting state + const processRecord: ProcessRecord = { + id: processId, + command, + status: "starting", + startTime, + sessionId: options.sessionId, + stdout: "", + stderr: "", + outputListeners: new Set(), + statusListeners: new Set(), + }; + + processes.set(processId, processRecord); + + // Start the actual process try { - const body = (await req.json()) as StartProcessRequest; - const { command, options = {} } = body; - - if (!command || typeof command !== "string") { - return new Response( - JSON.stringify({ - error: "Command is required and must be a string", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 400, - } - ); + const spawnOptions: SpawnOptions = { + cwd: options.cwd || process.cwd(), + env: { ...process.env, ...options.env }, + detached: false, + shell: true, + stdio: ["pipe", "pipe", "pipe"] as const, + }; + + // Use shell execution to preserve quotes and complex command structures + const childProcess = spawn(command, spawnOptions); + processRecord.childProcess = childProcess; + processRecord.pid = childProcess.pid; + processRecord.status = "running"; + + // Set up output handling + childProcess.stdout?.on("data", (data) => { + const output = data.toString(options.encoding || "utf8"); + processRecord.stdout += output; + + // Notify listeners + for (const listener of processRecord.outputListeners) { + listener("stdout", output); } + }); - const processId = options.processId || generateProcessId(); - const startTime = new Date(); - - // Check if process ID already exists - if (processes.has(processId)) { - return new Response( - JSON.stringify({ - error: `Process already exists: ${processId}`, - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 409, - } - ); + childProcess.stderr?.on("data", (data) => { + const output = data.toString(options.encoding || "utf8"); + processRecord.stderr += output; + + // Notify listeners + for (const listener of processRecord.outputListeners) { + listener("stderr", output); + } + }); + + childProcess.on("exit", (code, signal) => { + processRecord.endTime = new Date(); + processRecord.exitCode = code !== null ? code : -1; + + if (signal) { + processRecord.status = "killed"; + } else if (code === 0) { + processRecord.status = "completed"; + } else { + processRecord.status = "failed"; } - console.log(`[Server] Starting background process: ${command} (ID: ${processId})`); - - // Create process record in starting state - const processRecord: ProcessRecord = { - id: processId, - command, - status: 'starting', - startTime, - sessionId: options.sessionId, - stdout: '', - stderr: '', - outputListeners: new Set(), - statusListeners: new Set() - }; + // Notify status listeners + for (const listener of processRecord.statusListeners) { + listener(processRecord.status); + } - processes.set(processId, processRecord); + console.log( + `[Server] Process ${processId} exited with code ${code} (signal: ${signal})` + ); + }); - // Start the actual process - try { - const spawnOptions: SpawnOptions = { - cwd: options.cwd || process.cwd(), - env: { ...process.env, ...options.env }, - detached: false, - shell: true, - stdio: ["pipe", "pipe", "pipe"] as const - }; - - // Use shell execution to preserve quotes and complex command structures - const childProcess = spawn(command, spawnOptions); - processRecord.childProcess = childProcess; - processRecord.pid = childProcess.pid; - processRecord.status = 'running'; - - // Set up output handling - childProcess.stdout?.on('data', (data) => { - const output = data.toString(options.encoding || 'utf8'); - processRecord.stdout += output; - - // Notify listeners - for (const listener of processRecord.outputListeners) { - listener('stdout', output); - } - }); - - childProcess.stderr?.on('data', (data) => { - const output = data.toString(options.encoding || 'utf8'); - processRecord.stderr += output; - - // Notify listeners - for (const listener of processRecord.outputListeners) { - listener('stderr', output); - } - }); - - childProcess.on('exit', (code, signal) => { - processRecord.endTime = new Date(); - processRecord.exitCode = code !== null ? code : -1; - - if (signal) { - processRecord.status = 'killed'; - } else if (code === 0) { - processRecord.status = 'completed'; - } else { - processRecord.status = 'failed'; - } - - // Notify status listeners - for (const listener of processRecord.statusListeners) { - listener(processRecord.status); - } - - console.log(`[Server] Process ${processId} exited with code ${code} (signal: ${signal})`); - }); - - childProcess.on('error', (error) => { - processRecord.status = 'error'; - processRecord.endTime = new Date(); - console.error(`[Server] Process ${processId} error:`, error); - - // Notify status listeners - for (const listener of processRecord.statusListeners) { - listener('error'); - } - }); - - // Timeout handling - if (options.timeout) { - setTimeout(() => { - if (processRecord.status === 'running') { - childProcess.kill('SIGTERM'); - console.log(`[Server] Process ${processId} timed out after ${options.timeout}ms`); - } - }, options.timeout); - } - - return new Response( - JSON.stringify({ - process: { - id: processRecord.id, - pid: processRecord.pid, - command: processRecord.command, - status: processRecord.status, - startTime: processRecord.startTime.toISOString(), - sessionId: processRecord.sessionId - } - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } + childProcess.on("error", (error) => { + processRecord.status = "error"; + processRecord.endTime = new Date(); + console.error(`[Server] Process ${processId} error:`, error); + + // Notify status listeners + for (const listener of processRecord.statusListeners) { + listener("error"); + } + }); + + // Timeout handling + if (options.timeout) { + setTimeout(() => { + if (processRecord.status === "running") { + childProcess.kill("SIGTERM"); + console.log( + `[Server] Process ${processId} timed out after ${options.timeout}ms` ); - } catch (error) { - // Clean up on error - processes.delete(processId); - throw error; + } + }, options.timeout); + } + + return new Response( + JSON.stringify({ + process: { + id: processRecord.id, + pid: processRecord.pid, + command: processRecord.command, + status: processRecord.status, + startTime: processRecord.startTime.toISOString(), + sessionId: processRecord.sessionId, + }, + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, } + ); } catch (error) { - console.error("[Server] Error in handleStartProcessRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to start process", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + // Clean up on error + processes.delete(processId); + throw error; } + } catch (error) { + console.error("[Server] Error in handleStartProcessRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to start process", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } } export async function handleListProcessesRequest( - processes: Map, - req: Request, - corsHeaders: Record + processes: Map, + req: Request, + corsHeaders: Record ): Promise { - try { - const processesArray = Array.from(processes.values()).map(record => ({ - id: record.id, - pid: record.pid, - command: record.command, - status: record.status, - startTime: record.startTime.toISOString(), - endTime: record.endTime?.toISOString(), - exitCode: record.exitCode, - sessionId: record.sessionId - })); - - return new Response( - JSON.stringify({ - processes: processesArray, - count: processesArray.length, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleListProcessesRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to list processes", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); - } + try { + const processesArray = Array.from(processes.values()).map((record) => ({ + id: record.id, + pid: record.pid, + command: record.command, + status: record.status, + startTime: record.startTime.toISOString(), + endTime: record.endTime?.toISOString(), + exitCode: record.exitCode, + sessionId: record.sessionId, + })); + + return new Response( + JSON.stringify({ + processes: processesArray, + count: processesArray.length, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleListProcessesRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to list processes", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } } export async function handleGetProcessRequest( - processes: Map, - req: Request, - corsHeaders: Record, - processId: string + processes: Map, + req: Request, + corsHeaders: Record, + processId: string ): Promise { - try { - const record = processes.get(processId); - - if (!record) { - return new Response( - JSON.stringify({ - process: null - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 404, - } - ); + try { + const record = processes.get(processId); + + if (!record) { + return new Response( + JSON.stringify({ + process: null, + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 404, } - - return new Response( - JSON.stringify({ - process: { - id: record.id, - pid: record.pid, - command: record.command, - status: record.status, - startTime: record.startTime.toISOString(), - endTime: record.endTime?.toISOString(), - exitCode: record.exitCode, - sessionId: record.sessionId - } - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleGetProcessRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to get process", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } + + return new Response( + JSON.stringify({ + process: { + id: record.id, + pid: record.pid, + command: record.command, + status: record.status, + startTime: record.startTime.toISOString(), + endTime: record.endTime?.toISOString(), + exitCode: record.exitCode, + sessionId: record.sessionId, + }, + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleGetProcessRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to get process", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } } export async function handleKillProcessRequest( - processes: Map, - req: Request, - corsHeaders: Record, - processId: string + processes: Map, + req: Request, + corsHeaders: Record, + processId: string ): Promise { - try { - const record = processes.get(processId); - - if (!record) { - return new Response( - JSON.stringify({ - error: `Process not found: ${processId}`, - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 404, - } - ); + try { + const record = processes.get(processId); + + if (!record) { + return new Response( + JSON.stringify({ + error: `Process not found: ${processId}`, + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 404, } + ); + } - if (record.childProcess && record.status === 'running') { - record.childProcess.kill('SIGTERM'); - console.log(`[Server] Sent SIGTERM to process ${processId}`); + if (record.childProcess && record.status === "running") { + record.childProcess.kill("SIGTERM"); + console.log(`[Server] Sent SIGTERM to process ${processId}`); - // Give it a moment to terminate gracefully, then force kill - setTimeout(() => { - if (record.childProcess && record.status === 'running') { - record.childProcess.kill('SIGKILL'); - console.log(`[Server] Force killed process ${processId}`); - } - }, 5000); + // Give it a moment to terminate gracefully, then force kill + setTimeout(() => { + if (record.childProcess && record.status === "running") { + record.childProcess.kill("SIGKILL"); + console.log(`[Server] Force killed process ${processId}`); } + }, 5000); + } - // Mark as killed locally - record.status = 'killed'; - record.endTime = new Date(); - record.exitCode = -1; - - // Notify status listeners - for (const listener of record.statusListeners) { - listener('killed'); - } + // Mark as killed locally + record.status = "killed"; + record.endTime = new Date(); + record.exitCode = -1; - return new Response( - JSON.stringify({ - success: true, - message: `Process ${processId} killed`, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleKillProcessRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to kill process", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + // Notify status listeners + for (const listener of record.statusListeners) { + listener("killed"); } + + return new Response( + JSON.stringify({ + success: true, + message: `Process ${processId} killed`, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleKillProcessRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to kill process", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } } export async function handleKillAllProcessesRequest( - processes: Map, - req: Request, - corsHeaders: Record + processes: Map, + req: Request, + corsHeaders: Record ): Promise { - try { - let killedCount = 0; - - for (const [processId, record] of processes) { - if (record.childProcess && record.status === 'running') { - try { - record.childProcess.kill('SIGTERM'); - record.status = 'killed'; - record.endTime = new Date(); - record.exitCode = -1; - - // Notify status listeners - for (const listener of record.statusListeners) { - listener('killed'); - } - - killedCount++; - console.log(`[Server] Killed process ${processId}`); - } catch (error) { - console.error(`[Server] Failed to kill process ${processId}:`, error); - } - } - } + try { + let killedCount = 0; - return new Response( - JSON.stringify({ - success: true, - killedCount, - message: `Killed ${killedCount} processes`, - timestamp: new Date().toISOString(), - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleKillAllProcessesRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to kill all processes", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + for (const [processId, record] of processes) { + if (record.childProcess && record.status === "running") { + try { + record.childProcess.kill("SIGTERM"); + record.status = "killed"; + record.endTime = new Date(); + record.exitCode = -1; + + // Notify status listeners + for (const listener of record.statusListeners) { + listener("killed"); + } + + killedCount++; + console.log(`[Server] Killed process ${processId}`); + } catch (error) { + console.error(`[Server] Failed to kill process ${processId}:`, error); + } + } } + + return new Response( + JSON.stringify({ + success: true, + killedCount, + message: `Killed ${killedCount} processes`, + timestamp: new Date().toISOString(), + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleKillAllProcessesRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to kill all processes", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } } export async function handleGetProcessLogsRequest( - processes: Map, - req: Request, - corsHeaders: Record, - processId: string + processes: Map, + req: Request, + corsHeaders: Record, + processId: string ): Promise { - try { - const record = processes.get(processId); - - if (!record) { - return new Response( - JSON.stringify({ - error: `Process not found: ${processId}`, - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 404, - } - ); + try { + const record = processes.get(processId); + + if (!record) { + return new Response( + JSON.stringify({ + error: `Process not found: ${processId}`, + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 404, } - - return new Response( - JSON.stringify({ - stdout: record.stdout, - stderr: record.stderr, - processId: record.id, - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - } - ); - } catch (error) { - console.error("[Server] Error in handleGetProcessLogsRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to get process logs", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } + + return new Response( + JSON.stringify({ + stdout: record.stdout, + stderr: record.stderr, + processId: record.id, + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + } + ); + } catch (error) { + console.error("[Server] Error in handleGetProcessLogsRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to get process logs", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } } export async function handleStreamProcessLogsRequest( - processes: Map, - req: Request, - corsHeaders: Record, - processId: string + processes: Map, + req: Request, + corsHeaders: Record, + processId: string ): Promise { - try { - const record = processes.get(processId); - - if (!record) { - return new Response( - JSON.stringify({ - error: `Process not found: ${processId}`, - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 404, - } - ); + try { + const record = processes.get(processId); + + if (!record) { + return new Response( + JSON.stringify({ + error: `Process not found: ${processId}`, + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 404, } - - // Create a readable stream for Server-Sent Events - let isConnected = true; - - const stream = new ReadableStream({ - start(controller) { - // Send existing logs first - if (record.stdout) { - const event = `data: ${JSON.stringify({ - type: 'stdout', - timestamp: new Date().toISOString(), - data: record.stdout, - processId, - sessionId: record.sessionId - })}\n\n`; - controller.enqueue(new TextEncoder().encode(event)); - } - - if (record.stderr) { - const event = `data: ${JSON.stringify({ - type: 'stderr', - timestamp: new Date().toISOString(), - data: record.stderr, - processId, - sessionId: record.sessionId - })}\n\n`; - controller.enqueue(new TextEncoder().encode(event)); - } - - // Send status - const statusEvent = `data: ${JSON.stringify({ - type: 'status', - timestamp: new Date().toISOString(), - data: `Process status: ${record.status}`, - processId, - sessionId: record.sessionId - })}\n\n`; - controller.enqueue(new TextEncoder().encode(statusEvent)); - - // Set up real-time streaming for ongoing output - const outputListener = (stream: 'stdout' | 'stderr', data: string) => { - if (!isConnected) return; - - const event = `data: ${JSON.stringify({ - type: stream, - timestamp: new Date().toISOString(), - data, - processId, - sessionId: record.sessionId - })}\n\n`; - - try { - controller.enqueue(new TextEncoder().encode(event)); - } catch (error) { - console.log(`[Server] Stream closed for process ${processId}`); - isConnected = false; - } - }; - - const statusListener = (status: ProcessStatus) => { - if (!isConnected) return; - - const event = `data: ${JSON.stringify({ - type: 'status', - timestamp: new Date().toISOString(), - data: `Process status: ${status}`, - processId, - sessionId: record.sessionId - })}\n\n`; - - try { - controller.enqueue(new TextEncoder().encode(event)); - } catch (error) { - console.log(`[Server] Stream closed for process ${processId}`); - isConnected = false; - } - - // Close stream when process completes - if (['completed', 'failed', 'killed', 'error'].includes(status)) { - setTimeout(() => { - record.outputListeners.delete(outputListener); - record.statusListeners.delete(statusListener); - controller.close(); - }, 1000); // Give a moment for final events - } - }; - - // Add listeners - record.outputListeners.add(outputListener); - record.statusListeners.add(statusListener); - }, - - cancel() { - isConnected = false; - console.log(`[Server] Log stream cancelled for process ${processId}`); - } - }); - - return new Response(stream, { - headers: { - "Content-Type": "text/event-stream", - "Cache-Control": "no-cache", - "Connection": "keep-alive", - ...corsHeaders, - }, - }); - } catch (error) { - console.error("[Server] Error in handleStreamProcessLogsRequest:", error); - return new Response( - JSON.stringify({ - error: "Failed to stream process logs", - message: error instanceof Error ? error.message : "Unknown error", - }), - { - headers: { - "Content-Type": "application/json", - ...corsHeaders, - }, - status: 500, - } - ); + ); } + + // Create a readable stream for Server-Sent Events + let isConnected = true; + + const stream = new ReadableStream({ + start(controller) { + // Send existing logs first + if (record.stdout) { + const event = `data: ${JSON.stringify({ + type: "stdout", + timestamp: new Date().toISOString(), + data: record.stdout, + processId, + sessionId: record.sessionId, + })}\n\n`; + controller.enqueue(new TextEncoder().encode(event)); + } + + if (record.stderr) { + const event = `data: ${JSON.stringify({ + type: "stderr", + timestamp: new Date().toISOString(), + data: record.stderr, + processId, + sessionId: record.sessionId, + })}\n\n`; + controller.enqueue(new TextEncoder().encode(event)); + } + + // Send status + const statusEvent = `data: ${JSON.stringify({ + type: "status", + timestamp: new Date().toISOString(), + data: `Process status: ${record.status}`, + processId, + sessionId: record.sessionId, + })}\n\n`; + controller.enqueue(new TextEncoder().encode(statusEvent)); + + // Set up real-time streaming for ongoing output + const outputListener = (stream: "stdout" | "stderr", data: string) => { + if (!isConnected) return; + + const event = `data: ${JSON.stringify({ + type: stream, + timestamp: new Date().toISOString(), + data, + processId, + sessionId: record.sessionId, + })}\n\n`; + + try { + controller.enqueue(new TextEncoder().encode(event)); + } catch (error) { + console.log(`[Server] Stream closed for process ${processId}`); + isConnected = false; + } + }; + + const statusListener = (status: ProcessStatus) => { + if (!isConnected) return; + + const event = `data: ${JSON.stringify({ + type: "status", + timestamp: new Date().toISOString(), + data: `Process status: ${status}`, + processId, + sessionId: record.sessionId, + })}\n\n`; + + try { + controller.enqueue(new TextEncoder().encode(event)); + } catch (error) { + console.log(`[Server] Stream closed for process ${processId}`); + isConnected = false; + } + + // Close stream when process completes + if (["completed", "failed", "killed", "error"].includes(status)) { + setTimeout(() => { + record.outputListeners.delete(outputListener); + record.statusListeners.delete(statusListener); + controller.close(); + }, 1000); // Give a moment for final events + } + }; + + // Add listeners + record.outputListeners.add(outputListener); + record.statusListeners.add(statusListener); + }, + + cancel() { + isConnected = false; + console.log(`[Server] Log stream cancelled for process ${processId}`); + }, + }); + + return new Response(stream, { + headers: { + "Content-Type": "text/event-stream", + "Cache-Control": "no-cache", + Connection: "keep-alive", + ...corsHeaders, + }, + }); + } catch (error) { + console.error("[Server] Error in handleStreamProcessLogsRequest:", error); + return new Response( + JSON.stringify({ + error: "Failed to stream process logs", + message: error instanceof Error ? error.message : "Unknown error", + }), + { + headers: { + "Content-Type": "application/json", + ...corsHeaders, + }, + status: 500, + } + ); + } } diff --git a/packages/sandbox/container_src/index.ts b/packages/sandbox/container_src/index.ts index c0972b4..7acfa49 100644 --- a/packages/sandbox/container_src/index.ts +++ b/packages/sandbox/container_src/index.ts @@ -1,6 +1,9 @@ import { randomBytes } from "node:crypto"; import { serve } from "bun"; -import { handleExecuteRequest, handleStreamingExecuteRequest } from "./handler/exec"; +import { + handleExecuteRequest, + handleStreamingExecuteRequest, +} from "./handler/exec"; import { handleDeleteFileRequest, handleMkdirRequest, @@ -38,7 +41,7 @@ const processes = new Map(); // Generate a unique session ID using cryptographically secure randomness function generateSessionId(): string { - return `session_${Date.now()}_${randomBytes(6).toString('hex')}`; + return `session_${Date.now()}_${randomBytes(6).toString("hex")}`; } // Clean up old sessions (older than 1 hour) @@ -283,19 +286,39 @@ const server = serve({ default: // Handle dynamic routes for individual processes if (pathname.startsWith("/api/process/")) { - const segments = pathname.split('/'); + const segments = pathname.split("/"); if (segments.length >= 4) { const processId = segments[3]; const action = segments[4]; // Optional: logs, stream, etc. if (!action && req.method === "GET") { - return handleGetProcessRequest(processes, req, corsHeaders, processId); + return handleGetProcessRequest( + processes, + req, + corsHeaders, + processId + ); } else if (!action && req.method === "DELETE") { - return handleKillProcessRequest(processes, req, corsHeaders, processId); + return handleKillProcessRequest( + processes, + req, + corsHeaders, + processId + ); } else if (action === "logs" && req.method === "GET") { - return handleGetProcessLogsRequest(processes, req, corsHeaders, processId); + return handleGetProcessLogsRequest( + processes, + req, + corsHeaders, + processId + ); } else if (action === "stream" && req.method === "GET") { - return handleStreamProcessLogsRequest(processes, req, corsHeaders, processId); + return handleStreamProcessLogsRequest( + processes, + req, + corsHeaders, + processId + ); } } } @@ -311,7 +334,10 @@ const server = serve({ }); } } catch (error) { - console.error(`[Container] Error handling ${req.method} ${pathname}:`, error); + console.error( + `[Container] Error handling ${req.method} ${pathname}:`, + error + ); return new Response( JSON.stringify({ error: "Internal server error", @@ -330,7 +356,7 @@ const server = serve({ hostname: "0.0.0.0", port: 3000, // We don't need this, but typescript complains - websocket: { async message() { } }, + websocket: { async message() {} }, }); console.log(`🚀 Bun server running on http://0.0.0.0:${server.port}`); diff --git a/packages/sandbox/container_src/types.ts b/packages/sandbox/container_src/types.ts index a480ad3..7041458 100644 --- a/packages/sandbox/container_src/types.ts +++ b/packages/sandbox/container_src/types.ts @@ -2,12 +2,12 @@ import type { ChildProcess } from "node:child_process"; // Process management types export type ProcessStatus = - | 'starting' - | 'running' - | 'completed' - | 'failed' - | 'killed' - | 'error'; + | "starting" + | "running" + | "completed" + | "failed" + | "killed" + | "error"; export interface ProcessRecord { id: string; @@ -21,7 +21,7 @@ export interface ProcessRecord { childProcess?: ChildProcess; stdout: string; stderr: string; - outputListeners: Set<(stream: 'stdout' | 'stderr', data: string) => void>; + outputListeners: Set<(stream: "stdout" | "stderr", data: string) => void>; statusListeners: Set<(status: ProcessStatus) => void>; } diff --git a/packages/sandbox/src/client.ts b/packages/sandbox/src/client.ts index 2198819..68d5abb 100644 --- a/packages/sandbox/src/client.ts +++ b/packages/sandbox/src/client.ts @@ -4,7 +4,7 @@ import type { GetProcessResponse, ListProcessesResponse, StartProcessRequest, - StartProcessResponse + StartProcessResponse, } from "./types"; interface ExecuteRequest { @@ -253,10 +253,7 @@ export class HttpClient { } } - async execute( - command: string, - sessionId?: string - ): Promise { + async execute(command: string, sessionId?: string): Promise { try { const targetSessionId = sessionId || this.sessionId; @@ -305,7 +302,6 @@ export class HttpClient { } } - async executeCommandStream( command: string, sessionId?: string @@ -320,7 +316,7 @@ export class HttpClient { }), headers: { "Content-Type": "application/json", - "Accept": "text/event-stream", + Accept: "text/event-stream", }, method: "POST", }); @@ -338,9 +334,7 @@ export class HttpClient { throw new Error("No response body for streaming request"); } - console.log( - `[HTTP Client] Started command stream: ${command}` - ); + console.log(`[HTTP Client] Started command stream: ${command}`); return response.body; } catch (error) { @@ -392,7 +386,6 @@ export class HttpClient { } } - async mkdir( path: string, recursive: boolean = false, @@ -434,7 +427,6 @@ export class HttpClient { } } - async writeFile( path: string, content: string, @@ -478,7 +470,6 @@ export class HttpClient { } } - async readFile( path: string, encoding: string = "utf-8", @@ -520,7 +511,6 @@ export class HttpClient { } } - async deleteFile( path: string, sessionId?: string @@ -560,7 +550,6 @@ export class HttpClient { } } - async renameFile( oldPath: string, newPath: string, @@ -602,7 +591,6 @@ export class HttpClient { } } - async moveFile( sourcePath: string, destinationPath: string, @@ -644,7 +632,6 @@ export class HttpClient { } } - async exposePort(port: number, name?: string): Promise { try { const response = await this.doFetch(`/api/expose-port`, { @@ -670,7 +657,9 @@ export class HttpClient { const data: ExposePortResponse = await response.json(); console.log( - `[HTTP Client] Port exposed: ${port}${name ? ` (${name})` : ""}, Success: ${data.success}` + `[HTTP Client] Port exposed: ${port}${ + name ? ` (${name})` : "" + }, Success: ${data.success}` ); return data; @@ -732,9 +721,7 @@ export class HttpClient { } const data: GetExposedPortsResponse = await response.json(); - console.log( - `[HTTP Client] Got ${data.count} exposed ports` - ); + console.log(`[HTTP Client] Got ${data.count} exposed ports`); return data; } catch (error) { @@ -871,9 +858,7 @@ export class HttpClient { } const data: ListProcessesResponse = await response.json(); - console.log( - `[HTTP Client] Listed ${data.processes.length} processes` - ); + console.log(`[HTTP Client] Listed ${data.processes.length} processes`); return data; } catch (error) { @@ -902,7 +887,9 @@ export class HttpClient { const data: GetProcessResponse = await response.json(); console.log( - `[HTTP Client] Got process ${processId}: ${data.process?.status || 'not found'}` + `[HTTP Client] Got process ${processId}: ${ + data.process?.status || "not found" + }` ); return data; @@ -912,7 +899,9 @@ export class HttpClient { } } - async killProcess(processId: string): Promise<{ success: boolean; message: string }> { + async killProcess( + processId: string + ): Promise<{ success: boolean; message: string }> { try { const response = await this.doFetch(`/api/process/${processId}`, { headers: { @@ -930,10 +919,11 @@ export class HttpClient { ); } - const data = await response.json() as { success: boolean; message: string }; - console.log( - `[HTTP Client] Killed process ${processId}` - ); + const data = (await response.json()) as { + success: boolean; + message: string; + }; + console.log(`[HTTP Client] Killed process ${processId}`); return data; } catch (error) { @@ -942,7 +932,11 @@ export class HttpClient { } } - async killAllProcesses(): Promise<{ success: boolean; killedCount: number; message: string }> { + async killAllProcesses(): Promise<{ + success: boolean; + killedCount: number; + message: string; + }> { try { const response = await this.doFetch("/api/process/kill-all", { headers: { @@ -960,10 +954,12 @@ export class HttpClient { ); } - const data = await response.json() as { success: boolean; killedCount: number; message: string }; - console.log( - `[HTTP Client] Killed ${data.killedCount} processes` - ); + const data = (await response.json()) as { + success: boolean; + killedCount: number; + message: string; + }; + console.log(`[HTTP Client] Killed ${data.killedCount} processes`); return data; } catch (error) { @@ -991,9 +987,7 @@ export class HttpClient { } const data: GetProcessLogsResponse = await response.json(); - console.log( - `[HTTP Client] Got logs for process ${processId}` - ); + console.log(`[HTTP Client] Got logs for process ${processId}`); return data; } catch (error) { @@ -1002,11 +996,13 @@ export class HttpClient { } } - async streamProcessLogs(processId: string): Promise> { + async streamProcessLogs( + processId: string + ): Promise> { try { const response = await this.doFetch(`/api/process/${processId}/stream`, { headers: { - "Accept": "text/event-stream", + Accept: "text/event-stream", "Cache-Control": "no-cache", }, method: "GET", diff --git a/packages/sandbox/src/index.ts b/packages/sandbox/src/index.ts index 89360c4..9b3dcc5 100644 --- a/packages/sandbox/src/index.ts +++ b/packages/sandbox/src/index.ts @@ -1,20 +1,30 @@ // Export types from client export type { - DeleteFileResponse, ExecuteResponse, + DeleteFileResponse, + ExecuteResponse, GitCheckoutResponse, - MkdirResponse, MoveFileResponse, - ReadFileResponse, RenameFileResponse, WriteFileResponse + MkdirResponse, + MoveFileResponse, + ReadFileResponse, + RenameFileResponse, + WriteFileResponse, } from "./client"; // Re-export request handler utilities export { - proxyToSandbox, type RouteInfo, type SandboxEnv -} from './request-handler'; + proxyToSandbox, + type RouteInfo, + type SandboxEnv, +} from "./request-handler"; export { getSandbox, Sandbox } from "./sandbox"; // Export SSE parser for converting ReadableStream to AsyncIterable -export { asyncIterableToSSEStream, parseSSEStream, responseToAsyncIterable } from "./sse-parser"; +export { + asyncIterableToSSEStream, + parseSSEStream, + responseToAsyncIterable, +} from "./sse-parser"; // Export event types for streaming export type { ExecEvent, LogEvent } from "./types"; diff --git a/packages/sandbox/src/request-handler.ts b/packages/sandbox/src/request-handler.ts index 20e524e..2f5603c 100644 --- a/packages/sandbox/src/request-handler.ts +++ b/packages/sandbox/src/request-handler.ts @@ -1,9 +1,5 @@ import { getSandbox, type Sandbox } from "./sandbox"; -import { - logSecurityEvent, - sanitizeSandboxId, - validatePort -} from "./security"; +import { logSecurityEvent, sanitizeSandboxId, validatePort } from "./security"; export interface SandboxEnv { Sandbox: DurableObjectNamespace; @@ -46,32 +42,38 @@ export async function proxyToSandbox( method: request.method, headers: { ...Object.fromEntries(request.headers), - 'X-Original-URL': request.url, - 'X-Forwarded-Host': url.hostname, - 'X-Forwarded-Proto': url.protocol.replace(':', ''), - 'X-Sandbox-Name': sandboxId, // Pass the friendly name + "X-Original-URL": request.url, + "X-Forwarded-Host": url.hostname, + "X-Forwarded-Proto": url.protocol.replace(":", ""), + "X-Sandbox-Name": sandboxId, // Pass the friendly name }, body: request.body, }); return sandbox.containerFetch(proxyRequest, port); } catch (error) { - console.error('[Sandbox] Proxy routing error:', error); - return new Response('Proxy routing error', { status: 500 }); + console.error("[Sandbox] Proxy routing error:", error); + return new Response("Proxy routing error", { status: 500 }); } } function extractSandboxRoute(url: URL): RouteInfo | null { // Parse subdomain pattern: port-sandboxId.domain - const subdomainMatch = url.hostname.match(/^(\d{4,5})-([^.-][^.]*[^.-]|[^.-])\.(.+)$/); + const subdomainMatch = url.hostname.match( + /^(\d{4,5})-([^.-][^.]*[^.-]|[^.-])\.(.+)$/ + ); if (!subdomainMatch) { // Log malformed subdomain attempts - if (url.hostname.includes('-') && url.hostname.includes('.')) { - logSecurityEvent('MALFORMED_SUBDOMAIN_ATTEMPT', { - hostname: url.hostname, - url: url.toString() - }, 'medium'); + if (url.hostname.includes("-") && url.hostname.includes(".")) { + logSecurityEvent( + "MALFORMED_SUBDOMAIN_ATTEMPT", + { + hostname: url.hostname, + url: url.toString(), + }, + "medium" + ); } return null; } @@ -82,13 +84,17 @@ function extractSandboxRoute(url: URL): RouteInfo | null { const port = parseInt(portStr, 10); if (!validatePort(port)) { - logSecurityEvent('INVALID_PORT_IN_SUBDOMAIN', { - port, - portStr, - sandboxId, - hostname: url.hostname, - url: url.toString() - }, 'high'); + logSecurityEvent( + "INVALID_PORT_IN_SUBDOMAIN", + { + port, + portStr, + sandboxId, + hostname: url.hostname, + url: url.toString(), + }, + "high" + ); return null; } @@ -96,34 +102,46 @@ function extractSandboxRoute(url: URL): RouteInfo | null { try { sanitizedSandboxId = sanitizeSandboxId(sandboxId); } catch (error) { - logSecurityEvent('INVALID_SANDBOX_ID_IN_SUBDOMAIN', { - sandboxId, - port, - hostname: url.hostname, - url: url.toString(), - error: error instanceof Error ? error.message : 'Unknown error' - }, 'high'); + logSecurityEvent( + "INVALID_SANDBOX_ID_IN_SUBDOMAIN", + { + sandboxId, + port, + hostname: url.hostname, + url: url.toString(), + error: error instanceof Error ? error.message : "Unknown error", + }, + "high" + ); return null; } // DNS subdomain length limit is 63 characters if (sandboxId.length > 63) { - logSecurityEvent('SANDBOX_ID_LENGTH_VIOLATION', { - sandboxId, - length: sandboxId.length, - port, - hostname: url.hostname - }, 'medium'); + logSecurityEvent( + "SANDBOX_ID_LENGTH_VIOLATION", + { + sandboxId, + length: sandboxId.length, + port, + hostname: url.hostname, + }, + "medium" + ); return null; } - logSecurityEvent('SANDBOX_ROUTE_EXTRACTED', { - port, - sandboxId: sanitizedSandboxId, - domain, - path: url.pathname || "/", - hostname: url.hostname - }, 'low'); + logSecurityEvent( + "SANDBOX_ROUTE_EXTRACTED", + { + port, + sandboxId: sanitizedSandboxId, + domain, + path: url.pathname || "/", + hostname: url.hostname, + }, + "low" + ); return { port, diff --git a/packages/sandbox/src/sandbox.ts b/packages/sandbox/src/sandbox.ts index fd28331..2ad27b5 100644 --- a/packages/sandbox/src/sandbox.ts +++ b/packages/sandbox/src/sandbox.ts @@ -5,7 +5,7 @@ import { logSecurityEvent, SecurityError, sanitizeSandboxId, - validatePort + validatePort, } from "./security"; import type { ExecOptions, @@ -14,12 +14,9 @@ import type { Process, ProcessOptions, ProcessStatus, - StreamOptions -} from "./types"; -import { - ProcessNotFoundError, - SandboxError + StreamOptions, } from "./types"; +import { ProcessNotFoundError, SandboxError } from "./types"; export function getSandbox(ns: DurableObjectNamespace, id: string) { const stub = getContainer(ns, id); @@ -45,9 +42,7 @@ export class Sandbox extends Container implements ISandbox { ); }, onCommandStart: (command) => { - console.log( - `[Container] Command started: ${command}` - ); + console.log(`[Container] Command started: ${command}`); }, onError: (error, _command) => { console.error(`[Container] Command error: ${error}`); @@ -61,7 +56,8 @@ export class Sandbox extends Container implements ISandbox { // Load the sandbox name from storage on initialization this.ctx.blockConcurrencyWhile(async () => { - this.sandboxName = await this.ctx.storage.get('sandboxName') || null; + this.sandboxName = + (await this.ctx.storage.get("sandboxName")) || null; }); } @@ -69,7 +65,7 @@ export class Sandbox extends Container implements ISandbox { async setSandboxName(name: string): Promise { if (!this.sandboxName) { this.sandboxName = name; - await this.ctx.storage.put('sandboxName', name); + await this.ctx.storage.put("sandboxName", name); console.log(`[Sandbox] Stored sandbox name via RPC: ${name}`); } } @@ -100,10 +96,10 @@ export class Sandbox extends Container implements ISandbox { const url = new URL(request.url); // Capture and store the sandbox name from the header if present - if (!this.sandboxName && request.headers.has('X-Sandbox-Name')) { - const name = request.headers.get('X-Sandbox-Name')!; + if (!this.sandboxName && request.headers.has("X-Sandbox-Name")) { + const name = request.headers.get("X-Sandbox-Name")!; this.sandboxName = name; - await this.ctx.storage.put('sandboxName', name); + await this.ctx.storage.put("sandboxName", name); console.log(`[Sandbox] Stored sandbox name: ${this.sandboxName}`); } @@ -138,23 +134,29 @@ export class Sandbox extends Container implements ISandbox { try { // Handle cancellation if (options?.signal?.aborted) { - throw new Error('Operation was aborted'); + throw new Error("Operation was aborted"); } let result: ExecResult; if (options?.stream && options?.onOutput) { // Streaming with callbacks - we need to collect the final result - result = await this.executeWithStreaming(command, options, startTime, timestamp); - } else { - // Regular execution - const response = await this.client.execute( + result = await this.executeWithStreaming( command, - options?.sessionId + options, + startTime, + timestamp ); + } else { + // Regular execution + const response = await this.client.execute(command, options?.sessionId); const duration = Date.now() - startTime; - result = this.mapExecuteResponseToExecResult(response, duration, options?.sessionId); + result = this.mapExecuteResponseToExecResult( + response, + duration, + options?.sessionId + ); } // Call completion callback if provided @@ -181,26 +183,31 @@ export class Sandbox extends Container implements ISandbox { startTime: number, timestamp: string ): Promise { - let stdout = ''; - let stderr = ''; + let stdout = ""; + let stderr = ""; try { - const stream = await this.client.executeCommandStream(command, options.sessionId); - const { parseSSEStream } = await import('./sse-parser'); + const stream = await this.client.executeCommandStream( + command, + options.sessionId + ); + const { parseSSEStream } = await import("./sse-parser"); - for await (const event of parseSSEStream(stream)) { + for await (const event of parseSSEStream( + stream + )) { // Check for cancellation if (options.signal?.aborted) { - throw new Error('Operation was aborted'); + throw new Error("Operation was aborted"); } switch (event.type) { - case 'stdout': - case 'stderr': + case "stdout": + case "stderr": if (event.data) { // Update accumulated output - if (event.type === 'stdout') stdout += event.data; - if (event.type === 'stderr') stderr += event.data; + if (event.type === "stdout") stdout += event.data; + if (event.type === "stderr") stderr += event.data; // Call user's callback if (options.onOutput) { @@ -209,39 +216,40 @@ export class Sandbox extends Container implements ISandbox { } break; - case 'complete': { + case "complete": { // Use result from complete event if available const duration = Date.now() - startTime; - return event.result || { - success: event.exitCode === 0, - exitCode: event.exitCode || 0, - stdout, - stderr, - command, - duration, - timestamp, - sessionId: options.sessionId - }; + return ( + event.result || { + success: event.exitCode === 0, + exitCode: event.exitCode || 0, + stdout, + stderr, + command, + duration, + timestamp, + sessionId: options.sessionId, + } + ); } - case 'error': - throw new Error(event.error || 'Command execution failed'); + case "error": + throw new Error(event.error || "Command execution failed"); } } // If we get here without a complete event, something went wrong - throw new Error('Stream ended without completion event'); - + throw new Error("Stream ended without completion event"); } catch (error) { if (options.signal?.aborted) { - throw new Error('Operation was aborted'); + throw new Error("Operation was aborted"); } throw error; } } private mapExecuteResponseToExecResult( - response: import('./client').ExecuteResponse, + response: import("./client").ExecuteResponse, duration: number, sessionId?: string ): ExecResult { @@ -253,13 +261,15 @@ export class Sandbox extends Container implements ISandbox { command: response.command, duration, timestamp: response.timestamp, - sessionId + sessionId, }; } - // Background process management - async startProcess(command: string, options?: ProcessOptions): Promise { + async startProcess( + command: string, + options?: ProcessOptions + ): Promise { // Use the new HttpClient method to start the process try { const response = await this.client.startProcess(command, { @@ -269,7 +279,7 @@ export class Sandbox extends Container implements ISandbox { env: options?.env, cwd: options?.cwd, encoding: options?.encoding, - autoCleanup: options?.autoCleanup + autoCleanup: options?.autoCleanup, }); const process = response.process; @@ -284,14 +294,14 @@ export class Sandbox extends Container implements ISandbox { sessionId: process.sessionId, async kill(): Promise { - throw new Error('Method will be replaced'); + throw new Error("Method will be replaced"); }, async getStatus(): Promise { - throw new Error('Method will be replaced'); + throw new Error("Method will be replaced"); }, async getLogs(): Promise<{ stdout: string; stderr: string }> { - throw new Error('Method will be replaced'); - } + throw new Error("Method will be replaced"); + }, }; // Bind context properly @@ -301,7 +311,7 @@ export class Sandbox extends Container implements ISandbox { processObj.getStatus = async () => { const current = await this.getProcess(process.id); - return current?.status || 'error'; + return current?.status || "error"; }; processObj.getLogs = async () => { @@ -315,7 +325,6 @@ export class Sandbox extends Container implements ISandbox { } return processObj; - } catch (error) { if (options?.onError && error instanceof Error) { options.onError(error); @@ -328,7 +337,7 @@ export class Sandbox extends Container implements ISandbox { async listProcesses(): Promise { const response = await this.client.listProcesses(); - return response.processes.map(processData => ({ + return response.processes.map((processData) => ({ id: processData.id, pid: processData.pid, command: processData.command, @@ -344,13 +353,13 @@ export class Sandbox extends Container implements ISandbox { getStatus: async () => { const current = await this.getProcess(processData.id); - return current?.status || 'error'; + return current?.status || "error"; }, getLogs: async () => { const logs = await this.getProcessLogs(processData.id); return { stdout: logs.stdout, stderr: logs.stderr }; - } + }, })); } @@ -377,13 +386,13 @@ export class Sandbox extends Container implements ISandbox { getStatus: async () => { const current = await this.getProcess(processData.id); - return current?.status || 'error'; + return current?.status || "error"; }, getLogs: async () => { const logs = await this.getProcessLogs(processData.id); return { stdout: logs.stdout, stderr: logs.stderr }; - } + }, }; } @@ -392,12 +401,17 @@ export class Sandbox extends Container implements ISandbox { // Note: signal parameter is not currently supported by the HttpClient implementation await this.client.killProcess(id); } catch (error) { - if (error instanceof Error && error.message.includes('Process not found')) { + if ( + error instanceof Error && + error.message.includes("Process not found") + ) { throw new ProcessNotFoundError(id); } throw new SandboxError( - `Failed to kill process ${id}: ${error instanceof Error ? error.message : 'Unknown error'}`, - 'KILL_PROCESS_FAILED' + `Failed to kill process ${id}: ${ + error instanceof Error ? error.message : "Unknown error" + }`, + "KILL_PROCESS_FAILED" ); } } @@ -414,40 +428,53 @@ export class Sandbox extends Container implements ISandbox { return 0; } - async getProcessLogs(id: string): Promise<{ stdout: string; stderr: string }> { + async getProcessLogs( + id: string + ): Promise<{ stdout: string; stderr: string }> { try { const response = await this.client.getProcessLogs(id); return { stdout: response.stdout, - stderr: response.stderr + stderr: response.stderr, }; } catch (error) { - if (error instanceof Error && error.message.includes('Process not found')) { + if ( + error instanceof Error && + error.message.includes("Process not found") + ) { throw new ProcessNotFoundError(id); } throw error; } } - // Streaming methods - return ReadableStream for RPC compatibility - async execStream(command: string, options?: StreamOptions): Promise> { + async execStream( + command: string, + options?: StreamOptions + ): Promise> { // Check for cancellation if (options?.signal?.aborted) { - throw new Error('Operation was aborted'); + throw new Error("Operation was aborted"); } // Get the stream from HttpClient (need to add this method) - const stream = await this.client.executeCommandStream(command, options?.sessionId); + const stream = await this.client.executeCommandStream( + command, + options?.sessionId + ); // Return the ReadableStream directly - can be converted to AsyncIterable by consumers return stream; } - async streamProcessLogs(processId: string, options?: { signal?: AbortSignal }): Promise> { + async streamProcessLogs( + processId: string, + options?: { signal?: AbortSignal } + ): Promise> { // Check for cancellation if (options?.signal?.aborted) { - throw new Error('Operation was aborted'); + throw new Error("Operation was aborted"); } // Get the stream from HttpClient @@ -464,10 +491,7 @@ export class Sandbox extends Container implements ISandbox { return this.client.gitCheckout(repoUrl, options.branch, options.targetDir); } - async mkdir( - path: string, - options: { recursive?: boolean } = {} - ) { + async mkdir(path: string, options: { recursive?: boolean } = {}) { return this.client.mkdir(path, options.recursive); } @@ -483,24 +507,15 @@ export class Sandbox extends Container implements ISandbox { return this.client.deleteFile(path); } - async renameFile( - oldPath: string, - newPath: string - ) { + async renameFile(oldPath: string, newPath: string) { return this.client.renameFile(oldPath, newPath); } - async moveFile( - sourcePath: string, - destinationPath: string - ) { + async moveFile(sourcePath: string, destinationPath: string) { return this.client.moveFile(sourcePath, destinationPath); } - async readFile( - path: string, - options: { encoding?: string } = {} - ) { + async readFile(path: string, options: { encoding?: string } = {}) { return this.client.readFile(path, options.encoding); } @@ -509,10 +524,16 @@ export class Sandbox extends Container implements ISandbox { // We need the sandbox name to construct preview URLs if (!this.sandboxName) { - throw new Error('Sandbox name not available. Ensure sandbox is accessed through getSandbox()'); + throw new Error( + "Sandbox name not available. Ensure sandbox is accessed through getSandbox()" + ); } - const url = this.constructPreviewUrl(port, this.sandboxName, options.hostname); + const url = this.constructPreviewUrl( + port, + this.sandboxName, + options.hostname + ); return { url, @@ -523,17 +544,27 @@ export class Sandbox extends Container implements ISandbox { async unexposePort(port: number) { if (!validatePort(port)) { - logSecurityEvent('INVALID_PORT_UNEXPOSE', { - port - }, 'high'); - throw new SecurityError(`Invalid port number: ${port}. Must be between 1024-65535 and not reserved.`); + logSecurityEvent( + "INVALID_PORT_UNEXPOSE", + { + port, + }, + "high" + ); + throw new SecurityError( + `Invalid port number: ${port}. Must be between 1024-65535 and not reserved.` + ); } await this.client.unexposePort(port); - logSecurityEvent('PORT_UNEXPOSED', { - port - }, 'low'); + logSecurityEvent( + "PORT_UNEXPOSED", + { + port, + }, + "low" + ); } async getExposedPorts(hostname: string) { @@ -541,10 +572,12 @@ export class Sandbox extends Container implements ISandbox { // We need the sandbox name to construct preview URLs if (!this.sandboxName) { - throw new Error('Sandbox name not available. Ensure sandbox is accessed through getSandbox()'); + throw new Error( + "Sandbox name not available. Ensure sandbox is accessed through getSandbox()" + ); } - return response.ports.map(port => ({ + return response.ports.map((port) => ({ url: this.constructPreviewUrl(port.port, this.sandboxName!, hostname), port: port.port, name: port.name, @@ -552,27 +585,40 @@ export class Sandbox extends Container implements ISandbox { })); } - - private constructPreviewUrl(port: number, sandboxId: string, hostname: string): string { + private constructPreviewUrl( + port: number, + sandboxId: string, + hostname: string + ): string { if (!validatePort(port)) { - logSecurityEvent('INVALID_PORT_REJECTED', { - port, - sandboxId, - hostname - }, 'high'); - throw new SecurityError(`Invalid port number: ${port}. Must be between 1024-65535 and not reserved.`); + logSecurityEvent( + "INVALID_PORT_REJECTED", + { + port, + sandboxId, + hostname, + }, + "high" + ); + throw new SecurityError( + `Invalid port number: ${port}. Must be between 1024-65535 and not reserved.` + ); } let sanitizedSandboxId: string; try { sanitizedSandboxId = sanitizeSandboxId(sandboxId); } catch (error) { - logSecurityEvent('INVALID_SANDBOX_ID_REJECTED', { - sandboxId, - port, - hostname, - error: error instanceof Error ? error.message : 'Unknown error' - }, 'high'); + logSecurityEvent( + "INVALID_SANDBOX_ID_REJECTED", + { + sandboxId, + port, + hostname, + error: error instanceof Error ? error.message : "Unknown error", + }, + "high" + ); throw error; } @@ -580,8 +626,8 @@ export class Sandbox extends Container implements ISandbox { if (isLocalhost) { // Unified subdomain approach for localhost (RFC 6761) - const [host, portStr] = hostname.split(':'); - const mainPort = portStr || '80'; + const [host, portStr] = hostname.split(":"); + const mainPort = portStr || "80"; // Use URL constructor for safe URL building try { @@ -592,23 +638,35 @@ export class Sandbox extends Container implements ISandbox { const finalUrl = baseUrl.toString(); - logSecurityEvent('PREVIEW_URL_CONSTRUCTED', { - port, - sandboxId: sanitizedSandboxId, - hostname, - resultUrl: finalUrl, - environment: 'localhost' - }, 'low'); + logSecurityEvent( + "PREVIEW_URL_CONSTRUCTED", + { + port, + sandboxId: sanitizedSandboxId, + hostname, + resultUrl: finalUrl, + environment: "localhost", + }, + "low" + ); return finalUrl; } catch (error) { - logSecurityEvent('URL_CONSTRUCTION_FAILED', { - port, - sandboxId: sanitizedSandboxId, - hostname, - error: error instanceof Error ? error.message : 'Unknown error' - }, 'high'); - throw new SecurityError(`Failed to construct preview URL: ${error instanceof Error ? error.message : 'Unknown error'}`); + logSecurityEvent( + "URL_CONSTRUCTION_FAILED", + { + port, + sandboxId: sanitizedSandboxId, + hostname, + error: error instanceof Error ? error.message : "Unknown error", + }, + "high" + ); + throw new SecurityError( + `Failed to construct preview URL: ${ + error instanceof Error ? error.message : "Unknown error" + }` + ); } } @@ -624,23 +682,35 @@ export class Sandbox extends Container implements ISandbox { const finalUrl = baseUrl.toString(); - logSecurityEvent('PREVIEW_URL_CONSTRUCTED', { - port, - sandboxId: sanitizedSandboxId, - hostname, - resultUrl: finalUrl, - environment: 'production' - }, 'low'); + logSecurityEvent( + "PREVIEW_URL_CONSTRUCTED", + { + port, + sandboxId: sanitizedSandboxId, + hostname, + resultUrl: finalUrl, + environment: "production", + }, + "low" + ); return finalUrl; } catch (error) { - logSecurityEvent('URL_CONSTRUCTION_FAILED', { - port, - sandboxId: sanitizedSandboxId, - hostname, - error: error instanceof Error ? error.message : 'Unknown error' - }, 'high'); - throw new SecurityError(`Failed to construct preview URL: ${error instanceof Error ? error.message : 'Unknown error'}`); + logSecurityEvent( + "URL_CONSTRUCTION_FAILED", + { + port, + sandboxId: sanitizedSandboxId, + hostname, + error: error instanceof Error ? error.message : "Unknown error", + }, + "high" + ); + throw new SecurityError( + `Failed to construct preview URL: ${ + error instanceof Error ? error.message : "Unknown error" + }` + ); } } } diff --git a/packages/sandbox/src/security.ts b/packages/sandbox/src/security.ts index 3280a9c..a603854 100644 --- a/packages/sandbox/src/security.ts +++ b/packages/sandbox/src/security.ts @@ -12,7 +12,7 @@ export class SecurityError extends Error { constructor(message: string, public readonly code?: string) { super(message); - this.name = 'SecurityError'; + this.name = "SecurityError"; } } @@ -52,62 +52,75 @@ export function sanitizeSandboxId(id: string): string { // Basic validation: not empty, reasonable length limit (DNS subdomain limit is 63 chars) if (!id || id.length > 63) { throw new SecurityError( - 'Sandbox ID must be 1-63 characters long.', - 'INVALID_SANDBOX_ID_LENGTH' + "Sandbox ID must be 1-63 characters long.", + "INVALID_SANDBOX_ID_LENGTH" ); } // DNS compliance: cannot start or end with hyphens (RFC requirement) - if (id.startsWith('-') || id.endsWith('-')) { + if (id.startsWith("-") || id.endsWith("-")) { throw new SecurityError( - 'Sandbox ID cannot start or end with hyphens (DNS requirement).', - 'INVALID_SANDBOX_ID_HYPHENS' + "Sandbox ID cannot start or end with hyphens (DNS requirement).", + "INVALID_SANDBOX_ID_HYPHENS" ); } // Prevent reserved names that cause technical conflicts const reservedNames = [ - 'www', 'api', 'admin', 'root', 'system', - 'cloudflare', 'workers' + "www", + "api", + "admin", + "root", + "system", + "cloudflare", + "workers", ]; const lowerCaseId = id.toLowerCase(); if (reservedNames.includes(lowerCaseId)) { throw new SecurityError( `Reserved sandbox ID '${id}' is not allowed.`, - 'RESERVED_SANDBOX_ID' + "RESERVED_SANDBOX_ID" ); } return id; } - /** * Logs security events for monitoring */ export function logSecurityEvent( event: string, details: Record, - severity: 'low' | 'medium' | 'high' | 'critical' = 'medium' + severity: "low" | "medium" | "high" | "critical" = "medium" ): void { const logEntry = { timestamp: new Date().toISOString(), event, severity, - ...details + ...details, }; switch (severity) { - case 'critical': - case 'high': - console.error(`[SECURITY:${severity.toUpperCase()}] ${event}:`, JSON.stringify(logEntry)); + case "critical": + case "high": + console.error( + `[SECURITY:${severity.toUpperCase()}] ${event}:`, + JSON.stringify(logEntry) + ); break; - case 'medium': - console.warn(`[SECURITY:${severity.toUpperCase()}] ${event}:`, JSON.stringify(logEntry)); + case "medium": + console.warn( + `[SECURITY:${severity.toUpperCase()}] ${event}:`, + JSON.stringify(logEntry) + ); break; - case 'low': - console.info(`[SECURITY:${severity.toUpperCase()}] ${event}:`, JSON.stringify(logEntry)); + case "low": + console.info( + `[SECURITY:${severity.toUpperCase()}] ${event}:`, + JSON.stringify(logEntry) + ); break; } } diff --git a/packages/sandbox/src/sse-parser.ts b/packages/sandbox/src/sse-parser.ts index 7718639..5716a3b 100644 --- a/packages/sandbox/src/sse-parser.ts +++ b/packages/sandbox/src/sse-parser.ts @@ -14,13 +14,13 @@ export async function* parseSSEStream( ): AsyncIterable { const reader = stream.getReader(); const decoder = new TextDecoder(); - let buffer = ''; + let buffer = ""; try { while (true) { // Check for cancellation if (signal?.aborted) { - throw new Error('Operation was aborted'); + throw new Error("Operation was aborted"); } const { done, value } = await reader.read(); @@ -30,28 +30,28 @@ export async function* parseSSEStream( buffer += decoder.decode(value, { stream: true }); // Process complete SSE events in buffer - const lines = buffer.split('\n'); + const lines = buffer.split("\n"); // Keep the last incomplete line in buffer - buffer = lines.pop() || ''; + buffer = lines.pop() || ""; for (const line of lines) { // Skip empty lines - if (line.trim() === '') continue; + if (line.trim() === "") continue; // Process SSE data lines - if (line.startsWith('data: ')) { + if (line.startsWith("data: ")) { const data = line.substring(6); // Skip [DONE] markers or empty data - if (data === '[DONE]' || data.trim() === '') continue; + if (data === "[DONE]" || data.trim() === "") continue; try { const event = JSON.parse(data) as T; yield event; } catch (error) { // Log parsing errors but continue processing - console.error('Failed to parse SSE event:', data, error); + console.error("Failed to parse SSE event:", data, error); // Optionally yield an error event // yield { type: 'error', data: `Parse error: ${error.message}` } as T; } @@ -62,14 +62,14 @@ export async function* parseSSEStream( } // Process any remaining data in buffer - if (buffer.trim() && buffer.startsWith('data: ')) { + if (buffer.trim() && buffer.startsWith("data: ")) { const data = buffer.substring(6); - if (data !== '[DONE]' && data.trim()) { + if (data !== "[DONE]" && data.trim()) { try { const event = JSON.parse(data) as T; yield event; } catch (error) { - console.error('Failed to parse final SSE event:', data, error); + console.error("Failed to parse final SSE event:", data, error); } } } @@ -79,7 +79,6 @@ export async function* parseSSEStream( } } - /** * Helper to convert a Response with SSE stream directly to AsyncIterable * @param response - Response object with SSE stream @@ -90,11 +89,13 @@ export async function* responseToAsyncIterable( signal?: AbortSignal ): AsyncIterable { if (!response.ok) { - throw new Error(`Response not ok: ${response.status} ${response.statusText}`); + throw new Error( + `Response not ok: ${response.status} ${response.statusText}` + ); } if (!response.body) { - throw new Error('No response body'); + throw new Error("No response body"); } yield* parseSSEStream(response.body, signal); @@ -121,7 +122,7 @@ export function asyncIterableToSSEStream( try { for await (const event of events) { if (options?.signal?.aborted) { - controller.error(new Error('Operation was aborted')); + controller.error(new Error("Operation was aborted")); break; } @@ -131,7 +132,7 @@ export function asyncIterableToSSEStream( } // Send completion marker - controller.enqueue(encoder.encode('data: [DONE]\n\n')); + controller.enqueue(encoder.encode("data: [DONE]\n\n")); } catch (error) { controller.error(error); } finally { @@ -141,7 +142,7 @@ export function asyncIterableToSSEStream( cancel() { // Handle stream cancellation - console.log('SSE stream cancelled'); - } + console.log("SSE stream cancelled"); + }, }); -} \ No newline at end of file +} diff --git a/packages/sandbox/src/types.ts b/packages/sandbox/src/types.ts index 80e7a40..294d774 100644 --- a/packages/sandbox/src/types.ts +++ b/packages/sandbox/src/types.ts @@ -36,7 +36,7 @@ export interface ExecOptions extends BaseExecOptions { /** * Callback for real-time output data */ - onOutput?: (stream: 'stdout' | 'stderr', data: string) => void; + onOutput?: (stream: "stdout" | "stderr", data: string) => void; /** * Callback when command completes (only when stream: true) @@ -80,7 +80,6 @@ export interface ExecResult { */ command: string; - /** * Execution duration in milliseconds */ @@ -119,7 +118,7 @@ export interface ProcessOptions extends BaseExecOptions { /** * Callback for real-time output (background processes) */ - onOutput?: (stream: 'stdout' | 'stderr', data: string) => void; + onOutput?: (stream: "stdout" | "stderr", data: string) => void; /** * Callback when process starts successfully @@ -133,12 +132,12 @@ export interface ProcessOptions extends BaseExecOptions { } export type ProcessStatus = - | 'starting' // Process is being initialized - | 'running' // Process is actively running - | 'completed' // Process exited successfully (code 0) - | 'failed' // Process exited with non-zero code - | 'killed' // Process was terminated by signal - | 'error'; // Process failed to start or encountered error + | "starting" // Process is being initialized + | "running" // Process is actively running + | "completed" // Process exited successfully (code 0) + | "failed" // Process exited with non-zero code + | "killed" // Process was terminated by signal + | "error"; // Process failed to start or encountered error export interface Process { /** @@ -156,7 +155,6 @@ export interface Process { */ readonly command: string; - /** * Current process status */ @@ -201,7 +199,7 @@ export interface Process { // Streaming Types export interface ExecEvent { - type: 'start' | 'stdout' | 'stderr' | 'complete' | 'error'; + type: "start" | "stdout" | "stderr" | "complete" | "error"; timestamp: string; data?: string; command?: string; @@ -212,7 +210,7 @@ export interface ExecEvent { } export interface LogEvent { - type: 'stdout' | 'stderr' | 'exit' | 'error'; + type: "stdout" | "stderr" | "exit" | "error"; timestamp: string; data: string; processId: string; @@ -237,28 +235,28 @@ export interface StreamOptions extends BaseExecOptions { export class SandboxError extends Error { constructor(message: string, public code?: string) { super(message); - this.name = 'SandboxError'; + this.name = "SandboxError"; } } export class ProcessNotFoundError extends SandboxError { constructor(processId: string) { - super(`Process not found: ${processId}`, 'PROCESS_NOT_FOUND'); - this.name = 'ProcessNotFoundError'; + super(`Process not found: ${processId}`, "PROCESS_NOT_FOUND"); + this.name = "ProcessNotFoundError"; } } export class ProcessAlreadyExistsError extends SandboxError { constructor(processId: string) { - super(`Process already exists: ${processId}`, 'PROCESS_EXISTS'); - this.name = 'ProcessAlreadyExistsError'; + super(`Process already exists: ${processId}`, "PROCESS_EXISTS"); + this.name = "ProcessAlreadyExistsError"; } } export class ExecutionTimeoutError extends SandboxError { constructor(timeout: number) { - super(`Execution timed out after ${timeout}ms`, 'EXECUTION_TIMEOUT'); - this.name = 'ExecutionTimeoutError'; + super(`Execution timed out after ${timeout}ms`, "EXECUTION_TIMEOUT"); + this.name = "ExecutionTimeoutError"; } } @@ -275,12 +273,12 @@ export interface ProcessRecord { sessionId?: string; // Internal fields - childProcess?: any; // Node.js ChildProcess - stdout: string; // Accumulated output (ephemeral) - stderr: string; // Accumulated output (ephemeral) + childProcess?: any; // Node.js ChildProcess + stdout: string; // Accumulated output (ephemeral) + stderr: string; // Accumulated output (ephemeral) // Streaming - outputListeners: Set<(stream: 'stdout' | 'stderr', data: string) => void>; + outputListeners: Set<(stream: "stdout" | "stderr", data: string) => void>; statusListeners: Set<(status: ProcessStatus) => void>; } @@ -304,7 +302,7 @@ export interface StartProcessResponse { id: string; pid?: number; command: string; - status: ProcessStatus; + status: ProcessStatus; startTime: string; sessionId?: string; }; @@ -315,7 +313,7 @@ export interface ListProcessesResponse { id: string; pid?: number; command: string; - status: ProcessStatus; + status: ProcessStatus; startTime: string; endTime?: string; exitCode?: number; @@ -328,7 +326,7 @@ export interface GetProcessResponse { id: string; pid?: number; command: string; - status: ProcessStatus; + status: ProcessStatus; startTime: string; endTime?: string; exitCode?: number; @@ -356,8 +354,14 @@ export interface ISandbox { killAllProcesses(): Promise; // Advanced streaming - returns ReadableStream that can be converted to AsyncIterable - execStream(command: string, options?: StreamOptions): Promise>; - streamProcessLogs(processId: string, options?: { signal?: AbortSignal }): Promise>; + execStream( + command: string, + options?: StreamOptions + ): Promise>; + streamProcessLogs( + processId: string, + options?: { signal?: AbortSignal } + ): Promise>; // Utility methods cleanupCompletedProcesses(): Promise; @@ -367,20 +371,31 @@ export interface ISandbox { // Type Guards export function isExecResult(value: any): value is ExecResult { - return value && - typeof value.success === 'boolean' && - typeof value.exitCode === 'number' && - typeof value.stdout === 'string' && - typeof value.stderr === 'string'; + return ( + value && + typeof value.success === "boolean" && + typeof value.exitCode === "number" && + typeof value.stdout === "string" && + typeof value.stderr === "string" + ); } export function isProcess(value: any): value is Process { - return value && - typeof value.id === 'string' && - typeof value.command === 'string' && - typeof value.status === 'string'; + return ( + value && + typeof value.id === "string" && + typeof value.command === "string" && + typeof value.status === "string" + ); } export function isProcessStatus(value: string): value is ProcessStatus { - return ['starting', 'running', 'completed', 'failed', 'killed', 'error'].includes(value); -} \ No newline at end of file + return [ + "starting", + "running", + "completed", + "failed", + "killed", + "error", + ].includes(value); +} diff --git a/scripts/fix-toc-links.js b/scripts/fix-toc-links.js index b174d35..986aeca 100644 --- a/scripts/fix-toc-links.js +++ b/scripts/fix-toc-links.js @@ -10,15 +10,15 @@ * still auto-generating the TOC structure with doctoc. */ -import fs from 'fs'; -import path from 'path'; +import fs from "fs"; +import path from "path"; -const readmePath = path.join(process.cwd(), 'README.md'); -let content = fs.readFileSync(readmePath, 'utf8'); +const readmePath = path.join(process.cwd(), "README.md"); +let content = fs.readFileSync(readmePath, "utf8"); // Fix doctoc's emoji handling: (#-word) should become (#word) // This matches the explicit IDs we set on HTML headings -content = content.replace(/\(#-([^)]+)\)/g, '(#$1)'); +content = content.replace(/\(#-([^)]+)\)/g, "(#$1)"); fs.writeFileSync(readmePath, content); -console.log('TOC links fixed!'); +console.log("TOC links fixed!");