diff --git a/.github/workflows/claude-reviews.yml b/.github/workflows/claude-reviews.yml index f85694c1..3db866ad 100644 --- a/.github/workflows/claude-reviews.yml +++ b/.github/workflows/claude-reviews.yml @@ -8,6 +8,10 @@ on: pull_request_review_comment: types: [created] +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: comprehensive-review: name: PR Description & Code Review @@ -30,6 +34,29 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + + - name: Delete Previous Claude Comments + run: | + echo "🧹 Deleting previous Claude comments from github-actions bot..." + + # Get all comments from github-actions bot containing 'Claude' + CLAUDE_COMMENTS=$(gh api repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments \ + --jq '[.[] | select(.user.login == "github-actions[bot]") | select(.body | contains("Claude")) | .id]') + + if [ "$CLAUDE_COMMENTS" = "[]" ] || [ -z "$CLAUDE_COMMENTS" ]; then + echo "No previous Claude comments found" + else + echo "Found Claude comments to delete:" + echo "$CLAUDE_COMMENTS" | jq -r '.[]' | while read comment_id; do + echo "Deleting comment $comment_id" + gh api repos/${{ github.repository }}/issues/comments/$comment_id -X DELETE || echo "Failed to delete comment $comment_id" + done + echo "βœ… Deleted previous Claude comments" + fi + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + continue-on-error: true + - name: Detect Critical Paths id: critical_paths run: | @@ -147,41 +174,3 @@ jobs: --max-turns ${{ steps.critical_paths.outputs.is_critical == 'true' && '90' || '65' }} --model claude-sonnet-4-5-20250929 - - name: Intelligent Comment Cleanup - uses: anthropics/claude-code-action@v1 - if: always() - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} - prompt: | - Clean up stale bot comments on PR #${{ github.event.pull_request.number }}. - - **Task:** - 1. Fetch all comments on this PR - 2. Identify bot comments (users ending in [bot]) that are stale/outdated: - - Old reviews superseded by newer ones - - Old PR description suggestions - - Previously collapsed/outdated markers - - Progress/status comments from previous workflow runs - 3. Keep only the most recent comment per category per bot - 4. DELETE all stale comments (do not collapse) - - **Get all comments:** - ```bash - gh api repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments --jq '.[] | {id, user: .user.login, body, created_at}' - ``` - - **Delete a comment:** - ```bash - gh api repos/${{ github.repository }}/issues/comments/COMMENT_ID -X DELETE - ``` - - Be intelligent: - - Preserve the newest useful comment in each category - - Delete everything else that's redundant or stale - - If unsure, keep the comment (conservative approach) - - claude_args: | - --allowed-tools "Bash(gh api repos/*/issues/*/comments:*),Bash(gh api repos/*/issues/comments/*:*)" - --max-turns 8 - --model claude-haiku-4-5-20251001 diff --git a/commitlint.config.js b/commitlint.config.js index 2f1da49f..522de8ef 100644 --- a/commitlint.config.js +++ b/commitlint.config.js @@ -22,10 +22,10 @@ export default { 'type-empty': [2, 'never'], 'subject-empty': [2, 'never'], 'subject-full-stop': [2, 'never', '.'], - 'header-max-length': [2, 'always', 100], + 'header-max-length': [2, 'always', 150], 'body-leading-blank': [1, 'always'], - 'body-max-line-length': [2, 'always', 100], + 'body-max-line-length': [2, 'always', 200], 'footer-leading-blank': [1, 'always'], - 'footer-max-line-length': [2, 'always', 100], + 'footer-max-line-length': [2, 'always', 200], }, }; diff --git a/src/api-types.ts b/src/api-types.ts index 2fc92e8d..cf4ae0f9 100644 --- a/src/api-types.ts +++ b/src/api-types.ts @@ -136,17 +136,24 @@ export type { } from 'worker/database/types'; // Agent/Generator Types -export type { +export type { Blueprint as BlueprintType, + PhasicBlueprint, CodeReviewOutputType, FileConceptType, FileOutputType as GeneratedFile, } from 'worker/agents/schemas'; -export type { - CodeGenState +export type { + AgentState, + PhasicState } from 'worker/agents/core/state'; +export type { + BehaviorType, + ProjectType +} from 'worker/agents/core/types'; + export type { ConversationMessage, } from 'worker/agents/inferutils/common'; @@ -168,7 +175,7 @@ export type { export type { RateLimitError } from "worker/services/rate-limit/errors"; export type { AgentPreviewResponse, CodeGenArgs } from 'worker/api/controllers/agent/types'; export type { RateLimitErrorResponse } from 'worker/api/responses'; -export { RateLimitExceededError, SecurityError, SecurityErrorType } from 'shared/types/errors'; +export { RateLimitExceededError, SecurityError, SecurityErrorType } from '../shared/types/errors.js'; export type { AIModels } from 'worker/agents/inferutils/config.types'; // Model selection types diff --git a/src/components/project-mode-selector.tsx b/src/components/project-mode-selector.tsx new file mode 100644 index 00000000..c09df5fa --- /dev/null +++ b/src/components/project-mode-selector.tsx @@ -0,0 +1,83 @@ +import { useState } from 'react'; + +export type ProjectMode = 'app' | 'presentation' | 'general'; + +interface ProjectModeSelectorProps { + value: ProjectMode; + onChange: (mode: ProjectMode) => void; + disabled?: boolean; + className?: string; +} + +export function ProjectModeSelector({ value, onChange, disabled = false, className = '' }: ProjectModeSelectorProps) { + const [hoveredMode, setHoveredMode] = useState(null); + + const modes = [ + { + id: 'app' as const, + label: 'App', + description: 'Full-stack applications', + }, + { + id: 'presentation' as const, + label: 'Slides', + description: 'Interactive presentations', + }, + { + id: 'general' as const, + label: 'Chat', + description: 'Conversational assistant', + }, + ]; + + return ( +
+ {modes.map((mode, index) => { + const isSelected = value === mode.id; + const isHovered = hoveredMode === mode.id; + + return ( +
+ + + {/* Separator dot (except after last item) */} + {index < modes.length - 1 && ( +
+ )} +
+ ); + })} +
+ ); +} diff --git a/src/lib/api-client.ts b/src/lib/api-client.ts index a6ea0952..7b46ffa7 100644 --- a/src/lib/api-client.ts +++ b/src/lib/api-client.ts @@ -57,13 +57,13 @@ import type{ AgentPreviewResponse, PlatformStatusData, RateLimitError -} from '@/api-types'; +} from '../api-types.js'; import { - + RateLimitExceededError, SecurityError, SecurityErrorType, -} from '@/api-types'; +} from '../api-types.js'; import { toast } from 'sonner'; /** diff --git a/src/routes/chat/chat.tsx b/src/routes/chat/chat.tsx index a7dda364..0be83aad 100644 --- a/src/routes/chat/chat.tsx +++ b/src/routes/chat/chat.tsx @@ -21,13 +21,12 @@ import { ViewModeSwitch } from './components/view-mode-switch'; import { DebugPanel, type DebugMessage } from './components/debug-panel'; import { DeploymentControls } from './components/deployment-controls'; import { useChat, type FileType } from './hooks/use-chat'; -import { type ModelConfigsData, type BlueprintType, SUPPORTED_IMAGE_MIME_TYPES } from '@/api-types'; +import { type ModelConfigsData, type BlueprintType, type PhasicBlueprint, SUPPORTED_IMAGE_MIME_TYPES, ProjectType } from '@/api-types'; import { Copy } from './components/copy'; import { useFileContentStream } from './hooks/use-file-content-stream'; import { logger } from '@/utils/logger'; import { useApp } from '@/hooks/use-app'; import { useAuth } from '@/contexts/auth-context'; -import { AgentModeDisplay } from '@/components/agent-mode-display'; import { useGitHubExport } from '@/hooks/use-github-export'; import { GitHubExportModal } from '@/components/github-export-modal'; import { GitCloneModal } from '@/components/shared/GitCloneModal'; @@ -42,13 +41,16 @@ import { DropdownMenu, DropdownMenuContent, DropdownMenuItem, DropdownMenuTrigge import { AlertDialog, AlertDialogAction, AlertDialogCancel, AlertDialogContent, AlertDialogDescription, AlertDialogFooter, AlertDialogHeader, AlertDialogTitle } from '@/components/ui/alert-dialog'; import { sendWebSocketMessage } from './utils/websocket-helpers'; +const isPhasicBlueprint = (blueprint?: BlueprintType | null): blueprint is PhasicBlueprint => + !!blueprint && 'implementationRoadmap' in blueprint; + export default function Chat() { const { chatId: urlChatId } = useParams(); const [searchParams] = useSearchParams(); const userQuery = searchParams.get('query'); - const agentMode = searchParams.get('agentMode') || 'deterministic'; - + const projectType = searchParams.get('projectType') || 'app'; + // Extract images from URL params if present const userImages = useMemo(() => { const imagesParam = searchParams.get('images'); @@ -139,11 +141,13 @@ export default function Chat() { runtimeErrorCount, staticIssueCount, isDebugging, + // Behavior type from backend + behaviorType, } = useChat({ chatId: urlChatId, query: userQuery, images: userImages, - agentMode: agentMode as 'deterministic' | 'smart', + projectType: projectType as ProjectType, onDebugMessage: addDebugMessage, }); @@ -337,13 +341,29 @@ export default function Chat() { return isPhase1Complete && !!urlChatId; }, [isPhase1Complete, urlChatId]); - const showMainView = useMemo( - () => - streamedBootstrapFiles.length > 0 || - !!blueprint || - files.length > 0, - [streamedBootstrapFiles, blueprint, files.length], - ); + // Detect if agentic mode is showing static content (docs, markdown) + const isStaticContent = useMemo(() => { + if (behaviorType !== 'agentic' || files.length === 0) return false; + + // Check if all files are static (markdown, text, or in docs/ directory) + return files.every(file => { + const path = file.filePath.toLowerCase(); + return path.endsWith('.md') || + path.endsWith('.mdx') || + path.endsWith('.txt') || + path.startsWith('docs/') || + path.includes('/docs/'); + }); + }, [behaviorType, files]); + + const showMainView = useMemo(() => { + // For agentic mode: only show preview panel when files or preview URL exist + if (behaviorType === 'agentic') { + return files.length > 0 || !!previewUrl; + } + // For phasic mode: keep existing logic + return streamedBootstrapFiles.length > 0 || !!blueprint || files.length > 0; + }, [behaviorType, isGeneratingBlueprint, blueprint, files.length, previewUrl, streamedBootstrapFiles.length]); const [mainMessage, ...otherMessages] = useMemo(() => messages, [messages]); @@ -360,14 +380,28 @@ export default function Chat() { }, [messages.length, scrollToBottom]); useEffect(() => { - if (previewUrl && !hasSeenPreview.current && isPhase1Complete) { - setView('preview'); - setShowTooltip(true); - setTimeout(() => { - setShowTooltip(false); - }, 3000); // Auto-hide tooltip after 3 seconds + // For static content in agentic mode, show editor view instead of preview + if (isStaticContent && files.length > 0 && !hasSeenPreview.current) { + setView('editor'); + // Auto-select first file if none selected + if (!activeFilePath) { + setActiveFilePath(files[0].filePath); + } + hasSeenPreview.current = true; + } else if (previewUrl && !hasSeenPreview.current) { + // Agentic: auto-switch immediately when preview URL available + // Phasic: require phase 1 complete + const shouldSwitch = behaviorType === 'agentic' || isPhase1Complete; + + if (shouldSwitch) { + setView('preview'); + setShowTooltip(true); + setTimeout(() => { + setShowTooltip(false); + }, 3000); + } } - }, [previewUrl, isPhase1Complete]); + }, [previewUrl, isPhase1Complete, isStaticContent, files, activeFilePath, behaviorType]); useEffect(() => { if (chatId) { @@ -492,11 +526,13 @@ export default function Chat() { const completedPhases = phaseTimeline.filter(p => p.status === 'completed').length; // Get predicted phase count from blueprint, fallback to current phase count - const predictedPhaseCount = blueprint?.implementationRoadmap?.length || 0; + const predictedPhaseCount = isPhasicBlueprint(blueprint) + ? blueprint.implementationRoadmap.length + : 0; const totalPhases = Math.max(predictedPhaseCount, phaseTimeline.length, 1); return [completedPhases, totalPhases]; - }, [phaseTimeline, blueprint?.implementationRoadmap]); + }, [phaseTimeline, blueprint]); if (import.meta.env.DEV) { logger.debug({ @@ -554,18 +590,6 @@ export default function Chat() { - {import.meta.env - .VITE_AGENT_MODE_ENABLED && ( -
- -
- )} )} @@ -627,34 +651,37 @@ export default function Chat() {
)} - { - setView(viewMode); - hasSwitchedFile.current = true; - }} - chatId={chatId} - isDeploying={isDeploying} - handleDeployToCloudflare={handleDeployToCloudflare} - runtimeErrorCount={runtimeErrorCount} - staticIssueCount={staticIssueCount} - isDebugging={isDebugging} - isGenerating={isGenerating} - isThinking={isThinking} - /> + {/* Only show PhaseTimeline for phasic mode */} + {behaviorType !== 'agentic' && ( + { + setView(viewMode); + hasSwitchedFile.current = true; + }} + chatId={chatId} + isDeploying={isDeploying} + handleDeployToCloudflare={handleDeployToCloudflare} + runtimeErrorCount={runtimeErrorCount} + staticIssueCount={staticIssueCount} + isDebugging={isDebugging} + isGenerating={isGenerating} + isThinking={isThinking} + /> + )} - {/* Deployment and Generation Controls */} - {chatId && ( + {/* Deployment and Generation Controls - Only for phasic mode */} + {chatId && behaviorType !== 'agentic' && ( ); -} \ No newline at end of file +} diff --git a/src/routes/chat/components/blueprint.tsx b/src/routes/chat/components/blueprint.tsx index 64d1c4e4..bf8bf40c 100644 --- a/src/routes/chat/components/blueprint.tsx +++ b/src/routes/chat/components/blueprint.tsx @@ -1,7 +1,10 @@ -import type { BlueprintType } from '@/api-types'; +import type { BlueprintType, PhasicBlueprint } from '@/api-types'; import clsx from 'clsx'; import { Markdown } from './messages'; +const isPhasicBlueprint = (blueprint: BlueprintType): blueprint is PhasicBlueprint => + 'views' in blueprint; + export function Blueprint({ blueprint, className, @@ -11,6 +14,8 @@ export function Blueprint({ }) { if (!blueprint) return null; + const phasicBlueprint = isPhasicBlueprint(blueprint) ? blueprint : null; + return (
@@ -84,13 +89,13 @@ export function Blueprint({
{/* Views */} - {Array.isArray(blueprint.views) && blueprint.views.length > 0 && ( + {phasicBlueprint && phasicBlueprint.views?.length > 0 && (

Views

- {blueprint.views.map((view, index) => ( + {phasicBlueprint.views?.map((view, index) => (

{view.name} @@ -105,41 +110,41 @@ export function Blueprint({ )} {/* User Flow */} - {blueprint.userFlow && ( + {phasicBlueprint?.userFlow && (

User Flow

- {blueprint.userFlow?.uiLayout && ( + {phasicBlueprint.userFlow.uiLayout && (

UI Layout

- {blueprint.userFlow.uiLayout} + {phasicBlueprint.userFlow.uiLayout}
)} - {blueprint.userFlow?.uiDesign && ( + {phasicBlueprint.userFlow.uiDesign && (

UI Design

- {blueprint.userFlow.uiDesign} + {phasicBlueprint.userFlow.uiDesign}
)} - {blueprint.userFlow?.userJourney && ( + {phasicBlueprint.userFlow.userJourney && (

User Journey

- {blueprint.userFlow?.userJourney} + {phasicBlueprint.userFlow.userJourney}
)} @@ -148,25 +153,25 @@ export function Blueprint({ )} {/* Data Flow */} - {(blueprint.dataFlow || blueprint.architecture?.dataFlow) && ( + {phasicBlueprint && (phasicBlueprint.dataFlow || phasicBlueprint.architecture?.dataFlow) && (

Data Flow

- {blueprint.dataFlow || blueprint.architecture?.dataFlow} + {phasicBlueprint.dataFlow || phasicBlueprint.architecture?.dataFlow}
)} {/* Implementation Roadmap */} - {Array.isArray(blueprint.implementationRoadmap) && blueprint.implementationRoadmap.length > 0 && ( + {phasicBlueprint && phasicBlueprint.implementationRoadmap?.length > 0 && (

Implementation Roadmap

- {blueprint.implementationRoadmap.map((roadmapItem, index) => ( + {phasicBlueprint.implementationRoadmap?.map((roadmapItem, index) => (

Phase {index + 1}: {roadmapItem.phase} @@ -181,7 +186,7 @@ export function Blueprint({ )} {/* Initial Phase */} - {blueprint.initialPhase && ( + {phasicBlueprint?.initialPhase && (

Initial Phase @@ -189,18 +194,18 @@ export function Blueprint({

- {blueprint.initialPhase.name} + {phasicBlueprint.initialPhase.name}

- {blueprint.initialPhase.description} + {phasicBlueprint.initialPhase.description} - {Array.isArray(blueprint.initialPhase.files) && blueprint.initialPhase.files.length > 0 && ( + {Array.isArray(phasicBlueprint.initialPhase.files) && phasicBlueprint.initialPhase.files.length > 0 && (
Files to be created:
- {blueprint.initialPhase.files.map((file, fileIndex) => ( + {phasicBlueprint.initialPhase.files.map((file, fileIndex) => (
{file.path}
{file.purpose}
@@ -215,14 +220,14 @@ export function Blueprint({ )} {/* Pitfalls */} - {Array.isArray(blueprint.pitfalls) && blueprint.pitfalls.length > 0 && ( + {phasicBlueprint && phasicBlueprint.pitfalls?.length > 0 && (

Pitfalls

    - {blueprint.pitfalls?.map((pitfall, index) => ( + {phasicBlueprint.pitfalls?.map((pitfall, index) => (
  • {pitfall}
  • diff --git a/src/routes/chat/hooks/use-chat.ts b/src/routes/chat/hooks/use-chat.ts index 05b254b6..b50a7ef3 100644 --- a/src/routes/chat/hooks/use-chat.ts +++ b/src/routes/chat/hooks/use-chat.ts @@ -6,7 +6,9 @@ import { type BlueprintType, type WebSocketMessage, type CodeFixEdits, - type ImageAttachment + type ImageAttachment, + type ProjectType, + type BehaviorType } from '@/api-types'; import { createRepairingJSONParser, @@ -52,17 +54,25 @@ export function useChat({ chatId: urlChatId, query: userQuery, images: userImages, - agentMode = 'deterministic', + projectType = 'app', onDebugMessage, onTerminalMessage, }: { chatId?: string; query: string | null; images?: ImageAttachment[]; - agentMode?: 'deterministic' | 'smart'; + projectType?: ProjectType; onDebugMessage?: (type: 'error' | 'warning' | 'info' | 'websocket', message: string, details?: string, source?: string, messageType?: string, rawMessage?: unknown) => void; onTerminalMessage?: (log: { id: string; content: string; type: 'command' | 'stdout' | 'stderr' | 'info' | 'error' | 'warn' | 'debug'; timestamp: number; source?: string }) => void; }) { + // Derive initial behavior type from project type + const getInitialBehaviorType = (): BehaviorType => { + if (projectType === 'presentation' || projectType === 'general') { + return 'agentic'; + } + return 'phasic'; + }; + const connectionStatus = useRef<'idle' | 'connecting' | 'connected' | 'failed' | 'retrying'>('idle'); const retryCount = useRef(0); const maxRetries = 5; @@ -80,6 +90,7 @@ export function useChat({ const [blueprint, setBlueprint] = useState(); const [previewUrl, setPreviewUrl] = useState(); const [query, setQuery] = useState(); + const [behaviorType, setBehaviorType] = useState(getInitialBehaviorType()); const [websocket, setWebsocket] = useState(); @@ -190,6 +201,7 @@ export function useChat({ setRuntimeErrorCount, setStaticIssueCount, setIsDebugging, + setBehaviorType, // Current state isInitialStateRestored, blueprint, @@ -201,6 +213,7 @@ export function useChat({ projectStages, isGenerating, urlChatId, + behaviorType, // Functions updateStage, sendMessage, @@ -219,6 +232,7 @@ export function useChat({ projectStages, isGenerating, urlChatId, + behaviorType, updateStage, sendMessage, loadBootstrapFiles, @@ -405,7 +419,7 @@ export function useChat({ // Start new code generation using API client const response = await apiClient.createAgentSession({ query: userQuery, - agentMode, + projectType, images: userImages, // Pass images from URL params for multi-modal blueprint }); @@ -414,12 +428,16 @@ export function useChat({ const result: { websocketUrl: string; agentId: string; + behaviorType: BehaviorType; + projectType: ProjectType; template: { files: FileType[]; }; } = { websocketUrl: '', agentId: '', + behaviorType: 'phasic', + projectType: 'app', template: { files: [], }, @@ -447,7 +465,7 @@ export function useChat({ } catch (e) { logger.error('Error parsing JSON:', e, obj.chunk); } - } + } if (obj.agentId) { result.agentId = obj.agentId; } @@ -455,6 +473,15 @@ export function useChat({ result.websocketUrl = obj.websocketUrl; logger.debug('πŸ“‘ Received WebSocket URL from server:', result.websocketUrl) } + if (obj.behaviorType) { + result.behaviorType = obj.behaviorType; + setBehaviorType(obj.behaviorType); + logger.debug('Received behaviorType from server:', obj.behaviorType); + } + if (obj.projectType) { + result.projectType = obj.projectType; + logger.debug('Received projectType from server:', obj.projectType); + } if (obj.template) { logger.debug('Received template from server:', obj.template); result.template = obj.template; @@ -658,5 +685,7 @@ export function useChat({ runtimeErrorCount, staticIssueCount, isDebugging, + // Behavior type from backend + behaviorType, }; } diff --git a/src/routes/chat/utils/handle-websocket-message.ts b/src/routes/chat/utils/handle-websocket-message.ts index 3636aef7..8d3563d4 100644 --- a/src/routes/chat/utils/handle-websocket-message.ts +++ b/src/routes/chat/utils/handle-websocket-message.ts @@ -1,5 +1,5 @@ import type { WebSocket } from 'partysocket'; -import type { WebSocketMessage, BlueprintType, ConversationMessage } from '@/api-types'; +import type { WebSocketMessage, BlueprintType, ConversationMessage, AgentState, PhasicState, BehaviorType } from '@/api-types'; import { deduplicateMessages, isAssistantMessageDuplicate } from './deduplicate-messages'; import { logger } from '@/utils/logger'; import { getFileType } from '@/utils/string'; @@ -11,7 +11,7 @@ import { setAllFilesCompleted, updatePhaseFileStatus, } from './file-state-helpers'; -import { +import { createAIMessage, handleRateLimitError, handleStreamingMessage, @@ -22,6 +22,10 @@ import { completeStages } from './project-stage-helpers'; import { sendWebSocketMessage } from './websocket-helpers'; import type { FileType, PhaseTimelineItem } from '../hooks/use-chat'; import { toast } from 'sonner'; +import { createRepairingJSONParser } from '@/utils/ndjson-parser/ndjson-parser'; + +const isPhasicState = (state: AgentState): state is PhasicState => + state.behaviorType === 'phasic'; export interface HandleMessageDeps { // State setters @@ -47,7 +51,8 @@ export interface HandleMessageDeps { setRuntimeErrorCount: React.Dispatch>; setStaticIssueCount: React.Dispatch>; setIsDebugging: React.Dispatch>; - + setBehaviorType: React.Dispatch>; + // Current state isInitialStateRestored: boolean; blueprint: BlueprintType | undefined; @@ -59,6 +64,7 @@ export interface HandleMessageDeps { projectStages: any[]; isGenerating: boolean; urlChatId: string | undefined; + behaviorType: BehaviorType; // Functions updateStage: (stageId: string, updates: any) => void; @@ -93,6 +99,10 @@ export function createWebSocketMessageHandler(deps: HandleMessageDeps) { } return ''; }; + + // Blueprint chunk parser (maintained across chunks) + let blueprintParser: ReturnType | null = null; + return (websocket: WebSocket, message: WebSocketMessage) => { const { setFiles, @@ -115,6 +125,7 @@ export function createWebSocketMessageHandler(deps: HandleMessageDeps) { setIsGenerating, setIsPhaseProgressActive, setIsDebugging, + setBehaviorType, isInitialStateRestored, blueprint, query, @@ -125,6 +136,7 @@ export function createWebSocketMessageHandler(deps: HandleMessageDeps) { projectStages, isGenerating, urlChatId, + behaviorType, updateStage, sendMessage, loadBootstrapFiles, @@ -159,7 +171,12 @@ export function createWebSocketMessageHandler(deps: HandleMessageDeps) { if (!isInitialStateRestored) { logger.debug('πŸ“₯ Performing initial state restoration'); - + + if (state.behaviorType && state.behaviorType !== behaviorType) { + setBehaviorType(state.behaviorType); + logger.debug('πŸ”„ Restored behaviorType from backend:', state.behaviorType); + } + if (state.blueprint && !blueprint) { setBlueprint(state.blueprint); updateStage('blueprint', { status: 'completed' }); @@ -191,12 +208,12 @@ export function createWebSocketMessageHandler(deps: HandleMessageDeps) { ); } - if (state.generatedPhases && state.generatedPhases.length > 0 && phaseTimeline.length === 0) { + if (isPhasicState(state) && state.generatedPhases.length > 0 && phaseTimeline.length === 0) { logger.debug('πŸ“‹ Restoring phase timeline:', state.generatedPhases); // If not actively generating, mark incomplete phases as cancelled (they were interrupted) const isActivelyGenerating = state.shouldBeGenerating === true; - const timeline = state.generatedPhases.map((phase: any, index: number) => { + const timeline = state.generatedPhases.map((phase, index: number) => { // Determine phase status: // - completed if explicitly marked complete // - cancelled if incomplete and not actively generating (interrupted) @@ -212,7 +229,7 @@ export function createWebSocketMessageHandler(deps: HandleMessageDeps) { name: phase.name, description: phase.description, status: phaseStatus, - files: phase.files.map((filesConcept: any) => { + files: phase.files.map(filesConcept => { const file = state.generatedFilesMap?.[filesConcept.path]; // File status: // - completed if it exists in generated files @@ -250,6 +267,20 @@ export function createWebSocketMessageHandler(deps: HandleMessageDeps) { } } + // Display queued user messages from state + const queuedInputs = state.pendingUserInputs || []; + if (queuedInputs.length > 0) { + logger.debug('πŸ“‹ Restoring queued user messages:', queuedInputs); + const queuedMessages: ChatMessage[] = queuedInputs.map((msg, idx) => ({ + role: 'user', + content: msg, + conversationId: `queued-${idx}`, + status: 'queued' as const, + queuePosition: idx + 1 + })); + setMessages(prev => [...prev, ...queuedMessages]); + } + setIsInitialStateRestored(true); if (state.shouldBeGenerating && !isGenerating) { @@ -835,6 +866,27 @@ export function createWebSocketMessageHandler(deps: HandleMessageDeps) { break; } + case 'blueprint_chunk': { + // Initialize parser on first chunk + if (!blueprintParser) { + blueprintParser = createRepairingJSONParser(); + logger.debug('Blueprint streaming started'); + } + + // Feed chunk to parser + blueprintParser.feed(message.chunk); + + // Try to parse partial blueprint + try { + const partial = blueprintParser.finalize(); + setBlueprint(partial); + logger.debug('Blueprint chunk processed, partial blueprint updated'); + } catch (e) { + logger.debug('Blueprint chunk accumulated, waiting for more data'); + } + break; + } + case 'terminal_output': { // Handle terminal output from server if (onTerminalMessage) { diff --git a/src/routes/chat/utils/message-helpers.ts b/src/routes/chat/utils/message-helpers.ts index eddba260..1f36ff1a 100644 --- a/src/routes/chat/utils/message-helpers.ts +++ b/src/routes/chat/utils/message-helpers.ts @@ -16,6 +16,8 @@ export type ChatMessage = Omit & { isThinking?: boolean; toolEvents?: ToolEvent[]; }; + status?: 'queued' | 'active'; + queuePosition?: number; }; /** diff --git a/src/routes/home.tsx b/src/routes/home.tsx index 7bf6c0ff..bfb5b4c8 100644 --- a/src/routes/home.tsx +++ b/src/routes/home.tsx @@ -3,9 +3,9 @@ import { ArrowRight, Info } from 'react-feather'; import { useNavigate } from 'react-router'; import { useAuth } from '@/contexts/auth-context'; import { - AgentModeToggle, - type AgentMode, -} from '../components/agent-mode-toggle'; + ProjectModeSelector, + type ProjectMode, +} from '../components/project-mode-selector'; import { useAuthGuard } from '../hooks/useAuthGuard'; import { usePaginatedApps } from '@/hooks/use-paginated-apps'; import { AnimatePresence, LayoutGroup, motion } from 'framer-motion'; @@ -21,7 +21,7 @@ export default function Home() { const navigate = useNavigate(); const { requireAuth } = useAuthGuard(); const textareaRef = useRef(null); - const [agentMode, setAgentMode] = useState('deterministic'); + const [projectMode, setProjectMode] = useState('app'); const [query, setQuery] = useState(''); const { user } = useAuth(); @@ -60,13 +60,13 @@ export default function Home() { // Discover section should appear only when enough apps are available and loading is done const discoverReady = useMemo(() => !loading && (apps?.length ?? 0) > 5, [loading, apps]); - const handleCreateApp = (query: string, mode: AgentMode) => { + const handleCreateApp = (query: string, mode: ProjectMode) => { const encodedQuery = encodeURIComponent(query); const encodedMode = encodeURIComponent(mode); - + // Encode images as JSON if present const imageParam = images.length > 0 ? `&images=${encodeURIComponent(JSON.stringify(images))}` : ''; - const intendedUrl = `/chat/new?query=${encodedQuery}&agentMode=${encodedMode}${imageParam}`; + const intendedUrl = `/chat/new?query=${encodedQuery}&projectType=${encodedMode}${imageParam}`; if ( !requireAuth({ @@ -179,7 +179,7 @@ export default function Home() { onSubmit={(e) => { e.preventDefault(); const query = textareaRef.current!.value; - handleCreateApp(query, agentMode); + handleCreateApp(query, projectMode); }} className="flex z-10 flex-col w-full min-h-[150px] bg-bg-4 border border-accent/30 dark:border-accent/50 dark:bg-bg-2 rounded-[18px] shadow-textarea p-5 transition-all duration-200" > @@ -210,7 +210,7 @@ export default function Home() { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); const query = textareaRef.current!.value; - handleCreateApp(query, agentMode); + handleCreateApp(query, projectMode); } }} /> @@ -224,15 +224,11 @@ export default function Home() { )}
- {import.meta.env.VITE_AGENT_MODE_ENABLED ? ( - - ) : ( -
- )} +
{ + const identity = `# Identity +You are an elite autonomous project builder with deep expertise in Cloudflare Workers, Durable Objects, TypeScript, React, Vite, and modern web applications. You operate with EXTREMELY HIGH reasoning capability.`; + + const comms = `# CRITICAL: Communication Mode +- Perform ALL analysis, planning, and reasoning INTERNALLY using your high reasoning capability +- Your output should be CONCISE: brief status updates and tool calls ONLY +- NO verbose explanations, NO step-by-step narrations in your output +- Think deeply internally β†’ Act externally with precise tool calls β†’ Report results briefly +- This is NOT negotiable - verbose output wastes tokens and degrades user experience`; + + const architecture = `# System Architecture (CRITICAL - Understand This) + +## How Your Environment Works + +**You operate in a Durable Object with TWO distinct layers:** + +### 1. Virtual Filesystem (Your Workspace) +- Lives in Durable Object storage (persistent) +- Managed by FileManager + Git (isomorphic-git with SQLite) +- ALL files you generate go here FIRST +- Files exist in DO storage, NOT in actual sandbox yet +- Full git history maintained (commits, diffs, log, show) +- This is YOUR primary working area + +### 2. Sandbox Environment (Execution Layer) +- A docker-like container that can run arbitary code +- Suitable for running bun + vite dev server +- Has its own filesystem (NOT directly accessible to you) +- Provisioned/deployed to when deploy_preview is called +- Runs 'bun run dev' and exposes preview URL when initialized +- THIS is where code actually executes + +## The Deploy Process (What deploy_preview Does) + +When you call deploy_preview: +1. Checks if sandbox instance exists +2. If NOT: Creates new sandbox instance + - Writes all virtual files to sandbox filesystem (including template files and then your generated files on top) + - Runs: bun install β†’ bun run dev + - Exposes port β†’ preview URL +3. If YES: Uses existing sandbox +4. Syncs any provided/freshly generated files to sandbox filesystem +5. Returns preview URL + +**KEY INSIGHT**: Your generate_files writes to VIRTUAL filesystem. deploy_preview syncs to SANDBOX. + +## File Flow Diagram +\`\`\` +You (LLM) + β†’ generate_files / regenerate_file + β†’ Virtual Filesystem (FileManager + Git) + β†’ [Files stored in DO, committed to git] + +deploy_preview called + β†’ Syncs virtual files β†’ Sandbox filesystem + β†’ Returns preview URL +\`\`\` + +## When Things Break + +**Sandbox becomes unhealthy:** +- DeploymentManager auto-detects via health checks +- Will auto-redeploy after failures +- You may see retry messages - this is normal + +**Need fresh start:** +- Use force_redeploy=true in deploy_preview +- Destroys current sandbox, creates new one +- Expensive operation - only when truly stuck + +## Troubleshooting Workflow + +**Problem: "I generated files but preview shows old code"** +β†’ You forgot to deploy_preview after generating files +β†’ Solution: Call deploy_preview to sync virtual β†’ sandbox + +**Problem: "run_analysis says file doesn't exist"** +β†’ File is in virtual FS but not synced to sandbox yet +β†’ Solution: deploy_preview first, then run_analysis + +**Problem: "exec_commands fails with 'no instance'"** +β†’ Sandbox doesn't exist yet +β†’ Solution: deploy_preview first to create sandbox + +**Problem: "get_logs returns empty"** +β†’ User hasn't interacted with preview yet, OR logs were cleared +β†’ Solution: Wait for user interaction or check timestamps + +**Problem: "Same error keeps appearing after fix"** +β†’ Logs are cumulative - you're seeing old errors. +β†’ Solution: Clear logs with deploy_preview(clearLogs=true) and try again. + +**Problem: "Types look correct but still errors"** +β†’ You're reading from virtual FS, but sandbox has old versions +β†’ Solution: deploy_preview to sync latest changes`; + + const environment = `# Project Environment +- Runtime: Cloudflare Workers (NO Node.js fs/path/process APIs available) +- Fetch API standard (Request/Response), Web Streams API +- Frontend: React 19 + Vite + TypeScript + TailwindCSS +- Build tool: Bun (commands: bun run dev/build/lint/deploy) +- All projects MUST be Cloudflare Worker projects with wrangler.jsonc`; + + const constraints = `# Platform Constraints +- NO Node.js APIs (fs, path, process, etc.) - Workers runtime only +- Logs and errors are user-driven; check recency before fixing +- Paths are ALWAYS relative to project root +- Commands execute at project root - NEVER use cd +- NEVER modify wrangler.jsonc or package.json unless absolutely necessary`; + + const workflow = `# Your Workflow (Execute This Rigorously) + +## Step 1: Understand Requirements +- Read user request carefully +- Identify project type: app, presentation, documentation, tool, workflow +- Determine if clarifying questions are needed (rare - usually requirements are clear) + +## Step 2: Determine Approach +**Static Content** (documentation, guides, markdown): +- Generate files in docs/ directory structure +- NO sandbox needed +- Focus on content quality, organization, formatting + +**Interactive Projects** (apps, presentations, APIs, tools): +- Require sandbox with template +- Must have runtime environment +- Will use deploy_preview for testing + +## Step 3: Template Selection (Interactive Projects Only) +CRITICAL - This step is MANDATORY for interactive projects: + +**Use AI-Powered Template Selector:** +1. Call \`init_suitable_template\` - AI analyzes requirements and selects best template + - Automatically searches template library (rich collection of templates) + - Matches project type, complexity, style to available templates + - Returns: selection reasoning + automatically imports template files + - Trust the AI selector - it knows the template library well + +2. Review the selection reasoning + - AI explains why template was chosen + - Template files now in your virtual filesystem + - Ready for blueprint generation with template context + +**What if no suitable template?** +- Rare case: AI returns null if no template matches +- Fallback: Virtual-first mode (generate all config files yourself) +- Manual configs: package.json, wrangler.jsonc, vite.config.js +- Use this ONLY when AI couldn't find a match + +**Why template-first matters:** +- Templates have working configs and features +- Blueprint can leverage existing template structure +- Avoids recreating what template already provides +- Better architecture from day one + +**CRITICAL**: Do NOT skip template selection for interactive projects. Always call \`init_suitable_template\` first. + +## Step 4: Generate Blueprint +- Use generate_blueprint to create structured PRD (optionally with prompt parameter for additional context) +- Blueprint defines: title, description, features, architecture, plan +- Refine with alter_blueprint if needed +- NEVER start building without a plan + +## Step 5: Build Incrementally +- Use generate_files for new features/components (goes to virtual FS) +- Use regenerate_file for surgical fixes to existing files (goes to virtual FS) +- Commit frequently with clear messages (git operates on virtual FS) +- For interactive projects: + - After generating files: deploy_preview (syncs virtual β†’ sandbox) + - Then verify with run_analysis or runtime tools + - Fix issues β†’ iterate +- **Remember**: Files in virtual FS won't execute until you deploy_preview + +## Step 6: Verification & Polish +- run_analysis for type checking and linting +- get_runtime_errors / get_logs for runtime issues +- Fix all issues before completion +- Ensure professional quality and polish`; + + const tools = `# Available Tools (Detailed Reference) + +## Planning & Architecture + +**generate_blueprint** - Create structured project plan (Product Requirements Document) + +**What it is:** +- Your planning tool - creates a PRD defining WHAT to build before you start +- Becomes the source of truth for implementation +- Stored in agent state (persists across all requests) +- Accepts optional **prompt** parameter for providing additional context beyond user's initial request + +**What it generates:** +- title: Project name +- projectName: Technical identifier +- description: What the project does +- colorPalette: Brand colors for UI +- frameworks: Tech stack being used +- plan[]: Phased implementation roadmap with requirements per phase + +**When to call:** +- βœ… FIRST STEP when no blueprint exists +- βœ… User provides vague requirements (you need to design structure) +- βœ… Complex project needing phased approach + +**When NOT to call:** +- ❌ Blueprint already exists (use alter_blueprint to modify) +- ❌ Simple one-file tasks (just generate directly) + +**Optional prompt parameter:** +- Use to provide additional context, clarifications, or refined specifications +- If omitted, uses user's original request +- Useful when you've learned more through conversation + +**CRITICAL After-Effects:** +1. Blueprint stored in agent state +2. You now have clear plan to follow +3. Use plan phases to guide generate_files calls +4. **Do NOT start building without blueprint** (fundamental rule) + +**Example workflow:** +\`\`\` +User: "Build a todo app" + ↓ +You: generate_blueprint (creates PRD with phases) + ↓ +Review blueprint, refine with alter_blueprint if needed + ↓ +Follow phases: generate_files for phase-1, then phase-2, etc. +\`\`\` + +**alter_blueprint** +- Patch specific fields in existing blueprint +- Use to refine after generation or requirements change +- Surgical updates only - don't regenerate entire blueprint + +## Template Selection +**init_suitable_template** - AI-powered template selection and import + +**What it does:** +- Analyzes your requirements against entire template library +- Uses AI to match project type, complexity, style to available templates +- Automatically selects and imports best matching template +- Returns: selection reasoning + imported template files + +**How it works:** +\`\`\` +You call: init_suitable_template() + ↓ +AI fetches all available templates from library + ↓ +AI analyzes: project type, requirements, complexity, style + ↓ +AI selects best matching template + ↓ +Template automatically imported to virtual filesystem + ↓ +Returns: selection object + reasoning + imported files +\`\`\` + +**What you get back:** +- selection.selectedTemplateName: Chosen template name (or null if none suitable) +- selection.reasoning: Why this template was chosen +- selection.projectType: Detected/confirmed project type +- selection.complexity: simple/moderate/complex +- selection.styleSelection: UI style recommendation +- importedFiles[]: Array of important template files now in virtual FS + +**Template Library Coverage:** +The library includes templates for: +- React/Vue/Svelte apps with various configurations +- Game starters (canvas-based, WebGL) +- Presentation frameworks (Spectacle, Reveal.js) +- Dashboard/Admin templates +- Landing pages and marketing sites +- API/Worker templates +- And many more specialized templates + +**When to use:** +- βœ… ALWAYS for interactive projects (app/presentation/workflow) +- βœ… Before generate_blueprint (template context enriches blueprint) +- βœ… First step after understanding requirements + +**When NOT to use:** +- ❌ Static documentation projects (no runtime needed) +- ❌ After template already imported + +**CRITICAL Caveat:** +- If AI returns null (no suitable template), fall back to virtual-first mode +- This is RARE - trust the AI selector to find a match +- Template's 'bun run dev' MUST work or sandbox creation fails +- If using virtual-first fallback, YOU must ensure working dev script + +**Example workflow:** +\`\`\` +1. init_suitable_template() + β†’ AI: "Selected react-game-starter because: user wants 2D game, template has canvas setup and scoring system..." + β†’ Imported 15 important files +2. generate_blueprint(prompt: "Template has canvas and game loop. Build on this...") + β†’ Blueprint leverages existing template features +3. generate_files(...) + β†’ Build on top of template foundation +\`\`\` + +## File Operations (Understanding Your Two-Layer System) + +**CRITICAL: Where Your Files Live** + +You work with TWO separate filesystems: + +1. **Virtual Filesystem** (Your persistent workspace) + - Lives in Durable Object storage + - Managed by git (full commit history) + - Files here do NOT execute - just stored + - Persists across all requests/sessions + +2. **Sandbox Filesystem** (Where code runs) + - Separate container running Bun + Vite dev server + - Files here CAN execute and be tested + - Created when you call deploy_preview + - Destroyed/recreated on redeploy + +**The File Flow You Control:** +\`\`\` +You call: generate_files or regenerate_file + ↓ +Files written to VIRTUAL filesystem (Durable Object storage) + ↓ +Auto-committed to git (generate_files) or staged (regenerate_file) + ↓ +[Files NOT in sandbox yet - sandbox can't see them] + ↓ +You call: deploy_preview + ↓ +Files synced from virtual filesystem β†’ sandbox filesystem + ↓ +Now sandbox can execute your code +\`\`\` + +--- + +**virtual_filesystem** - List and read files from your persistent workspace + +Commands available: +- **"list"**: See all files in your virtual filesystem +- **"read"**: Read file contents by paths (requires paths parameter) + +**What it does:** +- Lists/reads from your persistent workspace (template files + generated files) +- Shows you what exists BEFORE deploying to sandbox +- Useful for: discovering files, verifying changes, understanding structure + +**Where it reads from (priority order):** +1. Your generated/modified files (highest priority) +2. Template files (if template selected) +3. Returns empty if file doesn't exist + +**When to use:** +- βœ… Before editing (understand what exists) +- βœ… After generate_files/regenerate_file (verify changes worked) +- βœ… Exploring template structure +- βœ… Checking if file exists before regenerating + +**CRITICAL Caveat:** +- Reads from VIRTUAL filesystem, not sandbox +- Sandbox may have older versions if you haven't called deploy_preview +- If sandbox behaving weird, check if virtual FS and sandbox are in sync + +--- + +**generate_files** - Create or completely rewrite files + +**What it does:** +- Generates complete file contents from scratch +- Can create multiple files in one call (batch operation) +- Automatically commits to git with descriptive message +- **Where files go**: Virtual filesystem only (not in sandbox yet) + +**When to use:** +- βœ… Creating brand new files that don't exist +- βœ… Scaffolding features requiring multiple coordinated files +- βœ… When regenerate_file failed 2+ times (file too broken to patch) +- βœ… Initial project structure + +**When NOT to use:** +- ❌ Small fixes to existing files (use regenerate_file - faster) +- ❌ Tweaking single functions (use regenerate_file) + +**CRITICAL After-Effects:** +1. Files now exist in virtual filesystem +2. Automatically committed to git +3. Sandbox does NOT see them yet +4. **You MUST call deploy_preview to sync virtual β†’ sandbox** +5. Only after deploy_preview can you test or run_analysis + +--- + +**regenerate_file** - Surgical fixes to single existing file + +**What it does:** +- Applies minimal, targeted changes to one file +- Uses smart pattern matching internally +- Makes multiple passes (up to 3) to fix issues +- Returns diff showing exactly what changed +- **Where files go**: Virtual filesystem only + +**When to use:** +- βœ… Fixing TypeScript/JavaScript errors +- βœ… Adding missing imports or exports +- βœ… Patching bugs or logic errors +- βœ… Small feature additions to existing components + +**When NOT to use:** +- ❌ File doesn't exist yet (use generate_files) +- ❌ File is too broken to patch (use generate_files to rewrite) +- ❌ Haven't read the file yet (read it first!) + +**How to describe issues (CRITICAL for success):** +- BE SPECIFIC: Include exact error messages, line numbers +- ONE PROBLEM PER ISSUE: Don't combine unrelated problems +- PROVIDE CONTEXT: Explain what's broken and why +- SUGGEST SOLUTION: Share your best idea for fixing it + +**CRITICAL After-Effects:** +1. File updated in virtual filesystem +2. Changes are STAGED (git add) but NOT committed +3. **You MUST manually call git commit** (unlike generate_files) +4. Sandbox does NOT see changes yet +5. **You MUST call deploy_preview to sync virtual β†’ sandbox** + +**PARALLEL EXECUTION:** +- You can call regenerate_file on MULTIPLE different files simultaneously +- Much faster than sequential calls + +## Deployment & Testing +**deploy_preview** +- Deploy to sandbox and get preview URL +- Only for interactive projects (apps, presentations, APIs) +- NOT for static documentation +- Creates sandbox on first call if needed +- TWO MODES: + 1. **Template-based**: If you called init_suitable_template(), uses that selected template + 2. **Virtual-first**: If you generated package.json, wrangler.jsonc, vite.config.js directly, creates sandbox with fallback template + your files as overlay +- Syncs all files from virtual filesystem to sandbox + +**run_analysis** +- TypeScript checking + ESLint +- **Where**: Runs in sandbox on deployed files +- **Requires**: Sandbox must exist +- Run after changes to catch errors early +- Much faster than runtime testing +- Analyzes files you specify (or all generated files) + +**get_runtime_errors** +- Fetch runtime exceptions from sandbox +- **Where**: Sandbox environment +- **Requires**: Sandbox running, user has interacted with app +- Check recency - logs are cumulative +- Use after deploy_preview for verification +- Errors only appear when code actually executes + +**get_logs** +- Get console logs from sandbox +- **Where**: Sandbox environment +- **Requires**: Sandbox running +- Cumulative - check timestamps +- Useful for debugging runtime behavior +- Logs appear when user interacts with preview + +## Utilities +**exec_commands** +- Execute shell commands in sandbox +- **Where**: Sandbox environment (NOT virtual filesystem) +- **Requires**: Sandbox must exist (call deploy_preview first) +- Use sparingly - most needs covered by other tools +- Commands run at project root +- Examples: bun add package, custom build scripts + +**git** +- Operations: commit, log, show +- **Where**: Virtual filesystem (isomorphic-git on DO storage) +- Commit frequently with conventional messages +- Use for: saving progress, reviewing changes +- Full git history maintained +- **Note**: This is YOUR git, not sandbox git + +**generate_images** +- Future image generation capability +- Currently a stub - do NOT rely on this`; + + const staticVsSandbox = `# CRITICAL: Static vs Sandbox Detection + +**Static Content (NO Sandbox)**: +- Markdown files (.md, .mdx) +- Documentation in docs/ directory +- Plain text files +- Configuration without runtime +β†’ Generate files, NO deploy_preview needed +β†’ Focus on content quality and organization + +**Interactive Projects (Require Sandbox)**: +- React apps, presentations, APIs +- Anything with bun run dev +- UI with interactivity +- Backend endpoints +β†’ Must select template +β†’ Use deploy_preview for testing +β†’ Verify with run_analysis + runtime tools`; + + const quality = `# Quality Standards + +**Code Quality:** +- Type-safe TypeScript (no any, proper interfaces) +- Minimal dependencies - reuse what exists +- Clean architecture - separation of concerns +- Professional error handling + +**UI Quality (when applicable):** +- Responsive design (mobile, tablet, desktop) +- Proper spacing and visual hierarchy +- Interactive states (hover, focus, active, disabled) +- Accessibility basics (semantic HTML, ARIA when needed) +- TailwindCSS for styling (theme-consistent) + +**Testing & Verification:** +- All TypeScript errors resolved +- No lint warnings +- Runtime tested via preview +- Edge cases considered`; + + const reactSafety = `# React Safety & Common Pitfalls + +${PROMPT_UTILS.REACT_RENDER_LOOP_PREVENTION_LITE} + +${PROMPT_UTILS.COMMON_PITFALLS} + +**Additional Warnings:** +- NEVER modify state during render +- useEffect dependencies must be complete +- Memoize expensive computations +- Avoid inline object/function creation in JSX`; + + const completion = `# Completion Discipline + +When you're done: +**BUILD_COMPLETE: ** +- All requirements met +- All errors fixed +- Testing completed +- Ready for user + +If blocked: +**BUILD_STUCK: ** +- Clear explanation of blocker +- What you tried +- What you need to proceed + +STOP ALL TOOL CALLS IMMEDIATELY after either signal.`; + + const warnings = `# Critical Warnings + +1. TEMPLATE SELECTION IS CRITICAL - Use init_suitable_template() for interactive projects, trust AI selector +2. For template-based: Selected template MUST have working 'bun run dev' or sandbox fails +3. For virtual-first: You MUST generate package.json, wrangler.jsonc, vite.config.js before deploy_preview +4. Do NOT deploy static documentation - wastes resources +5. Check log timestamps - they're cumulative, may contain old data +6. NEVER create verbose step-by-step explanations - use tools directly +7. Template switching allowed but strongly discouraged +8. Virtual-first is advanced mode - default to template-based unless necessary`; + + return [ + identity, + comms, + architecture, + environment, + constraints, + workflow, + tools, + staticVsSandbox, + quality, + reactSafety, + completion, + warnings, + '# Dynamic Context-Specific Guidance', + dynamicHints, + ].join('\n\n'); +}; + +/** + * Build user prompt with all context + */ +const getUserPrompt = ( + inputs: BuildInputs, + fileSummaries: string, + templateInfo?: string +): string => { + const { query, projectName, blueprint } = inputs; + return `## Build Task +**Project Name**: ${projectName} +**User Request**: ${query} + +${blueprint ? `## Project Blueprint + +The following blueprint defines the structure, features, and requirements for this project: + +\`\`\`json +${JSON.stringify(blueprint, null, 2)} +\`\`\` + +**Use this blueprint to guide your implementation.** It outlines what needs to be built.` : `## Note + +No blueprint provided. Design the project structure based on the user request above.`} + +${templateInfo ? `## Template Context + +This project uses a preconfigured template: + +${templateInfo} + +**IMPORTANT:** Leverage existing components, utilities, and APIs from the template. Do not recreate what already exists.` : ''} + +${fileSummaries ? `## Current Codebase + +${fileSummaries}` : `## Starting Fresh + +This is a new project. Start from the template or scratch.`} + +## Your Mission + +Build a complete, production-ready solution that best fulfills the request. If it needs a full web experience, build it. If it’s a backend workflow, implement it. If it’s narrative content, write documents; if slides are appropriate, build a deck and verify via preview. + +**Approach (internal planning):** +1. Understand requirements and decide representation (UI, backend, slides, documents) +2. Generate PRD (if missing) and refine +3. Scaffold with generate_files, preferring regenerate_file for targeted edits +4. When a runtime exists: deploy_preview, then verify with run_analysis +5. Iterate and polish; commit meaningful checkpoints + +**Remember:** +- Write clean, type-safe, maintainable code +- Test thoroughly with deploy_preview and run_analysis +- Fix all issues before claiming completion +- Commit regularly with descriptive messages + +## Execution Reminder +- If no blueprint or plan is present: generate_blueprint FIRST (optionally with prompt parameter for additional context), then alter_blueprint if needed. Do not implement until a plan exists. +- Deploy only when a runtime exists; do not deploy for documents-only work. + +Begin building.`; +}; + +/** + * Summarize files for context + */ +function summarizeFiles(filesIndex: FileState[]): string { + if (!filesIndex || filesIndex.length === 0) { + return 'No files generated yet.'; + } + + const summary = filesIndex.map(f => { + const relativePath = f.filePath.startsWith('/') ? f.filePath.substring(1) : f.filePath; + const sizeKB = (f.fileContents.length / 1024).toFixed(1); + return `- ${relativePath} (${sizeKB} KB) - ${f.filePurpose}`; + }).join('\n'); + + return `Generated Files (${filesIndex.length} total):\n${summary}`; +} + +export class AgenticProjectBuilder extends Assistant { + logger = createObjectLogger(this, 'AgenticProjectBuilder'); + modelConfigOverride?: ModelConfig; + + constructor( + env: Env, + inferenceContext: InferenceContext, + modelConfigOverride?: ModelConfig, + ) { + super(env, inferenceContext); + this.modelConfigOverride = modelConfigOverride; + } + + async run( + inputs: BuildInputs, + session: BuildSession, + streamCb?: (chunk: string) => void, + toolRenderer?: RenderToolCall, + onToolComplete?: (message: Message) => Promise, + onAssistantMessage?: (message: Message) => Promise, + conversationHistory?: ConversationMessage[] + ): Promise { + this.logger.info('Starting project build', { + projectName: inputs.projectName, + projectType: session.projectType, + hasBlueprint: !!inputs.blueprint, + }); + + // Get file summaries + const fileSummaries = summarizeFiles(session.filesIndex); + + // Get template details from agent + const operationOptions = session.agent.getOperationOptions(); + const templateInfo = operationOptions.context.templateDetails + ? PROMPT_UTILS.serializeTemplate(operationOptions.context.templateDetails) + : undefined; + + // Build dynamic hints from current context + const hasFiles = (session.filesIndex || []).length > 0; + const isAgenticBlueprint = (bp?: Blueprint): bp is AgenticBlueprint => { + return !!bp && Array.isArray((bp as any).plan); + }; + const hasTSX = session.filesIndex?.some(f => /\.(t|j)sx$/i.test(f.filePath)) || false; + const hasMD = session.filesIndex?.some(f => /\.(md|mdx)$/i.test(f.filePath)) || false; + const hasPlan = isAgenticBlueprint(inputs.blueprint) && inputs.blueprint.plan.length > 0; + const hasTemplate = !!session.selectedTemplate; + const needsSandbox = hasTSX || session.projectType === 'presentation' || session.projectType === 'app'; + + const dynamicHints = [ + !hasPlan ? '- No plan detected: Start with generate_blueprint (optionally with prompt parameter) to establish PRD (title, projectName, description, colorPalette, frameworks, plan).' : '- Plan detected: proceed to implement milestones using generate_files/regenerate_file.', + needsSandbox && !hasTemplate ? '- Interactive project without template: Use init_suitable_template() to let AI select and import best matching template before first deploy.' : '', + hasTSX ? '- UI detected: Use deploy_preview to verify runtime; then run_analysis for quick feedback.' : '', + hasMD && !hasTSX ? '- Documents detected without UI: This is STATIC content - generate files in docs/, NO deploy_preview needed.' : '', + !hasFiles && hasPlan ? '- Plan ready, no files yet: Scaffold initial structure with generate_files.' : '', + ].filter(Boolean).join('\n'); + + let historyMessages: Message[] = []; + if (conversationHistory && conversationHistory.length > 0) { + const prepared = await prepareMessagesForInference(this.env, conversationHistory); + historyMessages = prepared as Message[]; + + this.logger.info('Loaded conversation history', { + messageCount: historyMessages.length + }); + } + + let systemPrompt = getSystemPrompt(dynamicHints); + + if (historyMessages.length > 0) { + systemPrompt += `\n\n# Conversation History\nYou are being provided with the full conversation history from your previous interactions. Review it to understand context and avoid repeating work.`; + } + + let userPrompt = getUserPrompt(inputs, fileSummaries, templateInfo); + + const system = createSystemMessage(systemPrompt); + const user = createUserMessage(userPrompt); + const messages: Message[] = this.save([system, user, ...historyMessages]); + + // Build tools with renderer and conversation sync callback + const tools = buildAgenticBuilderTools(session, this.logger, toolRenderer, onToolComplete); + + let output = ''; + + try { + const result = await executeInference({ + env: this.env, + context: this.inferenceContext, + agentActionName: 'agenticProjectBuilder', + modelConfig: this.modelConfigOverride || AGENT_CONFIG.agenticProjectBuilder, + messages, + tools, + stream: streamCb + ? { chunk_size: 64, onChunk: (c) => streamCb(c) } + : undefined, + onAssistantMessage, + }); + + output = result?.string || ''; + + this.logger.info('Project build completed', { + outputLength: output.length + }); + + } catch (error) { + this.logger.error('Project build failed', error); + throw error; + } + + return output; + } +} diff --git a/worker/agents/assistants/codeDebugger.ts b/worker/agents/assistants/codeDebugger.ts index f31f8b75..586f3541 100644 --- a/worker/agents/assistants/codeDebugger.ts +++ b/worker/agents/assistants/codeDebugger.ts @@ -10,7 +10,6 @@ import { executeInference } from '../inferutils/infer'; import { InferenceContext, ModelConfig } from '../inferutils/config.types'; import { createObjectLogger } from '../../logger'; import type { ToolDefinition } from '../tools/types'; -import { CodingAgentInterface } from '../services/implementations/CodingAgent'; import { AGENT_CONFIG } from '../inferutils/config'; import { buildDebugTools } from '../tools/customTools'; import { RenderToolCall } from '../operations/UserConversationProcessor'; @@ -19,6 +18,7 @@ import { PROMPT_UTILS } from '../prompts'; import { RuntimeError } from 'worker/services/sandbox/sandboxTypes'; import { FileState } from '../core/state'; import { InferError } from '../inferutils/core'; +import { ICodingAgent } from '../services/interfaces/ICodingAgent'; const SYSTEM_PROMPT = `You are an elite autonomous code debugging specialist with deep expertise in root-cause analysis, modern web frameworks (React, Vite, Cloudflare Workers), TypeScript/JavaScript, build tools, and runtime environments. @@ -360,6 +360,7 @@ deploy_preview({ clearLogs: true }) - Always check timestamps vs. your deploy times - Cross-reference with get_runtime_errors and actual code - Don't fix issues that were already resolved + - Ignore server restarts - It is a vite dev server running, so it will restart on every source modification. This is normal. - **Before regenerate_file**: Read current code to confirm bug exists - **After regenerate_file**: Check diff to verify correctness @@ -396,7 +397,7 @@ deploy_preview({ clearLogs: true }) - **React**: render loops (state-in-render, missing deps, unstable Zustand selectors) - **Import/export**: named vs default inconsistency - **Type safety**: maintain strict TypeScript compliance -- **Configuration files**: Never try to edit wrangler.jsonc or package.json +- **Configuration files**: Never try to edit wrangler.jsonc, vite.config.ts or package.json **⚠️ CRITICAL: Do NOT "Optimize" Zustand Selectors** If you see this pattern - **LEAVE IT ALONE** (it's already optimal): @@ -544,7 +545,7 @@ type LoopDetectionState = { export type DebugSession = { filesIndex: FileState[]; - agent: CodingAgentInterface; + agent: ICodingAgent; runtimeErrors?: RuntimeError[]; }; diff --git a/worker/agents/constants.ts b/worker/agents/constants.ts index 907f3a84..d34a0d3f 100644 --- a/worker/agents/constants.ts +++ b/worker/agents/constants.ts @@ -68,6 +68,7 @@ export const WebSocketMessageResponses: Record = { CONVERSATION_STATE: 'conversation_state', PROJECT_NAME_UPDATED: 'project_name_updated', BLUEPRINT_UPDATED: 'blueprint_updated', + BLUEPRINT_CHUNK: 'blueprint_chunk', // Model configuration info MODEL_CONFIGS_INFO: 'model_configs_info', @@ -114,6 +115,8 @@ export const getMaxToolCallingDepth = (agentActionKey: AgentActionKey | 'testMod switch (agentActionKey) { case 'deepDebugger': return 100; + case 'agenticProjectBuilder': + return 100; default: return MAX_TOOL_CALLING_DEPTH_DEFAULT; } diff --git a/worker/agents/core/AgentComponent.ts b/worker/agents/core/AgentComponent.ts new file mode 100644 index 00000000..1179c66f --- /dev/null +++ b/worker/agents/core/AgentComponent.ts @@ -0,0 +1,89 @@ +import { AgentInfrastructure } from './AgentCore'; +import { StructuredLogger } from '../../logger'; +import { WebSocketMessageType } from '../../api/websocketTypes'; +import { WebSocketMessageData } from '../../api/websocketTypes'; +import { FileManager } from '../services/implementations/FileManager'; +import { DeploymentManager } from '../services/implementations/DeploymentManager'; +import { GitVersionControl } from '../git'; +import { AgentState, BaseProjectState } from './state'; +import { WebSocketMessageResponses } from '../constants'; + +/** + * Base class for all agent components (behaviors and objectives) + * + * Provides common infrastructure access patterns via protected helpers. + * + * Both BaseCodingBehavior and ProjectObjective extend this class to access: + * - Core infrastructure (state, env, sql, logger) + * - Services (fileManager, deploymentManager, git) + */ +export abstract class AgentComponent { + constructor(protected readonly infrastructure: AgentInfrastructure) {} + + // ========================================== + // PROTECTED HELPERS (Infrastructure access) + // ========================================== + + protected get env(): Env { + return this.infrastructure.env; + } + + get logger(): StructuredLogger { + return this.infrastructure.logger(); + } + + protected getAgentId(): string { + return this.infrastructure.getAgentId(); + } + + public getWebSockets(): WebSocket[] { + return this.infrastructure.getWebSockets(); + } + + protected get state(): TState { + return this.infrastructure.state; + } + + setState(state: TState): void { + try { + this.infrastructure.setState(state); + } catch (error) { + this.broadcastError("Error setting state", error); + this.logger.error("State details:", { + originalState: JSON.stringify(this.state, null, 2), + newState: JSON.stringify(state, null, 2) + }); + } + } + + // ========================================== + // PROTECTED HELPERS (Service access) + // ========================================== + + protected get fileManager(): FileManager { + return this.infrastructure.fileManager; + } + + protected get deploymentManager(): DeploymentManager { + return this.infrastructure.deploymentManager; + } + + public get git(): GitVersionControl { + return this.infrastructure.git; + } + + protected broadcast( + type: T, + data?: WebSocketMessageData + ): void { + this.infrastructure.broadcast(type, data); + } + + protected broadcastError(context: string, error: unknown): void { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.error(`${context}:`, error); + this.broadcast(WebSocketMessageResponses.ERROR, { + error: `${context}: ${errorMessage}` + }); + } +} diff --git a/worker/agents/core/AgentCore.ts b/worker/agents/core/AgentCore.ts new file mode 100644 index 00000000..1a6c98b9 --- /dev/null +++ b/worker/agents/core/AgentCore.ts @@ -0,0 +1,46 @@ +import { GitVersionControl } from "../git"; +import { DeploymentManager } from "../services/implementations/DeploymentManager"; +import { FileManager } from "../services/implementations/FileManager"; +import { StructuredLogger } from "../../logger"; +import { BaseProjectState } from "./state"; +import { WebSocketMessageType } from "../../api/websocketTypes"; +import { WebSocketMessageData } from "../../api/websocketTypes"; +import { ConversationMessage, ConversationState } from "../inferutils/common"; +import { TemplateDetails } from "worker/services/sandbox/sandboxTypes"; + +/** + * Infrastructure interface for agent implementations. + * Provides access to: + * - Core infrastructure (state, env, sql, logger) + * - Services (fileManager, deploymentManager, git) + */ +export interface AgentInfrastructure { + readonly state: TState; + setState(state: TState): void; + getWebSockets(): WebSocket[]; + broadcast( + type: T, + data?: WebSocketMessageData + ): void; + getAgentId(): string; + logger(): StructuredLogger; + readonly env: Env; + + setConversationState(state: ConversationState): void; + getConversationState(): ConversationState; + addConversationMessage(message: ConversationMessage): void; + clearConversation(): void; + + // Services + readonly fileManager: FileManager; + readonly deploymentManager: DeploymentManager; + readonly git: GitVersionControl; + + // Git export infrastructure + exportGitObjects(): Promise<{ + gitObjects: Array<{ path: string; data: Uint8Array }>; + query: string; + hasCommits: boolean; + templateDetails: TemplateDetails | null; + }>; +} diff --git a/worker/agents/core/behaviors/agentic.ts b/worker/agents/core/behaviors/agentic.ts new file mode 100644 index 00000000..32a42277 --- /dev/null +++ b/worker/agents/core/behaviors/agentic.ts @@ -0,0 +1,370 @@ + +import { AgentInitArgs } from '../types'; +import { AgenticState } from '../state'; +import { WebSocketMessageResponses } from '../../constants'; +import { UserConversationProcessor } from '../../operations/UserConversationProcessor'; +import { GenerationContext, AgenticGenerationContext } from '../../domain/values/GenerationContext'; +import { PhaseImplementationOperation } from '../../operations/PhaseImplementation'; +import { FileRegenerationOperation } from '../../operations/FileRegeneration'; +import { AgenticProjectBuilder, BuildSession } from '../../assistants/agenticProjectBuilder'; +import { buildToolCallRenderer } from '../../operations/UserConversationProcessor'; +import { PhaseGenerationOperation } from '../../operations/PhaseGeneration'; +import { FastCodeFixerOperation } from '../../operations/PostPhaseCodeFixer'; +import { customizeTemplateFiles, generateProjectName } from '../../utils/templateCustomizer'; +import { IdGenerator } from '../../utils/idGenerator'; +import { generateNanoId } from '../../../utils/idGenerator'; +import { BaseCodingBehavior, BaseCodingOperations } from './base'; +import { ICodingAgent } from '../../services/interfaces/ICodingAgent'; +import { SimpleCodeGenerationOperation } from '../../operations/SimpleCodeGeneration'; +import { OperationOptions } from 'worker/agents/operations/common'; +import { compactifyContext } from '../../utils/conversationCompactifier'; +import { ConversationMessage, createMultiModalUserMessage, createUserMessage, Message } from '../../inferutils/common'; +import { AbortError } from 'worker/agents/inferutils/core'; + +interface AgenticOperations extends BaseCodingOperations { + generateNextPhase: PhaseGenerationOperation; + implementPhase: PhaseImplementationOperation; +} + +/** + * AgenticCodingBehavior + */ +export class AgenticCodingBehavior extends BaseCodingBehavior implements ICodingAgent { + protected static readonly PROJECT_NAME_PREFIX_MAX_LENGTH = 20; + + protected operations: AgenticOperations = { + regenerateFile: new FileRegenerationOperation(), + fastCodeFixer: new FastCodeFixerOperation(), + processUserMessage: new UserConversationProcessor(), + simpleGenerateFiles: new SimpleCodeGenerationOperation(), + generateNextPhase: new PhaseGenerationOperation(), + implementPhase: new PhaseImplementationOperation(), + }; + + // Conversation sync tracking + private toolCallCounter: number = 0; + private readonly COMPACTIFY_CHECK_INTERVAL = 9; // Check compactification every 9 tool calls + + /** + * Initialize the code generator with project blueprint and template + * Sets up services and begins deployment process + */ + async initialize( + initArgs: AgentInitArgs, + ..._args: unknown[] + ): Promise { + await super.initialize(initArgs); + + const { query, hostname, inferenceContext, templateInfo, sandboxSessionId } = initArgs; + + const packageJson = templateInfo?.templateDetails?.allFiles['package.json']; + + const baseName = (query || 'project').toString(); + const projectName = generateProjectName( + baseName, + generateNanoId(), + AgenticCodingBehavior.PROJECT_NAME_PREFIX_MAX_LENGTH + ); + + this.logger.info('Generated project name', { projectName }); + + this.setState({ + ...this.state, + projectName, + query, + blueprint: { + title: baseName, + projectName, + description: query, + colorPalette: ['#1e1e1e'], + frameworks: [], + plan: [] + }, + templateName: templateInfo?.templateDetails?.name || (this.projectType === 'general' ? 'scratch' : ''), + sandboxInstanceId: undefined, + commandsHistory: [], + lastPackageJson: packageJson, + sessionId: sandboxSessionId!, + hostname, + inferenceContext, + projectType: this.projectType, + behaviorType: 'agentic' + }); + + if (templateInfo && templateInfo.templateDetails.name !== 'scratch') { + // Customize template files (package.json, wrangler.jsonc, .bootstrap.js, .gitignore) + const customizedFiles = customizeTemplateFiles( + templateInfo.templateDetails.allFiles, + { + projectName, + commandsHistory: [] // Empty initially, will be updated later + } + ); + + this.logger.info('Customized template files', { + files: Object.keys(customizedFiles) + }); + + // Save customized files to git + const filesToSave = Object.entries(customizedFiles).map(([filePath, content]) => ({ + filePath, + fileContents: content, + filePurpose: 'Project configuration file' + })); + + await this.fileManager.saveGeneratedFiles( + filesToSave, + 'Initialize project configuration files' + ); + + this.logger.info('Committed customized template files to git'); + } + this.logger.info(`Agent ${this.getAgentId()} session: ${this.state.sessionId} initialized successfully`); + return this.state; + } + + async onStart(props?: Record | undefined): Promise { + await super.onStart(props); + } + + // /** + // * Override handleUserInput to just queue messages without AI processing + // * Messages will be injected into conversation after tool call completions + // */ + // async handleUserInput(userMessage: string, images?: ImageAttachment[]): Promise { + // let processedImages: ProcessedImageAttachment[] | undefined; + + // if (images && images.length > 0) { + // processedImages = await Promise.all(images.map(async (image) => { + // return await uploadImage(this.env, image, ImageType.UPLOADS); + // })); + + // this.logger.info('Uploaded images for queued request', { + // imageCount: processedImages.length + // }); + // } + + // await this.queueUserRequest(userMessage, processedImages); + + // this.logger.info('User message queued during agentic build', { + // message: userMessage, + // queueSize: this.state.pendingUserInputs.length, + // hasImages: !!processedImages && processedImages.length > 0 + // }); + // } + + /** + * Handle tool call completion - sync to conversation and check queue/compactification + */ + private async handleMessageCompletion(conversationMessage: ConversationMessage): Promise { + this.toolCallCounter++; + + this.infrastructure.addConversationMessage(conversationMessage); + + this.logger.debug('Message synced to conversation', { + role: conversationMessage.role, + toolCallCount: this.toolCallCounter + }); + + if (this.toolCallCounter % this.COMPACTIFY_CHECK_INTERVAL === 0) { + await this.compactifyIfNeeded(); + } + } + + /** + * Compactify conversation state if needed + */ + private async compactifyIfNeeded(): Promise { + const conversationState = this.infrastructure.getConversationState(); + + const compactedHistory = await compactifyContext( + conversationState.runningHistory, + this.env, + this.getOperationOptions(), + (args) => { + this.broadcast(WebSocketMessageResponses.CONVERSATION_RESPONSE, { + message: '', + conversationId: IdGenerator.generateConversationId(), + isStreaming: false, + tool: args + }); + }, + this.logger + ); + + // Update if compactification occurred + if (compactedHistory.length !== conversationState.runningHistory.length) { + this.infrastructure.setConversationState({ + ...conversationState, + runningHistory: compactedHistory + }); + + this.logger.info('Conversation compactified', { + originalSize: conversationState.runningHistory.length, + compactedSize: compactedHistory.length + }); + } + } + + getOperationOptions(): OperationOptions { + return { + env: this.env, + agentId: this.getAgentId(), + context: GenerationContext.from(this.state, this.getTemplateDetails(), this.logger) as AgenticGenerationContext, + logger: this.logger, + inferenceContext: this.getInferenceContext(), + agent: this + }; + } + + async build(): Promise { + while (!this.state.mvpGenerated || this.state.pendingUserInputs.length > 0) { + await this.executeGeneration(); + } + } + + /** + * Execute the project generation + */ + private async executeGeneration(): Promise { + // Reset tool call counter for this build session + this.toolCallCounter = 0; + + this.logger.info('Starting project generation', { + query: this.state.query, + projectName: this.state.projectName + }); + + // Broadcast generation started + this.broadcast(WebSocketMessageResponses.GENERATION_STARTED, { + message: 'Starting project generation...', + totalFiles: 1 + }); + + const aiConversationId = IdGenerator.generateConversationId(); + + if (!this.state.mvpGenerated) { + this.broadcast(WebSocketMessageResponses.CONVERSATION_RESPONSE, { + message: 'Initializing project builder...', + conversationId: aiConversationId, + isStreaming: false + }); + } + + try { + const generator = new AgenticProjectBuilder( + this.env, + this.state.inferenceContext + ); + + const pendingUserInputs = this.fetchPendingUserRequests(); + if (pendingUserInputs.length > 0) { + this.logger.info('Processing user requests', { + requests: pendingUserInputs, + }); + let compiledMessage: Message; + const images = this.pendingUserImages; + if (images && images.length > 0) { + compiledMessage = createMultiModalUserMessage( + pendingUserInputs.join('\n'), + images.map(img => img.r2Key), + 'high' + ); + } else { + compiledMessage = createUserMessage(pendingUserInputs.join('\n')); + } + // Save the message to conversation history + this.infrastructure.addConversationMessage({ + ...compiledMessage, + conversationId: IdGenerator.generateConversationId(), + }); + } + + // Create build session for tools + const session: BuildSession = { + agent: this, + filesIndex: Object.values(this.state.generatedFilesMap), + projectType: this.state.projectType || 'app', + }; + + // Create tool renderer for UI feedback + const toolCallRenderer = buildToolCallRenderer( + (message: string, conversationId: string, isStreaming: boolean, tool?) => { + this.broadcast(WebSocketMessageResponses.CONVERSATION_RESPONSE, { + message, + conversationId, + isStreaming, + tool + }); + }, + aiConversationId + ); + + // Create conversation sync callback + const onToolComplete = async (toolMessage: Message) => { + await this.handleMessageCompletion({ + ...toolMessage, + conversationId: IdGenerator.generateConversationId() + }); + + // If user messages are queued, we throw an abort error, that shall break the tool call chain. + if (this.state.pendingUserInputs.length > 0) { + throw new AbortError('User messages are queued'); + } + }; + + const onAssistantMessage = async (message: Message) => { + const conversationMessage: ConversationMessage = { + ...message, + content: typeof message.content === 'string' ? message.content : JSON.stringify(message.content), + conversationId: IdGenerator.generateConversationId(), + }; + await this.handleMessageCompletion(conversationMessage); + }; + + const conversationState = this.infrastructure.getConversationState(); + + await generator.run( + { + query: this.state.query, + projectName: this.state.projectName, + blueprint: this.state.blueprint + }, + session, + (chunk: string) => { + this.broadcast(WebSocketMessageResponses.CONVERSATION_RESPONSE, { + message: chunk, + conversationId: aiConversationId, + isStreaming: true + }); + }, + toolCallRenderer, + onToolComplete, + onAssistantMessage, + conversationState.runningHistory + ); + // TODO: If user messages pending, start another execution run + + if (!this.state.mvpGenerated) { + // TODO: Should this be moved to a tool that the agent can call? + this.state.mvpGenerated = true; + this.logger.info('MVP generated'); + } + + // Final checks after generation completes + await this.compactifyIfNeeded(); + + this.logger.info('Project generation completed'); + + } catch (error) { + this.logger.error('Project generation failed', error); + this.broadcast(WebSocketMessageResponses.ERROR, { + error: error instanceof Error ? error.message : 'Unknown error during generation' + }); + throw error; + } finally { + this.generationPromise = null; + this.clearAbortController(); + } + } +} diff --git a/worker/agents/core/behaviors/base.ts b/worker/agents/core/behaviors/base.ts new file mode 100644 index 00000000..5a0788f8 --- /dev/null +++ b/worker/agents/core/behaviors/base.ts @@ -0,0 +1,1652 @@ +import { Connection } from 'agents'; +import { + FileConceptType, + FileOutputType, + Blueprint, + AgenticBlueprint, + PhasicBlueprint, +} from '../../schemas'; +import { ExecuteCommandsResponse, PreviewType, RuntimeError, StaticAnalysisResponse, TemplateDetails, TemplateFile } from '../../../services/sandbox/sandboxTypes'; +import { BaseProjectState, AgenticState } from '../state'; +import { AllIssues, AgentSummary, AgentInitArgs, BehaviorType, DeploymentTarget, ProjectType } from '../types'; +import { ModelConfig } from '../../inferutils/config.types'; +import { PREVIEW_EXPIRED_ERROR, WebSocketMessageResponses } from '../../constants'; +import { ProjectSetupAssistant } from '../../assistants/projectsetup'; +import { UserConversationProcessor, RenderToolCall } from '../../operations/UserConversationProcessor'; +import { FileRegenerationOperation } from '../../operations/FileRegeneration'; +// Database schema imports removed - using zero-storage OAuth flow +import { BaseSandboxService } from '../../../services/sandbox/BaseSandboxService'; +import { getTemplateImportantFiles } from '../../../services/sandbox/utils'; +import { createScratchTemplateDetails } from '../../utils/templates'; +import { WebSocketMessageData, WebSocketMessageType } from '../../../api/websocketTypes'; +import { InferenceContext, AgentActionKey } from '../../inferutils/config.types'; +import { AGENT_CONFIG } from '../../inferutils/config'; +import { ModelConfigService } from '../../../database/services/ModelConfigService'; +import { fixProjectIssues } from '../../../services/code-fixer'; +import { FastCodeFixerOperation } from '../../operations/PostPhaseCodeFixer'; +import { looksLikeCommand, validateAndCleanBootstrapCommands } from '../../utils/common'; +import { customizeTemplateFiles, generateBootstrapScript } from '../../utils/templateCustomizer'; +import { AppService } from '../../../database'; +import { RateLimitExceededError } from 'shared/types/errors'; +import { ImageAttachment, type ProcessedImageAttachment } from '../../../types/image-attachment'; +import { OperationOptions } from '../../operations/common'; +import { ImageType, uploadImage } from 'worker/utils/images'; +import { DeepCodeDebugger } from '../../assistants/codeDebugger'; +import { DeepDebugResult } from '../types'; +import { updatePackageJson } from '../../utils/packageSyncer'; +import { ICodingAgent } from '../../services/interfaces/ICodingAgent'; +import { SimpleCodeGenerationOperation } from '../../operations/SimpleCodeGeneration'; +import { AgentComponent } from '../AgentComponent'; +import type { AgentInfrastructure } from '../AgentCore'; +import { GitVersionControl } from '../../git'; + +export interface BaseCodingOperations { + regenerateFile: FileRegenerationOperation; + fastCodeFixer: FastCodeFixerOperation; + processUserMessage: UserConversationProcessor; + simpleGenerateFiles: SimpleCodeGenerationOperation; +} + +/** + * Base class for all coding behaviors + */ +export abstract class BaseCodingBehavior + extends AgentComponent implements ICodingAgent { + protected static readonly MAX_COMMANDS_HISTORY = 10; + + protected projectSetupAssistant: ProjectSetupAssistant | undefined; + + protected previewUrlCache: string = ''; + protected templateDetailsCache: TemplateDetails | null = null; + + // In-memory storage for user-uploaded images (not persisted in DO state) + protected pendingUserImages: ProcessedImageAttachment[] = [] + protected generationPromise: Promise | null = null; + protected currentAbortController?: AbortController; + protected deepDebugPromise: Promise<{ transcript: string } | { error: string }> | null = null; + protected deepDebugConversationId: string | null = null; + protected operations: BaseCodingOperations = { + regenerateFile: new FileRegenerationOperation(), + fastCodeFixer: new FastCodeFixerOperation(), + processUserMessage: new UserConversationProcessor(), + simpleGenerateFiles: new SimpleCodeGenerationOperation(), + }; + + getBehavior(): BehaviorType { + return this.state.behaviorType; + } + + protected isAgenticState(state: BaseProjectState): state is AgenticState { + return state.behaviorType === 'agentic'; + } + + constructor(infrastructure: AgentInfrastructure, protected projectType: ProjectType) { + super(infrastructure); + + this.setState({ + ...this.state, + behaviorType: this.getBehavior(), + projectType: this.projectType, + }); + } + + public async initialize( + initArgs: AgentInitArgs, + ..._args: unknown[] + ): Promise { + this.logger.info("Initializing agent"); + const { templateInfo } = initArgs; + if (templateInfo) { + this.templateDetailsCache = templateInfo.templateDetails; + + await this.ensureTemplateDetails(); + } + + // Reset the logg + return this.state; + } + + onStart(_props?: Record | undefined): Promise { + return Promise.resolve(); + } + + protected async initializeAsync(): Promise { + try { + const [, setupCommands] = await Promise.all([ + this.deployToSandbox(), + this.getProjectSetupAssistant().generateSetupCommands(), + this.generateReadme() + ]); + this.logger.info("Deployment to sandbox service and initial commands predictions completed successfully"); + await this.executeCommands(setupCommands.commands); + this.logger.info("Initial commands executed successfully"); + } catch (error) { + this.logger.error("Error during async initialization:", error); + // throw error; + } + } + onStateUpdate(_state: TState, _source: "server" | Connection) {} + + async ensureTemplateDetails() { + // Skip fetching details for "scratch" baseline + if (!this.templateDetailsCache) { + if (this.state.templateName === 'scratch') { + this.logger.info('Skipping template details fetch for scratch baseline'); + return; + } + this.logger.info(`Loading template details for: ${this.state.templateName}`); + const results = await BaseSandboxService.getTemplateDetails(this.state.templateName); + if (!results.success || !results.templateDetails) { + throw new Error(`Failed to get template details for: ${this.state.templateName}`); + } + + const templateDetails = results.templateDetails; + + const customizedAllFiles = { ...templateDetails.allFiles }; + + this.logger.info('Customizing template files for older app'); + const customizedFiles = customizeTemplateFiles( + templateDetails.allFiles, + { + projectName: this.state.projectName, + commandsHistory: this.getBootstrapCommands() + } + ); + Object.assign(customizedAllFiles, customizedFiles); + + this.templateDetailsCache = { + ...templateDetails, + allFiles: customizedAllFiles + }; + this.logger.info('Template details loaded and customized'); + } + return this.templateDetailsCache; + } + + public getTemplateDetails(): TemplateDetails { + if (!this.templateDetailsCache) { + // Synthesize a minimal scratch template when starting from scratch + if (this.state.templateName === 'scratch') { + this.templateDetailsCache = createScratchTemplateDetails(); + return this.templateDetailsCache; + } + this.ensureTemplateDetails(); + throw new Error('Template details not loaded. Call ensureTemplateDetails() first.'); + } + return this.templateDetailsCache; + } + + /** + * Update bootstrap script when commands history changes + * Called after significant command executions + */ + private async updateBootstrapScript(commandsHistory: string[]): Promise { + if (!commandsHistory || commandsHistory.length === 0) { + return; + } + + // Use only validated commands + const bootstrapScript = generateBootstrapScript( + this.state.projectName, + commandsHistory + ); + + await this.fileManager.saveGeneratedFile( + { + filePath: '.bootstrap.js', + fileContents: bootstrapScript, + filePurpose: 'Updated bootstrap script for first-time clone setup' + }, + 'chore: Update bootstrap script with latest commands' + ); + + this.logger.info('Updated bootstrap script with commands', { + commandCount: commandsHistory.length, + commands: commandsHistory + }); + } + + getPreviewUrlCache() { + return this.previewUrlCache; + } + + getProjectSetupAssistant(): ProjectSetupAssistant { + if (this.projectSetupAssistant === undefined) { + this.projectSetupAssistant = new ProjectSetupAssistant({ + env: this.env, + agentId: this.getAgentId(), + query: this.state.query, + blueprint: this.state.blueprint, + template: this.getTemplateDetails(), + inferenceContext: this.state.inferenceContext + }); + } + return this.projectSetupAssistant; + } + + getSessionId() { + return this.deploymentManager.getSessionId(); + } + + getSandboxServiceClient(): BaseSandboxService { + return this.deploymentManager.getClient(); + } + + isCodeGenerating(): boolean { + return this.generationPromise !== null; + } + + abstract getOperationOptions(): OperationOptions; + + /** + * Gets or creates an abort controller for the current operation + * Reuses existing controller for nested operations (e.g., tool calling) + */ + protected getOrCreateAbortController(): AbortController { + // Don't reuse aborted controllers + if (this.currentAbortController && !this.currentAbortController.signal.aborted) { + return this.currentAbortController; + } + + // Create new controller in memory for new operation + this.currentAbortController = new AbortController(); + + return this.currentAbortController; + } + + /** + * Cancels the current inference operation if any + */ + public cancelCurrentInference(): boolean { + if (this.currentAbortController) { + this.logger.info('Cancelling current inference operation'); + this.currentAbortController.abort(); + this.currentAbortController = undefined; + return true; + } + return false; + } + + /** + * Clears abort controller after successful completion + */ + protected clearAbortController(): void { + this.currentAbortController = undefined; + } + + /** + * Gets inference context with abort signal + * Reuses existing abort controller for nested operations + */ + protected getInferenceContext(): InferenceContext { + const controller = this.getOrCreateAbortController(); + + return { + ...this.state.inferenceContext, + abortSignal: controller.signal, + }; + } + + async generateReadme() { + this.logger.info('Generating README.md'); + // Only generate if it doesn't exist + if (this.fileManager.fileExists('README.md')) { + this.logger.info('README.md already exists'); + return; + } + + this.broadcast(WebSocketMessageResponses.FILE_GENERATING, { + message: 'Generating README.md', + filePath: 'README.md', + filePurpose: 'Project documentation and setup instructions' + }); + + const readme = await this.operations.simpleGenerateFiles.generateReadme(this.getOperationOptions()); + + await this.fileManager.saveGeneratedFile(readme, "feat: README.md"); + + this.broadcast(WebSocketMessageResponses.FILE_GENERATED, { + message: 'README.md generated successfully', + file: readme + }); + this.logger.info('README.md generated successfully'); + } + + async setBlueprint(blueprint: Blueprint): Promise { + this.setState({ + ...this.state, + blueprint: blueprint as AgenticBlueprint | PhasicBlueprint, + }); + this.broadcast(WebSocketMessageResponses.BLUEPRINT_UPDATED, { + message: 'Blueprint updated', + updatedKeys: Object.keys(blueprint || {}) + }); + } + + getProjectType() { + return this.state.projectType; + } + + async queueUserRequest(request: string, images?: ProcessedImageAttachment[]): Promise { + this.setState({ + ...this.state, + pendingUserInputs: [...this.state.pendingUserInputs, request] + }); + if (images && images.length > 0) { + this.logger.info('Storing user images in-memory for phase generation', { + imageCount: images.length, + }); + this.pendingUserImages = [...this.pendingUserImages, ...images]; + } + } + + protected fetchPendingUserRequests(): string[] { + const inputs = this.state.pendingUserInputs; + if (inputs.length > 0) { + this.setState({ + ...this.state, + pendingUserInputs: [] + }); + } + return inputs; + } + + clearConversation(): void { + this.infrastructure.clearConversation(); + } + + getGit(): GitVersionControl { + return this.git; + } + + + /** + * State machine controller for code generation with user interaction support + * Executes phases sequentially with review cycles and proper state transitions + */ + async generateAllFiles(): Promise { + if (this.state.mvpGenerated && this.state.pendingUserInputs.length === 0) { + this.logger.info("Code generation already completed and no user inputs pending"); + return; + } + if (this.isCodeGenerating()) { + this.logger.info("Code generation already in progress"); + return; + } + this.generationPromise = this.buildWrapper(); + await this.generationPromise; + } + + private async buildWrapper() { + this.broadcast(WebSocketMessageResponses.GENERATION_STARTED, { + message: 'Starting code generation', + totalFiles: this.getTotalFiles() + }); + this.logger.info('Starting code generation', { + totalFiles: this.getTotalFiles() + }); + await this.ensureTemplateDetails(); + try { + await this.build(); + } catch (error) { + if (error instanceof RateLimitExceededError) { + this.logger.error("Error in state machine:", error); + this.broadcast(WebSocketMessageResponses.RATE_LIMIT_ERROR, { error }); + } else { + this.broadcastError("Error during generation", error); + } + } finally { + // Clear abort controller after generation completes + this.clearAbortController(); + + const appService = new AppService(this.env); + await appService.updateApp( + this.getAgentId(), + { + status: 'completed', + } + ); + this.generationPromise = null; + this.broadcast(WebSocketMessageResponses.GENERATION_COMPLETE, { + message: "Code generation and review process completed.", + instanceId: this.state.sandboxInstanceId, + }); + } + } + + /** + * Abstract method to be implemented by subclasses + * Contains the main logic for code generation and review process + */ + abstract build(): Promise + + async executeDeepDebug( + issue: string, + toolRenderer: RenderToolCall, + streamCb: (chunk: string) => void, + focusPaths?: string[], + ): Promise { + const debugPromise = (async () => { + try { + const previousTranscript = this.state.lastDeepDebugTranscript ?? undefined; + const operationOptions = this.getOperationOptions(); + const filesIndex = operationOptions.context.allFiles + .filter((f) => + !focusPaths?.length || + focusPaths.some((p) => f.filePath.includes(p)), + ); + + const runtimeErrors = await this.fetchRuntimeErrors(true); + + const dbg = new DeepCodeDebugger( + operationOptions.env, + operationOptions.inferenceContext, + ); + + const out = await dbg.run( + { issue, previousTranscript }, + { filesIndex, agent: this, runtimeErrors }, + streamCb, + toolRenderer, + ); + + // Save transcript for next session + this.setState({ + ...this.state, + lastDeepDebugTranscript: out, + }); + + return { success: true as const, transcript: out }; + } catch (e) { + this.logger.error('Deep debugger failed', e); + return { success: false as const, error: `Deep debugger failed: ${String(e)}` }; + } finally{ + this.deepDebugPromise = null; + this.deepDebugConversationId = null; + } + })(); + + // Store promise before awaiting + this.deepDebugPromise = debugPromise; + + return await debugPromise; + } + + /** + * Get current model configurations (defaults + user overrides) + * Used by WebSocket to provide configuration info to frontend + */ + async getModelConfigsInfo() { + const userId = this.state.inferenceContext.userId; + if (!userId) { + throw new Error('No user session available for model configurations'); + } + + try { + const modelConfigService = new ModelConfigService(this.env); + + // Get all user configs + const userConfigsRecord = await modelConfigService.getUserModelConfigs(userId); + + // Transform to match frontend interface + const agents = Object.entries(AGENT_CONFIG).map(([key, config]) => ({ + key, + name: config.name, + description: config.description + })); + + type ModelConfigInfo = ModelConfig & { isUserOverride?: boolean }; + const userConfigs: Record = {}; + const defaultConfigs: Record = {}; + + for (const [actionKey, mergedConfig] of Object.entries(userConfigsRecord)) { + if (mergedConfig.isUserOverride) { + userConfigs[actionKey] = { + name: mergedConfig.name, + max_tokens: mergedConfig.max_tokens, + temperature: mergedConfig.temperature, + reasoning_effort: mergedConfig.reasoning_effort, + fallbackModel: mergedConfig.fallbackModel, + isUserOverride: true + }; + } + + const defaultConfig = AGENT_CONFIG[actionKey as AgentActionKey]; + if (defaultConfig) { + defaultConfigs[actionKey] = { + name: defaultConfig.name, + max_tokens: defaultConfig.max_tokens, + temperature: defaultConfig.temperature, + reasoning_effort: defaultConfig.reasoning_effort, + fallbackModel: defaultConfig.fallbackModel + }; + } + } + + return { + agents, + userConfigs, + defaultConfigs + }; + } catch (error) { + this.logger.error('Error fetching model configs info:', error); + throw error; + } + } + + getTotalFiles(): number { + return this.fileManager.getGeneratedFilePaths().length + } + + getSummary(): Promise { + const summaryData = { + query: this.state.query, + generatedCode: this.fileManager.getGeneratedFiles(), + conversation: this.state.conversationMessages, + }; + return Promise.resolve(summaryData); + } + + async getFullState(): Promise { + return this.state; + } + + migrateStateIfNeeded(): void { + // no-op, only older phasic agents need this, for now. + } + + getFileGenerated(filePath: string) { + return this.fileManager!.getGeneratedFile(filePath) || null; + } + + async fetchRuntimeErrors(clear: boolean = true, shouldWait: boolean = true): Promise { + if (shouldWait) { + await this.deploymentManager.waitForPreview(); + } + + try { + const errors = await this.deploymentManager.fetchRuntimeErrors(clear); + + if (errors.length > 0) { + this.broadcast(WebSocketMessageResponses.RUNTIME_ERROR_FOUND, { + errors, + message: "Runtime errors found", + count: errors.length + }); + } + + return errors; + } catch (error) { + this.logger.error("Exception fetching runtime errors:", error); + // If fetch fails, optionally redeploy in phasic mode only + if (this.state.behaviorType === 'phasic') { + this.deployToSandbox(); + } + const message = ""; + return [{ message, timestamp: new Date().toISOString(), level: 0, rawOutput: message }]; + } + } + + /** + * Perform static code analysis on the generated files + * This helps catch potential issues early in the development process + */ + async runStaticAnalysisCode(files?: string[]): Promise { + try { + const analysisResponse = await this.deploymentManager.runStaticAnalysis(files); + + const { lint, typecheck } = analysisResponse; + this.broadcast(WebSocketMessageResponses.STATIC_ANALYSIS_RESULTS, { + lint: { issues: lint.issues, summary: lint.summary }, + typecheck: { issues: typecheck.issues, summary: typecheck.summary } + }); + + return analysisResponse; + } catch (error) { + this.broadcastError("Failed to lint code", error); + return { success: false, lint: { issues: [], }, typecheck: { issues: [], } }; + } + } + + /** + * Apply deterministic code fixes for common TypeScript errors + */ + protected async applyDeterministicCodeFixes() : Promise { + try { + // Get static analysis and do deterministic fixes + const staticAnalysis = await this.runStaticAnalysisCode(); + if (staticAnalysis.typecheck.issues.length == 0) { + this.logger.info("No typecheck issues found, skipping deterministic fixes"); + return staticAnalysis; // So that static analysis is not repeated again + } + const typeCheckIssues = staticAnalysis.typecheck.issues; + this.broadcast(WebSocketMessageResponses.DETERMINISTIC_CODE_FIX_STARTED, { + message: `Attempting to fix ${typeCheckIssues.length} TypeScript issues using deterministic code fixer`, + issues: typeCheckIssues + }); + + this.logger.info(`Attempting to fix ${typeCheckIssues.length} TypeScript issues using deterministic code fixer`); + const allFiles = this.fileManager.getAllFiles(); + + const fixResult = fixProjectIssues( + allFiles.map(file => ({ + filePath: file.filePath, + fileContents: file.fileContents, + filePurpose: '' + })), + typeCheckIssues + ); + + this.broadcast(WebSocketMessageResponses.DETERMINISTIC_CODE_FIX_COMPLETED, { + message: `Fixed ${typeCheckIssues.length} TypeScript issues using deterministic code fixer`, + issues: typeCheckIssues, + fixResult + }); + + if (fixResult) { + // If there are unfixable issues but of type TS2307, extract external module names and install them + if (fixResult.unfixableIssues.length > 0) { + const modulesNotFound = fixResult.unfixableIssues.filter(issue => issue.issueCode === 'TS2307'); + // Reason is of the form: External package "xyz" should be handled by package manager + const moduleNames = modulesNotFound.flatMap(issue => { + const match = issue.reason.match(/External package ["'](.+?)["']/); + const name = match?.[1]; + return (typeof name === 'string' && name.trim().length > 0 && !name.startsWith('@shared')) ? [name] : []; + }); + if (moduleNames.length > 0) { + const installCommands = moduleNames.map(moduleName => `bun install ${moduleName}`); + await this.executeCommands(installCommands, false); + + this.logger.info(`Deterministic code fixer installed missing modules: ${moduleNames.join(', ')}`); + } else { + this.logger.info(`Deterministic code fixer detected no external modules to install from unfixable TS2307 issues`); + } + } + if (fixResult.modifiedFiles.length > 0) { + this.logger.info("Applying deterministic fixes to files, Fixes: ", JSON.stringify(fixResult, null, 2)); + const fixedFiles = fixResult.modifiedFiles.map(file => ({ + filePath: file.filePath, + filePurpose: allFiles.find(f => f.filePath === file.filePath)?.filePurpose || '', + fileContents: file.fileContents + })); + await this.fileManager.saveGeneratedFiles(fixedFiles, "fix: applied deterministic fixes"); + + await this.deployToSandbox(fixedFiles, false, "fix: applied deterministic fixes"); + this.logger.info("Deployed deterministic fixes to sandbox"); + } + } + this.logger.info(`Applied deterministic code fixes: ${JSON.stringify(fixResult, null, 2)}`); + } catch (error) { + this.broadcastError('Deterministic code fixer failed', error); + } + // return undefined; + } + + async fetchAllIssues(resetIssues: boolean = false): Promise { + const [runtimeErrors, staticAnalysis] = await Promise.all([ + this.fetchRuntimeErrors(resetIssues), + this.runStaticAnalysisCode() + ]); + this.logger.info("Fetched all issues:", JSON.stringify({ runtimeErrors, staticAnalysis })); + + return { runtimeErrors, staticAnalysis }; + } + + async updateProjectName(newName: string): Promise { + try { + const valid = /^[a-z0-9-_]{3,50}$/.test(newName); + if (!valid) return false; + const updatedBlueprint = { ...this.state.blueprint, projectName: newName }; + this.setState({ + ...this.state, + blueprint: updatedBlueprint + }); + let ok = true; + if (this.state.sandboxInstanceId) { + try { + ok = await this.getSandboxServiceClient().updateProjectName(this.state.sandboxInstanceId, newName); + } catch (_) { + ok = false; + } + } + try { + const appService = new AppService(this.env); + const dbOk = await appService.updateApp(this.getAgentId(), { title: newName }); + ok = ok && dbOk; + } catch (error) { + this.logger.error('Error updating project name in database:', error); + ok = false; + } + this.broadcast(WebSocketMessageResponses.PROJECT_NAME_UPDATED, { + message: 'Project name updated', + projectName: newName + }); + return ok; + } catch (error) { + this.logger.error('Error updating project name:', error); + return false; + } + } + + /** + * Update user-facing blueprint fields + * Only allows updating safe, cosmetic fields - not internal generation state + */ + async updateBlueprint(patch: Partial): Promise { + // Fields that are safe to update after generation starts + // Excludes: initialPhase (breaks phasic generation) + const safeUpdatableFields = new Set([ + 'title', + 'description', + 'detailedDescription', + 'colorPalette', + 'views', + 'userFlow', + 'dataFlow', + 'architecture', + 'pitfalls', + 'frameworks', + 'implementationRoadmap' + ]); + + // Filter to only safe fields + const filtered: Record = {}; + for (const [key, value] of Object.entries(patch)) { + if (safeUpdatableFields.has(key) && value !== undefined) { + filtered[key] = value; + } + } + + // Agentic: allow initializing plan if not set yet (first-time plan initialization only) + if (this.isAgenticState(this.state)) { + const currentPlan = this.state.blueprint?.plan; + const patchPlan = 'plan' in patch ? patch.plan : undefined; + if (Array.isArray(patchPlan) && (!Array.isArray(currentPlan) || currentPlan.length === 0)) { + filtered['plan'] = patchPlan; + } + } + + // projectName requires sandbox update, handle separately + if ('projectName' in patch && typeof patch.projectName === 'string') { + await this.updateProjectName(patch.projectName); + } + + // Merge and update state + const updated = { ...this.state.blueprint, ...filtered } as Blueprint; + this.setState({ + ...this.state, + blueprint: updated + }); + + this.broadcast(WebSocketMessageResponses.BLUEPRINT_UPDATED, { + message: 'Blueprint updated', + updatedKeys: Object.keys(filtered) + }); + + return updated; + } + + // ===== Debugging helpers for assistants ===== + listFiles(): FileOutputType[] { + return this.fileManager.getAllRelevantFiles(); + } + + async readFiles(paths: string[]): Promise<{ files: { path: string; content: string }[] }> { + const results: { path: string; content: string }[] = []; + const notFoundInFileManager: string[] = []; + + // First, try to read from FileManager (template + generated files) + for (const path of paths) { + const file = this.fileManager.getFile(path); + if (file) { + results.push({ path, content: file.fileContents }); + } else { + notFoundInFileManager.push(path); + } + } + + // If some files not found in FileManager and sandbox exists, try sandbox + if (notFoundInFileManager.length > 0 && this.state.sandboxInstanceId) { + const resp = await this.getSandboxServiceClient().getFiles( + this.state.sandboxInstanceId, + notFoundInFileManager + ); + if (resp.success) { + results.push(...resp.files.map(f => ({ + path: f.filePath, + content: f.fileContents + }))); + } + } + + return { files: results }; + } + + async execCommands(commands: string[], shouldSave: boolean, timeout?: number): Promise { + const { sandboxInstanceId } = this.state; + if (!sandboxInstanceId) { + return { success: false, results: [], error: 'No sandbox instance' }; + } + const result = await this.getSandboxServiceClient().executeCommands(sandboxInstanceId, commands, timeout); + if (shouldSave) { + this.saveExecutedCommands(commands); + } + return result; + } + + /** + * Regenerate a file to fix identified issues + * Retries up to 3 times before giving up + */ + async regenerateFile(file: FileOutputType, issues: string[], retryIndex: number = 0) { + this.broadcast(WebSocketMessageResponses.FILE_REGENERATING, { + message: `Regenerating file: ${file.filePath}`, + filePath: file.filePath, + original_issues: issues, + }); + + const result = await this.operations.regenerateFile.execute( + {file, issues, retryIndex}, + this.getOperationOptions() + ); + + const fileState = await this.fileManager.saveGeneratedFile(result); + + this.broadcast(WebSocketMessageResponses.FILE_REGENERATED, { + message: `Regenerated file: ${file.filePath}`, + file: fileState, + original_issues: issues, + }); + + return fileState; + } + + async regenerateFileByPath(path: string, issues: string[]): Promise<{ path: string; diff: string }> { + const { sandboxInstanceId } = this.state; + if (!sandboxInstanceId) { + throw new Error('No sandbox instance available'); + } + // Prefer local file manager; fallback to sandbox + let fileContents = ''; + let filePurpose = ''; + try { + const fmFile = this.fileManager.getFile(path); + if (fmFile) { + fileContents = fmFile.fileContents; + filePurpose = fmFile.filePurpose || ''; + } else { + const resp = await this.getSandboxServiceClient().getFiles(sandboxInstanceId, [path]); + const f = resp.success ? resp.files.find(f => f.filePath === path) : undefined; + if (!f) throw new Error(resp.error || `File not found: ${path}`); + fileContents = f.fileContents; + } + } catch (e) { + throw new Error(`Failed to read file for regeneration: ${String(e)}`); + } + + const regenerated = await this.regenerateFile({ filePath: path, fileContents, filePurpose }, issues, 0); + // Persist to sandbox instance + await this.getSandboxServiceClient().writeFiles(sandboxInstanceId, [{ filePath: regenerated.filePath, fileContents: regenerated.fileContents }], `Deep debugger fix: ${path}`); + return { path, diff: regenerated.lastDiff }; + } + + async generateFiles( + phaseName: string, + phaseDescription: string, + requirements: string[], + files: FileConceptType[] + ): Promise<{ files: Array<{ path: string; purpose: string; diff: string }> }> { + this.logger.info('Generating files for deep debugger', { + phaseName, + requirementsCount: requirements.length, + filesCount: files.length + }); + + // Broadcast file generation started + this.broadcast(WebSocketMessageResponses.PHASE_IMPLEMENTING, { + message: `Generating files: ${phaseName}`, + phaseName + }); + + const operation = new SimpleCodeGenerationOperation(); + const result = await operation.execute( + { + phaseName, + phaseDescription, + requirements, + files, + fileGeneratingCallback: (filePath: string, filePurpose: string) => { + this.broadcast(WebSocketMessageResponses.FILE_GENERATING, { + message: `Generating file: ${filePath}`, + filePath, + filePurpose + }); + }, + fileChunkGeneratedCallback: (filePath: string, chunk: string, format: 'full_content' | 'unified_diff') => { + this.broadcast(WebSocketMessageResponses.FILE_CHUNK_GENERATED, { + message: `Generating file: ${filePath}`, + filePath, + chunk, + format + }); + }, + fileClosedCallback: (file, message) => { + this.broadcast(WebSocketMessageResponses.FILE_GENERATED, { + message, + file + }); + } + }, + this.getOperationOptions() + ); + + await this.fileManager.saveGeneratedFiles( + result.files, + `feat: ${phaseName}\n\n${phaseDescription}` + ); + + this.logger.info('Files generated and saved', { + fileCount: result.files.length + }); + + const savedFiles = result.files.map(f => { + const fileState = this.state.generatedFilesMap[f.filePath]; + return { + path: f.filePath, + purpose: f.filePurpose || '', + diff: fileState?.lastDiff || '' + }; + }); + + return { files: savedFiles }; + } + + // A wrapper for LLM tool to deploy to sandbox + async deployPreview(clearLogs: boolean = true, forceRedeploy: boolean = false): Promise { + const response = await this.deployToSandbox([], forceRedeploy, undefined, clearLogs); + if (response && response.previewURL) { + this.broadcast(WebSocketMessageResponses.PREVIEW_FORCE_REFRESH, {}); + return `Deployment successful: ${response.previewURL}`; + } + return `Failed to deploy: ${response?.tunnelURL}`; + } + + async deployToSandbox(files: FileOutputType[] = [], redeploy: boolean = false, commitMessage?: string, clearLogs: boolean = false): Promise { + // Call deployment manager with callbacks for broadcasting at the right times + const result = await this.deploymentManager.deployToSandbox( + files, + redeploy, + commitMessage, + clearLogs, + { + onStarted: (data) => { + this.broadcast(WebSocketMessageResponses.DEPLOYMENT_STARTED, data); + }, + onCompleted: (data) => { + this.broadcast(WebSocketMessageResponses.DEPLOYMENT_COMPLETED, data); + }, + onError: (data) => { + this.broadcast(WebSocketMessageResponses.DEPLOYMENT_FAILED, data); + }, + onAfterSetupCommands: async () => { + // Sync package.json after setup commands (includes dependency installs) + await this.syncPackageJsonFromSandbox(); + } + } + ); + + return result; + } + + /** + * Deploy the generated code to Cloudflare Workers + */ + async deployToCloudflare(target: DeploymentTarget = 'platform'): Promise<{ deploymentUrl?: string; workersUrl?: string } | null> { + try { + // Ensure sandbox instance exists first + if (!this.state.sandboxInstanceId) { + this.logger.info('No sandbox instance, deploying to sandbox first'); + await this.deployToSandbox(); + + if (!this.state.sandboxInstanceId) { + this.logger.error('Failed to deploy to sandbox service'); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: 'Deployment failed: Failed to deploy to sandbox service', + error: 'Sandbox service unavailable' + }); + return null; + } + } + + // Call service - handles orchestration, callbacks for broadcasting + const result = await this.deploymentManager.deployToCloudflare({ + target, + callbacks: { + onStarted: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_STARTED, data); + }, + onCompleted: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_COMPLETED, data); + }, + onError: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, data); + }, + onPreviewExpired: () => { + // Re-deploy sandbox and broadcast error + this.deployToSandbox(); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: PREVIEW_EXPIRED_ERROR, + error: PREVIEW_EXPIRED_ERROR + }); + } + } + }); + + // Update database with deployment ID if successful + if (result.deploymentUrl && result.deploymentId) { + const appService = new AppService(this.env); + await appService.updateDeploymentId( + this.getAgentId(), + result.deploymentId + ); + } + + return result.deploymentUrl ? { deploymentUrl: result.deploymentUrl } : null; + + } catch (error) { + this.logger.error('Cloudflare deployment error:', error); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: 'Deployment failed', + error: error instanceof Error ? error.message : String(error) + }); + return null; + } + } + + async importTemplate(templateName: string): Promise<{ templateName: string; filesImported: number; files: TemplateFile[] }> { + this.logger.info(`Importing template into project: ${templateName}`); + + // Update state + this.setState({ + ...this.state, + templateName: templateName, + }); + + this.templateDetailsCache = null; // Clear template details cache + const templateDetails = await this.ensureTemplateDetails(); + if (!templateDetails) { + throw new Error(`Failed to get template details for: ${templateName}`); + } + + this.setState({ + ...this.state, + lastPackageJson: templateDetails.allFiles['package.json'] || this.state.lastPackageJson, + }); + + // Get important files for return value + const importantFiles = getTemplateImportantFiles(templateDetails); + + return { + templateName: templateDetails.name, + filesImported: Object.keys(templateDetails.allFiles).length, + files: importantFiles + }; + } + + async waitForGeneration(): Promise { + if (this.generationPromise) { + try { + await this.generationPromise; + this.logger.info("Code generation completed successfully"); + } catch (error) { + this.logger.error("Error during code generation:", error); + } + } else { + this.logger.error("No generation process found"); + } + } + + isDeepDebugging(): boolean { + return this.deepDebugPromise !== null; + } + + getDeepDebugSessionState(): { conversationId: string } | null { + if (this.deepDebugConversationId && this.deepDebugPromise) { + return { conversationId: this.deepDebugConversationId }; + } + return null; + } + + async waitForDeepDebug(): Promise { + if (this.deepDebugPromise) { + try { + await this.deepDebugPromise; + this.logger.info("Deep debug session completed successfully"); + } catch (error) { + this.logger.error("Error during deep debug session:", error); + } finally { + // Clear promise after waiting completes + this.deepDebugPromise = null; + } + } + } + + protected async onProjectUpdate(message: string): Promise { + this.setState({ + ...this.state, + projectUpdatesAccumulator: [...this.state.projectUpdatesAccumulator, message] + }); + } + + protected async getAndResetProjectUpdates() { + const projectUpdates = this.state.projectUpdatesAccumulator || []; + this.setState({ + ...this.state, + projectUpdatesAccumulator: [] + }); + return projectUpdates; + } + + public broadcast(msg: T, data?: WebSocketMessageData): void { + if (this.operations.processUserMessage.isProjectUpdateType(msg)) { + let message = msg as string; + if (data && 'message' in data) { + message = (data as { message: string }).message; + } + this.onProjectUpdate(message); + } + super.broadcast(msg, data); + } + + protected getBootstrapCommands() { + const bootstrapCommands = this.state.commandsHistory || []; + // Validate, deduplicate, and clean + const { validCommands } = validateAndCleanBootstrapCommands(bootstrapCommands); + return validCommands; + } + + protected async saveExecutedCommands(commands: string[]) { + this.logger.info('Saving executed commands', { commands }); + + // Merge with existing history + const mergedCommands = [...(this.state.commandsHistory || []), ...commands]; + + // Validate, deduplicate, and clean + const { validCommands, invalidCommands, deduplicated } = validateAndCleanBootstrapCommands(mergedCommands); + + // Log what was filtered out + if (invalidCommands.length > 0 || deduplicated > 0) { + this.logger.warn('[commands] Bootstrap commands cleaned', { + invalidCommands, + invalidCount: invalidCommands.length, + deduplicatedCount: deduplicated, + finalCount: validCommands.length + }); + } + + // Update state with cleaned commands + this.setState({ + ...this.state, + commandsHistory: validCommands + }); + + // Update bootstrap script with validated commands + await this.updateBootstrapScript(validCommands); + + // Sync package.json if any dependency-modifying commands were executed + const hasDependencyCommands = commands.some(cmd => + cmd.includes('install') || + cmd.includes(' add ') || + cmd.includes('remove') || + cmd.includes('uninstall') + ); + + if (hasDependencyCommands) { + this.logger.info('Dependency commands executed, syncing package.json from sandbox'); + await this.syncPackageJsonFromSandbox(); + } + } + + /** + * Execute commands with retry logic + * Chunks commands and retries failed ones with AI assistance + */ + protected async executeCommands(commands: string[], shouldRetry: boolean = true, chunkSize: number = 5): Promise { + const state = this.state; + if (!state.sandboxInstanceId) { + this.logger.warn('No sandbox instance available for executing commands'); + return; + } + + // Sanitize and prepare commands + commands = commands.join('\n').split('\n').filter(cmd => cmd.trim() !== '').filter(cmd => looksLikeCommand(cmd) && !cmd.includes(' undefined')); + if (commands.length === 0) { + this.logger.warn("No commands to execute"); + return; + } + + commands = commands.map(cmd => cmd.trim().replace(/^\s*-\s*/, '').replace(/^npm/, 'bun')); + this.logger.info(`AI suggested ${commands.length} commands to run: ${commands.join(", ")}`); + + // Remove duplicate commands + commands = Array.from(new Set(commands)); + + // Execute in chunks + const commandChunks = []; + for (let i = 0; i < commands.length; i += chunkSize) { + commandChunks.push(commands.slice(i, i + chunkSize)); + } + + const successfulCommands: string[] = []; + + for (const chunk of commandChunks) { + // Retry failed commands up to 3 times + let currentChunk = chunk; + let retryCount = 0; + const maxRetries = shouldRetry ? 3 : 1; + + while (currentChunk.length > 0 && retryCount < maxRetries) { + try { + this.broadcast(WebSocketMessageResponses.COMMAND_EXECUTING, { + message: retryCount > 0 ? `Retrying commands (attempt ${retryCount + 1}/${maxRetries})` : "Executing commands", + commands: currentChunk + }); + + const resp = await this.getSandboxServiceClient().executeCommands( + state.sandboxInstanceId, + currentChunk + ); + if (!resp.results || !resp.success) { + this.logger.error('Failed to execute commands', { response: resp }); + // Check if instance is still running + const status = await this.getSandboxServiceClient().getInstanceStatus(state.sandboxInstanceId); + if (!status.success || !status.isHealthy) { + this.logger.error(`Instance ${state.sandboxInstanceId} is no longer running`); + return; + } + break; + } + + // Process results + const successful = resp.results.filter(r => r.success); + const failures = resp.results.filter(r => !r.success); + + // Track successful commands + if (successful.length > 0) { + const successfulCmds = successful.map(r => r.command); + this.logger.info(`Successfully executed ${successful.length} commands: ${successfulCmds.join(", ")}`); + successfulCommands.push(...successfulCmds); + } + + // If all succeeded, move to next chunk + if (failures.length === 0) { + this.logger.info(`All commands in chunk executed successfully`); + break; + } + + // Handle failures + const failedCommands = failures.map(r => r.command); + this.logger.warn(`${failures.length} commands failed: ${failedCommands.join(", ")}`); + + // Only retry if shouldRetry is true + if (!shouldRetry) { + break; + } + + retryCount++; + + // For install commands, try AI regeneration + const failedInstallCommands = failedCommands.filter(cmd => + cmd.startsWith("bun") || cmd.startsWith("npm") || cmd.includes("install") + ); + + if (failedInstallCommands.length > 0 && retryCount < maxRetries) { + // Use AI to suggest alternative commands + const newCommands = await this.getProjectSetupAssistant().generateSetupCommands( + `The following install commands failed: ${JSON.stringify(failures, null, 2)}. Please suggest alternative commands.` + ); + + if (newCommands?.commands && newCommands.commands.length > 0) { + this.logger.info(`AI suggested ${newCommands.commands.length} alternative commands`); + this.broadcast(WebSocketMessageResponses.COMMAND_EXECUTING, { + message: "Executing regenerated commands", + commands: newCommands.commands + }); + currentChunk = newCommands.commands.filter(looksLikeCommand); + } else { + this.logger.warn('AI could not generate alternative commands'); + currentChunk = []; + } + } else { + // No retry needed for non-install commands + currentChunk = []; + } + } catch (error) { + this.logger.error('Error executing commands:', error); + // Stop retrying on error + break; + } + } + } + + // Record command execution history + const failedCommands = commands.filter(cmd => !successfulCommands.includes(cmd)); + + if (failedCommands.length > 0) { + this.broadcastError('Failed to execute commands', new Error(failedCommands.join(", "))); + } else { + this.logger.info(`All commands executed successfully: ${successfulCommands.join(", ")}`); + } + + this.saveExecutedCommands(successfulCommands); + } + + /** + * Sync package.json from sandbox to agent's git repository + * Called after install/add/remove commands to keep dependencies in sync + */ + protected async syncPackageJsonFromSandbox(): Promise { + try { + this.logger.info('Fetching current package.json from sandbox'); + const results = await this.readFiles(['package.json']); + if (!results || !results.files || results.files.length === 0) { + this.logger.warn('Failed to fetch package.json from sandbox', { results }); + return; + } + const packageJsonContent = results.files[0].content; + + const { updated, packageJson } = updatePackageJson(this.state.lastPackageJson, packageJsonContent); + if (!updated) { + this.logger.info('package.json has not changed, skipping sync'); + return; + } + // Update state with latest package.json + this.setState({ + ...this.state, + lastPackageJson: packageJson + }); + + // Commit to git repository + const fileState = await this.fileManager.saveGeneratedFile( + { + filePath: 'package.json', + fileContents: packageJson, + filePurpose: 'Project dependencies and configuration' + }, + 'chore: sync package.json dependencies from sandbox' + ); + + this.logger.info('Successfully synced package.json to git', { + filePath: fileState.filePath, + }); + + // Broadcast update to clients + this.broadcast(WebSocketMessageResponses.FILE_GENERATED, { + message: 'Synced package.json from sandbox', + file: fileState + }); + + } catch (error) { + this.logger.error('Failed to sync package.json from sandbox', error); + // Non-critical error - don't throw, just log + } + } + + async getLogs(_reset?: boolean, durationSeconds?: number): Promise { + if (!this.state.sandboxInstanceId) { + throw new Error('Cannot get logs: No sandbox instance available'); + } + + const response = await this.getSandboxServiceClient().getLogs(this.state.sandboxInstanceId, _reset, durationSeconds); + if (response.success) { + return `STDOUT: ${response.logs.stdout}\nSTDERR: ${response.logs.stderr}`; + } else { + return `Failed to get logs, ${response.error}`; + } + } + + /** + * Delete files from the file manager + */ + async deleteFiles(filePaths: string[]) { + const deleteCommands: string[] = []; + for (const filePath of filePaths) { + deleteCommands.push(`rm -rf ${filePath}`); + } + // Remove the files from file manager + this.fileManager.deleteFiles(filePaths); + try { + await this.executeCommands(deleteCommands, false); + this.logger.info(`Deleted ${filePaths.length} files: ${filePaths.join(", ")}`); + } catch (error) { + this.logger.error('Error deleting files:', error); + } + } + + /** + * Handle user input during conversational code generation + * Processes user messages and updates pendingUserInputs state + */ + async handleUserInput(userMessage: string, images?: ImageAttachment[]): Promise { + try { + this.logger.info('Processing user input message', { + messageLength: userMessage.length, + pendingInputsCount: this.state.pendingUserInputs.length, + hasImages: !!images && images.length > 0, + imageCount: images?.length || 0 + }); + + // Ensure template details are loaded before processing + await this.ensureTemplateDetails(); + + // Just fetch runtime errors + const errors = await this.fetchRuntimeErrors(false, false); + const projectUpdates = await this.getAndResetProjectUpdates(); + this.logger.info('Passing context to user conversation processor', { errors, projectUpdates }); + + + const conversationState = this.infrastructure.getConversationState(); + // If there are images, upload them and pass the URLs to the conversation processor + let uploadedImages: ProcessedImageAttachment[] = []; + if (images) { + uploadedImages = await Promise.all(images.map(async (image) => { + return await uploadImage(this.env, image, ImageType.UPLOADS); + })); + + this.logger.info('Uploaded images', { uploadedImages }); + } + + // Process the user message using conversational assistant + const conversationalResponse = await this.operations.processUserMessage.execute( + { + userMessage, + conversationState, + conversationResponseCallback: ( + message: string, + conversationId: string, + isStreaming: boolean, + tool?: { name: string; status: 'start' | 'success' | 'error'; args?: Record } + ) => { + // Track conversationId when deep_debug starts + if (tool?.name === 'deep_debug' && tool.status === 'start') { + this.deepDebugConversationId = conversationId; + } + + this.broadcast(WebSocketMessageResponses.CONVERSATION_RESPONSE, { + message, + conversationId, + isStreaming, + tool, + }); + }, + errors, + projectUpdates, + images: uploadedImages + }, + this.getOperationOptions() + ); + + const { conversationResponse, conversationState: newConversationState } = conversationalResponse; + this.logger.info('User input processed successfully', { + responseLength: conversationResponse.userResponse.length, + }); + + this.infrastructure.setConversationState(newConversationState); + } catch (error) { + this.logger.error('Error processing user input', error); + throw error; + } + } + + /** + * Capture screenshot of the given URL using Cloudflare Browser Rendering REST API + */ + public async captureScreenshot( + url: string, + viewport: { width: number; height: number } = { width: 1280, height: 720 } + ): Promise { + if (!this.env.DB || !this.getAgentId()) { + const error = 'Cannot capture screenshot: DB or agentId not available'; + this.logger.warn(error); + this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { + error, + configurationError: true + }); + throw new Error(error); + } + + if (!url) { + const error = 'URL is required for screenshot capture'; + this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { + error, + url, + viewport + }); + throw new Error(error); + } + + this.logger.info('Capturing screenshot via REST API', { url, viewport }); + + // Notify start of screenshot capture + this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_STARTED, { + message: `Capturing screenshot of ${url}`, + url, + viewport + }); + + try { + // Use Cloudflare Browser Rendering REST API + const apiUrl = `https://api.cloudflare.com/client/v4/accounts/${this.env.CLOUDFLARE_ACCOUNT_ID}/browser-rendering/snapshot`; + + const response = await fetch(apiUrl, { + method: 'POST', + headers: { + 'Authorization': `Bearer ${this.env.CLOUDFLARE_API_TOKEN}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + url: url, + viewport: viewport, + gotoOptions: { + waitUntil: 'networkidle0', + timeout: 10000 + }, + screenshotOptions: { + fullPage: false, + type: 'png' + } + }), + }); + + if (!response.ok) { + const errorText = await response.text(); + const error = `Browser Rendering API failed: ${response.status} - ${errorText}`; + this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { + error, + url, + viewport, + statusCode: response.status, + statusText: response.statusText + }); + throw new Error(error); + } + + const result = await response.json() as { + success: boolean; + result: { + screenshot: string; // base64 encoded + content: string; // HTML content + }; + }; + + if (!result.success || !result.result.screenshot) { + const error = 'Browser Rendering API succeeded but no screenshot returned'; + this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { + error, + url, + viewport, + apiResponse: result + }); + throw new Error(error); + } + + // Get base64 screenshot data + const base64Screenshot = result.result.screenshot; + const screenshot: ImageAttachment = { + id: this.getAgentId(), + filename: 'latest.png', + mimeType: 'image/png', + base64Data: base64Screenshot + }; + const uploadedImage = await uploadImage(this.env, screenshot, ImageType.SCREENSHOTS); + + // Persist in database + try { + const appService = new AppService(this.env); + await appService.updateAppScreenshot(this.getAgentId(), uploadedImage.publicUrl); + } catch (dbError) { + const error = `Database update failed: ${dbError instanceof Error ? dbError.message : 'Unknown database error'}`; + this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { + error, + url, + viewport, + screenshotCaptured: true, + databaseError: true + }); + throw new Error(error); + } + + this.logger.info('Screenshot captured and stored successfully', { + url, + storage: uploadedImage.publicUrl.startsWith('data:') ? 'database' : (uploadedImage.publicUrl.includes('/api/screenshots/') ? 'r2' : 'images'), + length: base64Screenshot.length + }); + + // Notify successful screenshot capture + this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_SUCCESS, { + message: `Successfully captured screenshot of ${url}`, + url, + viewport, + screenshotSize: base64Screenshot.length, + timestamp: new Date().toISOString() + }); + + return uploadedImage.publicUrl; + + } catch (error) { + this.logger.error('Failed to capture screenshot via REST API:', error); + + // Only broadcast if error wasn't already broadcast above + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + if (!errorMessage.includes('Browser Rendering API') && !errorMessage.includes('Database update failed')) { + this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { + error: errorMessage, + url, + viewport + }); + } + + throw new Error(`Screenshot capture failed: ${error instanceof Error ? error.message : 'Unknown error'}`); + } + } +} diff --git a/worker/agents/core/behaviors/phasic.ts b/worker/agents/core/behaviors/phasic.ts new file mode 100644 index 00000000..14fd5e3a --- /dev/null +++ b/worker/agents/core/behaviors/phasic.ts @@ -0,0 +1,856 @@ +import { + PhaseConceptGenerationSchemaType, + PhaseConceptType, + FileConceptType, + FileOutputType, + PhaseImplementationSchemaType, +} from '../../schemas'; +import { StaticAnalysisResponse } from '../../../services/sandbox/sandboxTypes'; +import { CurrentDevState, MAX_PHASES, PhasicState } from '../state'; +import { AllIssues, AgentInitArgs, PhaseExecutionResult, UserContext } from '../types'; +import { ModelConfig } from '../../inferutils/config.types'; +import { WebSocketMessageResponses } from '../../constants'; +import { UserConversationProcessor } from '../../operations/UserConversationProcessor'; +// import { WebSocketBroadcaster } from '../services/implementations/WebSocketBroadcaster'; +import { GenerationContext, PhasicGenerationContext } from '../../domain/values/GenerationContext'; +import { IssueReport } from '../../domain/values/IssueReport'; +import { PhaseImplementationOperation } from '../../operations/PhaseImplementation'; +import { FileRegenerationOperation } from '../../operations/FileRegeneration'; +import { PhaseGenerationOperation } from '../../operations/PhaseGeneration'; +// Database schema imports removed - using zero-storage OAuth flow +import { AgentActionKey } from '../../inferutils/config.types'; +import { AGENT_CONFIG } from '../../inferutils/config'; +import { ModelConfigService } from '../../../database/services/ModelConfigService'; +import { FastCodeFixerOperation } from '../../operations/PostPhaseCodeFixer'; +import { customizePackageJson, customizeTemplateFiles, generateProjectName } from '../../utils/templateCustomizer'; +import { generateBlueprint } from '../../planning/blueprint'; +import { RateLimitExceededError } from 'shared/types/errors'; +import { ImageAttachment, type ProcessedImageAttachment } from '../../../types/image-attachment'; +import { OperationOptions } from '../../operations/common'; +import { ConversationMessage } from '../../inferutils/common'; +import { generateNanoId } from 'worker/utils/idGenerator'; +import { IdGenerator } from '../../utils/idGenerator'; +import { BaseCodingBehavior, BaseCodingOperations } from './base'; +import { ICodingAgent } from '../../services/interfaces/ICodingAgent'; +import { SimpleCodeGenerationOperation } from '../../operations/SimpleCodeGeneration'; +import { StateMigration } from '../stateMigration'; + +interface PhasicOperations extends BaseCodingOperations { + generateNextPhase: PhaseGenerationOperation; + implementPhase: PhaseImplementationOperation; +} + +/** + * PhasicCodingBehavior - Deterministically orchestrated agent + * + * Manages the lifecycle of code generation including: + * - Blueprint, phase generation, phase implementation, review cycles orchestrations + * - File streaming with WebSocket updates + * - Code validation and error correction + * - Deployment to sandbox service + */ +export class PhasicCodingBehavior extends BaseCodingBehavior implements ICodingAgent { + protected static readonly PROJECT_NAME_PREFIX_MAX_LENGTH = 20; + + protected operations: PhasicOperations = { + regenerateFile: new FileRegenerationOperation(), + fastCodeFixer: new FastCodeFixerOperation(), + processUserMessage: new UserConversationProcessor(), + simpleGenerateFiles: new SimpleCodeGenerationOperation(), + generateNextPhase: new PhaseGenerationOperation(), + implementPhase: new PhaseImplementationOperation(), + }; + + /** + * Initialize the code generator with project blueprint and template + * Sets up services and begins deployment process + */ + async initialize( + initArgs: AgentInitArgs, + ..._args: unknown[] + ): Promise { + await super.initialize(initArgs); + const { templateInfo } = initArgs; + if (!templateInfo || !templateInfo.templateDetails) { + throw new Error('Phasic initialization requires templateInfo.templateDetails'); + } + const { query, language, frameworks, hostname, inferenceContext, sandboxSessionId } = initArgs; + + // Generate a blueprint + this.logger.info('Generating blueprint', { query, queryLength: query.length, imagesCount: initArgs.images?.length || 0 }); + this.logger.info(`Using language: ${language}, frameworks: ${frameworks ? frameworks.join(", ") : "none"}`); + + const blueprint = await generateBlueprint({ + env: this.env, + inferenceContext, + query, + language: language!, + frameworks: frameworks!, + templateDetails: templateInfo?.templateDetails, + templateMetaInfo: templateInfo?.selection, + images: initArgs.images, + projectType: this.projectType, + stream: { + chunk_size: 256, + onChunk: (chunk) => { + initArgs.onBlueprintChunk(chunk); + } + } + }) + + const packageJson = templateInfo.templateDetails.allFiles['package.json']; + + const projectName = generateProjectName( + blueprint?.projectName || templateInfo?.templateDetails.name || '', + generateNanoId(), + PhasicCodingBehavior.PROJECT_NAME_PREFIX_MAX_LENGTH + ); + + this.logger.info('Generated project name', { projectName }); + + const nextState: PhasicState = { + ...this.state, + projectName, + query, + blueprint, + templateName: templateInfo.templateDetails.name, + sandboxInstanceId: undefined, + generatedPhases: [], + commandsHistory: [], + lastPackageJson: packageJson, + sessionId: sandboxSessionId!, + hostname, + inferenceContext, + projectType: this.projectType, + behaviorType: 'phasic' + }; + this.setState(nextState); + // Customize template files (package.json, wrangler.jsonc, .bootstrap.js, .gitignore) + const customizedFiles = customizeTemplateFiles( + templateInfo.templateDetails.allFiles, + { + projectName, + commandsHistory: [] + } + ); + + this.logger.info('Customized template files', { + files: Object.keys(customizedFiles) + }); + + // Save customized files to git + const filesToSave = Object.entries(customizedFiles).map(([filePath, content]) => ({ + filePath, + fileContents: content, + filePurpose: 'Project configuration file' + })); + + await this.fileManager.saveGeneratedFiles( + filesToSave, + 'Initialize project configuration files' + ); + + this.logger.info('Committed customized template files to git'); + + this.initializeAsync().catch((error: unknown) => { + this.broadcastError("Initialization failed", error); + }); + this.logger.info(`Agent ${this.getAgentId()} session: ${this.state.sessionId} initialized successfully`); + return this.state; + } + + async onStart(props?: Record | undefined): Promise { + await super.onStart(props); + } + + migrateStateIfNeeded(): void { + const migratedState = StateMigration.migrateIfNeeded(this.state, this.logger) as PhasicState | null; + if (migratedState) { + this.setState(migratedState); + } + + // migrate overwritten package.jsons + const oldPackageJson = this.fileManager.getFile('package.json')?.fileContents || this.state.lastPackageJson; + if (oldPackageJson) { + const packageJson = customizePackageJson(oldPackageJson, this.state.projectName); + this.fileManager.saveGeneratedFiles([ + { + filePath: 'package.json', + fileContents: packageJson, + filePurpose: 'Project configuration file' + } + ], 'chore: fix overwritten package.json'); + } + } + + rechargePhasesCounter(max_phases: number = MAX_PHASES): void { + if (this.getPhasesCounter() <= max_phases) { + this.setState({ + ...this.state, + phasesCounter: max_phases + }); + } + } + + decrementPhasesCounter(): number { + const counter = this.getPhasesCounter() - 1; + this.setState({ + ...this.state, + phasesCounter: counter + }); + return counter; + } + + getPhasesCounter(): number { + return this.state.phasesCounter; + } + + getOperationOptions(): OperationOptions { + return { + env: this.env, + agentId: this.getAgentId(), + context: GenerationContext.from(this.state, this.getTemplateDetails(), this.logger) as PhasicGenerationContext, + logger: this.logger, + inferenceContext: this.getInferenceContext(), + agent: this + }; + } + + private createNewIncompletePhase(phaseConcept: PhaseConceptType) { + this.setState({ + ...this.state, + generatedPhases: [...this.state.generatedPhases, { + ...phaseConcept, + completed: false + }] + }) + + this.logger.info("Created new incomplete phase:", JSON.stringify(this.state.generatedPhases, null, 2)); + } + + private markPhaseComplete(phaseName: string) { + // First find the phase + const phases = this.state.generatedPhases; + if (!phases.some(p => p.name === phaseName)) { + this.logger.warn(`Phase ${phaseName} not found in generatedPhases array, skipping save`); + return; + } + + // Update the phase + this.setState({ + ...this.state, + generatedPhases: phases.map(p => p.name === phaseName ? { ...p, completed: true } : p) + }); + + this.logger.info("Completed phases:", JSON.stringify(phases, null, 2)); + } + + async queueUserRequest(request: string, images?: ProcessedImageAttachment[]): Promise { + this.rechargePhasesCounter(3); + await super.queueUserRequest(request, images); + } + + async build(): Promise { + await this.launchStateMachine(); + } + + private async launchStateMachine() { + this.logger.info("Launching state machine"); + + let currentDevState = CurrentDevState.PHASE_IMPLEMENTING; + const generatedPhases = this.state.generatedPhases; + const incompletedPhases = generatedPhases.filter(phase => !phase.completed); + let phaseConcept : PhaseConceptType | undefined; + if (incompletedPhases.length > 0) { + phaseConcept = incompletedPhases[incompletedPhases.length - 1]; + this.logger.info('Resuming code generation from incompleted phase', { + phase: phaseConcept + }); + } else if (generatedPhases.length > 0) { + currentDevState = CurrentDevState.PHASE_GENERATING; + this.logger.info('Resuming code generation after generating all phases', { + phase: generatedPhases[generatedPhases.length - 1] + }); + } else { + phaseConcept = this.state.blueprint.initialPhase; + this.logger.info('Starting code generation from initial phase', { + phase: phaseConcept + }); + this.createNewIncompletePhase(phaseConcept); + } + + let staticAnalysisCache: StaticAnalysisResponse | undefined; + let userContext: UserContext | undefined; + + try { + let executionResults: PhaseExecutionResult; + // State machine loop - continues until IDLE state + while (currentDevState !== CurrentDevState.IDLE) { + this.logger.info(`[generateAllFiles] Executing state: ${currentDevState}`); + switch (currentDevState) { + case CurrentDevState.PHASE_GENERATING: + executionResults = await this.executePhaseGeneration(); + currentDevState = executionResults.currentDevState; + phaseConcept = executionResults.result; + staticAnalysisCache = executionResults.staticAnalysis; + userContext = executionResults.userContext; + break; + case CurrentDevState.PHASE_IMPLEMENTING: + executionResults = await this.executePhaseImplementation(phaseConcept, staticAnalysisCache, userContext); + currentDevState = executionResults.currentDevState; + staticAnalysisCache = executionResults.staticAnalysis; + userContext = undefined; + break; + case CurrentDevState.REVIEWING: + currentDevState = await this.executeReviewCycle(); + break; + case CurrentDevState.FINALIZING: + currentDevState = await this.executeFinalizing(); + break; + default: + break; + } + } + + this.logger.info("State machine completed successfully"); + } catch (error) { + this.logger.error("Error in state machine:", error); + } + } + + /** + * Execute phase generation state - generate next phase with user suggestions + */ + async executePhaseGeneration(): Promise { + this.logger.info("Executing PHASE_GENERATING state"); + try { + const currentIssues = await this.fetchAllIssues(); + + // Generate next phase with user suggestions if available + + // Get stored images if user suggestions are present + const pendingUserInputs = this.fetchPendingUserRequests(); + const userContext = (pendingUserInputs.length > 0) + ? { + suggestions: pendingUserInputs, + images: this.pendingUserImages + } as UserContext + : undefined; + + if (userContext && userContext?.suggestions && userContext.suggestions.length > 0) { + // Only reset pending user inputs if user suggestions were read + this.logger.info("Resetting pending user inputs", { + userSuggestions: userContext.suggestions, + hasImages: !!userContext.images, + imageCount: userContext.images?.length || 0 + }); + + // Clear images after they're passed to phase generation + if (userContext?.images && userContext.images.length > 0) { + this.logger.info('Clearing stored user images after passing to phase generation'); + this.pendingUserImages = []; + } + } + + const nextPhase = await this.generateNextPhase(currentIssues, userContext); + + if (!nextPhase) { + this.logger.info("No more phases to implement, transitioning to FINALIZING"); + return { + currentDevState: CurrentDevState.FINALIZING, + }; + } + + // Store current phase and transition to implementation + this.setState({ + ...this.state, + currentPhase: nextPhase + }); + + return { + currentDevState: CurrentDevState.PHASE_IMPLEMENTING, + result: nextPhase, + staticAnalysis: currentIssues.staticAnalysis, + userContext: userContext, + }; + } catch (error) { + if (error instanceof RateLimitExceededError) { + throw error; + } + this.broadcastError("Error generating phase", error); + return { + currentDevState: CurrentDevState.IDLE, + }; + } + } + + /** + * Execute phase implementation state - implement current phase + */ + async executePhaseImplementation(phaseConcept?: PhaseConceptType, staticAnalysis?: StaticAnalysisResponse, userContext?: UserContext): Promise<{currentDevState: CurrentDevState, staticAnalysis?: StaticAnalysisResponse}> { + try { + this.logger.info("Executing PHASE_IMPLEMENTING state"); + + if (phaseConcept === undefined) { + phaseConcept = this.state.currentPhase; + if (phaseConcept === undefined) { + this.logger.error("No phase concept provided to implement, will call phase generation"); + const results = await this.executePhaseGeneration(); + phaseConcept = results.result; + if (phaseConcept === undefined) { + this.logger.error("No phase concept provided to implement, will return"); + return {currentDevState: CurrentDevState.FINALIZING}; + } + } + } + + this.setState({ + ...this.state, + currentPhase: undefined // reset current phase + }); + + let currentIssues : AllIssues; + if (this.state.sandboxInstanceId) { + if (staticAnalysis) { + // If have cached static analysis, fetch everything else fresh + currentIssues = { + runtimeErrors: await this.fetchRuntimeErrors(true), + staticAnalysis: staticAnalysis, + }; + } else { + currentIssues = await this.fetchAllIssues(true) + } + } else { + currentIssues = { + runtimeErrors: [], + staticAnalysis: { success: true, lint: { issues: [] }, typecheck: { issues: [] } }, + } + } + // Implement the phase with user context (suggestions and images) + await this.implementPhase(phaseConcept, currentIssues, userContext); + + this.logger.info(`Phase ${phaseConcept.name} completed, generating next phase`); + + const phasesCounter = this.decrementPhasesCounter(); + + if ((phaseConcept.lastPhase || phasesCounter <= 0) && this.state.pendingUserInputs.length === 0) return {currentDevState: CurrentDevState.FINALIZING, staticAnalysis: staticAnalysis}; + return {currentDevState: CurrentDevState.PHASE_GENERATING, staticAnalysis: staticAnalysis}; + } catch (error) { + this.logger.error("Error implementing phase", error); + if (error instanceof RateLimitExceededError) { + throw error; + } + return {currentDevState: CurrentDevState.IDLE}; + } + } + + /** + * Execute review cycle state - review and cleanup + */ + async executeReviewCycle(): Promise { + this.logger.info("Executing REVIEWING state - review and cleanup"); + if (this.state.reviewingInitiated) { + this.logger.info("Reviewing already initiated, skipping"); + return CurrentDevState.IDLE; + } + this.setState({ + ...this.state, + reviewingInitiated: true + }); + + // If issues/errors found, prompt user if they want to review and cleanup + const issues = await this.fetchAllIssues(false); + if (issues.runtimeErrors.length > 0 || issues.staticAnalysis.typecheck.issues.length > 0) { + this.logger.info("Reviewing stage - issues found, prompting user to review and cleanup"); + const message : ConversationMessage = { + role: "assistant", + content: `If the user responds with yes, launch the 'deep_debug' tool with the prompt to fix all the issues in the app\nThere might be some bugs in the app. Do you want me to try to fix them?`, + conversationId: IdGenerator.generateConversationId(), + } + // Store the message in the conversation history so user's response can trigger the deep debug tool + this.infrastructure.addConversationMessage(message); + + this.broadcast(WebSocketMessageResponses.CONVERSATION_RESPONSE, { + message: message.content, + conversationId: message.conversationId, + isStreaming: false, + }); + } + + return CurrentDevState.IDLE; + } + + /** + * Execute finalizing state - final review and cleanup (runs only once) + */ + async executeFinalizing(): Promise { + this.logger.info("Executing FINALIZING state - final review and cleanup"); + + // Only do finalizing stage if it wasn't done before + if (this.state.mvpGenerated) { + this.logger.info("Finalizing stage already done"); + return CurrentDevState.REVIEWING; + } + this.setState({ + ...this.state, + mvpGenerated: true + }); + + const phaseConcept: PhaseConceptType = { + name: "Finalization and Review", + description: "Full polishing and final review of the application", + files: [], + lastPhase: true + } + + this.createNewIncompletePhase(phaseConcept); + + const currentIssues = await this.fetchAllIssues(true); + + // Run final review and cleanup phase + await this.implementPhase(phaseConcept, currentIssues); + + const numFilesGenerated = this.fileManager.getGeneratedFilePaths().length; + this.logger.info(`Finalization complete. Generated ${numFilesGenerated}/${this.getTotalFiles()} files.`); + + // Transition to IDLE - generation complete + return CurrentDevState.REVIEWING; + } + + /** + * Generate next phase with user context (suggestions and images) + */ + async generateNextPhase(currentIssues: AllIssues, userContext?: UserContext): Promise { + const issues = IssueReport.from(currentIssues); + + // Build notification message + let notificationMsg = "Generating next phase"; + if (userContext?.suggestions && userContext.suggestions.length > 0) { + notificationMsg = `Generating next phase incorporating ${userContext.suggestions.length} user suggestion(s)`; + } + if (userContext?.images && userContext.images.length > 0) { + notificationMsg += ` with ${userContext.images.length} image(s)`; + } + + // Notify phase generation start + this.broadcast(WebSocketMessageResponses.PHASE_GENERATING, { + message: notificationMsg, + issues: issues, + userSuggestions: userContext?.suggestions, + }); + + const result = await this.operations.generateNextPhase.execute( + { + issues, + userContext, + isUserSuggestedPhase: userContext?.suggestions && userContext.suggestions.length > 0 && this.state.mvpGenerated, + }, + this.getOperationOptions() + ) + // Execute install commands if any + if (result.installCommands && result.installCommands.length > 0) { + this.executeCommands(result.installCommands); + } + + // Execute delete commands if any + const filesToDelete = result.files.filter(f => f.changes?.toLowerCase().trim() === 'delete'); + if (filesToDelete.length > 0) { + this.logger.info(`Deleting ${filesToDelete.length} files: ${filesToDelete.map(f => f.path).join(", ")}`); + this.deleteFiles(filesToDelete.map(f => f.path)); + } + + if (result.files.length === 0) { + this.logger.info("No files generated for next phase"); + // Notify phase generation complete + this.broadcast(WebSocketMessageResponses.PHASE_GENERATED, { + message: `No files generated for next phase`, + phase: undefined + }); + return undefined; + } + + this.createNewIncompletePhase(result); + // Notify phase generation complete + this.broadcast(WebSocketMessageResponses.PHASE_GENERATED, { + message: `Generated next phase: ${result.name}`, + phase: result + }); + + return result; + } + + /** + * Implement a single phase of code generation + * Streams file generation with real-time updates and incorporates technical instructions + */ + async implementPhase(phase: PhaseConceptType, currentIssues: AllIssues, userContext?: UserContext, streamChunks: boolean = true, postPhaseFixing: boolean = true): Promise { + const issues = IssueReport.from(currentIssues); + + const implementationMsg = userContext?.suggestions && userContext.suggestions.length > 0 + ? `Implementing phase: ${phase.name} with ${userContext.suggestions.length} user suggestion(s)` + : `Implementing phase: ${phase.name}`; + const msgWithImages = userContext?.images && userContext.images.length > 0 + ? `${implementationMsg} and ${userContext.images.length} image(s)` + : implementationMsg; + + this.broadcast(WebSocketMessageResponses.PHASE_IMPLEMENTING, { + message: msgWithImages, + phase: phase, + issues: issues, + }); + + + const result = await this.operations.implementPhase.execute( + { + phase, + issues, + isFirstPhase: this.state.generatedPhases.filter(p => p.completed).length === 0, + fileGeneratingCallback: (filePath: string, filePurpose: string) => { + this.broadcast(WebSocketMessageResponses.FILE_GENERATING, { + message: `Generating file: ${filePath}`, + filePath: filePath, + filePurpose: filePurpose + }); + }, + userContext, + shouldAutoFix: this.state.inferenceContext.enableRealtimeCodeFix, + fileChunkGeneratedCallback: streamChunks ? (filePath: string, chunk: string, format: 'full_content' | 'unified_diff') => { + this.broadcast(WebSocketMessageResponses.FILE_CHUNK_GENERATED, { + message: `Generating file: ${filePath}`, + filePath: filePath, + chunk, + format, + }); + } : (_filePath: string, _chunk: string, _format: 'full_content' | 'unified_diff') => {}, + fileClosedCallback: (file: FileOutputType, message: string) => { + this.broadcast(WebSocketMessageResponses.FILE_GENERATED, { + message, + file, + }); + } + }, + this.getOperationOptions() + ); + + this.broadcast(WebSocketMessageResponses.PHASE_VALIDATING, { + message: `Validating files for phase: ${phase.name}`, + phase: phase, + }); + + // Await the already-created realtime code fixer promises + const finalFiles = await Promise.allSettled(result.fixedFilePromises).then((results: PromiseSettledResult[]) => { + return results.map((result) => { + if (result.status === 'fulfilled') { + return result.value; + } else { + return null; + } + }).filter((f): f is FileOutputType => f !== null); + }); + + // Update state with completed phase + await this.fileManager.saveGeneratedFiles(finalFiles, `feat: ${phase.name}\n\n${phase.description}`); + + this.logger.info("Files generated for phase:", phase.name, finalFiles.map(f => f.filePath)); + + // Execute commands if provided + if (result.commands && result.commands.length > 0) { + this.logger.info("Phase implementation suggested install commands:", result.commands); + await this.executeCommands(result.commands, false); + } + + // Deploy generated files + if (finalFiles.length > 0) { + await this.deployToSandbox(finalFiles, false, phase.name, true); + if (postPhaseFixing) { + await this.applyDeterministicCodeFixes(); + if (this.state.inferenceContext.enableFastSmartCodeFix) { + await this.applyFastSmartCodeFixes(); + } + } + } + + // Validation complete + this.broadcast(WebSocketMessageResponses.PHASE_VALIDATED, { + message: `Files validated for phase: ${phase.name}`, + phase: phase + }); + + this.logger.info("Files generated for phase:", phase.name, finalFiles.map(f => f.filePath)); + + this.logger.info(`Validation complete for phase: ${phase.name}`); + + // Notify phase completion + this.broadcast(WebSocketMessageResponses.PHASE_IMPLEMENTED, { + phase: { + name: phase.name, + files: finalFiles.map(f => ({ + path: f.filePath, + purpose: f.filePurpose, + contents: f.fileContents + })), + description: phase.description + }, + message: "Files generated successfully for phase" + }); + + this.markPhaseComplete(phase.name); + + return { + files: finalFiles, + deploymentNeeded: result.deploymentNeeded, + commands: result.commands + }; + } + + /** + * Get current model configurations (defaults + user overrides) + * Used by WebSocket to provide configuration info to frontend + */ + async getModelConfigsInfo() { + const userId = this.state.inferenceContext.userId; + if (!userId) { + throw new Error('No user session available for model configurations'); + } + + try { + const modelConfigService = new ModelConfigService(this.env); + + // Get all user configs + const userConfigsRecord = await modelConfigService.getUserModelConfigs(userId); + + // Transform to match frontend interface + const agents = Object.entries(AGENT_CONFIG).map(([key, config]) => ({ + key, + name: config.name, + description: config.description + })); + + type ModelConfigInfo = ModelConfig & { isUserOverride?: boolean }; + const userConfigs: Record = {}; + const defaultConfigs: Record = {}; + + for (const [actionKey, mergedConfig] of Object.entries(userConfigsRecord)) { + if (mergedConfig.isUserOverride) { + userConfigs[actionKey] = { + name: mergedConfig.name, + max_tokens: mergedConfig.max_tokens, + temperature: mergedConfig.temperature, + reasoning_effort: mergedConfig.reasoning_effort, + fallbackModel: mergedConfig.fallbackModel, + isUserOverride: true + }; + } + + const defaultConfig = AGENT_CONFIG[actionKey as AgentActionKey]; + if (defaultConfig) { + defaultConfigs[actionKey] = { + name: defaultConfig.name, + max_tokens: defaultConfig.max_tokens, + temperature: defaultConfig.temperature, + reasoning_effort: defaultConfig.reasoning_effort, + fallbackModel: defaultConfig.fallbackModel + }; + } + } + + return { + agents, + userConfigs, + defaultConfigs + }; + } catch (error) { + this.logger.error('Error fetching model configs info:', error); + throw error; + } + } + + getTotalFiles(): number { + return this.fileManager.getGeneratedFilePaths().length + ((this.state.currentPhase || this.state.blueprint.initialPhase)?.files?.length || 0); + } + + private async applyFastSmartCodeFixes() : Promise { + try { + const startTime = Date.now(); + this.logger.info("Applying fast smart code fixes"); + // Get static analysis and do deterministic fixes + const staticAnalysis = await this.runStaticAnalysisCode(); + if (staticAnalysis.typecheck.issues.length + staticAnalysis.lint.issues.length == 0) { + this.logger.info("No issues found, skipping fast smart code fixes"); + return; + } + const issues = staticAnalysis.typecheck.issues.concat(staticAnalysis.lint.issues); + const allFiles = this.fileManager.getAllRelevantFiles(); + + const fastCodeFixer = await this.operations.fastCodeFixer.execute({ + query: this.state.query, + issues, + allFiles, + }, this.getOperationOptions()); + + if (fastCodeFixer.length > 0) { + await this.fileManager.saveGeneratedFiles(fastCodeFixer, "fix: Fast smart code fixes"); + await this.deployToSandbox(fastCodeFixer); + this.logger.info("Fast smart code fixes applied successfully"); + } + this.logger.info(`Fast smart code fixes applied in ${Date.now() - startTime}ms`); + } catch (error) { + this.broadcastError("Failed to apply fast smart code fixes", error); + return; + } + } + + async generateFiles( + phaseName: string, + phaseDescription: string, + requirements: string[], + files: FileConceptType[] + ): Promise<{ files: Array<{ path: string; purpose: string; diff: string }> }> { + this.logger.info('Generating files for deep debugger', { + phaseName, + requirementsCount: requirements.length, + filesCount: files.length + }); + + // Create phase structure with explicit files + const phase: PhaseConceptType = { + name: phaseName, + description: phaseDescription, + files: files, + lastPhase: true + }; + + // Call existing implementPhase with postPhaseFixing=false + // This skips deterministic fixes and fast smart fixes + const result = await this.implementPhase( + phase, + { + runtimeErrors: [], + staticAnalysis: { + success: true, + lint: { issues: [] }, + typecheck: { issues: [] } + }, + }, + { suggestions: requirements }, + true, // streamChunks + false // postPhaseFixing = false (skip auto-fixes) + ); + + const savedFiles = result.files.map(f => { + const fileState = this.state.generatedFilesMap[f.filePath]; + return { + path: f.filePath, + purpose: f.filePurpose || '', + diff: fileState?.lastDiff || '' + }; + }); + + return { files: savedFiles }; + } + + async handleUserInput(userMessage: string, images?: ImageAttachment[]): Promise { + const result = await super.handleUserInput(userMessage, images); + return result; + } +} diff --git a/worker/agents/core/codingAgent.ts b/worker/agents/core/codingAgent.ts new file mode 100644 index 00000000..e0a710f5 --- /dev/null +++ b/worker/agents/core/codingAgent.ts @@ -0,0 +1,624 @@ +import { Agent, AgentContext, ConnectionContext } from "agents"; +import { AgentInitArgs, AgentSummary, DeployOptions, DeployResult, ExportOptions, ExportResult, DeploymentTarget, BehaviorType } from "./types"; +import { AgenticState, AgentState, BaseProjectState, CurrentDevState, MAX_PHASES, PhasicState } from "./state"; +import { Blueprint } from "../schemas"; +import { BaseCodingBehavior } from "./behaviors/base"; +import { createObjectLogger, StructuredLogger } from '../../logger'; +import { InferenceContext } from "../inferutils/config.types"; +import { FileManager } from '../services/implementations/FileManager'; +import { DeploymentManager } from '../services/implementations/DeploymentManager'; +import { GitVersionControl } from '../git'; +import { StateManager } from '../services/implementations/StateManager'; +import { PhasicCodingBehavior } from './behaviors/phasic'; +import { AgenticCodingBehavior } from './behaviors/agentic'; +import { SqlExecutor } from '../git'; +import { AgentInfrastructure } from "./AgentCore"; +import { ProjectType } from './types'; +import { Connection } from 'agents'; +import { handleWebSocketMessage, handleWebSocketClose, broadcastToConnections, sendToConnection } from './websocket'; +import { WebSocketMessageData, WebSocketMessageType } from "worker/api/websocketTypes"; +import { PreviewType, TemplateDetails } from "worker/services/sandbox/sandboxTypes"; +import { WebSocketMessageResponses } from "../constants"; +import { AppService } from "worker/database"; +import { ConversationMessage, ConversationState } from "../inferutils/common"; +import { ImageAttachment } from "worker/types/image-attachment"; +import { RateLimitExceededError } from "shared/types/errors"; +import { ProjectObjective } from "./objectives/base"; +import { AppObjective } from "./objectives/app"; +import { WorkflowObjective } from "./objectives/workflow"; +import { PresentationObjective } from "./objectives/presentation"; +import { GeneralObjective } from "./objectives/general"; +import { FileOutputType } from "../schemas"; + +const DEFAULT_CONVERSATION_SESSION_ID = 'default'; + +interface AgentBootstrapProps { + behaviorType?: BehaviorType; + projectType?: ProjectType; +} + +export class CodeGeneratorAgent extends Agent implements AgentInfrastructure { + public _logger: StructuredLogger | undefined; + private behavior!: BaseCodingBehavior; + private objective!: ProjectObjective; + protected static readonly PROJECT_NAME_PREFIX_MAX_LENGTH = 20; + // Services + readonly fileManager: FileManager; + readonly deploymentManager: DeploymentManager; + readonly git: GitVersionControl; + + // Redeclare as public to satisfy AgentInfrastructure interface + declare public readonly env: Env; + declare public readonly sql: SqlExecutor; + + // ========================================== + // Initialization + // ========================================== + + initialState = { + behaviorType: 'unknown' as BehaviorType, + projectType: 'unknown' as ProjectType, + projectName: "", + query: "", + sessionId: '', + hostname: '', + blueprint: {} as unknown as Blueprint, + templateName: '', + generatedFilesMap: {}, + conversationMessages: [], + inferenceContext: {} as InferenceContext, + shouldBeGenerating: false, + sandboxInstanceId: undefined, + commandsHistory: [], + lastPackageJson: '', + pendingUserInputs: [], + projectUpdatesAccumulator: [], + lastDeepDebugTranscript: null, + mvpGenerated: false, + reviewingInitiated: false, + generatedPhases: [], + currentDevState: CurrentDevState.IDLE, + phasesCounter: MAX_PHASES, + } as AgentState; + + constructor(ctx: AgentContext, env: Env) { + super(ctx, env); + + this.sql`CREATE TABLE IF NOT EXISTS full_conversations (id TEXT PRIMARY KEY, messages TEXT)`; + this.sql`CREATE TABLE IF NOT EXISTS compact_conversations (id TEXT PRIMARY KEY, messages TEXT)`; + + // Create StateManager + const stateManager = new StateManager( + () => this.state, + (s) => this.setState(s) + ); + + this.git = new GitVersionControl(this.sql.bind(this)); + this.fileManager = new FileManager( + stateManager, + () => this.behavior?.getTemplateDetails?.() || null, + this.git + ); + this.deploymentManager = new DeploymentManager( + { + stateManager, + fileManager: this.fileManager, + getLogger: () => this.logger(), + env: this.env + }, + 10 // MAX_COMMANDS_HISTORY + ); + } + + onFirstInit(props?: AgentBootstrapProps): void { + this.logger().info('Bootstrapping CodeGeneratorAgent', { props }); + const behaviorType = props?.behaviorType ?? this.state.behaviorType ?? 'phasic'; + const projectType = props?.projectType ?? this.state.projectType ?? 'app'; + + if (behaviorType === 'phasic') { + this.behavior = new PhasicCodingBehavior(this as AgentInfrastructure, projectType); + } else { + this.behavior = new AgenticCodingBehavior(this as AgentInfrastructure, projectType); + } + + // Create objective based on project type + this.objective = this.createObjective(projectType); + } + + /** + * Factory method to create the appropriate objective based on project type + */ + private createObjective(projectType: ProjectType): ProjectObjective { + const infrastructure = this as AgentInfrastructure; + + switch (projectType) { + case 'app': + return new AppObjective(infrastructure); + case 'workflow': + return new WorkflowObjective(infrastructure); + case 'presentation': + return new PresentationObjective(infrastructure); + case 'general': + return new GeneralObjective(infrastructure); + default: + // Default to app for backward compatibility + return new AppObjective(infrastructure); + } + } + + /** + * Initialize the agent with project blueprint and template + * Only called once in an app's lifecycle + */ + async initialize( + initArgs: AgentInitArgs, + ..._args: unknown[] + ): Promise { + const { inferenceContext } = initArgs; + const sandboxSessionId = DeploymentManager.generateNewSessionId(); + this.initLogger(inferenceContext.agentId, inferenceContext.userId, sandboxSessionId); + + // Infrastructure setup + await this.gitInit(); + + // Let behavior handle all state initialization (blueprint, projectName, etc.) + await this.behavior.initialize({ + ...initArgs, + sandboxSessionId // Pass generated session ID to behavior + }); + + try { + await this.objective.onProjectCreated(); + } catch (error) { + this.logger().error('Lifecycle hook onProjectCreated failed:', error); + // Don't fail initialization if hook fails + } + await this.saveToDatabase(); + + return this.state; + } + + async isInitialized() { + return this.getAgentId() ? true : false + } + + /** + * Called evertime when agent is started or re-started + * @param props - Optional props + */ + async onStart(props?: Record | undefined): Promise { + this.logger().info(`Agent ${this.getAgentId()} session: ${this.state.sessionId} onStart`, { props }); + + if (!this.behavior) { + // First-time initialization + this.logger().info('First-time onStart initialization detected, invoking onFirstInit'); + this.onFirstInit(props as AgentBootstrapProps); + } + + this.behavior.onStart(props); + + // Ignore if agent not initialized + if (!this.state.query) { + this.logger().warn(`Agent ${this.getAgentId()} session: ${this.state.sessionId} onStart ignored, agent not initialized`); + return; + } + + // Ensure state is migrated for any previous versions + this.behavior.migrateStateIfNeeded(); + + // Check if this is a read-only operation + const readOnlyMode = props?.readOnlyMode === true; + + if (readOnlyMode) { + this.logger().info(`Agent ${this.getAgentId()} starting in READ-ONLY mode - skipping expensive initialization`); + return; + } + + // Just in case + await this.gitInit(); + + await this.behavior.ensureTemplateDetails(); + this.logger().info(`Agent ${this.getAgentId()} session: ${this.state.sessionId} onStart processed successfully`); + } + + onConnect(connection: Connection, ctx: ConnectionContext) { + this.logger().info(`Agent connected for agent ${this.getAgentId()}`, { connection, ctx }); + sendToConnection(connection, 'agent_connected', { + state: this.state, + templateDetails: this.behavior.getTemplateDetails() + }); + } + + private initLogger(agentId: string, userId: string, sessionId?: string) { + this._logger = createObjectLogger(this, 'CodeGeneratorAgent'); + this._logger.setObjectId(agentId); + this._logger.setFields({ + agentId, + userId, + projectType: this.state.projectType, + behaviorType: this.state.behaviorType + }); + if (sessionId) { + this._logger.setField('sessionId', sessionId); + } + return this._logger; + } + + // ========================================== + // Utilities + // ========================================== + + logger(): StructuredLogger { + if (!this._logger) { + this._logger = this.initLogger(this.getAgentId(), this.state.inferenceContext.userId, this.state.sessionId); + } + return this._logger; + } + + getAgentId() { + return this.state.inferenceContext.agentId; + } + + getWebSockets(): WebSocket[] { + return this.ctx.getWebSockets(); + } + + /** + * Get the project objective (defines what is being built) + */ + getObjective(): ProjectObjective { + return this.objective; + } + + /** + * Get the behavior (defines how code is generated) + */ + getBehavior(): BaseCodingBehavior { + return this.behavior; + } + + async getFullState(): Promise { + return await this.behavior.getFullState(); + } + + async getSummary(): Promise { + return this.behavior.getSummary(); + } + + getPreviewUrlCache(): string { + return this.behavior.getPreviewUrlCache(); + } + + deployToSandbox( + files: FileOutputType[] = [], + redeploy: boolean = false, + commitMessage?: string, + clearLogs: boolean = false + ): Promise { + return this.behavior.deployToSandbox(files, redeploy, commitMessage, clearLogs); + } + + deployToCloudflare(target?: DeploymentTarget): Promise<{ deploymentUrl?: string; workersUrl?: string } | null> { + return this.behavior.deployToCloudflare(target); + } + + deployProject(options?: DeployOptions): Promise { + return this.objective.deploy(options); + } + + exportProject(options: ExportOptions): Promise { + return this.objective.export(options); + } + + importTemplate(templateName: string): Promise<{ templateName: string; filesImported: number }> { + return this.behavior.importTemplate(templateName); + } + + protected async saveToDatabase() { + this.logger().info(`Saving agent ${this.getAgentId()} to database`); + // Save the app to database (authenticated users only) + const appService = new AppService(this.env); + await appService.createApp({ + id: this.state.inferenceContext.agentId, + userId: this.state.inferenceContext.userId, + sessionToken: null, + title: this.state.blueprint.title || this.state.query.substring(0, 100), + description: this.state.blueprint.description, + originalPrompt: this.state.query, + finalPrompt: this.state.query, + framework: this.state.blueprint.frameworks.join(','), + visibility: 'private', + status: 'generating', + createdAt: new Date(), + updatedAt: new Date() + }); + this.logger().info(`App saved successfully to database for agent ${this.state.inferenceContext.agentId}`, { + agentId: this.state.inferenceContext.agentId, + userId: this.state.inferenceContext.userId, + visibility: 'private' + }); + this.logger().info(`Agent initialized successfully for agent ${this.state.inferenceContext.agentId}`); + } + + // ========================================== + // Conversation Management + // ========================================== + + /* + * Each DO has 10 gb of sqlite storage. However, the way agents sdk works, it stores the 'state' object of the agent as a single row + * in the cf_agents_state table. And row size has a much smaller limit in sqlite. Thus, we only keep current compactified conversation + * in the agent's core state and store the full conversation in a separate DO table. + */ + getConversationState(id: string = DEFAULT_CONVERSATION_SESSION_ID): ConversationState { + const currentConversation = this.state.conversationMessages; + const rows = this.sql<{ messages: string, id: string }>`SELECT * FROM full_conversations WHERE id = ${id}`; + let fullHistory: ConversationMessage[] = []; + if (rows.length > 0 && rows[0].messages) { + try { + const parsed = JSON.parse(rows[0].messages); + if (Array.isArray(parsed)) { + fullHistory = parsed as ConversationMessage[]; + } + } catch (_e) { + this.logger().warn('Failed to parse full conversation history', _e); + } + } + if (fullHistory.length === 0) { + fullHistory = currentConversation; + } + // Load compact (running) history from sqlite with fallback to in-memory state for migration + const compactRows = this.sql<{ messages: string, id: string }>`SELECT * FROM compact_conversations WHERE id = ${id}`; + let runningHistory: ConversationMessage[] = []; + if (compactRows.length > 0 && compactRows[0].messages) { + try { + const parsed = JSON.parse(compactRows[0].messages); + if (Array.isArray(parsed)) { + runningHistory = parsed as ConversationMessage[]; + } + } catch (_e) { + this.logger().warn('Failed to parse compact conversation history', _e); + } + } + if (runningHistory.length === 0) { + runningHistory = currentConversation; + } + + // Remove duplicates + const deduplicateMessages = (messages: ConversationMessage[]): ConversationMessage[] => { + const seen = new Set(); + return messages.filter(msg => { + const key = `${msg.conversationId}-${msg.role}-${msg.tool_call_id || ''}`; + if (seen.has(key)) { + return false; + } + seen.add(key); + return true; + }); + }; + + runningHistory = deduplicateMessages(runningHistory); + fullHistory = deduplicateMessages(fullHistory); + + this.logger().info(`Loaded conversation state ${id}, full_length: ${fullHistory.length}, compact_length: ${runningHistory.length}`, fullHistory); + + return { + id: id, + runningHistory, + fullHistory, + }; + } + + setConversationState(conversations: ConversationState) { + const serializedFull = JSON.stringify(conversations.fullHistory); + const serializedCompact = JSON.stringify(conversations.runningHistory); + try { + this.logger().info(`Saving conversation state ${conversations.id}, full_length: ${serializedFull.length}, compact_length: ${serializedCompact.length}`, serializedFull); + this.sql`INSERT OR REPLACE INTO compact_conversations (id, messages) VALUES (${conversations.id}, ${serializedCompact})`; + this.sql`INSERT OR REPLACE INTO full_conversations (id, messages) VALUES (${conversations.id}, ${serializedFull})`; + } catch (error) { + this.logger().error(`Failed to save conversation state ${conversations.id}`, error); + } + } + + addConversationMessage(message: ConversationMessage) { + const conversationState = this.getConversationState(); + if (!conversationState.runningHistory.find(msg => msg.conversationId === message.conversationId)) { + this.logger().info('Adding conversation message', { + message, + conversationId: message.conversationId, + runningHistoryLength: conversationState.runningHistory.length, + fullHistoryLength: conversationState.fullHistory.length + }); + conversationState.runningHistory.push(message); + } else { + conversationState.runningHistory = conversationState.runningHistory.map(msg => { + if (msg.conversationId === message.conversationId) { + return message; + } + return msg; + }); + } + if (!conversationState.fullHistory.find(msg => msg.conversationId === message.conversationId)) { + conversationState.fullHistory.push(message); + } else { + conversationState.fullHistory = conversationState.fullHistory.map(msg => { + if (msg.conversationId === message.conversationId) { + return message; + } + return msg; + }); + } + this.setConversationState(conversationState); + } + + /** + * Clear conversation history + */ + public clearConversation(): void { + const messageCount = this.state.conversationMessages.length; + + // Clear conversation messages only from agent's running history + this.setState({ + ...this.state, + conversationMessages: [] + }); + + // Send confirmation response + this.broadcast(WebSocketMessageResponses.CONVERSATION_CLEARED, { + message: 'Conversation history cleared', + clearedMessageCount: messageCount + }); + } + + /** + * Handle user input during conversational code generation + * Processes user messages and updates pendingUserInputs state + */ + async handleUserInput(userMessage: string, images?: ImageAttachment[]): Promise { + try { + this.logger().info('Processing user input message', { + messageLength: userMessage.length, + pendingInputsCount: this.state.pendingUserInputs.length, + hasImages: !!images && images.length > 0, + imageCount: images?.length || 0 + }); + + await this.behavior.handleUserInput(userMessage, images); + if (!this.behavior.isCodeGenerating()) { + // If idle, start generation process + this.logger().info('User input during IDLE state, starting generation'); + this.behavior.generateAllFiles().catch(error => { + this.logger().error('Error starting generation from user input:', error); + }); + } + + } catch (error) { + if (error instanceof RateLimitExceededError) { + this.logger().error('Rate limit exceeded:', error); + this.broadcast(WebSocketMessageResponses.RATE_LIMIT_ERROR, { + error + }); + return; + } + this.broadcastError('Error processing user input', error); + } + } + // ========================================== + // WebSocket Management + // ========================================== + + /** + * Handle WebSocket message - Agent owns WebSocket lifecycle + * Delegates to centralized handler which can access both behavior and objective + */ + async onMessage(connection: Connection, message: string): Promise { + handleWebSocketMessage(this, connection, message); + } + + /** + * Handle WebSocket close - Agent owns WebSocket lifecycle + */ + async onClose(connection: Connection): Promise { + handleWebSocketClose(connection); + } + + /** + * Broadcast message to all connected WebSocket clients + * Type-safe version using proper WebSocket message types + */ + public broadcast( + type: T, + data?: WebSocketMessageData + ): void { + broadcastToConnections(this, type, data || {} as WebSocketMessageData); + } + + protected broadcastError(context: string, error: unknown): void { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger().error(`${context}:`, error); + this.broadcast(WebSocketMessageResponses.ERROR, { + error: `${context}: ${errorMessage}` + }); + } + // ========================================== + // Git Management + // ========================================== + + protected async gitInit() { + try { + await this.git.init(); + this.logger().info("Git initialized successfully"); + // Check if there is any commit + const head = await this.git.getHead(); + + if (!head) { + this.logger().info("No commits found, creating initial commit"); + // get all generated files and commit them + const generatedFiles = this.fileManager.getGeneratedFiles(); + if (generatedFiles.length === 0) { + this.logger().info("No generated files found, skipping initial commit"); + return; + } + await this.git.commit(generatedFiles, "Initial commit"); + this.logger().info("Initial commit created successfully"); + } + } catch (error) { + this.logger().error("Error during git init:", error); + } + } + + /** + * Export git objects + * The route handler will build the repo with template rebasing + */ + async exportGitObjects(): Promise<{ + gitObjects: Array<{ path: string; data: Uint8Array }>; + query: string; + hasCommits: boolean; + templateDetails: TemplateDetails | null; + }> { + try { + // Export git objects efficiently (minimal DO memory usage) + const gitObjects = this.git.fs.exportGitObjects(); + + await this.gitInit(); + + // Ensure template details are available + await this.behavior.ensureTemplateDetails(); + + const templateDetails = this.behavior.getTemplateDetails(); + + return { + gitObjects, + query: this.state.query || 'N/A', + hasCommits: gitObjects.length > 0, + templateDetails + }; + } catch (error) { + this.logger().error('exportGitObjects failed', error); + throw error; + } + } + + /** + * Cache GitHub OAuth token in memory for subsequent exports + * Token is ephemeral - lost on DO eviction + */ + setGitHubToken(token: string, username: string, ttl: number = 3600000): void { + this.objective.setGitHubToken(token, username, ttl); + } + + /** + * Get cached GitHub token if available and not expired + */ + getGitHubToken(): { token: string; username: string } | null { + return this.objective.getGitHubToken(); + } + + /** + * Clear cached GitHub token + */ + clearGitHubToken(): void { + this.objective.clearGitHubToken(); + } +} diff --git a/worker/agents/core/objectives/app.ts b/worker/agents/core/objectives/app.ts new file mode 100644 index 00000000..04631afc --- /dev/null +++ b/worker/agents/core/objectives/app.ts @@ -0,0 +1,310 @@ +import { ProjectObjective } from './base'; +import { BaseProjectState } from '../state'; +import { ProjectType, RuntimeType, ExportResult, ExportOptions, DeployResult, DeployOptions } from '../types'; +import { WebSocketMessageResponses, PREVIEW_EXPIRED_ERROR } from '../../constants'; +import { AppService } from '../../../database/services/AppService'; +import type { AgentInfrastructure } from '../AgentCore'; +import { GitHubService } from '../../../services/github'; + +/** + * AppObjective - Full-Stack Web Applications + * + * Produces: React + Vite + Cloudflare Workers full-stack applications + * Runtime: Cloudflare Containers (sandbox) + * Template: R2-backed React templates + * Export: Deploy to Cloudflare Workers for platform (and soon User's personal Cloudflare account) + * + * This is the EXISTING, ORIGINAL project type. + * All current production apps are AppObjective. + */ +export class AppObjective + extends ProjectObjective { + + constructor(infrastructure: AgentInfrastructure) { + super(infrastructure); + } + + // ========================================== + // IDENTITY + // ========================================== + + getType(): ProjectType { + return 'app'; + } + + // ========================================== + // RUNTIME & INFRASTRUCTURE + // ========================================== + + getRuntime(): RuntimeType { + return 'sandbox'; + } + + needsTemplate(): boolean { + return true; + } + + getTemplateType(): string | null { + return this.state.templateName; + } + + // ========================================== + // LIFECYCLE HOOKS + // ========================================== + + /** + * After code generation, auto-deploy to sandbox for preview + */ + async onCodeGenerated(): Promise { + this.logger.info('AppObjective: Code generation complete, auto-deploying to sandbox'); + + try { + await this.deploymentManager.deployToSandbox(); + this.logger.info('AppObjective: Auto-deployment to sandbox successful'); + } catch (error) { + this.logger.error('AppObjective: Auto-deployment to sandbox failed', error); + // Don't throw - generation succeeded even if deployment failed + } + } + + // ========================================== + // DEPLOYMENT & EXPORT + // ========================================== + + async deploy(options?: DeployOptions): Promise { + const target = options?.target ?? 'platform'; + if (target !== 'platform') { + const message = `Unsupported deployment target "${target}" for app projects`; + this.logger.error(message); + return { success: false, target, error: message }; + } + + try { + this.logger.info('Deploying app to Workers for Platforms'); + + // Ensure sandbox instance exists first + if (!this.state.sandboxInstanceId) { + this.logger.info('No sandbox instance, deploying to sandbox first'); + await this.deploymentManager.deployToSandbox(); + + if (!this.state.sandboxInstanceId) { + this.logger.error('Failed to deploy to sandbox service'); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: 'Deployment failed: Failed to deploy to sandbox service', + error: 'Sandbox service unavailable' + }); + return { + success: false, + target, + error: 'Failed to deploy to sandbox service' + }; + } + } + + // Deploy to Cloudflare Workers for Platforms + const result = await this.deploymentManager.deployToCloudflare({ + target, + callbacks: { + onStarted: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_STARTED, data); + }, + onCompleted: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_COMPLETED, data); + }, + onError: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, data); + }, + onPreviewExpired: () => { + this.deploymentManager.deployToSandbox(); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: PREVIEW_EXPIRED_ERROR, + error: PREVIEW_EXPIRED_ERROR + }); + } + } + }); + + // Update database with deployment ID if successful + if (result.deploymentUrl && result.deploymentId) { + const appService = new AppService(this.env); + await appService.updateDeploymentId( + this.getAgentId(), + result.deploymentId + ); + + this.logger.info('Updated app deployment ID in database', { + agentId: this.getAgentId(), + deploymentId: result.deploymentId + }); + } + + return { + success: !!result.deploymentUrl, + target, + url: result.deploymentUrl || undefined, + metadata: { + deploymentId: result.deploymentId, + workersUrl: result.deploymentUrl + } + }; + + } catch (error) { + this.logger.error('Cloudflare deployment error:', error); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: 'Deployment failed', + error: error instanceof Error ? error.message : String(error) + }); + + return { + success: false, + target, + error: error instanceof Error ? error.message : 'Unknown deployment error' + }; + } + } + + async export(options: ExportOptions): Promise { + if (options.kind !== 'github' || !options.github) { + const error = 'App export requires GitHub context'; + this.logger.error(error, { kind: options.kind }); + return { success: false, error }; + } + + const githubOptions = options.github; + + try { + this.logger.info('Starting GitHub export using DO git'); + + this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_STARTED, { + message: `Starting GitHub export to repository "${githubOptions.cloneUrl}"`, + repositoryName: githubOptions.repositoryHtmlUrl, + isPrivate: githubOptions.isPrivate + }); + + this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_PROGRESS, { + message: 'Preparing git repository...', + step: 'preparing', + progress: 20 + }); + + const { gitObjects, query, templateDetails } = await this.infrastructure.exportGitObjects(); + + this.logger.info('Git objects exported', { + objectCount: gitObjects.length, + hasTemplate: !!templateDetails + }); + + let appCreatedAt: Date | undefined = undefined; + try { + const agentId = this.getAgentId(); + if (agentId) { + const appService = new AppService(this.env); + const app = await appService.getAppDetails(agentId); + if (app && app.createdAt) { + appCreatedAt = new Date(app.createdAt); + this.logger.info('Using app createdAt for template base', { + createdAt: appCreatedAt.toISOString() + }); + } + } + } catch (error) { + this.logger.warn('Failed to get app createdAt, using current time', { error }); + appCreatedAt = new Date(); + } + + this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_PROGRESS, { + message: 'Uploading to GitHub repository...', + step: 'uploading_files', + progress: 40 + }); + + const result = await GitHubService.exportToGitHub({ + gitObjects, + templateDetails, + appQuery: query, + appCreatedAt, + token: githubOptions.token, + repositoryUrl: githubOptions.repositoryHtmlUrl, + username: githubOptions.username, + email: githubOptions.email + }); + + if (!result.success) { + throw new Error(result.error || 'Failed to export to GitHub'); + } + + this.logger.info('GitHub export completed', { + commitSha: result.commitSha + }); + + if (githubOptions.token && githubOptions.username) { + try { + this.setGitHubToken(githubOptions.token, githubOptions.username); + this.logger.info('GitHub token cached after successful export'); + } catch (cacheError) { + this.logger.warn('Failed to cache GitHub token', { error: cacheError }); + } + } + + this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_PROGRESS, { + message: 'Finalizing GitHub export...', + step: 'finalizing', + progress: 90 + }); + + const agentId = this.getAgentId(); + this.logger.info('[DB Update] Updating app with GitHub repository URL', { + agentId, + repositoryUrl: githubOptions.repositoryHtmlUrl, + visibility: githubOptions.isPrivate ? 'private' : 'public' + }); + + const appService = new AppService(this.env); + const updateResult = await appService.updateGitHubRepository( + agentId || '', + githubOptions.repositoryHtmlUrl || '', + githubOptions.isPrivate ? 'private' : 'public' + ); + + this.logger.info('[DB Update] Database update result', { + agentId, + success: updateResult, + repositoryUrl: githubOptions.repositoryHtmlUrl + }); + + this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_COMPLETED, { + message: `Successfully exported to GitHub repository: ${githubOptions.repositoryHtmlUrl}`, + repositoryUrl: githubOptions.repositoryHtmlUrl, + cloneUrl: githubOptions.cloneUrl, + commitSha: result.commitSha + }); + + this.logger.info('GitHub export completed successfully', { + repositoryUrl: githubOptions.repositoryHtmlUrl, + commitSha: result.commitSha + }); + + return { + success: true, + url: githubOptions.repositoryHtmlUrl, + metadata: { + repositoryUrl: githubOptions.repositoryHtmlUrl, + cloneUrl: githubOptions.cloneUrl, + commitSha: result.commitSha + } + }; + + } catch (error) { + this.logger.error('GitHub export failed', error); + this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_ERROR, { + message: `GitHub export failed: ${error instanceof Error ? error.message : 'Unknown error'}`, + error: error instanceof Error ? error.message : 'Unknown error' + }); + return { + success: false, + url: options.github.repositoryHtmlUrl, + error: error instanceof Error ? error.message : 'Unknown error' + }; + } + } +} diff --git a/worker/agents/core/objectives/base.ts b/worker/agents/core/objectives/base.ts new file mode 100644 index 00000000..4ee3f4db --- /dev/null +++ b/worker/agents/core/objectives/base.ts @@ -0,0 +1,141 @@ +import { BaseProjectState } from '../state'; +import { ProjectType, RuntimeType, ExportResult, ExportOptions, DeployResult, DeployOptions } from '../types'; +import { AgentComponent } from '../AgentComponent'; +import type { AgentInfrastructure } from '../AgentCore'; + +/** + * Abstract base class for project objectives + * + * Defines WHAT is being built (app, workflow, presentation, etc.) + * + * Design principles: + * - Defines project identity (type, name, description) + * - Defines runtime requirements (sandbox, worker, none) + * - Defines template needs + * - Implements export/deployment logic + * - Provides lifecycle hooks + */ +export abstract class ProjectObjective + extends AgentComponent { + + // GitHub token cache (ephemeral, lost on DO eviction) + protected githubTokenCache: { + token: string; + username: string; + expiresAt: number; + } | null = null; + + constructor(infrastructure: AgentInfrastructure) { + super(infrastructure); + } + + // ========================================== + // ABSTRACT METHODS (Must be implemented) + // ========================================== + + /** + * Get project type identifier + */ + abstract getType(): ProjectType; + + /** + * Get runtime type (where it runs during development) + */ + abstract getRuntime(): RuntimeType; + + /** + * Does this project need a template? + */ + abstract needsTemplate(): boolean; + + /** + * Get template type if needed + */ + abstract getTemplateType(): string | null; + + /** + * Deploy project to its runtime target + */ + abstract deploy(options?: DeployOptions): Promise; + + /** + * Export project artifacts (GitHub repo, PDF, etc.) + */ + abstract export(options: ExportOptions): Promise; + + // ========================================== + // OPTIONAL LIFECYCLE HOOKS + // ========================================== + + /** + * Called after project is created and initialized + * Override for project-specific setup + */ + async onProjectCreated(): Promise { + // Default: no-op + } + + /** + * Called after code generation completes + * Override for project-specific post-generation actions + */ + async onCodeGenerated(): Promise { + // Default: no-op + } + + // ========================================== + // OPTIONAL VALIDATION + // ========================================== + + /** + * Validate project configuration and state + * Override for project-specific validation + */ + async validate(): Promise<{ valid: boolean; errors?: string[] }> { + return { valid: true }; + } + + /** + * Cache GitHub OAuth token in memory for subsequent exports + * Token is ephemeral - lost on DO eviction + */ + setGitHubToken(token: string, username: string, ttl: number = 3600000): void { + this.githubTokenCache = { + token, + username, + expiresAt: Date.now() + ttl + }; + this.logger.info('GitHub token cached', { + username, + expiresAt: new Date(this.githubTokenCache.expiresAt).toISOString() + }); + } + + /** + * Get cached GitHub token if available and not expired + */ + getGitHubToken(): { token: string; username: string } | null { + if (!this.githubTokenCache) { + return null; + } + + if (Date.now() >= this.githubTokenCache.expiresAt) { + this.logger.info('GitHub token expired, clearing cache'); + this.githubTokenCache = null; + return null; + } + + return { + token: this.githubTokenCache.token, + username: this.githubTokenCache.username + }; + } + + /** + * Clear cached GitHub token + */ + clearGitHubToken(): void { + this.githubTokenCache = null; + this.logger.info('GitHub token cleared'); + } +} diff --git a/worker/agents/core/objectives/general.ts b/worker/agents/core/objectives/general.ts new file mode 100644 index 00000000..858a7c96 --- /dev/null +++ b/worker/agents/core/objectives/general.ts @@ -0,0 +1,38 @@ +import { ProjectObjective } from './base'; +import { BaseProjectState } from '../state'; +import { ProjectType, RuntimeType, ExportResult, ExportOptions, DeployResult, DeployOptions } from '../types'; +import type { AgentInfrastructure } from '../AgentCore'; + +export class GeneralObjective + extends ProjectObjective { + + constructor(infrastructure: AgentInfrastructure) { + super(infrastructure); + } + + getType(): ProjectType { + return 'general'; + } + + getRuntime(): RuntimeType { + // No runtime assumed; agentic behavior will initialize slides/app runtime if needed + return 'none'; + } + + needsTemplate(): boolean { + return false; + } + + getTemplateType(): string | null { + return null; // scratch + } + + async deploy(_options?: DeployOptions): Promise { + return { success: false, target: 'platform', error: 'Deploy not applicable for general projects. Use tools to initialize a runtime first.' }; + } + + async export(_options: ExportOptions): Promise { + return { success: false, error: 'Export not applicable for general projects.' }; + } +} + diff --git a/worker/agents/core/objectives/presentation.ts b/worker/agents/core/objectives/presentation.ts new file mode 100644 index 00000000..d0a91cfb --- /dev/null +++ b/worker/agents/core/objectives/presentation.ts @@ -0,0 +1,131 @@ +import { ProjectObjective } from './base'; +import { BaseProjectState } from '../state'; +import { ProjectType, RuntimeType, ExportResult, ExportOptions, DeployResult, DeployOptions } from '../types'; +import type { AgentInfrastructure } from '../AgentCore'; +import { WebSocketMessageResponses, PREVIEW_EXPIRED_ERROR } from '../../constants'; +import { AppService } from '../../../database/services/AppService'; + +/** + * WIP - PresentationObjective - Slides/Docs/Marketing Materials + * + * Produces: Spectacle-based presentations + * Runtime: Sandbox + * Template: Spectacle template (R2-backed) + * Export: PDF, Google Slides, PowerPoint + * + */ +export class PresentationObjective + extends ProjectObjective { + + constructor(infrastructure: AgentInfrastructure) { + super(infrastructure); + } + + // ========================================== + // IDENTITY + // ========================================== + + getType(): ProjectType { + return 'presentation'; + } + + // ========================================== + // RUNTIME & INFRASTRUCTURE + // ========================================== + + getRuntime(): RuntimeType { + return 'sandbox'; + } + + needsTemplate(): boolean { + return true; + } + + getTemplateType(): string | null { + return 'spectacle'; // New template to be created + } + + // ========================================== + // DEPLOYMENT & EXPORT + // ========================================== + + async deploy(options?: DeployOptions): Promise { + const target = options?.target ?? 'platform'; + if (target !== 'platform') { + const error = `Unsupported deployment target "${target}" for presentations`; + this.logger.error(error); + return { success: false, target, error }; + } + + try { + this.logger.info('Deploying presentation to Workers for Platforms'); + + if (!this.state.sandboxInstanceId) { + await this.deploymentManager.deployToSandbox(); + + if (!this.state.sandboxInstanceId) { + const error = 'Failed to deploy to sandbox service'; + this.logger.error(error); + return { success: false, target, error }; + } + } + + const result = await this.deploymentManager.deployToCloudflare({ + target, + callbacks: { + onStarted: (data) => this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_STARTED, data), + onCompleted: (data) => this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_COMPLETED, data), + onError: (data) => this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, data), + onPreviewExpired: () => { + this.deploymentManager.deployToSandbox(); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: PREVIEW_EXPIRED_ERROR, + error: PREVIEW_EXPIRED_ERROR + }); + } + } + }); + + if (result.deploymentUrl && result.deploymentId) { + const appService = new AppService(this.env); + await appService.updateDeploymentId(this.getAgentId(), result.deploymentId); + } + + return { + success: !!result.deploymentUrl, + target, + url: result.deploymentUrl || undefined, + metadata: { + deploymentId: result.deploymentId, + workersUrl: result.deploymentUrl + } + }; + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown presentation deployment error'; + this.logger.error('Presentation deployment error:', error); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: 'Deployment failed', + error: message + }); + return { success: false, target, error: message }; + } + } + + async export(options: ExportOptions): Promise { + const allowedKinds: Array = ['pdf', 'pptx', 'googleslides']; + if (!allowedKinds.includes(options.kind)) { + const error = `Unsupported presentation export kind "${options.kind}"`; + this.logger.warn(error); + return { success: false, error }; + } + + const format = options.format || options.kind; + this.logger.info('Presentation export requested', { format }); + + return { + success: false, + error: 'Presentation export not yet implemented - coming in Phase 3', + metadata: { format } + }; + } +} diff --git a/worker/agents/core/objectives/workflow.ts b/worker/agents/core/objectives/workflow.ts new file mode 100644 index 00000000..e53def58 --- /dev/null +++ b/worker/agents/core/objectives/workflow.ts @@ -0,0 +1,113 @@ +import { ProjectObjective } from './base'; +import { BaseProjectState } from '../state'; +import { ProjectType, RuntimeType, ExportResult, ExportOptions, DeployResult, DeployOptions } from '../types'; +import type { AgentInfrastructure } from '../AgentCore'; +import { WebSocketMessageResponses } from '../../constants'; + +/** + * WIP! + * WorkflowObjective - Backend-Only Workflows + * + * Produces: Cloudflare Workers without UI (APIs, scheduled jobs, queues) + * Runtime: Sandbox for now, Dynamic Worker Loaders in the future + * Template: In-memory (no R2) + * Export: Deploy to Cloudflare Workers in user's account + */ +export class WorkflowObjective + extends ProjectObjective { + + constructor(infrastructure: AgentInfrastructure) { + super(infrastructure); + } + + // ========================================== + // IDENTITY + // ========================================== + + getType(): ProjectType { + return 'workflow'; + } + + // ========================================== + // RUNTIME & INFRASTRUCTURE + // ========================================== + + getRuntime(): RuntimeType { + return 'worker'; + } + + needsTemplate(): boolean { + return false; // In-memory templates + } + + getTemplateType(): string | null { + return null; + } + + // ========================================== + // DEPLOYMENT & EXPORT + // ========================================== + + async deploy(options?: DeployOptions): Promise { + const target = options?.target ?? 'user'; + + try { + this.logger.info('Deploying workflow to Cloudflare Workers (user account)', { target }); + + const result = await this.deploymentManager.deployToCloudflare({ + target, + callbacks: { + onStarted: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_STARTED, data); + }, + onCompleted: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_COMPLETED, data); + }, + onError: (data) => { + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, data); + } + } + }); + + return { + success: !!result.deploymentUrl, + target, + url: result.deploymentUrl || undefined, + deploymentId: result.deploymentId, + metadata: { + deploymentId: result.deploymentId, + workersUrl: result.deploymentUrl + } + }; + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown workflow deployment error'; + this.logger.error('Workflow deployment failed', error); + this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { + message: 'Workflow deployment failed', + error: message + }); + + return { + success: false, + target, + error: message + }; + } + } + + async export(options: ExportOptions): Promise { + if (options.kind !== 'workflow') { + const error = 'Workflow export must be invoked with kind="workflow"'; + this.logger.warn(error, { kind: options.kind }); + return { success: false, error }; + } + + const deployResult = await this.deploy(options); + return { + success: deployResult.success, + url: deployResult.url, + error: deployResult.error, + metadata: deployResult.metadata + }; + } +} diff --git a/worker/agents/core/simpleGeneratorAgent.ts b/worker/agents/core/simpleGeneratorAgent.ts deleted file mode 100644 index 3975d570..00000000 --- a/worker/agents/core/simpleGeneratorAgent.ts +++ /dev/null @@ -1,2707 +0,0 @@ -import { Agent, AgentContext, Connection, ConnectionContext } from 'agents'; -import { - Blueprint, - PhaseConceptGenerationSchemaType, - PhaseConceptType, - FileConceptType, - FileOutputType, - PhaseImplementationSchemaType, -} from '../schemas'; -import { ExecuteCommandsResponse, GitHubPushRequest, PreviewType, RuntimeError, StaticAnalysisResponse, TemplateDetails } from '../../services/sandbox/sandboxTypes'; -import { GitHubExportResult } from '../../services/github/types'; -import { GitHubService } from '../../services/github/GitHubService'; -import { CodeGenState, CurrentDevState, MAX_PHASES } from './state'; -import { AllIssues, AgentSummary, AgentInitArgs, PhaseExecutionResult, UserContext } from './types'; -import { PREVIEW_EXPIRED_ERROR, WebSocketMessageResponses } from '../constants'; -import { broadcastToConnections, handleWebSocketClose, handleWebSocketMessage, sendToConnection } from './websocket'; -import { createObjectLogger, StructuredLogger } from '../../logger'; -import { ProjectSetupAssistant } from '../assistants/projectsetup'; -import { UserConversationProcessor, RenderToolCall } from '../operations/UserConversationProcessor'; -import { FileManager } from '../services/implementations/FileManager'; -import { StateManager } from '../services/implementations/StateManager'; -import { DeploymentManager } from '../services/implementations/DeploymentManager'; -// import { WebSocketBroadcaster } from '../services/implementations/WebSocketBroadcaster'; -import { GenerationContext } from '../domain/values/GenerationContext'; -import { IssueReport } from '../domain/values/IssueReport'; -import { PhaseImplementationOperation } from '../operations/PhaseImplementation'; -import { FileRegenerationOperation } from '../operations/FileRegeneration'; -import { PhaseGenerationOperation } from '../operations/PhaseGeneration'; -import { ScreenshotAnalysisOperation } from '../operations/ScreenshotAnalysis'; -// Database schema imports removed - using zero-storage OAuth flow -import { BaseSandboxService } from '../../services/sandbox/BaseSandboxService'; -import { WebSocketMessageData, WebSocketMessageType } from '../../api/websocketTypes'; -import { InferenceContext, AgentActionKey } from '../inferutils/config.types'; -import { AGENT_CONFIG } from '../inferutils/config'; -import { ModelConfigService } from '../../database/services/ModelConfigService'; -import { fixProjectIssues } from '../../services/code-fixer'; -import { GitVersionControl } from '../git'; -import { FastCodeFixerOperation } from '../operations/PostPhaseCodeFixer'; -import { looksLikeCommand, validateAndCleanBootstrapCommands } from '../utils/common'; -import { customizePackageJson, customizeTemplateFiles, generateBootstrapScript, generateProjectName } from '../utils/templateCustomizer'; -import { generateBlueprint } from '../planning/blueprint'; -import { AppService } from '../../database'; -import { RateLimitExceededError } from 'shared/types/errors'; -import { ImageAttachment, type ProcessedImageAttachment } from '../../types/image-attachment'; -import { OperationOptions } from '../operations/common'; -import { CodingAgentInterface } from '../services/implementations/CodingAgent'; -import { ImageType, uploadImage } from 'worker/utils/images'; -import { ConversationMessage, ConversationState } from '../inferutils/common'; -import { DeepCodeDebugger } from '../assistants/codeDebugger'; -import { DeepDebugResult } from './types'; -import { StateMigration } from './stateMigration'; -import { generateNanoId } from 'worker/utils/idGenerator'; -import { updatePackageJson } from '../utils/packageSyncer'; -import { IdGenerator } from '../utils/idGenerator'; - -interface Operations { - regenerateFile: FileRegenerationOperation; - generateNextPhase: PhaseGenerationOperation; - analyzeScreenshot: ScreenshotAnalysisOperation; - implementPhase: PhaseImplementationOperation; - fastCodeFixer: FastCodeFixerOperation; - processUserMessage: UserConversationProcessor; -} - -const DEFAULT_CONVERSATION_SESSION_ID = 'default'; - -/** - * SimpleCodeGeneratorAgent - Deterministically orchestrated agent - * - * Manages the lifecycle of code generation including: - * - Blueprint, phase generation, phase implementation, review cycles orchestrations - * - File streaming with WebSocket updates - * - Code validation and error correction - * - Deployment to sandbox service - */ -export class SimpleCodeGeneratorAgent extends Agent { - private static readonly MAX_COMMANDS_HISTORY = 10; - private static readonly PROJECT_NAME_PREFIX_MAX_LENGTH = 20; - - protected projectSetupAssistant: ProjectSetupAssistant | undefined; - protected stateManager!: StateManager; - protected fileManager!: FileManager; - protected codingAgent: CodingAgentInterface = new CodingAgentInterface(this); - - protected deploymentManager!: DeploymentManager; - protected git: GitVersionControl; - - private previewUrlCache: string = ''; - private templateDetailsCache: TemplateDetails | null = null; - - // In-memory storage for user-uploaded images (not persisted in DO state) - private pendingUserImages: ProcessedImageAttachment[] = [] - private generationPromise: Promise | null = null; - private currentAbortController?: AbortController; - private deepDebugPromise: Promise<{ transcript: string } | { error: string }> | null = null; - private deepDebugConversationId: string | null = null; - - // GitHub token cache (ephemeral, lost on DO eviction) - private githubTokenCache: { - token: string; - username: string; - expiresAt: number; - } | null = null; - - - protected operations: Operations = { - regenerateFile: new FileRegenerationOperation(), - generateNextPhase: new PhaseGenerationOperation(), - analyzeScreenshot: new ScreenshotAnalysisOperation(), - implementPhase: new PhaseImplementationOperation(), - fastCodeFixer: new FastCodeFixerOperation(), - processUserMessage: new UserConversationProcessor() - }; - - public _logger: StructuredLogger | undefined; - - private initLogger(agentId: string, sessionId: string, userId: string) { - this._logger = createObjectLogger(this, 'CodeGeneratorAgent'); - this._logger.setObjectId(agentId); - this._logger.setFields({ - sessionId, - agentId, - userId, - }); - return this._logger; - } - - logger(): StructuredLogger { - if (!this._logger) { - this._logger = this.initLogger(this.getAgentId(), this.state.sessionId, this.state.inferenceContext.userId); - } - return this._logger; - } - - getAgentId() { - return this.state.inferenceContext.agentId; - } - - initialState: CodeGenState = { - blueprint: {} as Blueprint, - projectName: "", - query: "", - generatedPhases: [], - generatedFilesMap: {}, - agentMode: 'deterministic', - sandboxInstanceId: undefined, - templateName: '', - commandsHistory: [], - lastPackageJson: '', - pendingUserInputs: [], - inferenceContext: {} as InferenceContext, - sessionId: '', - hostname: '', - conversationMessages: [], - currentDevState: CurrentDevState.IDLE, - phasesCounter: MAX_PHASES, - mvpGenerated: false, - shouldBeGenerating: false, - reviewingInitiated: false, - projectUpdatesAccumulator: [], - lastDeepDebugTranscript: null, - }; - - constructor(ctx: AgentContext, env: Env) { - super(ctx, env); - this.sql`CREATE TABLE IF NOT EXISTS full_conversations (id TEXT PRIMARY KEY, messages TEXT)`; - this.sql`CREATE TABLE IF NOT EXISTS compact_conversations (id TEXT PRIMARY KEY, messages TEXT)`; - - // Initialize StateManager - this.stateManager = new StateManager( - () => this.state, - (s) => this.setState(s) - ); - - // Initialize GitVersionControl (bind sql to preserve 'this' context) - this.git = new GitVersionControl(this.sql.bind(this)); - - // Initialize FileManager - this.fileManager = new FileManager(this.stateManager, () => this.getTemplateDetails(), this.git); - - // Initialize DeploymentManager first (manages sandbox client caching) - // DeploymentManager will use its own getClient() override for caching - this.deploymentManager = new DeploymentManager( - { - stateManager: this.stateManager, - fileManager: this.fileManager, - getLogger: () => this.logger(), - env: this.env - }, - SimpleCodeGeneratorAgent.MAX_COMMANDS_HISTORY - ); - } - - /** - * Initialize the code generator with project blueprint and template - * Sets up services and begins deployment process - */ - async initialize( - initArgs: AgentInitArgs, - ..._args: unknown[] - ): Promise { - - const { query, language, frameworks, hostname, inferenceContext, templateInfo } = initArgs; - const sandboxSessionId = DeploymentManager.generateNewSessionId(); - this.initLogger(inferenceContext.agentId, sandboxSessionId, inferenceContext.userId); - - // Generate a blueprint - this.logger().info('Generating blueprint', { query, queryLength: query.length, imagesCount: initArgs.images?.length || 0 }); - this.logger().info(`Using language: ${language}, frameworks: ${frameworks ? frameworks.join(", ") : "none"}`); - - const blueprint = await generateBlueprint({ - env: this.env, - inferenceContext, - query, - language: language!, - frameworks: frameworks!, - templateDetails: templateInfo.templateDetails, - templateMetaInfo: templateInfo.selection, - images: initArgs.images, - stream: { - chunk_size: 256, - onChunk: (chunk) => { - // initArgs.writer.write({chunk}); - initArgs.onBlueprintChunk(chunk); - } - } - }) - - const packageJson = templateInfo.templateDetails?.allFiles['package.json']; - - this.templateDetailsCache = templateInfo.templateDetails; - - const projectName = generateProjectName( - blueprint?.projectName || templateInfo.templateDetails.name, - generateNanoId(), - SimpleCodeGeneratorAgent.PROJECT_NAME_PREFIX_MAX_LENGTH - ); - - this.logger().info('Generated project name', { projectName }); - - this.setState({ - ...this.initialState, - projectName, - query, - blueprint, - templateName: templateInfo.templateDetails.name, - sandboxInstanceId: undefined, - generatedPhases: [], - commandsHistory: [], - lastPackageJson: packageJson, - sessionId: sandboxSessionId, - hostname, - inferenceContext, - }); - - await this.gitInit(); - - // Customize template files (package.json, wrangler.jsonc, .bootstrap.js, .gitignore) - const customizedFiles = customizeTemplateFiles( - templateInfo.templateDetails.allFiles, - { - projectName, - commandsHistory: [] // Empty initially, will be updated later - } - ); - - this.logger().info('Customized template files', { - files: Object.keys(customizedFiles) - }); - - // Save customized files to git - const filesToSave = Object.entries(customizedFiles).map(([filePath, content]) => ({ - filePath, - fileContents: content, - filePurpose: 'Project configuration file' - })); - - await this.fileManager.saveGeneratedFiles( - filesToSave, - 'Initialize project configuration files' - ); - - this.logger().info('Committed customized template files to git'); - - this.initializeAsync().catch((error: unknown) => { - this.broadcastError("Initialization failed", error); - }); - this.logger().info(`Agent ${this.getAgentId()} session: ${this.state.sessionId} initialized successfully`); - await this.saveToDatabase(); - return this.state; - } - - private async initializeAsync(): Promise { - try { - const [, setupCommands] = await Promise.all([ - this.deployToSandbox(), - this.getProjectSetupAssistant().generateSetupCommands(), - this.generateReadme() - ]); - this.logger().info("Deployment to sandbox service and initial commands predictions completed successfully"); - await this.executeCommands(setupCommands.commands); - this.logger().info("Initial commands executed successfully"); - } catch (error) { - this.logger().error("Error during async initialization:", error); - // throw error; - } - } - - async isInitialized() { - return this.getAgentId() ? true : false - } - - async onStart(props?: Record | undefined): Promise { - this.logger().info(`Agent ${this.getAgentId()} session: ${this.state.sessionId} onStart`, { props }); - - // Ignore if agent not initialized - if (!this.state.query) { - this.logger().warn(`Agent ${this.getAgentId()} session: ${this.state.sessionId} onStart ignored, agent not initialized`); - return; - } - - // Ensure state is migrated for any previous versions - this.migrateStateIfNeeded(); - - // Check if this is a read-only operation - const readOnlyMode = props?.readOnlyMode === true; - - if (readOnlyMode) { - this.logger().info(`Agent ${this.getAgentId()} starting in READ-ONLY mode - skipping expensive initialization`); - return; - } - - // migrate overwritten package.jsons - const oldPackageJson = this.fileManager.getFile('package.json')?.fileContents || this.state.lastPackageJson; - if (oldPackageJson) { - const packageJson = customizePackageJson(oldPackageJson, this.state.projectName); - this.fileManager.saveGeneratedFiles([ - { - filePath: 'package.json', - fileContents: packageJson, - filePurpose: 'Project configuration file' - } - ], 'chore: fix overwritten package.json'); - } - - // Full initialization for read-write operations - await this.gitInit(); - this.logger().info(`Agent ${this.getAgentId()} session: ${this.state.sessionId} onStart being processed, template name: ${this.state.templateName}`); - // Fill the template cache - await this.ensureTemplateDetails(); - this.logger().info(`Agent ${this.getAgentId()} session: ${this.state.sessionId} onStart processed successfully`); - } - - private async gitInit() { - try { - await this.git.init(); - this.logger().info("Git initialized successfully"); - // Check if there is any commit - const head = await this.git.getHead(); - - if (!head) { - this.logger().info("No commits found, creating initial commit"); - // get all generated files and commit them - const generatedFiles = this.fileManager.getGeneratedFiles(); - if (generatedFiles.length === 0) { - this.logger().info("No generated files found, skipping initial commit"); - return; - } - await this.git.commit(generatedFiles, "Initial commit"); - this.logger().info("Initial commit created successfully"); - } - } catch (error) { - this.logger().error("Error during git init:", error); - } - } - - onStateUpdate(_state: CodeGenState, _source: "server" | Connection) {} - - setState(state: CodeGenState): void { - try { - super.setState(state); - } catch (error) { - this.broadcastError("Error setting state", error); - this.logger().error("State details:", { - originalState: JSON.stringify(this.state, null, 2), - newState: JSON.stringify(state, null, 2) - }); - } - } - - onConnect(connection: Connection, ctx: ConnectionContext) { - this.logger().info(`Agent connected for agent ${this.getAgentId()}`, { connection, ctx }); - sendToConnection(connection, 'agent_connected', { - state: this.state, - templateDetails: this.getTemplateDetails() - }); - } - - async ensureTemplateDetails() { - if (!this.templateDetailsCache) { - this.logger().info(`Loading template details for: ${this.state.templateName}`); - const results = await BaseSandboxService.getTemplateDetails(this.state.templateName); - if (!results.success || !results.templateDetails) { - throw new Error(`Failed to get template details for: ${this.state.templateName}`); - } - - const templateDetails = results.templateDetails; - - const customizedAllFiles = { ...templateDetails.allFiles }; - - this.logger().info('Customizing template files for older app'); - const customizedFiles = customizeTemplateFiles( - templateDetails.allFiles, - { - projectName: this.state.projectName, - commandsHistory: this.getBootstrapCommands() - } - ); - Object.assign(customizedAllFiles, customizedFiles); - - this.templateDetailsCache = { - ...templateDetails, - allFiles: customizedAllFiles - }; - this.logger().info('Template details loaded and customized'); - } - return this.templateDetailsCache; - } - - private getTemplateDetails(): TemplateDetails { - if (!this.templateDetailsCache) { - this.ensureTemplateDetails(); - throw new Error('Template details not loaded. Call ensureTemplateDetails() first.'); - } - return this.templateDetailsCache; - } - - /** - * Update bootstrap script when commands history changes - * Called after significant command executions - */ - private async updateBootstrapScript(commandsHistory: string[]): Promise { - if (!commandsHistory || commandsHistory.length === 0) { - return; - } - - // Use only validated commands - const bootstrapScript = generateBootstrapScript( - this.state.projectName, - commandsHistory - ); - - await this.fileManager.saveGeneratedFile( - { - filePath: '.bootstrap.js', - fileContents: bootstrapScript, - filePurpose: 'Updated bootstrap script for first-time clone setup' - }, - 'chore: Update bootstrap script with latest commands' - ); - - this.logger().info('Updated bootstrap script with commands', { - commandCount: commandsHistory.length, - commands: commandsHistory - }); - } - - /* - * Each DO has 10 gb of sqlite storage. However, the way agents sdk works, it stores the 'state' object of the agent as a single row - * in the cf_agents_state table. And row size has a much smaller limit in sqlite. Thus, we only keep current compactified conversation - * in the agent's core state and store the full conversation in a separate DO table. - */ - getConversationState(id: string = DEFAULT_CONVERSATION_SESSION_ID): ConversationState { - const currentConversation = this.state.conversationMessages; - const rows = this.sql<{ messages: string, id: string }>`SELECT * FROM full_conversations WHERE id = ${id}`; - let fullHistory: ConversationMessage[] = []; - if (rows.length > 0 && rows[0].messages) { - try { - const parsed = JSON.parse(rows[0].messages); - if (Array.isArray(parsed)) { - fullHistory = parsed as ConversationMessage[]; - } - } catch (_e) {} - } - if (fullHistory.length === 0) { - fullHistory = currentConversation; - } - // Load compact (running) history from sqlite with fallback to in-memory state for migration - const compactRows = this.sql<{ messages: string, id: string }>`SELECT * FROM compact_conversations WHERE id = ${id}`; - let runningHistory: ConversationMessage[] = []; - if (compactRows.length > 0 && compactRows[0].messages) { - try { - const parsed = JSON.parse(compactRows[0].messages); - if (Array.isArray(parsed)) { - runningHistory = parsed as ConversationMessage[]; - } - } catch (_e) {} - } - if (runningHistory.length === 0) { - runningHistory = currentConversation; - } - - // Remove duplicates - const deduplicateMessages = (messages: ConversationMessage[]): ConversationMessage[] => { - const seen = new Set(); - return messages.filter(msg => { - if (seen.has(msg.conversationId)) { - return false; - } - seen.add(msg.conversationId); - return true; - }); - }; - - runningHistory = deduplicateMessages(runningHistory); - fullHistory = deduplicateMessages(fullHistory); - - return { - id: id, - runningHistory, - fullHistory, - }; - } - - setConversationState(conversations: ConversationState) { - const serializedFull = JSON.stringify(conversations.fullHistory); - const serializedCompact = JSON.stringify(conversations.runningHistory); - try { - this.logger().info(`Saving conversation state ${conversations.id}, full_length: ${serializedFull.length}, compact_length: ${serializedCompact.length}`); - this.sql`INSERT OR REPLACE INTO compact_conversations (id, messages) VALUES (${conversations.id}, ${serializedCompact})`; - this.sql`INSERT OR REPLACE INTO full_conversations (id, messages) VALUES (${conversations.id}, ${serializedFull})`; - } catch (error) { - this.logger().error(`Failed to save conversation state ${conversations.id}`, error); - } - } - - addConversationMessage(message: ConversationMessage) { - const conversationState = this.getConversationState(); - if (!conversationState.runningHistory.find(msg => msg.conversationId === message.conversationId)) { - conversationState.runningHistory.push(message); - } else { - conversationState.runningHistory = conversationState.runningHistory.map(msg => { - if (msg.conversationId === message.conversationId) { - return message; - } - return msg; - }); - } - if (!conversationState.fullHistory.find(msg => msg.conversationId === message.conversationId)) { - conversationState.fullHistory.push(message); - } else { - conversationState.fullHistory = conversationState.fullHistory.map(msg => { - if (msg.conversationId === message.conversationId) { - return message; - } - return msg; - }); - } - this.setConversationState(conversationState); - } - - private async saveToDatabase() { - this.logger().info(`Blueprint generated successfully for agent ${this.getAgentId()}`); - // Save the app to database (authenticated users only) - const appService = new AppService(this.env); - await appService.createApp({ - id: this.state.inferenceContext.agentId, - userId: this.state.inferenceContext.userId, - sessionToken: null, - title: this.state.blueprint.title || this.state.query.substring(0, 100), - description: this.state.blueprint.description || null, - originalPrompt: this.state.query, - finalPrompt: this.state.query, - framework: this.state.blueprint.frameworks?.[0], - visibility: 'private', - status: 'generating', - createdAt: new Date(), - updatedAt: new Date() - }); - this.logger().info(`App saved successfully to database for agent ${this.state.inferenceContext.agentId}`, { - agentId: this.state.inferenceContext.agentId, - userId: this.state.inferenceContext.userId, - visibility: 'private' - }); - this.logger().info(`Agent initialized successfully for agent ${this.state.inferenceContext.agentId}`); - } - - getPreviewUrlCache() { - return this.previewUrlCache; - } - - getProjectSetupAssistant(): ProjectSetupAssistant { - if (this.projectSetupAssistant === undefined) { - this.projectSetupAssistant = new ProjectSetupAssistant({ - env: this.env, - agentId: this.getAgentId(), - query: this.state.query, - blueprint: this.state.blueprint, - template: this.getTemplateDetails(), - inferenceContext: this.state.inferenceContext - }); - } - return this.projectSetupAssistant; - } - - getSessionId() { - return this.deploymentManager.getSessionId(); - } - - getSandboxServiceClient(): BaseSandboxService { - return this.deploymentManager.getClient(); - } - - getGit(): GitVersionControl { - return this.git; - } - - isCodeGenerating(): boolean { - return this.generationPromise !== null; - } - - rechargePhasesCounter(max_phases: number = MAX_PHASES): void { - if (this.getPhasesCounter() <= max_phases) { - this.setState({ - ...this.state, - phasesCounter: max_phases - }); - } - } - - decrementPhasesCounter(): number { - const counter = this.getPhasesCounter() - 1; - this.setState({ - ...this.state, - phasesCounter: counter - }); - return counter; - } - - getPhasesCounter(): number { - return this.state.phasesCounter; - } - - getOperationOptions(): OperationOptions { - return { - env: this.env, - agentId: this.getAgentId(), - context: GenerationContext.from(this.state, this.getTemplateDetails(), this.logger()), - logger: this.logger(), - inferenceContext: this.getInferenceContext(), - agent: this.codingAgent - }; - } - - /** - * Gets or creates an abort controller for the current operation - * Reuses existing controller for nested operations (e.g., tool calling) - */ - protected getOrCreateAbortController(): AbortController { - // Don't reuse aborted controllers - if (this.currentAbortController && !this.currentAbortController.signal.aborted) { - return this.currentAbortController; - } - - // Create new controller in memory for new operation - this.currentAbortController = new AbortController(); - - return this.currentAbortController; - } - - /** - * Cancels the current inference operation if any - */ - public cancelCurrentInference(): boolean { - if (this.currentAbortController) { - this.logger().info('Cancelling current inference operation'); - this.currentAbortController.abort(); - this.currentAbortController = undefined; - return true; - } - return false; - } - - /** - * Clears abort controller after successful completion - */ - protected clearAbortController(): void { - this.currentAbortController = undefined; - } - - /** - * Gets inference context with abort signal - * Reuses existing abort controller for nested operations - */ - protected getInferenceContext(): InferenceContext { - const controller = this.getOrCreateAbortController(); - - return { - ...this.state.inferenceContext, - abortSignal: controller.signal, - }; - } - - private createNewIncompletePhase(phaseConcept: PhaseConceptType) { - this.setState({ - ...this.state, - generatedPhases: [...this.state.generatedPhases, { - ...phaseConcept, - completed: false - }] - }) - - this.logger().info("Created new incomplete phase:", JSON.stringify(this.state.generatedPhases, null, 2)); - } - - private markPhaseComplete(phaseName: string) { - // First find the phase - const phases = this.state.generatedPhases; - if (!phases.some(p => p.name === phaseName)) { - this.logger().warn(`Phase ${phaseName} not found in generatedPhases array, skipping save`); - return; - } - - // Update the phase - this.setState({ - ...this.state, - generatedPhases: phases.map(p => p.name === phaseName ? { ...p, completed: true } : p) - }); - - this.logger().info("Completed phases:", JSON.stringify(phases, null, 2)); - } - - private broadcastError(context: string, error: unknown): void { - const errorMessage = error instanceof Error ? error.message : String(error); - this.logger().error(`${context}:`, error); - this.broadcast(WebSocketMessageResponses.ERROR, { - error: `${context}: ${errorMessage}` - }); - } - - async generateReadme() { - this.logger().info('Generating README.md'); - // Only generate if it doesn't exist - if (this.fileManager.fileExists('README.md')) { - this.logger().info('README.md already exists'); - return; - } - - this.broadcast(WebSocketMessageResponses.FILE_GENERATING, { - message: 'Generating README.md', - filePath: 'README.md', - filePurpose: 'Project documentation and setup instructions' - }); - - const readme = await this.operations.implementPhase.generateReadme(this.getOperationOptions()); - - await this.fileManager.saveGeneratedFile(readme, "feat: README.md"); - - this.broadcast(WebSocketMessageResponses.FILE_GENERATED, { - message: 'README.md generated successfully', - file: readme - }); - this.logger().info('README.md generated successfully'); - } - - async queueUserRequest(request: string, images?: ProcessedImageAttachment[]): Promise { - this.rechargePhasesCounter(3); - this.setState({ - ...this.state, - pendingUserInputs: [...this.state.pendingUserInputs, request] - }); - if (images && images.length > 0) { - this.logger().info('Storing user images in-memory for phase generation', { - imageCount: images.length, - }); - this.pendingUserImages = [...this.pendingUserImages, ...images]; - } - } - - private fetchPendingUserRequests(): string[] { - const inputs = this.state.pendingUserInputs; - if (inputs.length > 0) { - this.setState({ - ...this.state, - pendingUserInputs: [] - }); - } - return inputs; - } - - /** - * State machine controller for code generation with user interaction support - * Executes phases sequentially with review cycles and proper state transitions - */ - async generateAllFiles(reviewCycles: number = 5): Promise { - if (this.state.mvpGenerated && this.state.pendingUserInputs.length === 0) { - this.logger().info("Code generation already completed and no user inputs pending"); - return; - } - if (this.isCodeGenerating()) { - this.logger().info("Code generation already in progress"); - return; - } - this.generationPromise = this.launchStateMachine(reviewCycles); - await this.generationPromise; - } - - private async launchStateMachine(reviewCycles: number) { - this.broadcast(WebSocketMessageResponses.GENERATION_STARTED, { - message: 'Starting code generation', - totalFiles: this.getTotalFiles() - }); - this.logger().info('Starting code generation', { - totalFiles: this.getTotalFiles() - }); - await this.ensureTemplateDetails(); - - let currentDevState = CurrentDevState.PHASE_IMPLEMENTING; - const generatedPhases = this.state.generatedPhases; - const incompletedPhases = generatedPhases.filter(phase => !phase.completed); - let phaseConcept : PhaseConceptType | undefined; - if (incompletedPhases.length > 0) { - phaseConcept = incompletedPhases[incompletedPhases.length - 1]; - this.logger().info('Resuming code generation from incompleted phase', { - phase: phaseConcept - }); - } else if (generatedPhases.length > 0) { - currentDevState = CurrentDevState.PHASE_GENERATING; - this.logger().info('Resuming code generation after generating all phases', { - phase: generatedPhases[generatedPhases.length - 1] - }); - } else { - phaseConcept = this.state.blueprint.initialPhase; - this.logger().info('Starting code generation from initial phase', { - phase: phaseConcept - }); - this.createNewIncompletePhase(phaseConcept); - } - - let staticAnalysisCache: StaticAnalysisResponse | undefined; - let userContext: UserContext | undefined; - - // Store review cycles for later use - this.setState({ - ...this.state, - reviewCycles: reviewCycles - }); - - try { - let executionResults: PhaseExecutionResult; - // State machine loop - continues until IDLE state - while (currentDevState !== CurrentDevState.IDLE) { - this.logger().info(`[generateAllFiles] Executing state: ${currentDevState}`); - switch (currentDevState) { - case CurrentDevState.PHASE_GENERATING: - executionResults = await this.executePhaseGeneration(); - currentDevState = executionResults.currentDevState; - phaseConcept = executionResults.result; - staticAnalysisCache = executionResults.staticAnalysis; - userContext = executionResults.userContext; - break; - case CurrentDevState.PHASE_IMPLEMENTING: - executionResults = await this.executePhaseImplementation(phaseConcept, staticAnalysisCache, userContext); - currentDevState = executionResults.currentDevState; - staticAnalysisCache = executionResults.staticAnalysis; - userContext = undefined; - break; - case CurrentDevState.REVIEWING: - currentDevState = await this.executeReviewCycle(); - break; - case CurrentDevState.FINALIZING: - currentDevState = await this.executeFinalizing(); - break; - default: - break; - } - } - - this.logger().info("State machine completed successfully"); - } catch (error) { - if (error instanceof RateLimitExceededError) { - this.logger().error("Error in state machine:", error); - this.broadcast(WebSocketMessageResponses.RATE_LIMIT_ERROR, { error }); - } else { - this.broadcastError("Error during generation", error); - } - } finally { - // Clear abort controller after generation completes - this.clearAbortController(); - - const appService = new AppService(this.env); - await appService.updateApp( - this.getAgentId(), - { - status: 'completed', - } - ); - this.generationPromise = null; - this.broadcast(WebSocketMessageResponses.GENERATION_COMPLETE, { - message: "Code generation and review process completed.", - instanceId: this.state.sandboxInstanceId, - }); - } - } - - /** - * Execute phase generation state - generate next phase with user suggestions - */ - async executePhaseGeneration(): Promise { - this.logger().info("Executing PHASE_GENERATING state"); - try { - const currentIssues = await this.fetchAllIssues(); - - // Generate next phase with user suggestions if available - - // Get stored images if user suggestions are present - const pendingUserInputs = this.fetchPendingUserRequests(); - const userContext = (pendingUserInputs.length > 0) - ? { - suggestions: pendingUserInputs, - images: this.pendingUserImages - } as UserContext - : undefined; - - if (userContext && userContext?.suggestions && userContext.suggestions.length > 0) { - // Only reset pending user inputs if user suggestions were read - this.logger().info("Resetting pending user inputs", { - userSuggestions: userContext.suggestions, - hasImages: !!userContext.images, - imageCount: userContext.images?.length || 0 - }); - - // Clear images after they're passed to phase generation - if (userContext?.images && userContext.images.length > 0) { - this.logger().info('Clearing stored user images after passing to phase generation'); - this.pendingUserImages = []; - } - } - - const nextPhase = await this.generateNextPhase(currentIssues, userContext); - - if (!nextPhase) { - this.logger().info("No more phases to implement, transitioning to FINALIZING"); - return { - currentDevState: CurrentDevState.FINALIZING, - }; - } - - // Store current phase and transition to implementation - this.setState({ - ...this.state, - currentPhase: nextPhase - }); - - return { - currentDevState: CurrentDevState.PHASE_IMPLEMENTING, - result: nextPhase, - staticAnalysis: currentIssues.staticAnalysis, - userContext: userContext, - }; - } catch (error) { - if (error instanceof RateLimitExceededError) { - throw error; - } - this.broadcastError("Error generating phase", error); - return { - currentDevState: CurrentDevState.IDLE, - }; - } - } - - /** - * Execute phase implementation state - implement current phase - */ - async executePhaseImplementation(phaseConcept?: PhaseConceptType, staticAnalysis?: StaticAnalysisResponse, userContext?: UserContext): Promise<{currentDevState: CurrentDevState, staticAnalysis?: StaticAnalysisResponse}> { - try { - this.logger().info("Executing PHASE_IMPLEMENTING state"); - - if (phaseConcept === undefined) { - phaseConcept = this.state.currentPhase; - if (phaseConcept === undefined) { - this.logger().error("No phase concept provided to implement, will call phase generation"); - const results = await this.executePhaseGeneration(); - phaseConcept = results.result; - if (phaseConcept === undefined) { - this.logger().error("No phase concept provided to implement, will return"); - return {currentDevState: CurrentDevState.FINALIZING}; - } - } - } - - this.setState({ - ...this.state, - currentPhase: undefined // reset current phase - }); - - let currentIssues : AllIssues; - if (this.state.sandboxInstanceId) { - if (staticAnalysis) { - // If have cached static analysis, fetch everything else fresh - currentIssues = { - runtimeErrors: await this.fetchRuntimeErrors(true), - staticAnalysis: staticAnalysis, - }; - } else { - currentIssues = await this.fetchAllIssues(true) - } - } else { - currentIssues = { - runtimeErrors: [], - staticAnalysis: { success: true, lint: { issues: [] }, typecheck: { issues: [] } }, - } - } - // Implement the phase with user context (suggestions and images) - await this.implementPhase(phaseConcept, currentIssues, userContext); - - this.logger().info(`Phase ${phaseConcept.name} completed, generating next phase`); - - const phasesCounter = this.decrementPhasesCounter(); - - if ((phaseConcept.lastPhase || phasesCounter <= 0) && this.state.pendingUserInputs.length === 0) return {currentDevState: CurrentDevState.FINALIZING, staticAnalysis: staticAnalysis}; - return {currentDevState: CurrentDevState.PHASE_GENERATING, staticAnalysis: staticAnalysis}; - } catch (error) { - this.logger().error("Error implementing phase", error); - if (error instanceof RateLimitExceededError) { - throw error; - } - return {currentDevState: CurrentDevState.IDLE}; - } - } - - /** - * Execute review cycle state - review and cleanup - */ - async executeReviewCycle(): Promise { - this.logger().info("Executing REVIEWING state - review and cleanup"); - if (this.state.reviewingInitiated) { - this.logger().info("Reviewing already initiated, skipping"); - return CurrentDevState.IDLE; - } - this.setState({ - ...this.state, - reviewingInitiated: true - }); - - // If issues/errors found, prompt user if they want to review and cleanup - const issues = await this.fetchAllIssues(false); - if (issues.runtimeErrors.length > 0 || issues.staticAnalysis.typecheck.issues.length > 0) { - this.logger().info("Reviewing stage - issues found, prompting user to review and cleanup"); - const message : ConversationMessage = { - role: "assistant", - content: `If the user responds with yes, launch the 'deep_debug' tool with the prompt to fix all the issues in the app\nThere might be some bugs in the app. Do you want me to try to fix them?`, - conversationId: IdGenerator.generateConversationId(), - } - // Store the message in the conversation history so user's response can trigger the deep debug tool - this.addConversationMessage(message); - - this.broadcast(WebSocketMessageResponses.CONVERSATION_RESPONSE, { - message: message.content, - conversationId: message.conversationId, - isStreaming: false, - }); - } - - return CurrentDevState.IDLE; - } - - /** - * Execute finalizing state - final review and cleanup (runs only once) - */ - async executeFinalizing(): Promise { - this.logger().info("Executing FINALIZING state - final review and cleanup"); - - // Only do finalizing stage if it wasn't done before - if (this.state.mvpGenerated) { - this.logger().info("Finalizing stage already done"); - return CurrentDevState.REVIEWING; - } - this.setState({ - ...this.state, - mvpGenerated: true - }); - - const phaseConcept: PhaseConceptType = { - name: "Finalization and Review", - description: "Full polishing and final review of the application", - files: [], - lastPhase: true - } - - this.createNewIncompletePhase(phaseConcept); - - const currentIssues = await this.fetchAllIssues(true); - - // Run final review and cleanup phase - await this.implementPhase(phaseConcept, currentIssues); - - const numFilesGenerated = this.fileManager.getGeneratedFilePaths().length; - this.logger().info(`Finalization complete. Generated ${numFilesGenerated}/${this.getTotalFiles()} files.`); - - // Transition to IDLE - generation complete - return CurrentDevState.REVIEWING; - } - - async executeDeepDebug( - issue: string, - toolRenderer: RenderToolCall, - streamCb: (chunk: string) => void, - focusPaths?: string[], - ): Promise { - - const debugPromise = (async () => { - try { - const previousTranscript = this.state.lastDeepDebugTranscript ?? undefined; - const operationOptions = this.getOperationOptions(); - const filesIndex = operationOptions.context.allFiles - .filter((f) => - !focusPaths?.length || - focusPaths.some((p) => f.filePath.includes(p)), - ); - - const runtimeErrors = await this.fetchRuntimeErrors(true); - - const dbg = new DeepCodeDebugger( - operationOptions.env, - operationOptions.inferenceContext, - ); - - const out = await dbg.run( - { issue, previousTranscript }, - { filesIndex, agent: this.codingAgent, runtimeErrors }, - streamCb, - toolRenderer, - ); - - // Save transcript for next session - this.setState({ - ...this.state, - lastDeepDebugTranscript: out, - }); - - return { success: true as const, transcript: out }; - } catch (e) { - this.logger().error('Deep debugger failed', e); - return { success: false as const, error: `Deep debugger failed: ${String(e)}` }; - } finally{ - this.deepDebugPromise = null; - this.deepDebugConversationId = null; - } - })(); - - // Store promise before awaiting - this.deepDebugPromise = debugPromise; - - return await debugPromise; - } - - /** - * Generate next phase with user context (suggestions and images) - */ - async generateNextPhase(currentIssues: AllIssues, userContext?: UserContext): Promise { - const issues = IssueReport.from(currentIssues); - - // Build notification message - let notificationMsg = "Generating next phase"; - if (userContext?.suggestions && userContext.suggestions.length > 0) { - notificationMsg = `Generating next phase incorporating ${userContext.suggestions.length} user suggestion(s)`; - } - if (userContext?.images && userContext.images.length > 0) { - notificationMsg += ` with ${userContext.images.length} image(s)`; - } - - // Notify phase generation start - this.broadcast(WebSocketMessageResponses.PHASE_GENERATING, { - message: notificationMsg, - issues: issues, - userSuggestions: userContext?.suggestions, - }); - - const result = await this.operations.generateNextPhase.execute( - { - issues, - userContext, - isUserSuggestedPhase: userContext?.suggestions && userContext.suggestions.length > 0 && this.state.mvpGenerated, - }, - this.getOperationOptions() - ) - // Execute install commands if any - if (result.installCommands && result.installCommands.length > 0) { - this.executeCommands(result.installCommands); - } - - // Execute delete commands if any - const filesToDelete = result.files.filter(f => f.changes?.toLowerCase().trim() === 'delete'); - if (filesToDelete.length > 0) { - this.logger().info(`Deleting ${filesToDelete.length} files: ${filesToDelete.map(f => f.path).join(", ")}`); - this.deleteFiles(filesToDelete.map(f => f.path)); - } - - if (result.files.length === 0) { - this.logger().info("No files generated for next phase"); - // Notify phase generation complete - this.broadcast(WebSocketMessageResponses.PHASE_GENERATED, { - message: `No files generated for next phase`, - phase: undefined - }); - return undefined; - } - - this.createNewIncompletePhase(result); - // Notify phase generation complete - this.broadcast(WebSocketMessageResponses.PHASE_GENERATED, { - message: `Generated next phase: ${result.name}`, - phase: result - }); - - return result; - } - - /** - * Implement a single phase of code generation - * Streams file generation with real-time updates and incorporates technical instructions - */ - async implementPhase(phase: PhaseConceptType, currentIssues: AllIssues, userContext?: UserContext, streamChunks: boolean = true, postPhaseFixing: boolean = true): Promise { - const issues = IssueReport.from(currentIssues); - - const implementationMsg = userContext?.suggestions && userContext.suggestions.length > 0 - ? `Implementing phase: ${phase.name} with ${userContext.suggestions.length} user suggestion(s)` - : `Implementing phase: ${phase.name}`; - const msgWithImages = userContext?.images && userContext.images.length > 0 - ? `${implementationMsg} and ${userContext.images.length} image(s)` - : implementationMsg; - - this.broadcast(WebSocketMessageResponses.PHASE_IMPLEMENTING, { - message: msgWithImages, - phase: phase, - issues: issues, - }); - - - const result = await this.operations.implementPhase.execute( - { - phase, - issues, - isFirstPhase: this.state.generatedPhases.filter(p => p.completed).length === 0, - fileGeneratingCallback: (filePath: string, filePurpose: string) => { - this.broadcast(WebSocketMessageResponses.FILE_GENERATING, { - message: `Generating file: ${filePath}`, - filePath: filePath, - filePurpose: filePurpose - }); - }, - userContext, - shouldAutoFix: this.state.inferenceContext.enableRealtimeCodeFix, - fileChunkGeneratedCallback: streamChunks ? (filePath: string, chunk: string, format: 'full_content' | 'unified_diff') => { - this.broadcast(WebSocketMessageResponses.FILE_CHUNK_GENERATED, { - message: `Generating file: ${filePath}`, - filePath: filePath, - chunk, - format, - }); - } : (_filePath: string, _chunk: string, _format: 'full_content' | 'unified_diff') => {}, - fileClosedCallback: (file: FileOutputType, message: string) => { - this.broadcast(WebSocketMessageResponses.FILE_GENERATED, { - message, - file, - }); - } - }, - this.getOperationOptions() - ); - - this.broadcast(WebSocketMessageResponses.PHASE_VALIDATING, { - message: `Validating files for phase: ${phase.name}`, - phase: phase, - }); - - // Await the already-created realtime code fixer promises - const finalFiles = await Promise.allSettled(result.fixedFilePromises).then((results: PromiseSettledResult[]) => { - return results.map((result) => { - if (result.status === 'fulfilled') { - return result.value; - } else { - return null; - } - }).filter((f): f is FileOutputType => f !== null); - }); - - // Update state with completed phase - await this.fileManager.saveGeneratedFiles(finalFiles, `feat: ${phase.name}\n\n${phase.description}`); - - this.logger().info("Files generated for phase:", phase.name, finalFiles.map(f => f.filePath)); - - // Execute commands if provided - if (result.commands && result.commands.length > 0) { - this.logger().info("Phase implementation suggested install commands:", result.commands); - await this.executeCommands(result.commands, false); - } - - // Deploy generated files - if (finalFiles.length > 0) { - await this.deployToSandbox(finalFiles, false, phase.name, true); - if (postPhaseFixing) { - await this.applyDeterministicCodeFixes(); - if (this.state.inferenceContext.enableFastSmartCodeFix) { - await this.applyFastSmartCodeFixes(); - } - } - } - - // Validation complete - this.broadcast(WebSocketMessageResponses.PHASE_VALIDATED, { - message: `Files validated for phase: ${phase.name}`, - phase: phase - }); - - this.logger().info("Files generated for phase:", phase.name, finalFiles.map(f => f.filePath)); - - this.logger().info(`Validation complete for phase: ${phase.name}`); - - // Notify phase completion - this.broadcast(WebSocketMessageResponses.PHASE_IMPLEMENTED, { - phase: { - name: phase.name, - files: finalFiles.map(f => ({ - path: f.filePath, - purpose: f.filePurpose, - contents: f.fileContents - })), - description: phase.description - }, - message: "Files generated successfully for phase" - }); - - this.markPhaseComplete(phase.name); - - return { - files: finalFiles, - deploymentNeeded: result.deploymentNeeded, - commands: result.commands - }; - } - - /** - * Get current model configurations (defaults + user overrides) - * Used by WebSocket to provide configuration info to frontend - */ - async getModelConfigsInfo() { - const userId = this.state.inferenceContext.userId; - if (!userId) { - throw new Error('No user session available for model configurations'); - } - - try { - const modelConfigService = new ModelConfigService(this.env); - - // Get all user configs - const userConfigsRecord = await modelConfigService.getUserModelConfigs(userId); - - // Transform to match frontend interface - const agents = Object.entries(AGENT_CONFIG).map(([key, config]) => ({ - key, - name: config.name, - description: config.description - })); - - const userConfigs: Record = {}; - const defaultConfigs: Record = {}; - - for (const [actionKey, mergedConfig] of Object.entries(userConfigsRecord)) { - if (mergedConfig.isUserOverride) { - userConfigs[actionKey] = { - name: mergedConfig.name, - max_tokens: mergedConfig.max_tokens, - temperature: mergedConfig.temperature, - reasoning_effort: mergedConfig.reasoning_effort, - fallbackModel: mergedConfig.fallbackModel, - isUserOverride: true - }; - } - - // Always include default config - const defaultConfig = AGENT_CONFIG[actionKey as AgentActionKey]; - if (defaultConfig) { - defaultConfigs[actionKey] = { - name: defaultConfig.name, - max_tokens: defaultConfig.max_tokens, - temperature: defaultConfig.temperature, - reasoning_effort: defaultConfig.reasoning_effort, - fallbackModel: defaultConfig.fallbackModel - }; - } - } - - return { - agents, - userConfigs, - defaultConfigs - }; - } catch (error) { - this.logger().error('Error fetching model configs info:', error); - throw error; - } - } - - getTotalFiles(): number { - return this.fileManager.getGeneratedFilePaths().length + ((this.state.currentPhase || this.state.blueprint.initialPhase)?.files?.length || 0); - } - - getSummary(): Promise { - const summaryData = { - query: this.state.query, - generatedCode: this.fileManager.getGeneratedFiles(), - conversation: this.state.conversationMessages, - }; - return Promise.resolve(summaryData); - } - - async getFullState(): Promise { - return this.state; - } - - private migrateStateIfNeeded(): void { - const migratedState = StateMigration.migrateIfNeeded(this.state, this.logger()); - if (migratedState) { - this.setState(migratedState); - } - } - - getFileGenerated(filePath: string) { - return this.fileManager!.getGeneratedFile(filePath) || null; - } - - getWebSockets(): WebSocket[] { - return this.ctx.getWebSockets(); - } - - async fetchRuntimeErrors(clear: boolean = true, shouldWait: boolean = true): Promise { - if (shouldWait) { - await this.deploymentManager.waitForPreview(); - } - - try { - const errors = await this.deploymentManager.fetchRuntimeErrors(clear); - - if (errors.length > 0) { - this.broadcast(WebSocketMessageResponses.RUNTIME_ERROR_FOUND, { - errors, - message: "Runtime errors found", - count: errors.length - }); - } - - return errors; - } catch (error) { - this.logger().error("Exception fetching runtime errors:", error); - // If fetch fails, initiate redeploy - this.deployToSandbox(); - const message = ""; - return [{ message, timestamp: new Date().toISOString(), level: 0, rawOutput: message }]; - } - } - - /** - * Perform static code analysis on the generated files - * This helps catch potential issues early in the development process - */ - async runStaticAnalysisCode(files?: string[]): Promise { - try { - const analysisResponse = await this.deploymentManager.runStaticAnalysis(files); - - const { lint, typecheck } = analysisResponse; - this.broadcast(WebSocketMessageResponses.STATIC_ANALYSIS_RESULTS, { - lint: { issues: lint.issues, summary: lint.summary }, - typecheck: { issues: typecheck.issues, summary: typecheck.summary } - }); - - return analysisResponse; - } catch (error) { - this.broadcastError("Failed to lint code", error); - return { success: false, lint: { issues: [], }, typecheck: { issues: [], } }; - } - } - - private async applyFastSmartCodeFixes() : Promise { - try { - const startTime = Date.now(); - this.logger().info("Applying fast smart code fixes"); - // Get static analysis and do deterministic fixes - const staticAnalysis = await this.runStaticAnalysisCode(); - if (staticAnalysis.typecheck.issues.length + staticAnalysis.lint.issues.length == 0) { - this.logger().info("No issues found, skipping fast smart code fixes"); - return; - } - const issues = staticAnalysis.typecheck.issues.concat(staticAnalysis.lint.issues); - const allFiles = this.fileManager.getAllRelevantFiles(); - - const fastCodeFixer = await this.operations.fastCodeFixer.execute({ - query: this.state.query, - issues, - allFiles, - }, this.getOperationOptions()); - - if (fastCodeFixer.length > 0) { - await this.fileManager.saveGeneratedFiles(fastCodeFixer, "fix: Fast smart code fixes"); - await this.deployToSandbox(fastCodeFixer); - this.logger().info("Fast smart code fixes applied successfully"); - } - this.logger().info(`Fast smart code fixes applied in ${Date.now() - startTime}ms`); - } catch (error) { - this.broadcastError("Failed to apply fast smart code fixes", error); - return; - } - } - - /** - * Apply deterministic code fixes for common TypeScript errors - */ - private async applyDeterministicCodeFixes() : Promise { - try { - // Get static analysis and do deterministic fixes - const staticAnalysis = await this.runStaticAnalysisCode(); - if (staticAnalysis.typecheck.issues.length == 0) { - this.logger().info("No typecheck issues found, skipping deterministic fixes"); - return staticAnalysis; // So that static analysis is not repeated again - } - const typeCheckIssues = staticAnalysis.typecheck.issues; - this.broadcast(WebSocketMessageResponses.DETERMINISTIC_CODE_FIX_STARTED, { - message: `Attempting to fix ${typeCheckIssues.length} TypeScript issues using deterministic code fixer`, - issues: typeCheckIssues - }); - - this.logger().info(`Attempting to fix ${typeCheckIssues.length} TypeScript issues using deterministic code fixer`); - const allFiles = this.fileManager.getAllFiles(); - - const fixResult = fixProjectIssues( - allFiles.map(file => ({ - filePath: file.filePath, - fileContents: file.fileContents, - filePurpose: '' - })), - typeCheckIssues - ); - - this.broadcast(WebSocketMessageResponses.DETERMINISTIC_CODE_FIX_COMPLETED, { - message: `Fixed ${typeCheckIssues.length} TypeScript issues using deterministic code fixer`, - issues: typeCheckIssues, - fixResult - }); - - if (fixResult) { - // If there are unfixable issues but of type TS2307, extract external module names and install them - if (fixResult.unfixableIssues.length > 0) { - const modulesNotFound = fixResult.unfixableIssues.filter(issue => issue.issueCode === 'TS2307'); - // Reason is of the form: External package "xyz" should be handled by package manager - const moduleNames = modulesNotFound.flatMap(issue => { - const match = issue.reason.match(/External package ["'](.+?)["']/); - const name = match?.[1]; - return (typeof name === 'string' && name.trim().length > 0 && !name.startsWith('@shared')) ? [name] : []; - }); - if (moduleNames.length > 0) { - const installCommands = moduleNames.map(moduleName => `bun install ${moduleName}`); - await this.executeCommands(installCommands, false); - - this.logger().info(`Deterministic code fixer installed missing modules: ${moduleNames.join(', ')}`); - } else { - this.logger().info(`Deterministic code fixer detected no external modules to install from unfixable TS2307 issues`); - } - } - if (fixResult.modifiedFiles.length > 0) { - this.logger().info("Applying deterministic fixes to files, Fixes: ", JSON.stringify(fixResult, null, 2)); - const fixedFiles = fixResult.modifiedFiles.map(file => ({ - filePath: file.filePath, - filePurpose: allFiles.find(f => f.filePath === file.filePath)?.filePurpose || '', - fileContents: file.fileContents - })); - await this.fileManager.saveGeneratedFiles(fixedFiles, "fix: applied deterministic fixes"); - - await this.deployToSandbox(fixedFiles, false, "fix: applied deterministic fixes"); - this.logger().info("Deployed deterministic fixes to sandbox"); - } - } - this.logger().info(`Applied deterministic code fixes: ${JSON.stringify(fixResult, null, 2)}`); - } catch (error) { - this.broadcastError('Deterministic code fixer failed', error); - } - // return undefined; - } - - async fetchAllIssues(resetIssues: boolean = false): Promise { - const [runtimeErrors, staticAnalysis] = await Promise.all([ - this.fetchRuntimeErrors(resetIssues), - this.runStaticAnalysisCode() - ]); - this.logger().info("Fetched all issues:", JSON.stringify({ runtimeErrors, staticAnalysis })); - - return { runtimeErrors, staticAnalysis }; - } - - async updateProjectName(newName: string): Promise { - try { - const valid = /^[a-z0-9-_]{3,50}$/.test(newName); - if (!valid) return false; - const updatedBlueprint = { ...this.state.blueprint, projectName: newName } as Blueprint; - this.setState({ - ...this.state, - blueprint: updatedBlueprint - }); - let ok = true; - if (this.state.sandboxInstanceId) { - try { - ok = await this.getSandboxServiceClient().updateProjectName(this.state.sandboxInstanceId, newName); - } catch (_) { - ok = false; - } - } - try { - const appService = new AppService(this.env); - const dbOk = await appService.updateApp(this.getAgentId(), { title: newName }); - ok = ok && dbOk; - } catch (error) { - this.logger().error('Error updating project name in database:', error); - ok = false; - } - this.broadcast(WebSocketMessageResponses.PROJECT_NAME_UPDATED, { - message: 'Project name updated', - projectName: newName - }); - return ok; - } catch (error) { - this.logger().error('Error updating project name:', error); - return false; - } - } - - async updateBlueprint(patch: Partial): Promise { - const keys = Object.keys(patch) as (keyof Blueprint)[]; - const allowed = new Set([ - 'title', - 'projectName', - 'detailedDescription', - 'description', - 'colorPalette', - 'views', - 'userFlow', - 'dataFlow', - 'architecture', - 'pitfalls', - 'frameworks', - 'implementationRoadmap' - ]); - const filtered: Partial = {}; - for (const k of keys) { - if (allowed.has(k) && typeof (patch as any)[k] !== 'undefined') { - (filtered as any)[k] = (patch as any)[k]; - } - } - if (typeof filtered.projectName === 'string' && filtered.projectName) { - await this.updateProjectName(filtered.projectName); - delete (filtered as any).projectName; - } - const updated: Blueprint = { ...this.state.blueprint, ...(filtered as Blueprint) } as Blueprint; - this.setState({ - ...this.state, - blueprint: updated - }); - this.broadcast(WebSocketMessageResponses.BLUEPRINT_UPDATED, { - message: 'Blueprint updated', - updatedKeys: Object.keys(filtered) - }); - return updated; - } - - // ===== Debugging helpers for assistants ===== - async readFiles(paths: string[]): Promise<{ files: { path: string; content: string }[] }> { - const { sandboxInstanceId } = this.state; - if (!sandboxInstanceId) { - return { files: [] }; - } - const resp = await this.getSandboxServiceClient().getFiles(sandboxInstanceId, paths); - if (!resp.success) { - this.logger().warn('readFiles failed', { error: resp.error }); - return { files: [] }; - } - return { files: resp.files.map(f => ({ path: f.filePath, content: f.fileContents })) }; - } - - async execCommands(commands: string[], shouldSave: boolean, timeout?: number): Promise { - const { sandboxInstanceId } = this.state; - if (!sandboxInstanceId) { - return { success: false, results: [], error: 'No sandbox instance' } as any; - } - const result = await this.getSandboxServiceClient().executeCommands(sandboxInstanceId, commands, timeout); - if (shouldSave) { - this.saveExecutedCommands(commands); - } - return result; - } - - /** - * Regenerate a file to fix identified issues - * Retries up to 3 times before giving up - */ - async regenerateFile(file: FileOutputType, issues: string[], retryIndex: number = 0) { - this.broadcast(WebSocketMessageResponses.FILE_REGENERATING, { - message: `Regenerating file: ${file.filePath}`, - filePath: file.filePath, - original_issues: issues, - }); - - const result = await this.operations.regenerateFile.execute( - {file, issues, retryIndex}, - this.getOperationOptions() - ); - - const fileState = await this.fileManager.saveGeneratedFile(result); - - this.broadcast(WebSocketMessageResponses.FILE_REGENERATED, { - message: `Regenerated file: ${file.filePath}`, - file: fileState, - original_issues: issues, - }); - - return fileState; - } - - async regenerateFileByPath(path: string, issues: string[]): Promise<{ path: string; diff: string }> { - const { sandboxInstanceId } = this.state; - if (!sandboxInstanceId) { - throw new Error('No sandbox instance available'); - } - // Prefer local file manager; fallback to sandbox - let fileContents = ''; - let filePurpose = ''; - try { - const fmFile = this.fileManager.getFile(path); - if (fmFile) { - fileContents = fmFile.fileContents; - filePurpose = fmFile.filePurpose || ''; - } else { - const resp = await this.getSandboxServiceClient().getFiles(sandboxInstanceId, [path]); - const f = resp.success ? resp.files.find(f => f.filePath === path) : undefined; - if (!f) throw new Error(resp.error || `File not found: ${path}`); - fileContents = f.fileContents; - } - } catch (e) { - throw new Error(`Failed to read file for regeneration: ${String(e)}`); - } - - const regenerated = await this.regenerateFile({ filePath: path, fileContents, filePurpose }, issues, 0); - // Persist to sandbox instance - await this.getSandboxServiceClient().writeFiles(sandboxInstanceId, [{ filePath: regenerated.filePath, fileContents: regenerated.fileContents }], `Deep debugger fix: ${path}`); - return { path, diff: regenerated.lastDiff }; - } - - async generateFiles( - phaseName: string, - phaseDescription: string, - requirements: string[], - files: FileConceptType[] - ): Promise<{ files: Array<{ path: string; purpose: string; diff: string }> }> { - this.logger().info('Generating files for deep debugger', { - phaseName, - requirementsCount: requirements.length, - filesCount: files.length - }); - - // Create phase structure with explicit files - const phase: PhaseConceptType = { - name: phaseName, - description: phaseDescription, - files: files, - lastPhase: true - }; - - // Call existing implementPhase with postPhaseFixing=false - // This skips deterministic fixes and fast smart fixes - const result = await this.implementPhase( - phase, - { - runtimeErrors: [], - staticAnalysis: { - success: true, - lint: { issues: [] }, - typecheck: { issues: [] } - }, - }, - { suggestions: requirements }, - true, // streamChunks - false // postPhaseFixing = false (skip auto-fixes) - ); - - // Return files with diffs from FileState - return { - files: result.files.map(f => ({ - path: f.filePath, - purpose: f.filePurpose || '', - diff: (f as any).lastDiff || '' // FileState has lastDiff - })) - }; - } - - async deployToSandbox(files: FileOutputType[] = [], redeploy: boolean = false, commitMessage?: string, clearLogs: boolean = false): Promise { - // Call deployment manager with callbacks for broadcasting at the right times - const result = await this.deploymentManager.deployToSandbox( - files, - redeploy, - commitMessage, - clearLogs, - { - onStarted: (data) => { - this.broadcast(WebSocketMessageResponses.DEPLOYMENT_STARTED, data); - }, - onCompleted: (data) => { - this.broadcast(WebSocketMessageResponses.DEPLOYMENT_COMPLETED, data); - }, - onError: (data) => { - this.broadcast(WebSocketMessageResponses.DEPLOYMENT_FAILED, data); - }, - onAfterSetupCommands: async () => { - // Sync package.json after setup commands (includes dependency installs) - await this.syncPackageJsonFromSandbox(); - } - } - ); - - return result; - } - - /** - * Deploy the generated code to Cloudflare Workers - */ - async deployToCloudflare(): Promise<{ deploymentUrl?: string; workersUrl?: string } | null> { - try { - // Ensure sandbox instance exists first - if (!this.state.sandboxInstanceId) { - this.logger().info('No sandbox instance, deploying to sandbox first'); - await this.deployToSandbox(); - - if (!this.state.sandboxInstanceId) { - this.logger().error('Failed to deploy to sandbox service'); - this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { - message: 'Deployment failed: Failed to deploy to sandbox service', - error: 'Sandbox service unavailable' - }); - return null; - } - } - - // Call service - handles orchestration, callbacks for broadcasting - const result = await this.deploymentManager.deployToCloudflare({ - onStarted: (data) => { - this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_STARTED, data); - }, - onCompleted: (data) => { - this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_COMPLETED, data); - }, - onError: (data) => { - this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, data); - }, - onPreviewExpired: () => { - // Re-deploy sandbox and broadcast error - this.deployToSandbox(); - this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { - message: PREVIEW_EXPIRED_ERROR, - error: PREVIEW_EXPIRED_ERROR - }); - } - }); - - // Update database with deployment ID if successful - if (result.deploymentUrl && result.deploymentId) { - const appService = new AppService(this.env); - await appService.updateDeploymentId( - this.getAgentId(), - result.deploymentId - ); - } - - return result.deploymentUrl ? { deploymentUrl: result.deploymentUrl } : null; - - } catch (error) { - this.logger().error('Cloudflare deployment error:', error); - this.broadcast(WebSocketMessageResponses.CLOUDFLARE_DEPLOYMENT_ERROR, { - message: 'Deployment failed', - error: error instanceof Error ? error.message : String(error) - }); - return null; - } - } - - async waitForGeneration(): Promise { - if (this.generationPromise) { - try { - await this.generationPromise; - this.logger().info("Code generation completed successfully"); - } catch (error) { - this.logger().error("Error during code generation:", error); - } - } else { - this.logger().error("No generation process found"); - } - } - - isDeepDebugging(): boolean { - return this.deepDebugPromise !== null; - } - - getDeepDebugSessionState(): { conversationId: string } | null { - if (this.deepDebugConversationId && this.deepDebugPromise) { - return { conversationId: this.deepDebugConversationId }; - } - return null; - } - - async waitForDeepDebug(): Promise { - if (this.deepDebugPromise) { - try { - await this.deepDebugPromise; - this.logger().info("Deep debug session completed successfully"); - } catch (error) { - this.logger().error("Error during deep debug session:", error); - } finally { - // Clear promise after waiting completes - this.deepDebugPromise = null; - } - } - } - - /** - * Cache GitHub OAuth token in memory for subsequent exports - * Token is ephemeral - lost on DO eviction - */ - setGitHubToken(token: string, username: string, ttl: number = 3600000): void { - this.githubTokenCache = { - token, - username, - expiresAt: Date.now() + ttl - }; - this.logger().info('GitHub token cached', { - username, - expiresAt: new Date(this.githubTokenCache.expiresAt).toISOString() - }); - } - - /** - * Get cached GitHub token if available and not expired - */ - getGitHubToken(): { token: string; username: string } | null { - if (!this.githubTokenCache) { - return null; - } - - if (Date.now() >= this.githubTokenCache.expiresAt) { - this.logger().info('GitHub token expired, clearing cache'); - this.githubTokenCache = null; - return null; - } - - return { - token: this.githubTokenCache.token, - username: this.githubTokenCache.username - }; - } - - /** - * Clear cached GitHub token - */ - clearGitHubToken(): void { - this.githubTokenCache = null; - this.logger().info('GitHub token cleared'); - } - - async onMessage(connection: Connection, message: string): Promise { - handleWebSocketMessage(this, connection, message); - } - - async onClose(connection: Connection): Promise { - handleWebSocketClose(connection); - } - - private async onProjectUpdate(message: string): Promise { - this.setState({ - ...this.state, - projectUpdatesAccumulator: [...this.state.projectUpdatesAccumulator, message] - }); - } - - private async getAndResetProjectUpdates() { - const projectUpdates = this.state.projectUpdatesAccumulator || []; - this.setState({ - ...this.state, - projectUpdatesAccumulator: [] - }); - return projectUpdates; - } - - public broadcast(msg: T, data?: WebSocketMessageData): void { - if (this.operations.processUserMessage.isProjectUpdateType(msg)) { - let message = msg as string; - if (data && 'message' in data) { - message = (data as { message: string }).message; - } - this.onProjectUpdate(message); - } - broadcastToConnections(this, msg, data || {} as WebSocketMessageData); - } - - private getBootstrapCommands() { - const bootstrapCommands = this.state.commandsHistory || []; - // Validate, deduplicate, and clean - const { validCommands } = validateAndCleanBootstrapCommands(bootstrapCommands); - return validCommands; - } - - private async saveExecutedCommands(commands: string[]) { - this.logger().info('Saving executed commands', { commands }); - - // Merge with existing history - const mergedCommands = [...(this.state.commandsHistory || []), ...commands]; - - // Validate, deduplicate, and clean - const { validCommands, invalidCommands, deduplicated } = validateAndCleanBootstrapCommands(mergedCommands); - - // Log what was filtered out - if (invalidCommands.length > 0 || deduplicated > 0) { - this.logger().warn('[commands] Bootstrap commands cleaned', { - invalidCommands, - invalidCount: invalidCommands.length, - deduplicatedCount: deduplicated, - finalCount: validCommands.length - }); - } - - // Update state with cleaned commands - this.setState({ - ...this.state, - commandsHistory: validCommands - }); - - // Update bootstrap script with validated commands - await this.updateBootstrapScript(validCommands); - - // Sync package.json if any dependency-modifying commands were executed - const hasDependencyCommands = commands.some(cmd => - cmd.includes('install') || - cmd.includes(' add ') || - cmd.includes('remove') || - cmd.includes('uninstall') - ); - - if (hasDependencyCommands) { - this.logger().info('Dependency commands executed, syncing package.json from sandbox'); - await this.syncPackageJsonFromSandbox(); - } - } - - /** - * Execute commands with retry logic - * Chunks commands and retries failed ones with AI assistance - */ - private async executeCommands(commands: string[], shouldRetry: boolean = true, chunkSize: number = 5): Promise { - const state = this.state; - if (!state.sandboxInstanceId) { - this.logger().warn('No sandbox instance available for executing commands'); - return; - } - - // Sanitize and prepare commands - commands = commands.join('\n').split('\n').filter(cmd => cmd.trim() !== '').filter(cmd => looksLikeCommand(cmd) && !cmd.includes(' undefined')); - if (commands.length === 0) { - this.logger().warn("No commands to execute"); - return; - } - - commands = commands.map(cmd => cmd.trim().replace(/^\s*-\s*/, '').replace(/^npm/, 'bun')); - this.logger().info(`AI suggested ${commands.length} commands to run: ${commands.join(", ")}`); - - // Remove duplicate commands - commands = Array.from(new Set(commands)); - - // Execute in chunks - const commandChunks = []; - for (let i = 0; i < commands.length; i += chunkSize) { - commandChunks.push(commands.slice(i, i + chunkSize)); - } - - const successfulCommands: string[] = []; - - for (const chunk of commandChunks) { - // Retry failed commands up to 3 times - let currentChunk = chunk; - let retryCount = 0; - const maxRetries = shouldRetry ? 3 : 1; - - while (currentChunk.length > 0 && retryCount < maxRetries) { - try { - this.broadcast(WebSocketMessageResponses.COMMAND_EXECUTING, { - message: retryCount > 0 ? `Retrying commands (attempt ${retryCount + 1}/${maxRetries})` : "Executing commands", - commands: currentChunk - }); - - const resp = await this.getSandboxServiceClient().executeCommands( - state.sandboxInstanceId, - currentChunk - ); - if (!resp.results || !resp.success) { - this.logger().error('Failed to execute commands', { response: resp }); - // Check if instance is still running - const status = await this.getSandboxServiceClient().getInstanceStatus(state.sandboxInstanceId); - if (!status.success || !status.isHealthy) { - this.logger().error(`Instance ${state.sandboxInstanceId} is no longer running`); - return; - } - break; - } - - // Process results - const successful = resp.results.filter(r => r.success); - const failures = resp.results.filter(r => !r.success); - - // Track successful commands - if (successful.length > 0) { - const successfulCmds = successful.map(r => r.command); - this.logger().info(`Successfully executed ${successful.length} commands: ${successfulCmds.join(", ")}`); - successfulCommands.push(...successfulCmds); - } - - // If all succeeded, move to next chunk - if (failures.length === 0) { - this.logger().info(`All commands in chunk executed successfully`); - break; - } - - // Handle failures - const failedCommands = failures.map(r => r.command); - this.logger().warn(`${failures.length} commands failed: ${failedCommands.join(", ")}`); - - // Only retry if shouldRetry is true - if (!shouldRetry) { - break; - } - - retryCount++; - - // For install commands, try AI regeneration - const failedInstallCommands = failedCommands.filter(cmd => - cmd.startsWith("bun") || cmd.startsWith("npm") || cmd.includes("install") - ); - - if (failedInstallCommands.length > 0 && retryCount < maxRetries) { - // Use AI to suggest alternative commands - const newCommands = await this.getProjectSetupAssistant().generateSetupCommands( - `The following install commands failed: ${JSON.stringify(failures, null, 2)}. Please suggest alternative commands.` - ); - - if (newCommands?.commands && newCommands.commands.length > 0) { - this.logger().info(`AI suggested ${newCommands.commands.length} alternative commands`); - this.broadcast(WebSocketMessageResponses.COMMAND_EXECUTING, { - message: "Executing regenerated commands", - commands: newCommands.commands - }); - currentChunk = newCommands.commands.filter(looksLikeCommand); - } else { - this.logger().warn('AI could not generate alternative commands'); - currentChunk = []; - } - } else { - // No retry needed for non-install commands - currentChunk = []; - } - } catch (error) { - this.logger().error('Error executing commands:', error); - // Stop retrying on error - break; - } - } - } - - // Record command execution history - const failedCommands = commands.filter(cmd => !successfulCommands.includes(cmd)); - - if (failedCommands.length > 0) { - this.broadcastError('Failed to execute commands', new Error(failedCommands.join(", "))); - } else { - this.logger().info(`All commands executed successfully: ${successfulCommands.join(", ")}`); - } - - this.saveExecutedCommands(successfulCommands); - } - - /** - * Sync package.json from sandbox to agent's git repository - * Called after install/add/remove commands to keep dependencies in sync - */ - private async syncPackageJsonFromSandbox(): Promise { - try { - this.logger().info('Fetching current package.json from sandbox'); - const results = await this.readFiles(['package.json']); - if (!results || !results.files || results.files.length === 0) { - this.logger().warn('Failed to fetch package.json from sandbox', { results }); - return; - } - const packageJsonContent = results.files[0].content; - - const { updated, packageJson } = updatePackageJson(this.state.lastPackageJson, packageJsonContent); - if (!updated) { - this.logger().info('package.json has not changed, skipping sync'); - return; - } - // Update state with latest package.json - this.setState({ - ...this.state, - lastPackageJson: packageJson - }); - - // Commit to git repository - const fileState = await this.fileManager.saveGeneratedFile( - { - filePath: 'package.json', - fileContents: packageJson, - filePurpose: 'Project dependencies and configuration' - }, - 'chore: sync package.json dependencies from sandbox' - ); - - this.logger().info('Successfully synced package.json to git', { - filePath: fileState.filePath, - }); - - // Broadcast update to clients - this.broadcast(WebSocketMessageResponses.FILE_GENERATED, { - message: 'Synced package.json from sandbox', - file: fileState - }); - - } catch (error) { - this.logger().error('Failed to sync package.json from sandbox', error); - // Non-critical error - don't throw, just log - } - } - - async getLogs(_reset?: boolean, durationSeconds?: number): Promise { - if (!this.state.sandboxInstanceId) { - throw new Error('Cannot get logs: No sandbox instance available'); - } - - const response = await this.getSandboxServiceClient().getLogs(this.state.sandboxInstanceId, _reset, durationSeconds); - if (response.success) { - return `STDOUT: ${response.logs.stdout}\nSTDERR: ${response.logs.stderr}`; - } else { - return `Failed to get logs, ${response.error}`; - } - } - - /** - * Delete files from the file manager - */ - async deleteFiles(filePaths: string[]) { - const deleteCommands: string[] = []; - for (const filePath of filePaths) { - deleteCommands.push(`rm -rf ${filePath}`); - } - // Remove the files from file manager - this.fileManager.deleteFiles(filePaths); - try { - await this.executeCommands(deleteCommands, false); - this.logger().info(`Deleted ${filePaths.length} files: ${filePaths.join(", ")}`); - } catch (error) { - this.logger().error('Error deleting files:', error); - } - } - - /** - * Export generated code to a GitHub repository - */ - async pushToGitHub(options: GitHubPushRequest): Promise { - try { - this.logger().info('Starting GitHub export using DO git'); - - // Broadcast export started - this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_STARTED, { - message: `Starting GitHub export to repository "${options.cloneUrl}"`, - repositoryName: options.repositoryHtmlUrl, - isPrivate: options.isPrivate - }); - - // Export git objects from DO - this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_PROGRESS, { - message: 'Preparing git repository...', - step: 'preparing', - progress: 20 - }); - - const { gitObjects, query, templateDetails } = await this.exportGitObjects(); - - this.logger().info('Git objects exported', { - objectCount: gitObjects.length, - hasTemplate: !!templateDetails - }); - - // Get app createdAt timestamp for template base commit - let appCreatedAt: Date | undefined = undefined; - try { - const appId = this.getAgentId(); - if (appId) { - const appService = new AppService(this.env); - const app = await appService.getAppDetails(appId); - if (app && app.createdAt) { - appCreatedAt = new Date(app.createdAt); - this.logger().info('Using app createdAt for template base', { - createdAt: appCreatedAt.toISOString() - }); - } - } - } catch (error) { - this.logger().warn('Failed to get app createdAt, using current time', { error }); - appCreatedAt = new Date(); // Fallback to current time - } - - // Push to GitHub using new service - this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_PROGRESS, { - message: 'Uploading to GitHub repository...', - step: 'uploading_files', - progress: 40 - }); - - const result = await GitHubService.exportToGitHub({ - gitObjects, - templateDetails, - appQuery: query, - appCreatedAt, - token: options.token, - repositoryUrl: options.repositoryHtmlUrl, - username: options.username, - email: options.email - }); - - if (!result.success) { - throw new Error(result.error || 'Failed to export to GitHub'); - } - - this.logger().info('GitHub export completed', { - commitSha: result.commitSha - }); - - // Cache token for subsequent exports - if (options.token && options.username) { - try { - this.setGitHubToken(options.token, options.username); - this.logger().info('GitHub token cached after successful export'); - } catch (cacheError) { - // Non-fatal - continue with finalization - this.logger().warn('Failed to cache GitHub token', { error: cacheError }); - } - } - - // Update database - this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_PROGRESS, { - message: 'Finalizing GitHub export...', - step: 'finalizing', - progress: 90 - }); - - const agentId = this.getAgentId(); - this.logger().info('[DB Update] Updating app with GitHub repository URL', { - agentId, - repositoryUrl: options.repositoryHtmlUrl, - visibility: options.isPrivate ? 'private' : 'public' - }); - - const appService = new AppService(this.env); - const updateResult = await appService.updateGitHubRepository( - agentId || '', - options.repositoryHtmlUrl || '', - options.isPrivate ? 'private' : 'public' - ); - - this.logger().info('[DB Update] Database update result', { - agentId, - success: updateResult, - repositoryUrl: options.repositoryHtmlUrl - }); - - // Broadcast success - this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_COMPLETED, { - message: `Successfully exported to GitHub repository: ${options.repositoryHtmlUrl}`, - repositoryUrl: options.repositoryHtmlUrl, - cloneUrl: options.cloneUrl, - commitSha: result.commitSha - }); - - this.logger().info('GitHub export completed successfully', { - repositoryUrl: options.repositoryHtmlUrl, - commitSha: result.commitSha - }); - - return { - success: true, - repositoryUrl: options.repositoryHtmlUrl, - cloneUrl: options.cloneUrl - }; - - } catch (error) { - this.logger().error('GitHub export failed', error); - this.broadcast(WebSocketMessageResponses.GITHUB_EXPORT_ERROR, { - message: `GitHub export failed: ${error instanceof Error ? error.message : 'Unknown error'}`, - error: error instanceof Error ? error.message : 'Unknown error' - }); - return { - success: false, - repositoryUrl: options.repositoryHtmlUrl, - cloneUrl: options.cloneUrl - }; - } - } - - /** - * Handle user input during conversational code generation - * Processes user messages and updates pendingUserInputs state - */ - async handleUserInput(userMessage: string, images?: ImageAttachment[]): Promise { - try { - this.logger().info('Processing user input message', { - messageLength: userMessage.length, - pendingInputsCount: this.state.pendingUserInputs.length, - hasImages: !!images && images.length > 0, - imageCount: images?.length || 0 - }); - - // Ensure template details are loaded before processing - await this.ensureTemplateDetails(); - - // Just fetch runtime errors - const errors = await this.fetchRuntimeErrors(false, false); - const projectUpdates = await this.getAndResetProjectUpdates(); - this.logger().info('Passing context to user conversation processor', { errors, projectUpdates }); - - // If there are images, upload them and pass the URLs to the conversation processor - let uploadedImages: ProcessedImageAttachment[] = []; - if (images) { - uploadedImages = await Promise.all(images.map(async (image) => { - return await uploadImage(this.env, image, ImageType.UPLOADS); - })); - - this.logger().info('Uploaded images', { uploadedImages }); - } - - // Process the user message using conversational assistant - const conversationalResponse = await this.operations.processUserMessage.execute( - { - userMessage, - conversationState: this.getConversationState(), - conversationResponseCallback: ( - message: string, - conversationId: string, - isStreaming: boolean, - tool?: { name: string; status: 'start' | 'success' | 'error'; args?: Record } - ) => { - // Track conversationId when deep_debug starts - if (tool?.name === 'deep_debug' && tool.status === 'start') { - this.deepDebugConversationId = conversationId; - } - - this.broadcast(WebSocketMessageResponses.CONVERSATION_RESPONSE, { - message, - conversationId, - isStreaming, - tool, - }); - }, - errors, - projectUpdates, - images: uploadedImages - }, - this.getOperationOptions() - ); - - const { conversationResponse, conversationState } = conversationalResponse; - this.setConversationState(conversationState); - - if (!this.generationPromise) { - // If idle, start generation process - this.logger().info('User input during IDLE state, starting generation'); - this.generateAllFiles().catch(error => { - this.logger().error('Error starting generation from user input:', error); - }); - } - - this.logger().info('User input processed successfully', { - responseLength: conversationResponse.userResponse.length, - }); - - } catch (error) { - if (error instanceof RateLimitExceededError) { - this.logger().error('Rate limit exceeded:', error); - this.broadcast(WebSocketMessageResponses.RATE_LIMIT_ERROR, { - error - }); - return; - } - this.broadcastError('Error processing user input', error); - } - } - - /** - * Clear conversation history - */ - public clearConversation(): void { - const messageCount = this.state.conversationMessages.length; - - // Clear conversation messages only from agent's running history - this.setState({ - ...this.state, - conversationMessages: [] - }); - - // Send confirmation response - this.broadcast(WebSocketMessageResponses.CONVERSATION_CLEARED, { - message: 'Conversation history cleared', - clearedMessageCount: messageCount - }); - } - - /** - * Capture screenshot of the given URL using Cloudflare Browser Rendering REST API - */ - public async captureScreenshot( - url: string, - viewport: { width: number; height: number } = { width: 1280, height: 720 } - ): Promise { - if (!this.env.DB || !this.getAgentId()) { - const error = 'Cannot capture screenshot: DB or agentId not available'; - this.logger().warn(error); - this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { - error, - configurationError: true - }); - throw new Error(error); - } - - if (!url) { - const error = 'URL is required for screenshot capture'; - this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { - error, - url, - viewport - }); - throw new Error(error); - } - - this.logger().info('Capturing screenshot via REST API', { url, viewport }); - - // Notify start of screenshot capture - this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_STARTED, { - message: `Capturing screenshot of ${url}`, - url, - viewport - }); - - try { - // Use Cloudflare Browser Rendering REST API - const apiUrl = `https://api.cloudflare.com/client/v4/accounts/${this.env.CLOUDFLARE_ACCOUNT_ID}/browser-rendering/snapshot`; - - const response = await fetch(apiUrl, { - method: 'POST', - headers: { - 'Authorization': `Bearer ${this.env.CLOUDFLARE_API_TOKEN}`, - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - url: url, - viewport: viewport, - gotoOptions: { - waitUntil: 'networkidle0', - timeout: 10000 - }, - screenshotOptions: { - fullPage: false, - type: 'png' - } - }), - }); - - if (!response.ok) { - const errorText = await response.text(); - const error = `Browser Rendering API failed: ${response.status} - ${errorText}`; - this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { - error, - url, - viewport, - statusCode: response.status, - statusText: response.statusText - }); - throw new Error(error); - } - - const result = await response.json() as { - success: boolean; - result: { - screenshot: string; // base64 encoded - content: string; // HTML content - }; - }; - - if (!result.success || !result.result.screenshot) { - const error = 'Browser Rendering API succeeded but no screenshot returned'; - this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { - error, - url, - viewport, - apiResponse: result - }); - throw new Error(error); - } - - // Get base64 screenshot data - const base64Screenshot = result.result.screenshot; - const screenshot: ImageAttachment = { - id: this.getAgentId(), - filename: 'latest.png', - mimeType: 'image/png', - base64Data: base64Screenshot - }; - const uploadedImage = await uploadImage(this.env, screenshot, ImageType.SCREENSHOTS); - - // Persist in database - try { - const appService = new AppService(this.env); - await appService.updateAppScreenshot(this.getAgentId(), uploadedImage.publicUrl); - } catch (dbError) { - const error = `Database update failed: ${dbError instanceof Error ? dbError.message : 'Unknown database error'}`; - this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { - error, - url, - viewport, - screenshotCaptured: true, - databaseError: true - }); - throw new Error(error); - } - - this.logger().info('Screenshot captured and stored successfully', { - url, - storage: uploadedImage.publicUrl.startsWith('data:') ? 'database' : (uploadedImage.publicUrl.includes('/api/screenshots/') ? 'r2' : 'images'), - length: base64Screenshot.length - }); - - // Notify successful screenshot capture - this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_SUCCESS, { - message: `Successfully captured screenshot of ${url}`, - url, - viewport, - screenshotSize: base64Screenshot.length, - timestamp: new Date().toISOString() - }); - - return uploadedImage.publicUrl; - - } catch (error) { - this.logger().error('Failed to capture screenshot via REST API:', error); - - // Only broadcast if error wasn't already broadcast above - const errorMessage = error instanceof Error ? error.message : 'Unknown error'; - if (!errorMessage.includes('Browser Rendering API') && !errorMessage.includes('Database update failed')) { - this.broadcast(WebSocketMessageResponses.SCREENSHOT_CAPTURE_ERROR, { - error: errorMessage, - url, - viewport - }); - } - - throw new Error(`Screenshot capture failed: ${error instanceof Error ? error.message : 'Unknown error'}`); - } - } - - /** - * Export git objects - * The route handler will build the repo with template rebasing - */ - async exportGitObjects(): Promise<{ - gitObjects: Array<{ path: string; data: Uint8Array }>; - query: string; - hasCommits: boolean; - templateDetails: TemplateDetails | null; - }> { - try { - // Export git objects efficiently (minimal DO memory usage) - const gitObjects = this.git.fs.exportGitObjects(); - - await this.gitInit(); - - // Ensure template details are available - await this.ensureTemplateDetails(); - - return { - gitObjects, - query: this.state.query || 'N/A', - hasCommits: gitObjects.length > 0, - templateDetails: this.templateDetailsCache - }; - } catch (error) { - this.logger().error('exportGitObjects failed', error); - throw error; - } - } -} diff --git a/worker/agents/core/smartGeneratorAgent.ts b/worker/agents/core/smartGeneratorAgent.ts deleted file mode 100644 index 88af36a1..00000000 --- a/worker/agents/core/smartGeneratorAgent.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { SimpleCodeGeneratorAgent } from "./simpleGeneratorAgent"; -import { CodeGenState } from "./state"; -import { AgentInitArgs } from "./types"; - -/** - * SmartCodeGeneratorAgent - Smartly orchestrated AI-powered code generation - * using an LLM orchestrator instead of state machine based orchestrator. - * TODO: NOT YET IMPLEMENTED, CURRENTLY Just uses SimpleCodeGeneratorAgent - */ -export class SmartCodeGeneratorAgent extends SimpleCodeGeneratorAgent { - - /** - * Initialize the smart code generator with project blueprint and template - * Sets up services and begins deployment process - */ - async initialize( - initArgs: AgentInitArgs, - agentMode: 'deterministic' | 'smart' - ): Promise { - this.logger().info('🧠 Initializing SmartCodeGeneratorAgent with enhanced AI orchestration', { - queryLength: initArgs.query.length, - agentType: agentMode - }); - - // Call the parent initialization - return await super.initialize(initArgs); - } - - async generateAllFiles(reviewCycles: number = 10): Promise { - if (this.state.agentMode === 'deterministic') { - return super.generateAllFiles(reviewCycles); - } else { - return this.builderLoop(); - } - } - - async builderLoop() { - // TODO - } -} \ No newline at end of file diff --git a/worker/agents/core/state.ts b/worker/agents/core/state.ts index 2840747b..52ed110f 100644 --- a/worker/agents/core/state.ts +++ b/worker/agents/core/state.ts @@ -1,9 +1,11 @@ -import type { Blueprint, PhaseConceptType , +import type { PhasicBlueprint, AgenticBlueprint, PhaseConceptType , FileOutputType, + Blueprint, } from '../schemas'; // import type { ScreenshotData } from './types'; import type { ConversationMessage } from '../inferutils/common'; import type { InferenceContext } from '../inferutils/config.types'; +import { BehaviorType, Plan, ProjectType } from './types'; export interface FileState extends FileOutputType { lastDiff: string; @@ -24,33 +26,94 @@ export enum CurrentDevState { export const MAX_PHASES = 12; -export interface CodeGenState { - blueprint: Blueprint; - projectName: string, +/** Common state fields for all agent behaviors */ +export interface BaseProjectState { + behaviorType: BehaviorType; + projectType: ProjectType; + + // Identity + projectName: string; query: string; + sessionId: string; + hostname: string; + + blueprint: Blueprint; + + templateName: string | 'custom'; + + // Conversation + conversationMessages: ConversationMessage[]; + + // Inference context + inferenceContext: InferenceContext; + + // Generation control + shouldBeGenerating: boolean; + // agentMode: 'deterministic' | 'smart'; // Would be migrated and mapped to behaviorType + + // Common file storage generatedFilesMap: Record; - generatedPhases: PhaseState[]; - commandsHistory?: string[]; // History of commands run - lastPackageJson?: string; // Last package.json file contents - templateName: string; + + // Common infrastructure sandboxInstanceId?: string; + commandsHistory?: string[]; + lastPackageJson?: string; + pendingUserInputs: string[]; + projectUpdatesAccumulator: string[]; - shouldBeGenerating: boolean; // Persistent flag indicating generation should be active + // Deep debug + lastDeepDebugTranscript: string | null; + mvpGenerated: boolean; reviewingInitiated: boolean; - agentMode: 'deterministic' | 'smart'; - sessionId: string; - hostname: string; - phasesCounter: number; +} - pendingUserInputs: string[]; - currentDevState: CurrentDevState; - reviewCycles?: number; // Number of review cycles for code review phase - currentPhase?: PhaseConceptType; // Current phase being worked on +/** Phasic agent state */ +export interface PhasicState extends BaseProjectState { + behaviorType: 'phasic'; + blueprint: PhasicBlueprint; + generatedPhases: PhaseState[]; - conversationMessages: ConversationMessage[]; - projectUpdatesAccumulator: string[]; - inferenceContext: InferenceContext; + phasesCounter: number; + currentDevState: CurrentDevState; + reviewCycles?: number; + currentPhase?: PhaseConceptType; +} - lastDeepDebugTranscript: string | null; -} +export interface WorkflowMetadata { + name: string; + description: string; + params: Record; + bindings?: { + envVars?: Record; + secrets?: Record; + resources?: Record; + }; +} + +/** Agentic agent state */ +export interface AgenticState extends BaseProjectState { + behaviorType: 'agentic'; + blueprint: AgenticBlueprint; + currentPlan: Plan; +} + +export type AgentState = PhasicState | AgenticState; diff --git a/worker/agents/core/stateMigration.ts b/worker/agents/core/stateMigration.ts index b8228872..f77a13bb 100644 --- a/worker/agents/core/stateMigration.ts +++ b/worker/agents/core/stateMigration.ts @@ -1,27 +1,56 @@ -import { CodeGenState, FileState } from './state'; +import { AgentState, FileState } from './state'; import { StructuredLogger } from '../../logger'; import { TemplateDetails } from 'worker/services/sandbox/sandboxTypes'; import { generateNanoId } from '../../utils/idGenerator'; import { generateProjectName } from '../utils/templateCustomizer'; +// Type guards for legacy state detection +type LegacyFileFormat = { + file_path?: string; + file_contents?: string; + file_purpose?: string; +}; + +type StateWithDeprecatedFields = AgentState & { + latestScreenshot?: unknown; + templateDetails?: TemplateDetails; + agentMode?: string; +}; + +function hasLegacyFileFormat(file: unknown): file is LegacyFileFormat { + if (typeof file !== 'object' || file === null) return false; + return 'file_path' in file || 'file_contents' in file || 'file_purpose' in file; +} + +function hasField(state: AgentState, key: K): state is AgentState & Record { + return key in state; +} + +function isStateWithTemplateDetails(state: AgentState): state is StateWithDeprecatedFields & { templateDetails: TemplateDetails } { + return 'templateDetails' in state; +} + +function isStateWithAgentMode(state: AgentState): state is StateWithDeprecatedFields & { agentMode: string } { + return 'agentMode' in state; +} + export class StateMigration { - static migrateIfNeeded(state: CodeGenState, logger: StructuredLogger): CodeGenState | null { + static migrateIfNeeded(state: AgentState, logger: StructuredLogger): AgentState | null { let needsMigration = false; //------------------------------------------------------------------------------------ // Migrate files from old schema //------------------------------------------------------------------------------------ - const migrateFile = (file: any): any => { - const hasOldFormat = 'file_path' in file || 'file_contents' in file || 'file_purpose' in file; - - if (hasOldFormat) { + const migrateFile = (file: FileState | unknown): FileState => { + if (hasLegacyFileFormat(file)) { return { - filePath: file.filePath || file.file_path, - fileContents: file.fileContents || file.file_contents, - filePurpose: file.filePurpose || file.file_purpose, + filePath: (file as FileState).filePath || file.file_path || '', + fileContents: (file as FileState).fileContents || file.file_contents || '', + filePurpose: (file as FileState).filePurpose || file.file_purpose || '', + lastDiff: (file as FileState).lastDiff || '', }; } - return file; + return file as FileState; }; const migratedFilesMap: Record = {}; @@ -126,19 +155,21 @@ export class StateMigration { ...migratedInferenceContext }; - delete (migratedInferenceContext as any).userApiKeys; + // Remove the deprecated field using type assertion + const contextWithLegacyField = migratedInferenceContext as unknown as Record; + delete contextWithLegacyField.userApiKeys; needsMigration = true; } //------------------------------------------------------------------------------------ // Migrate deprecated props //------------------------------------------------------------------------------------ - const stateHasDeprecatedProps = 'latestScreenshot' in (state as any); + const stateHasDeprecatedProps = hasField(state, 'latestScreenshot'); if (stateHasDeprecatedProps) { needsMigration = true; } - const stateHasProjectUpdatesAccumulator = 'projectUpdatesAccumulator' in (state as any); + const stateHasProjectUpdatesAccumulator = hasField(state, 'projectUpdatesAccumulator'); if (!stateHasProjectUpdatesAccumulator) { needsMigration = true; } @@ -147,10 +178,9 @@ export class StateMigration { // Migrate templateDetails -> templateName //------------------------------------------------------------------------------------ let migratedTemplateName = state.templateName; - const hasTemplateDetails = 'templateDetails' in (state as any); + const hasTemplateDetails = isStateWithTemplateDetails(state); if (hasTemplateDetails) { - const templateDetails = (state as any).templateDetails; - migratedTemplateName = (templateDetails as TemplateDetails).name; + migratedTemplateName = state.templateDetails.name; needsMigration = true; logger.info('Migrating templateDetails to templateName', { templateName: migratedTemplateName }); } @@ -170,6 +200,13 @@ export class StateMigration { logger.info('Generating missing projectName', { projectName: migratedProjectName }); } + let migratedProjectType = state.projectType; + const hasProjectType = hasField(state, 'projectType'); + if (!hasProjectType || !migratedProjectType) { + migratedProjectType = 'app'; + needsMigration = true; + logger.info('Adding default projectType for legacy state', { projectType: migratedProjectType }); + } if (needsMigration) { logger.info('Migrating state: schema format, conversation cleanup, security fixes, and bootstrap setup', { generatedFilesCount: Object.keys(migratedFilesMap).length, @@ -177,22 +214,27 @@ export class StateMigration { removedUserApiKeys: state.inferenceContext && 'userApiKeys' in state.inferenceContext, }); - const newState = { + const newState: AgentState = { ...state, generatedFilesMap: migratedFilesMap, conversationMessages: migratedConversationMessages, inferenceContext: migratedInferenceContext, projectUpdatesAccumulator: [], templateName: migratedTemplateName, - projectName: migratedProjectName - }; + projectName: migratedProjectName, + projectType: migratedProjectType, + } as AgentState; // Remove deprecated fields + const stateWithDeprecated = newState as StateWithDeprecatedFields; if (stateHasDeprecatedProps) { - delete (newState as any).latestScreenshot; + delete stateWithDeprecated.latestScreenshot; } if (hasTemplateDetails) { - delete (newState as any).templateDetails; + delete stateWithDeprecated.templateDetails; + } + if (isStateWithAgentMode(state)) { + delete stateWithDeprecated.agentMode; } return newState; diff --git a/worker/agents/core/types.ts b/worker/agents/core/types.ts index af7ecc27..618db851 100644 --- a/worker/agents/core/types.ts +++ b/worker/agents/core/types.ts @@ -1,27 +1,61 @@ -import type { RuntimeError, StaticAnalysisResponse } from '../../services/sandbox/sandboxTypes'; +import type { RuntimeError, StaticAnalysisResponse, GitHubPushRequest } from '../../services/sandbox/sandboxTypes'; import type { FileOutputType, PhaseConceptType } from '../schemas'; import type { ConversationMessage } from '../inferutils/common'; import type { InferenceContext } from '../inferutils/config.types'; import type { TemplateDetails } from '../../services/sandbox/sandboxTypes'; import { TemplateSelection } from '../schemas'; -import { CurrentDevState } from './state'; +import { CurrentDevState, PhasicState, AgenticState } from './state'; import { ProcessedImageAttachment } from 'worker/types/image-attachment'; -export interface AgentInitArgs { +export type BehaviorType = 'phasic' | 'agentic'; + +export type ProjectType = 'app' | 'workflow' | 'presentation' | 'general'; + +/** + * Runtime type - WHERE it runs during dev + * - sandbox: Cloudflare Containers (full apps with UI) + * - worker: Dynamic Worker Loaders (backend only) + * - none: No runtime (static export only) + */ +export type RuntimeType = 'sandbox' | 'worker' | 'none'; + +/** Base initialization arguments shared by all agents */ +interface BaseAgentInitArgs { query: string; - language?: string; - frameworks?: string[]; hostname: string; inferenceContext: InferenceContext; + language?: string; + frameworks?: string[]; + images?: ProcessedImageAttachment[]; + onBlueprintChunk: (chunk: string) => void; + sandboxSessionId?: string; // Generated by CodeGeneratorAgent, passed to behavior +} + +/** Phasic agent initialization arguments */ +interface PhasicAgentInitArgs extends BaseAgentInitArgs { templateInfo: { templateDetails: TemplateDetails; selection: TemplateSelection; - } - images?: ProcessedImageAttachment[]; - onBlueprintChunk: (chunk: string) => void; + }; +} + +/** Agentic agent initialization arguments */ +interface AgenticAgentInitArgs extends BaseAgentInitArgs { + templateInfo?: { + templateDetails: TemplateDetails; + selection: TemplateSelection; + }; } +/** Generic initialization arguments based on state type */ +export type AgentInitArgs = + TState extends PhasicState ? PhasicAgentInitArgs : + TState extends AgenticState ? AgenticAgentInitArgs : + PhasicAgentInitArgs | AgenticAgentInitArgs; + +export type Plan = string; + export interface AllIssues { runtimeErrors: RuntimeError[]; staticAnalysis: StaticAnalysisResponse; @@ -62,4 +96,42 @@ export interface PhaseExecutionResult { */ export type DeepDebugResult = | { success: true; transcript: string } - | { success: false; error: string }; \ No newline at end of file + | { success: false; error: string }; + +export type DeploymentTarget = 'platform' | 'user'; + +export interface DeployResult { + success: boolean; + target: DeploymentTarget; + url?: string; + deploymentId?: string; + error?: string; + metadata?: Record; +} + +export interface DeployOptions { + target?: DeploymentTarget; + token?: string; + metadata?: Record; +} + +/** + * Result of project export/deployment operation + */ +export interface ExportResult { + success: boolean; + url?: string; + error?: string; + metadata?: Record; +} + +/** + * Options for project export/deployment + */ +export interface ExportOptions { + kind: 'github' | 'pdf' | 'pptx' | 'googleslides' | 'workflow'; + format?: string; + token?: string; + github?: GitHubPushRequest; + metadata?: Record; +} diff --git a/worker/agents/core/websocket.ts b/worker/agents/core/websocket.ts index 4428f690..9b18032d 100644 --- a/worker/agents/core/websocket.ts +++ b/worker/agents/core/websocket.ts @@ -1,13 +1,17 @@ import { Connection } from 'agents'; import { createLogger } from '../../logger'; import { WebSocketMessageRequests, WebSocketMessageResponses } from '../constants'; -import { SimpleCodeGeneratorAgent } from './simpleGeneratorAgent'; import { WebSocketMessage, WebSocketMessageData, WebSocketMessageType } from '../../api/websocketTypes'; import { MAX_IMAGES_PER_MESSAGE, MAX_IMAGE_SIZE_BYTES } from '../../types/image-attachment'; +import type { CodeGeneratorAgent } from './codingAgent'; const logger = createLogger('CodeGeneratorWebSocket'); -export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connection: Connection, message: string): void { +export function handleWebSocketMessage( + agent: CodeGeneratorAgent, + connection: Connection, + message: string +): void { try { logger.info(`Received WebSocket message from ${connection.id}: ${message}`); const parsedMessage = JSON.parse(message); @@ -21,7 +25,7 @@ export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connecti }); // Check if generation is already active to avoid duplicate processes - if (agent.isCodeGenerating()) { + if (agent.getBehavior().isCodeGenerating()) { logger.info('Generation already in progress, skipping duplicate request'); // sendToConnection(connection, WebSocketMessageResponses.GENERATION_STARTED, { // message: 'Code generation is already in progress' @@ -31,13 +35,13 @@ export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connecti // Start generation process logger.info('Starting code generation process'); - agent.generateAllFiles().catch(error => { + agent.getBehavior().generateAllFiles().catch(error => { logger.error('Error during code generation:', error); sendError(connection, `Error generating files: ${error instanceof Error ? error.message : String(error)}`); }).finally(() => { // Only clear shouldBeGenerating on successful completion // (errors might want to retry, so this could be handled differently) - if (!agent.isCodeGenerating()) { + if (!agent.getBehavior().isCodeGenerating()) { agent.setState({ ...agent.state, shouldBeGenerating: false @@ -46,12 +50,12 @@ export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connecti }); break; case WebSocketMessageRequests.DEPLOY: - agent.deployToCloudflare().then((deploymentResult) => { - if (!deploymentResult) { - logger.error('Failed to deploy to Cloudflare Workers'); + agent.deployProject().then((deploymentResult) => { + if (!deploymentResult.success) { + logger.error('Deployment failed', deploymentResult); return; } - logger.info('Successfully deployed to Cloudflare Workers!', deploymentResult); + logger.info('Deployment completed', deploymentResult); }).catch((error: unknown) => { logger.error('Error during deployment:', error); }); @@ -59,14 +63,14 @@ export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connecti case WebSocketMessageRequests.PREVIEW: // Deploy current state for preview logger.info('Deploying for preview'); - agent.deployToSandbox().then((deploymentResult) => { + agent.getBehavior().deployToSandbox().then((deploymentResult) => { logger.info(`Preview deployed successfully!, deploymentResult:`, deploymentResult); }).catch((error: unknown) => { logger.error('Error during preview deployment:', error); }); break; case WebSocketMessageRequests.CAPTURE_SCREENSHOT: - agent.captureScreenshot(parsedMessage.data.url, parsedMessage.data.viewport).then((screenshotResult) => { + agent.getBehavior().captureScreenshot(parsedMessage.data.url, parsedMessage.data.viewport).then((screenshotResult) => { if (!screenshotResult) { logger.error('Failed to capture screenshot'); return; @@ -80,7 +84,7 @@ export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connecti logger.info('User requested to stop generation'); // Cancel current inference operation - const wasCancelled = agent.cancelCurrentInference(); + const wasCancelled = agent.getBehavior().cancelCurrentInference(); // Clear shouldBeGenerating flag agent.setState({ @@ -102,11 +106,11 @@ export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connecti shouldBeGenerating: true }); - if (!agent.isCodeGenerating()) { + if (!agent.getBehavior().isCodeGenerating()) { sendToConnection(connection, WebSocketMessageResponses.GENERATION_RESUMED, { message: 'Code generation resumed' }); - agent.generateAllFiles().catch(error => { + agent.getBehavior().generateAllFiles().catch(error => { logger.error('Error resuming code generation:', error); sendError(connection, `Error resuming generation: ${error instanceof Error ? error.message : String(error)}`); }); @@ -160,7 +164,7 @@ export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connecti break; case WebSocketMessageRequests.GET_MODEL_CONFIGS: logger.info('Fetching model configurations'); - agent.getModelConfigsInfo().then(configsInfo => { + agent.getBehavior().getModelConfigsInfo().then(configsInfo => { sendToConnection(connection, WebSocketMessageResponses.MODEL_CONFIGS_INFO, { message: 'Model configurations retrieved', configs: configsInfo @@ -177,7 +181,7 @@ export function handleWebSocketMessage(agent: SimpleCodeGeneratorAgent, connecti case WebSocketMessageRequests.GET_CONVERSATION_STATE: try { const state = agent.getConversationState(); - const debugState = agent.getDeepDebugSessionState(); + const debugState = agent.getBehavior().getDeepDebugSessionState(); logger.info('Conversation state retrieved', state); sendToConnection(connection, WebSocketMessageResponses.CONVERSATION_STATE, { state, @@ -251,4 +255,4 @@ export function sendToConnection( export function sendError(connection: WebSocket, errorMessage: string): void { sendToConnection(connection, 'error', { error: errorMessage }); -} \ No newline at end of file +} diff --git a/worker/agents/domain/values/GenerationContext.ts b/worker/agents/domain/values/GenerationContext.ts index 39176ab9..788ea8d8 100644 --- a/worker/agents/domain/values/GenerationContext.ts +++ b/worker/agents/domain/values/GenerationContext.ts @@ -1,36 +1,47 @@ -import { Blueprint } from '../../schemas'; +import { PhasicBlueprint, AgenticBlueprint } from '../../schemas'; import { FileTreeNode, TemplateDetails } from '../../../services/sandbox/sandboxTypes'; -import { CodeGenState, FileState, PhaseState } from '../../core/state'; +import { FileState, PhaseState, PhasicState, AgenticState } from '../../core/state'; import { DependencyManagement } from '../pure/DependencyManagement'; import type { StructuredLogger } from '../../../logger'; import { FileProcessing } from '../pure/FileProcessing'; +import { Plan } from '../../core/types'; + +/** Common fields shared by all generation contexts */ +interface BaseGenerationContext { + readonly query: string; + readonly allFiles: FileState[]; + readonly templateDetails: TemplateDetails; + readonly dependencies: Record; + readonly commandsHistory: string[]; +} + +/** Phase-based generation context with detailed blueprint */ +export interface PhasicGenerationContext extends BaseGenerationContext { + readonly blueprint: PhasicBlueprint; + readonly generatedPhases: PhaseState[]; +} + +/** Plan-based generation context with simple blueprint */ +export interface AgenticGenerationContext extends BaseGenerationContext { + readonly blueprint: AgenticBlueprint; + readonly currentPlan: Plan; +} /** - * Immutable context for code generation operations - * Contains all necessary data for generating code + * Discriminated union of generation contexts + * + * Discriminate using: `'generatedPhases' in context` or `GenerationContext.isPhasic(context)` */ -export class GenerationContext { - constructor( - public readonly query: string, - public readonly blueprint: Blueprint, - public readonly templateDetails: TemplateDetails, - public readonly dependencies: Record, - public readonly allFiles: FileState[], - public readonly generatedPhases: PhaseState[], - public readonly commandsHistory: string[] - ) { - // Freeze to ensure immutability - Object.freeze(this); - Object.freeze(this.dependencies); - Object.freeze(this.allFiles); - Object.freeze(this.generatedPhases); - Object.freeze(this.commandsHistory); - } - - /** - * Create context from current state - */ - static from(state: CodeGenState, templateDetails: TemplateDetails, logger?: Pick): GenerationContext { +export type GenerationContext = PhasicGenerationContext | AgenticGenerationContext; + +/** Generation context utility functions */ +export namespace GenerationContext { + /** Create immutable context from agent state */ + export function from( + state: PhasicState | AgenticState, + templateDetails: TemplateDetails, + logger?: Pick + ): GenerationContext { const dependencies = DependencyManagement.mergeDependencies( templateDetails.deps || {}, state.lastPackageJson, @@ -42,36 +53,70 @@ export class GenerationContext { state.generatedFilesMap ); - return new GenerationContext( - state.query, - state.blueprint, + const base = { + query: state.query, + allFiles, templateDetails, dependencies, - allFiles, - state.generatedPhases, - state.commandsHistory || [] - ); + commandsHistory: state.commandsHistory || [], + }; + + return state.behaviorType === 'phasic' + ? Object.freeze({ ...base, blueprint: (state as PhasicState).blueprint, generatedPhases: (state as PhasicState).generatedPhases }) + : Object.freeze({ ...base, blueprint: (state as AgenticState).blueprint, currentPlan: (state as AgenticState).currentPlan }); } - /** - * Get formatted phases for prompt generation - */ - getCompletedPhases() { - return Object.values(this.generatedPhases.filter(phase => phase.completed)); + /** Type guard for phasic context */ + export function isPhasic(context: GenerationContext): context is PhasicGenerationContext { + return 'generatedPhases' in context; } - getFileTree(): FileTreeNode { - const builder = new FileTreeBuilder(this.templateDetails?.fileTree); + /** Type guard for agentic context */ + export function isAgentic(context: GenerationContext): context is AgenticGenerationContext { + return 'currentPlan' in context; + } + + /** Get completed phases (empty array for agentic contexts) */ + export function getCompletedPhases(context: GenerationContext): PhaseState[] { + return isPhasic(context) + ? context.generatedPhases.filter(phase => phase.completed) + : []; + } - for (const { filePath } of this.allFiles) { + /** Build file tree from context files */ + export function getFileTree(context: GenerationContext): FileTreeNode { + const builder = new FileTreeBuilder(context.templateDetails?.fileTree); + + for (const { filePath } of context.allFiles) { const normalized = FileTreeBuilder.normalizePath(filePath); if (normalized) { builder.addFile(normalized); } } - + return builder.build(); } + + /** Get phasic blueprint if available */ + export function getPhasicBlueprint(context: GenerationContext): PhasicBlueprint | undefined { + return isPhasic(context) ? context.blueprint : undefined; + } + + /** Get agentic blueprint if available */ + export function getAgenticBlueprint(context: GenerationContext): AgenticBlueprint | undefined { + return isAgentic(context) ? context.blueprint : undefined; + } + + /** Get common blueprint data */ + export function getCommonBlueprintData(context: GenerationContext) { + return { + title: context.blueprint.title, + projectName: context.blueprint.projectName, + description: context.blueprint.description, + frameworks: context.blueprint.frameworks, + colorPalette: context.blueprint.colorPalette, + }; + } } class FileTreeBuilder { diff --git a/worker/agents/git/git.ts b/worker/agents/git/git.ts index f9e79c9c..29a5acd9 100644 --- a/worker/agents/git/git.ts +++ b/worker/agents/git/git.ts @@ -93,7 +93,7 @@ export class GitVersionControl { } } - console.log(`[Git] Staged ${files.length} files`, files); + console.log(`[Git] Staged ${files.length} files: ${files.map(f => f.filePath).join(', ')}`); } private normalizePath(path: string): string { diff --git a/worker/agents/index.ts b/worker/agents/index.ts index 17101673..61474d69 100644 --- a/worker/agents/index.ts +++ b/worker/agents/index.ts @@ -1,52 +1,71 @@ - -import { SmartCodeGeneratorAgent } from './core/smartGeneratorAgent'; import { getAgentByName } from 'agents'; -import { CodeGenState } from './core/state'; import { generateId } from '../utils/idGenerator'; import { StructuredLogger } from '../logger'; import { InferenceContext } from './inferutils/config.types'; import { SandboxSdkClient } from '../services/sandbox/sandboxSdkClient'; import { selectTemplate } from './planning/templateSelector'; import { TemplateDetails } from '../services/sandbox/sandboxTypes'; +import { createScratchTemplateDetails } from './utils/templates'; import { TemplateSelection } from './schemas'; import type { ImageAttachment } from '../types/image-attachment'; import { BaseSandboxService } from 'worker/services/sandbox/BaseSandboxService'; +import { AgentState, CurrentDevState } from './core/state'; +import { CodeGeneratorAgent } from './core/codingAgent'; +import { BehaviorType, ProjectType } from './core/types'; + +type AgentStubProps = { + behaviorType?: BehaviorType; + projectType?: ProjectType; +}; -export async function getAgentStub(env: Env, agentId: string) : Promise> { - return getAgentByName(env.CodeGenObject, agentId); +export async function getAgentStub( + env: Env, + agentId: string, + props?: AgentStubProps +) : Promise> { + const options = props ? { props } : undefined; + return getAgentByName(env.CodeGenObject, agentId, options); } -export async function getAgentStubLightweight(env: Env, agentId: string) : Promise> { - return getAgentByName(env.CodeGenObject, agentId, { +export async function getAgentStubLightweight(env: Env, agentId: string) : Promise> { + return getAgentByName(env.CodeGenObject, agentId, { // props: { readOnlyMode: true } }); } -export async function getAgentState(env: Env, agentId: string) : Promise { +export async function getAgentState(env: Env, agentId: string) : Promise { const agentInstance = await getAgentStub(env, agentId); - return await agentInstance.getFullState() as CodeGenState; + return await agentInstance.getFullState() as AgentState; } -export async function cloneAgent(env: Env, agentId: string) : Promise<{newAgentId: string, newAgent: DurableObjectStub}> { +export async function cloneAgent(env: Env, agentId: string) : Promise<{newAgentId: string, newAgent: DurableObjectStub}> { const agentInstance = await getAgentStub(env, agentId); if (!agentInstance || !await agentInstance.isInitialized()) { throw new Error(`Agent ${agentId} not found`); } const newAgentId = generateId(); - const newAgent = await getAgentStub(env, newAgentId); - const originalState = await agentInstance.getFullState() as CodeGenState; - const newState = { + const originalState = await agentInstance.getFullState(); + + const newState: AgentState = { ...originalState, sessionId: newAgentId, sandboxInstanceId: undefined, pendingUserInputs: [], - currentDevState: 0, - generationPromise: undefined, shouldBeGenerating: false, - // latestScreenshot: undefined, - clientReportedErrors: [], - }; + projectUpdatesAccumulator: [], + reviewingInitiated: false, + mvpGenerated: false, + ...(originalState.behaviorType === 'phasic' ? { + generatedPhases: [], + currentDevState: CurrentDevState.IDLE, + } : {}), + } as AgentState; + + const newAgent = await getAgentStub(env, newAgentId, { + behaviorType: originalState.behaviorType, + projectType: originalState.projectType, + }); await newAgent.setState(newState); return {newAgentId, newAgent}; @@ -56,9 +75,23 @@ export async function getTemplateForQuery( env: Env, inferenceContext: InferenceContext, query: string, + projectType: ProjectType | 'auto', images: ImageAttachment[] | undefined, logger: StructuredLogger, -) : Promise<{templateDetails: TemplateDetails, selection: TemplateSelection}> { +) : Promise<{templateDetails: TemplateDetails, selection: TemplateSelection, projectType: ProjectType}> { + // In 'general' mode, we intentionally start from scratch without a real template + if (projectType === 'general') { + const scratch: TemplateDetails = createScratchTemplateDetails(); + const selection: TemplateSelection = { + selectedTemplateName: null, + reasoning: 'General (from-scratch) mode: no template selected', + useCase: 'General', + complexity: 'moderate', + styleSelection: 'Custom', + projectType: 'general', + } as TemplateSelection; // satisfies schema shape + return { templateDetails: scratch, selection, projectType: 'general' }; + } // Fetch available templates const templatesResponse = await SandboxSdkClient.listTemplates(); if (!templatesResponse || !templatesResponse.success) { @@ -69,6 +102,7 @@ export async function getTemplateForQuery( env, inferenceContext, query, + projectType, availableTemplates: templatesResponse.templates, images, }); @@ -76,8 +110,10 @@ export async function getTemplateForQuery( logger.info('Selected template', { selectedTemplate: analyzeQueryResponse }); if (!analyzeQueryResponse.selectedTemplateName) { - logger.error('No suitable template found for code generation'); - throw new Error('No suitable template found for code generation'); + // For non-general requests when no template is selected, fall back to scratch + logger.warn('No suitable template found; falling back to scratch'); + const scratch: TemplateDetails = createScratchTemplateDetails(); + return { templateDetails: scratch, selection: analyzeQueryResponse, projectType: analyzeQueryResponse.projectType }; } const selectedTemplate = templatesResponse.templates.find(template => template.name === analyzeQueryResponse.selectedTemplateName); @@ -92,5 +128,5 @@ export async function getTemplateForQuery( } const templateDetails = templateDetailsResponse.templateDetails; - return { templateDetails, selection: analyzeQueryResponse }; -} \ No newline at end of file + return { templateDetails, selection: analyzeQueryResponse, projectType: analyzeQueryResponse.projectType }; +} diff --git a/worker/agents/inferutils/common.ts b/worker/agents/inferutils/common.ts index f4e5c968..57df50c8 100644 --- a/worker/agents/inferutils/common.ts +++ b/worker/agents/inferutils/common.ts @@ -25,6 +25,7 @@ export type Message = { content: MessageContent; name?: string; // Optional name field required for function messages tool_calls?: ChatCompletionMessageToolCall[]; + tool_call_id?: string; // For role = tool }; export interface ConversationMessage extends Message { diff --git a/worker/agents/inferutils/config.ts b/worker/agents/inferutils/config.ts index 482eff75..85136dec 100644 --- a/worker/agents/inferutils/config.ts +++ b/worker/agents/inferutils/config.ts @@ -160,6 +160,13 @@ export const AGENT_CONFIG: AgentConfig = { temperature: 0.1, fallbackModel: AIModels.GEMINI_2_5_FLASH, }, + agenticProjectBuilder: { + name: AIModels.GEMINI_2_5_PRO, + reasoning_effort: 'high', + max_tokens: 8000, + temperature: 0.7, + fallbackModel: AIModels.GEMINI_2_5_FLASH, + }, }; diff --git a/worker/agents/inferutils/config.types.ts b/worker/agents/inferutils/config.types.ts index a3b12007..26f3be6c 100644 --- a/worker/agents/inferutils/config.types.ts +++ b/worker/agents/inferutils/config.types.ts @@ -67,6 +67,7 @@ export interface AgentConfig { fastCodeFixer: ModelConfig; conversationalResponse: ModelConfig; deepDebugger: ModelConfig; + agenticProjectBuilder: ModelConfig; } // Provider and reasoning effort types for validation diff --git a/worker/agents/inferutils/core.ts b/worker/agents/inferutils/core.ts index 7ad6991a..9a98aec6 100644 --- a/worker/agents/inferutils/core.ts +++ b/worker/agents/inferutils/core.ts @@ -320,6 +320,7 @@ type InferArgsBase = { providerOverride?: 'cloudflare' | 'direct'; userApiKeys?: Record; abortSignal?: AbortSignal; + onAssistantMessage?: (message: Message) => Promise; }; type InferArgsStructured = InferArgsBase & { @@ -417,7 +418,7 @@ async function executeToolCalls(openAiToolCalls: ChatCompletionMessageFunctionTo if (!td) { throw new Error(`Tool ${tc.function.name} not found`); } - const result = await executeToolWithDefinition(td, args); + const result = await executeToolWithDefinition(tc, td, args); console.log(`Tool execution result for ${tc.function.name}:`, result); return { id: tc.id, @@ -427,6 +428,11 @@ async function executeToolCalls(openAiToolCalls: ChatCompletionMessageFunctionTo }; } catch (error) { console.error(`Tool execution failed for ${tc.function.name}:`, error); + // Check if error is an abort error + if (error instanceof AbortError) { + console.warn(`Tool call was aborted while executing ${tc.function.name}, ending tool call chain with the latest tool call result`); + throw error; + } return { id: tc.id, name: tc.function.name, @@ -438,6 +444,28 @@ async function executeToolCalls(openAiToolCalls: ChatCompletionMessageFunctionTo ); } +function updateToolCallContext(toolCallContext: ToolCallContext | undefined, assistantMessage: Message, executedToolCalls: ToolCallResult[]) { + const newMessages = [ + ...(toolCallContext?.messages || []), + assistantMessage, + ...executedToolCalls + .filter(result => result.name && result.name.trim() !== '') + .map((result, _) => ({ + role: "tool" as MessageRole, + content: result.result ? JSON.stringify(result.result) : 'done', + name: result.name, + tool_call_id: result.id, + })), + ]; + + const newDepth = (toolCallContext?.depth ?? 0) + 1; + const newToolCallContext = { + messages: newMessages, + depth: newDepth + }; + return newToolCallContext; +} + export function infer( args: InferArgsStructured, toolCallContext?: ToolCallContext, @@ -471,6 +499,7 @@ export async function infer({ reasoning_effort, temperature, abortSignal, + onAssistantMessage, }: InferArgsBase & { schema?: OutputSchema; schemaName?: string; @@ -527,14 +556,32 @@ export async function infer({ let messagesToPass = [...optimizedMessages]; if (toolCallContext && toolCallContext.messages) { - // Minimal core fix with logging: exclude prior tool messages that have empty name const ctxMessages = toolCallContext.messages; - const droppedToolMsgs = ctxMessages.filter(m => m.role === 'tool' && (!m.name || m.name.trim() === '')); - if (droppedToolMsgs.length) { - console.warn(`[TOOL_CALL_WARNING] Dropping ${droppedToolMsgs.length} prior tool message(s) with empty name to avoid provider error`, droppedToolMsgs); - } - const filteredCtx = ctxMessages.filter(m => m.role !== 'tool' || (m.name && m.name.trim() !== '')); - messagesToPass.push(...filteredCtx); + let validToolCallIds = new Set(); + + const filtered = ctxMessages.filter(msg => { + // Update valid IDs when we see assistant with tool_calls + if (msg.role === 'assistant' && msg.tool_calls) { + validToolCallIds = new Set(msg.tool_calls.map(tc => tc.id)); + return true; + } + + // Filter tool messages + if (msg.role === 'tool') { + if (!msg.name?.trim()) { + console.warn('[TOOL_ORPHAN] Dropping tool message with empty name:', msg.tool_call_id); + return false; + } + if (!msg.tool_call_id || !validToolCallIds.has(msg.tool_call_id)) { + console.warn('[TOOL_ORPHAN] Dropping orphaned tool message:', msg.name, msg.tool_call_id); + return false; + } + } + + return true; + }); + + messagesToPass.push(...filtered); } if (format) { @@ -622,6 +669,10 @@ export async function infer({ } let toolCalls: ChatCompletionMessageFunctionToolCall[] = []; + /* + * Handle LLM response + */ + let content = ''; if (stream) { // If streaming is enabled, handle the stream response @@ -715,6 +766,16 @@ export async function infer({ console.log(`Total tokens used in prompt: ${totalTokens}`); } + const assistantMessage = { role: "assistant" as MessageRole, content, tool_calls: toolCalls }; + + if (onAssistantMessage) { + await onAssistantMessage(assistantMessage); + } + + /* + * Handle tool calls + */ + if (!content && !stream && !toolCalls.length) { // // Only error if not streaming and no content // console.error('No content received from OpenAI', JSON.stringify(response, null, 2)); @@ -725,33 +786,32 @@ export async function infer({ let executedToolCalls: ToolCallResult[] = []; if (tools) { // console.log(`Tool calls:`, JSON.stringify(toolCalls, null, 2), 'definition:', JSON.stringify(tools, null, 2)); - executedToolCalls = await executeToolCalls(toolCalls, tools); + try { + executedToolCalls = await executeToolCalls(toolCalls, tools); + } catch (error) { + console.error(`Tool execution failed for ${toolCalls[0].function.name}:`, error); + // Check if error is an abort error + if (error instanceof AbortError) { + console.warn(`Tool call was aborted, ending tool call chain with the latest tool call result`); + + const newToolCallContext = updateToolCallContext(toolCallContext, assistantMessage, executedToolCalls); + return { string: content, toolCallContext: newToolCallContext }; + } + // Otherwise, continue + } } + /* + * Handle tool call results + */ + if (executedToolCalls.length) { console.log(`Tool calls executed:`, JSON.stringify(executedToolCalls, null, 2)); - // Generate a new response with the tool calls executed - const newMessages = [ - ...(toolCallContext?.messages || []), - { role: "assistant" as MessageRole, content, tool_calls: toolCalls }, - ...executedToolCalls - .filter(result => result.name && result.name.trim() !== '') - .map((result, _) => ({ - role: "tool" as MessageRole, - content: result.result ? JSON.stringify(result.result) : 'done', - name: result.name, - tool_call_id: result.id, - })), - ]; - - const newDepth = (toolCallContext?.depth ?? 0) + 1; - const newToolCallContext = { - messages: newMessages, - depth: newDepth - }; + + const newToolCallContext = updateToolCallContext(toolCallContext, assistantMessage, executedToolCalls); const executedCallsWithResults = executedToolCalls.filter(result => result.result); - console.log(`${actionKey}: Tool calling depth: ${newDepth}/${getMaxToolCallingDepth(actionKey)}`); + console.log(`${actionKey}: Tool calling depth: ${newToolCallContext.depth}/${getMaxToolCallingDepth(actionKey)}`); if (executedCallsWithResults.length) { if (schema && schemaName) { @@ -771,6 +831,7 @@ export async function infer({ reasoning_effort, temperature, abortSignal, + onAssistantMessage, }, newToolCallContext); return output; } else { @@ -786,6 +847,7 @@ export async function infer({ reasoning_effort, temperature, abortSignal, + onAssistantMessage, }, newToolCallContext); return output; } diff --git a/worker/agents/inferutils/infer.ts b/worker/agents/inferutils/infer.ts index a03530f3..db0907fd 100644 --- a/worker/agents/inferutils/infer.ts +++ b/worker/agents/inferutils/infer.ts @@ -39,6 +39,7 @@ interface InferenceParamsBase { reasoning_effort?: ReasoningEffort; modelConfig?: ModelConfig; context: InferenceContext; + onAssistantMessage?: (message: Message) => Promise; } interface InferenceParamsStructured extends InferenceParamsBase { @@ -60,7 +61,7 @@ export async function executeInference( { messages, temperature, maxTokens, - retryLimit = 5, // Increased retry limit for better reliability + retryLimit = 5, stream, tools, reasoning_effort, @@ -69,7 +70,8 @@ export async function executeInference( { format, modelName, modelConfig, - context + context, + onAssistantMessage }: InferenceParamsBase & { schema?: T; format?: SchemaFormat; @@ -124,6 +126,7 @@ export async function executeInference( { reasoning_effort: useCheaperModel ? undefined : reasoning_effort, temperature, abortSignal: context.abortSignal, + onAssistantMessage, }) : await infer({ env, metadata: context, @@ -136,6 +139,7 @@ export async function executeInference( { reasoning_effort: useCheaperModel ? undefined : reasoning_effort, temperature, abortSignal: context.abortSignal, + onAssistantMessage, }); logger.info(`Successfully completed ${agentActionName} operation`); // console.log(result); diff --git a/worker/agents/operations/FileRegeneration.ts b/worker/agents/operations/FileRegeneration.ts index ff05c24d..30edf3eb 100644 --- a/worker/agents/operations/FileRegeneration.ts +++ b/worker/agents/operations/FileRegeneration.ts @@ -2,6 +2,7 @@ import { FileGenerationOutputType } from '../schemas'; import { AgentOperation, OperationOptions } from '../operations/common'; import { RealtimeCodeFixer } from '../assistants/realtimeCodeFixer'; import { FileOutputType } from '../schemas'; +import { GenerationContext } from '../domain/values/GenerationContext'; export interface FileRegenerationInputs { file: FileOutputType; @@ -99,10 +100,10 @@ useEffect(() => { - If an issue cannot be fixed surgically, explain why instead of forcing a fix `; -export class FileRegenerationOperation extends AgentOperation { +export class FileRegenerationOperation extends AgentOperation { async execute( inputs: FileRegenerationInputs, - options: OperationOptions + options: OperationOptions ): Promise { try { // Use realtime code fixer to fix the file with enhanced surgical fix prompts diff --git a/worker/agents/operations/PhaseGeneration.ts b/worker/agents/operations/PhaseGeneration.ts index 4cca09c9..07855801 100644 --- a/worker/agents/operations/PhaseGeneration.ts +++ b/worker/agents/operations/PhaseGeneration.ts @@ -8,6 +8,7 @@ import { AgentOperation, getSystemPromptWithProjectContext, OperationOptions } f import { AGENT_CONFIG } from '../inferutils/config'; import type { UserContext } from '../core/types'; import { imagesToBase64 } from 'worker/utils/images'; +import { PhasicGenerationContext } from '../domain/values/GenerationContext'; export interface PhaseGenerationInputs { issues: IssueReport; @@ -186,10 +187,10 @@ const userPromptFormatter = (issues: IssueReport, userSuggestions?: string[], is return PROMPT_UTILS.verifyPrompt(prompt); } -export class PhaseGenerationOperation extends AgentOperation { +export class PhaseGenerationOperation extends AgentOperation { async execute( inputs: PhaseGenerationInputs, - options: OperationOptions + options: OperationOptions ): Promise { const { issues, userContext, isUserSuggestedPhase } = inputs; const { env, logger, context } = options; diff --git a/worker/agents/operations/PhaseImplementation.ts b/worker/agents/operations/PhaseImplementation.ts index 4de9f8ba..76611f58 100644 --- a/worker/agents/operations/PhaseImplementation.ts +++ b/worker/agents/operations/PhaseImplementation.ts @@ -13,6 +13,7 @@ import { IsRealtimeCodeFixerEnabled, RealtimeCodeFixer } from '../assistants/rea import { CodeSerializerType } from '../utils/codeSerializers'; import type { UserContext } from '../core/types'; import { imagesToBase64 } from 'worker/utils/images'; +import { PhasicGenerationContext } from '../domain/values/GenerationContext'; export interface PhaseImplementationInputs { phase: PhaseConceptType @@ -396,30 +397,6 @@ Goal: Thoroughly review the entire codebase generated in previous phases. Identi This phase prepares the code for final deployment.`; -const README_GENERATION_PROMPT = ` -Generate a comprehensive README.md file for this project based on the provided blueprint and template information. -The README should be professional, well-structured, and provide clear instructions for users and developers. - - - -- Create a professional README with proper markdown formatting -- Do not add any images or screenshots -- Include project title, description, and key features from the blueprint -- Add technology stack section based on the template dependencies -- Include setup/installation instructions using bun (not npm/yarn) -- Add usage examples and development instructions -- Include a deployment section with Cloudflare-specific instructions -- **IMPORTANT**: Add a \`[cloudflarebutton]\` placeholder near the top and another in the deployment section for the Cloudflare deploy button. Write the **EXACT** string except the backticks and DON'T enclose it in any other button or anything. We will replace it with https://deploy.workers.cloudflare.com/?url=\${repositoryUrl\} when the repository is created. -- Structure the content clearly with appropriate headers and sections -- Be concise but comprehensive - focus on essential information -- Use professional tone suitable for open source projects - - -Generate the complete README.md content in markdown format. -Do not provide any additional text or explanation. -All your output will be directly saved in the README.md file. -Do not provide and markdown fence \`\`\` \`\`\` around the content either! Just pure raw markdown content!`; - const formatUserSuggestions = (suggestions?: string[] | null): string => { if (!suggestions || suggestions.length === 0) { return ''; @@ -456,10 +433,10 @@ const userPromptFormatter = (phaseConcept: PhaseConceptType, issues: IssueReport return PROMPT_UTILS.verifyPrompt(prompt); } -export class PhaseImplementationOperation extends AgentOperation { +export class PhaseImplementationOperation extends AgentOperation { async execute( inputs: PhaseImplementationInputs, - options: OperationOptions + options: OperationOptions ): Promise { const { phase, issues, userContext } = inputs; const { env, logger, context } = options; @@ -580,37 +557,4 @@ export class PhaseImplementationOperation extends AgentOperation { - const { env, logger, context } = options; - logger.info("Generating README.md for the project"); - - try { - let readmePrompt = README_GENERATION_PROMPT; - const messages = [...getSystemPromptWithProjectContext(SYSTEM_PROMPT, context, CodeSerializerType.SCOF), createUserMessage(readmePrompt)]; - - const results = await executeInference({ - env: env, - messages, - agentActionName: "projectSetup", - context: options.inferenceContext, - }); - - if (!results || !results.string) { - logger.error('Failed to generate README.md content'); - throw new Error('Failed to generate README.md content'); - } - - logger.info('Generated README.md content successfully'); - - return { - filePath: 'README.md', - fileContents: results.string, - filePurpose: 'Project documentation and setup instructions' - }; - } catch (error) { - logger.error("Error generating README:", error); - throw error; - } - } } diff --git a/worker/agents/operations/PostPhaseCodeFixer.ts b/worker/agents/operations/PostPhaseCodeFixer.ts index bb3de4e2..242abd94 100644 --- a/worker/agents/operations/PostPhaseCodeFixer.ts +++ b/worker/agents/operations/PostPhaseCodeFixer.ts @@ -6,6 +6,7 @@ import { FileOutputType, PhaseConceptType } from '../schemas'; import { SCOFFormat } from '../output-formats/streaming-formats/scof'; import { CodeIssue } from '../../services/sandbox/sandboxTypes'; import { CodeSerializerType } from '../utils/codeSerializers'; +import { PhasicGenerationContext } from '../domain/values/GenerationContext'; export interface FastCodeFixerInputs { query: string; @@ -71,10 +72,10 @@ const userPromptFormatter = (query: string, issues: CodeIssue[], allFiles: FileO return PROMPT_UTILS.verifyPrompt(prompt); } -export class FastCodeFixerOperation extends AgentOperation { +export class FastCodeFixerOperation extends AgentOperation { async execute( inputs: FastCodeFixerInputs, - options: OperationOptions + options: OperationOptions ): Promise { const { query, issues, allFiles, allPhases } = inputs; const { env, logger } = options; diff --git a/worker/agents/operations/ScreenshotAnalysis.ts b/worker/agents/operations/ScreenshotAnalysis.ts deleted file mode 100644 index b562908b..00000000 --- a/worker/agents/operations/ScreenshotAnalysis.ts +++ /dev/null @@ -1,134 +0,0 @@ -import { Blueprint, ScreenshotAnalysisSchema, ScreenshotAnalysisType } from '../schemas'; -import { createSystemMessage, createMultiModalUserMessage } from '../inferutils/common'; -import { executeInference } from '../inferutils/infer'; -import { PROMPT_UTILS } from '../prompts'; -import { ScreenshotData } from '../core/types'; -import { AgentOperation, OperationOptions } from './common'; -import { OperationError } from '../utils/operationError'; - -export interface ScreenshotAnalysisInput { - screenshotData: ScreenshotData, -} - -const SYSTEM_PROMPT = `You are a UI/UX Quality Assurance Specialist at Cloudflare. Your task is to analyze application screenshots against blueprint specifications and identify visual issues. - -## ANALYSIS PRIORITIES: -1. **Missing Elements** - Blueprint components not visible -2. **Layout Issues** - Misaligned, overlapping, or broken layouts -3. **Responsive Problems** - Mobile/desktop rendering issues -4. **Visual Bugs** - Broken styling, incorrect colors, missing images - -## EXAMPLE ANALYSES: - -**Example 1 - Game UI:** -Blueprint: "Score display in top-right, game board centered, control buttons below" -Screenshot: Shows score in top-left, buttons missing -Analysis: -- hasIssues: true -- issues: ["Score positioned incorrectly", "Control buttons not visible"] -- matchesBlueprint: false -- deviations: ["Score placement", "Missing controls"] - -**Example 2 - Dashboard:** -Blueprint: "3-column layout with sidebar, main content, and metrics panel" -Screenshot: Shows proper 3-column layout, all elements visible -Analysis: -- hasIssues: false -- issues: [] -- matchesBlueprint: true -- deviations: [] - -## OUTPUT FORMAT: -Return JSON with exactly these fields: -- hasIssues: boolean -- issues: string[] (specific problems found) -- uiCompliance: { matchesBlueprint: boolean, deviations: string[] } -- suggestions: string[] (improvement recommendations)`; - -const USER_PROMPT = `Analyze this screenshot against the blueprint requirements. - -**Blueprint Context:** -{{blueprint}} - -**Viewport:** {{viewport}} - -**Analysis Required:** -- Compare visible elements against blueprint specifications -- Check layout, spacing, and component positioning -- Identify any missing or broken UI elements -- Assess responsive design for the given viewport size -- Note any visual bugs or rendering issues - -Provide specific, actionable feedback focused on blueprint compliance.` - -const userPromptFormatter = (screenshotData: { viewport: { width: number; height: number }; }, blueprint: Blueprint) => { - const prompt = PROMPT_UTILS.replaceTemplateVariables(USER_PROMPT, { - blueprint: JSON.stringify(blueprint, null, 2), - viewport: `${screenshotData.viewport.width}x${screenshotData.viewport.height}` - }); - return PROMPT_UTILS.verifyPrompt(prompt); -} - -export class ScreenshotAnalysisOperation extends AgentOperation { - async execute( - input: ScreenshotAnalysisInput, - options: OperationOptions - ): Promise { - const { screenshotData } = input; - const { env, context, logger } = options; - try { - logger.info('Analyzing screenshot from preview', { - url: screenshotData.url, - viewport: screenshotData.viewport, - hasScreenshotData: !!screenshotData.screenshot, - screenshotDataLength: screenshotData.screenshot?.length || 0 - }); - - if (!screenshotData.screenshot) { - throw new Error('No screenshot data available for analysis'); - } - - // Create multi-modal messages - const messages = [ - createSystemMessage(SYSTEM_PROMPT), - createMultiModalUserMessage( - userPromptFormatter(screenshotData, context.blueprint), - screenshotData.screenshot, // The base64 data URL or image URL - 'high' // Use high detail for better analysis - ) - ]; - - const { object: analysisResult } = await executeInference({ - env: env, - messages, - schema: ScreenshotAnalysisSchema, - agentActionName: 'screenshotAnalysis', - context: options.inferenceContext, - retryLimit: 3 - }); - - if (!analysisResult) { - logger.warn('Screenshot analysis returned no result'); - throw new Error('No analysis result'); - } - - logger.info('Screenshot analysis completed', { - hasIssues: analysisResult.hasIssues, - issueCount: analysisResult.issues.length, - matchesBlueprint: analysisResult.uiCompliance.matchesBlueprint - }); - - // Log detected UI issues - if (analysisResult.hasIssues) { - logger.warn('UI issues detected in screenshot', { - issues: analysisResult.issues, - deviations: analysisResult.uiCompliance.deviations - }); - } - - return analysisResult; - } catch (error) { - OperationError.logAndThrow(logger, "screenshot analysis", error); - } - } -} \ No newline at end of file diff --git a/worker/agents/operations/SimpleCodeGeneration.ts b/worker/agents/operations/SimpleCodeGeneration.ts new file mode 100644 index 00000000..fb572ea5 --- /dev/null +++ b/worker/agents/operations/SimpleCodeGeneration.ts @@ -0,0 +1,280 @@ +import { FileConceptType, FileOutputType } from '../schemas'; +import { createUserMessage, createSystemMessage } from '../inferutils/common'; +import { executeInference } from '../inferutils/infer'; +import { PROMPT_UTILS } from '../prompts'; +import { AgentOperation, getSystemPromptWithProjectContext, OperationOptions } from './common'; +import { SCOFFormat, SCOFParsingState } from '../output-formats/streaming-formats/scof'; +import { CodeGenerationStreamingState } from '../output-formats/streaming-formats/base'; +import { FileProcessing } from '../domain/pure/FileProcessing'; +import { CodeSerializerType } from '../utils/codeSerializers'; +import { GenerationContext } from '../domain/values/GenerationContext'; +import { FileState } from '../core/state'; + +export interface SimpleCodeGenerationInputs { + phaseName: string; + phaseDescription: string; + requirements: string[]; + files: FileConceptType[]; + fileGeneratingCallback?: (filePath: string, filePurpose: string) => void; + fileChunkGeneratedCallback?: (filePath: string, chunk: string, format: 'full_content' | 'unified_diff') => void; + fileClosedCallback?: (file: FileOutputType, message: string) => void; +} + +export interface SimpleCodeGenerationOutputs { + files: FileOutputType[]; +} + +const SYSTEM_PROMPT = `You are an expert Cloudflare developer specializing in Cloudflare Workers and Workflows. + +Your task is to generate production-ready code based on the provided specifications. + +## Original User Request +{{userQuery}} + +## Project Context +{{projectContext}} + +## Template Information +{{template}} + +## Previously Generated Files +{{existingFiles}} + +## Critical Guidelines +- Write clean, type-safe TypeScript code +- Follow best practices for the specific project type +- For Workflows: use WorkflowEntrypoint, step.do(), step.sleep() patterns +- For Workers: use standard Worker patterns with Request/Response +- Ensure all imports are correct +- Add proper error handling +- Include JSDoc comments where helpful +- Consider the context of existing files when generating new code +- Ensure new code integrates well with previously generated files`; + +const USER_PROMPT = `Generate code for the following phase: + +**Phase Name:** {{phaseName}} +**Description:** {{phaseDescription}} + +**Requirements:** +{{requirements}} + +**Files to Generate:** +{{files}} + +Generate complete, production-ready code for all specified files.`; + +const README_GENERATION_PROMPT = ` +Generate a comprehensive README.md file for this project based on the provided blueprint and template information. +The README should be professional, well-structured, and provide clear instructions for users and developers. + + + +- Create a professional README with proper markdown formatting +- Do not add any images or screenshots +- Include project title, description, and key features from the blueprint +- Add technology stack section based on the template dependencies +- Include setup/installation instructions using bun (not npm/yarn) +- Add usage examples and development instructions +- Include a deployment section with Cloudflare-specific instructions +- **IMPORTANT**: Add a \`[cloudflarebutton]\` placeholder near the top and another in the deployment section for the Cloudflare deploy button. Write the **EXACT** string except the backticks and DON'T enclose it in any other button or anything. We will replace it with https://deploy.workers.cloudflare.com/?url=\${repositoryUrl\} when the repository is created. +- Structure the content clearly with appropriate headers and sections +- Be concise but comprehensive - focus on essential information +- Use professional tone suitable for open source projects + + +Generate the complete README.md content in markdown format. +Do not provide any additional text or explanation. +All your output will be directly saved in the README.md file. +Do not provide and markdown fence \`\`\` \`\`\` around the content either! Just pure raw markdown content!`; + +const formatRequirements = (requirements: string[]): string => { + return requirements.map((req, index) => `${index + 1}. ${req}`).join('\n'); +}; + +const formatFiles = (files: FileConceptType[]): string => { + return files.map((file, index) => { + return `${index + 1}. **${file.path}** + Purpose: ${file.purpose} + ${file.changes ? `Changes needed: ${file.changes}` : 'Create new file'}`; + }).join('\n\n'); +}; + +const formatExistingFiles = (allFiles: FileState[]): string => { + if (!allFiles || allFiles.length === 0) { + return 'No files generated yet. This is the first generation phase.'; + } + + // Convert FileState[] to FileOutputType[] format for serializer + const filesForSerializer: FileOutputType[] = allFiles.map(file => ({ + filePath: file.filePath, + fileContents: file.fileContents, + filePurpose: file.filePurpose || 'Previously generated file' + })); + + // Use existing serializer from PROMPT_UTILS + return PROMPT_UTILS.serializeFiles(filesForSerializer, CodeSerializerType.SIMPLE); +}; + +export class SimpleCodeGenerationOperation extends AgentOperation< + GenerationContext, + SimpleCodeGenerationInputs, + SimpleCodeGenerationOutputs +> { + async execute( + inputs: SimpleCodeGenerationInputs, + options: OperationOptions + ): Promise { + const { phaseName, phaseDescription, requirements, files } = inputs; + const { env, logger, context, inferenceContext } = options; + + logger.info('Generating code via simple code generation', { + phaseName, + phaseDescription, + fileCount: files.length, + requirementCount: requirements.length, + existingFilesCount: context.allFiles.length, + hasUserQuery: !!context.query, + hasTemplateDetails: !!context.templateDetails + }); + + // Build project context + const projectContext = context.templateDetails + ? PROMPT_UTILS.serializeTemplate(context.templateDetails) + : 'No template context available'; + + // Format existing files for context + const existingFilesContext = formatExistingFiles(context.allFiles); + + // Build system message with full context + const systemPrompt = PROMPT_UTILS.replaceTemplateVariables(SYSTEM_PROMPT, { + userQuery: context.query || 'No specific user query available', + projectContext, + template: context.templateDetails ? PROMPT_UTILS.serializeTemplate(context.templateDetails) : 'No template information', + existingFiles: existingFilesContext + }); + + // Build user message with requirements + const userPrompt = PROMPT_UTILS.replaceTemplateVariables(USER_PROMPT, { + phaseName, + phaseDescription, + requirements: formatRequirements(requirements), + files: formatFiles(files) + }); + + const codeGenerationFormat = new SCOFFormat(); + const messages = [ + createSystemMessage(systemPrompt), + createUserMessage(userPrompt + codeGenerationFormat.formatInstructions()) + ]; + + // Initialize streaming state + const streamingState: CodeGenerationStreamingState = { + accumulator: '', + completedFiles: new Map(), + parsingState: {} as SCOFParsingState + }; + + const generatedFiles: FileOutputType[] = []; + + // Execute inference with streaming + await executeInference({ + env, + context: inferenceContext, + agentActionName: 'phaseImplementation', // Use existing phase implementation config + messages, + stream: { + chunk_size: 256, + onChunk: (chunk: string) => { + codeGenerationFormat.parseStreamingChunks( + chunk, + streamingState, + // File generation started + (filePath: string) => { + logger.info(`Starting generation of file: ${filePath}`); + if (inputs.fileGeneratingCallback) { + const purpose = files.find(f => f.path === filePath)?.purpose || 'Generated file'; + inputs.fileGeneratingCallback(filePath, purpose); + } + }, + // Stream file content chunks + (filePath: string, fileChunk: string, format: 'full_content' | 'unified_diff') => { + if (inputs.fileChunkGeneratedCallback) { + inputs.fileChunkGeneratedCallback(filePath, fileChunk, format); + } + }, + // onFileClose callback + (filePath: string) => { + logger.info(`Completed generation of file: ${filePath}`); + const completedFile = streamingState.completedFiles.get(filePath); + if (!completedFile) { + logger.error(`Completed file not found: ${filePath}`); + return; + } + + // Process the file contents + const originalContents = context.allFiles.find(f => f.filePath === filePath)?.fileContents || ''; + completedFile.fileContents = FileProcessing.processGeneratedFileContents( + completedFile, + originalContents, + logger + ); + + const generatedFile: FileOutputType = { + ...completedFile, + filePurpose: files.find(f => f.path === filePath)?.purpose || 'Generated file' + }; + + generatedFiles.push(generatedFile); + + if (inputs.fileClosedCallback) { + inputs.fileClosedCallback(generatedFile, `Completed generation of ${filePath}`); + } + } + ); + } + } + }); + + logger.info('Code generation completed', { + fileCount: generatedFiles.length + }); + + return { + files: generatedFiles + }; + } + + async generateReadme(options: OperationOptions): Promise { + const { env, logger, context } = options; + logger.info("Generating README.md for the project"); + + try { + let readmePrompt = README_GENERATION_PROMPT; + const messages = [...getSystemPromptWithProjectContext(SYSTEM_PROMPT, context, CodeSerializerType.SCOF), createUserMessage(readmePrompt)]; + + const results = await executeInference({ + env: env, + messages, + agentActionName: "projectSetup", + context: options.inferenceContext, + }); + + if (!results || !results.string) { + logger.error('Failed to generate README.md content'); + throw new Error('Failed to generate README.md content'); + } + + logger.info('Generated README.md content successfully'); + + return { + filePath: 'README.md', + fileContents: results.string, + filePurpose: 'Project documentation and setup instructions' + }; + } catch (error) { + logger.error("Error generating README:", error); + throw error; + } + } +} diff --git a/worker/agents/operations/UserConversationProcessor.ts b/worker/agents/operations/UserConversationProcessor.ts index 43ebaafe..4cc000e8 100644 --- a/worker/agents/operations/UserConversationProcessor.ts +++ b/worker/agents/operations/UserConversationProcessor.ts @@ -1,7 +1,6 @@ import { ConversationalResponseType } from "../schemas"; -import { createAssistantMessage, createUserMessage, createMultiModalUserMessage, MessageRole, mapImagesInMultiModalMessage } from "../inferutils/common"; +import { createAssistantMessage, createUserMessage, createMultiModalUserMessage } from "../inferutils/common"; import { executeInference } from "../inferutils/infer"; -import type { ChatCompletionMessageFunctionToolCall } from 'openai/resources'; import { WebSocketMessageResponses } from "../constants"; import { WebSocketMessageData } from "../../api/websocketTypes"; import { AgentOperation, OperationOptions, getSystemPromptWithProjectContext } from "../operations/common"; @@ -15,21 +14,17 @@ import { PROMPT_UTILS } from "../prompts"; import { RuntimeError } from "worker/services/sandbox/sandboxTypes"; import { CodeSerializerType } from "../utils/codeSerializers"; import { ConversationState } from "../inferutils/common"; -import { downloadR2Image, imagesToBase64, imageToBase64 } from "worker/utils/images"; +import { imagesToBase64 } from "worker/utils/images"; import { ProcessedImageAttachment } from "worker/types/image-attachment"; import { AbortError, InferResponseString } from "../inferutils/core"; +import { GenerationContext } from "../domain/values/GenerationContext"; +import { compactifyContext } from "../utils/conversationCompactifier"; +import { ChatCompletionMessageFunctionToolCall } from "openai/resources"; +import { prepareMessagesForInference } from "../utils/common"; // Constants const CHUNK_SIZE = 64; -// Compactification thresholds -const COMPACTIFICATION_CONFIG = { - MAX_TURNS: 40, // Trigger after 50 conversation turns - MAX_ESTIMATED_TOKENS: 100000, - PRESERVE_RECENT_MESSAGES: 10, // Always keep last 10 messages uncompacted - CHARS_PER_TOKEN: 4, // Rough estimation: 1 token β‰ˆ 4 characters -} as const; - export interface ToolCallStatusArgs { name: string; status: 'start' | 'success' | 'error'; @@ -304,36 +299,9 @@ function buildUserMessageWithContext(userMessage: string, errors: RuntimeError[] } } -async function prepareMessagesForInference(env: Env, messages: ConversationMessage[]) : Promise { - // For each multimodal image, convert the image to base64 data url - const processedMessages = await Promise.all(messages.map(m => { - return mapImagesInMultiModalMessage(structuredClone(m), async (c) => { - let url = c.image_url.url; - if (url.includes('base64,')) { - return c; - } - const image = await downloadR2Image(env, url); - return { - ...c, - image_url: { - ...c.image_url, - url: await imageToBase64(env, image) - }, - }; - }); - })); - return processedMessages; -} - -export class UserConversationProcessor extends AgentOperation { - /** - * Remove system context tags from message content - */ - private stripSystemContext(text: string): string { - return text.replace(/[\s\S]*?<\/system_context>\n?/gi, '').trim(); - } +export class UserConversationProcessor extends AgentOperation { - async execute(inputs: UserConversationInputs, options: OperationOptions): Promise { + async execute(inputs: UserConversationInputs, options: OperationOptions): Promise { const { env, logger, context, agent } = options; const { userMessage, conversationState, errors, images, projectUpdates } = inputs; logger.info("Processing user message", { @@ -372,18 +340,18 @@ export class UserConversationProcessor extends AgentOperation inputs.conversationResponseCallback(chunk, aiConversationId, true) ).map(td => ({ ...td, - onStart: (args: Record) => toolCallRenderer({ name: td.function.name, status: 'start', args }), - onComplete: (args: Record, result: unknown) => toolCallRenderer({ + onStart: (_tc: ChatCompletionMessageFunctionToolCall, args: Record) => Promise.resolve(toolCallRenderer({ name: td.function.name, status: 'start', args })), + onComplete: (_tc: ChatCompletionMessageFunctionToolCall, args: Record, result: unknown) => Promise.resolve(toolCallRenderer({ name: td.function.name, status: 'success', args, result: typeof result === 'string' ? result : JSON.stringify(result) - }) + })) })); const runningHistory = await prepareMessagesForInference(env, conversationState.runningHistory); - const compactHistory = await this.compactifyContext(runningHistory, env, options, toolCallRenderer, logger); + const compactHistory = await compactifyContext(runningHistory, env, options, toolCallRenderer, logger); if (compactHistory.length !== runningHistory.length) { logger.info("Conversation history compactified", { fullHistoryLength: conversationState.fullHistory.length, @@ -520,290 +488,6 @@ export class UserConversationProcessor extends AgentOperation m.role === 'user').length; - } - - /** - * Convert character count to estimated token count - */ - private tokensFromChars(chars: number): number { - return Math.ceil(chars / COMPACTIFICATION_CONFIG.CHARS_PER_TOKEN); - } - - /** - * Estimate token count for messages (4 chars β‰ˆ 1 token) - */ - private estimateTokens(messages: ConversationMessage[]): number { - let totalChars = 0; - - for (const msg of messages) { - if (typeof msg.content === 'string') { - totalChars += msg.content.length; - } else if (Array.isArray(msg.content)) { - // Multi-modal content - for (const part of msg.content) { - if (part.type === 'text') { - totalChars += part.text.length; - } else if (part.type === 'image_url') { - // Images use ~1000 tokens each (approximate) - totalChars += 4000; - } - } - } - - // Account for tool calls - if (msg.tool_calls && Array.isArray(msg.tool_calls)) { - for (const tc of msg.tool_calls as ChatCompletionMessageFunctionToolCall[]) { - // Function name - if (tc.function?.name) { - totalChars += tc.function.name.length; - } - // Function arguments (JSON string) - if (tc.function?.arguments) { - totalChars += tc.function.arguments.length; - } - // Tool call structure overhead (id, type, etc.) - rough estimate - totalChars += 50; - } - } - } - - return this.tokensFromChars(totalChars); - } - - /** - * Check if compactification should be triggered - */ - private shouldCompactify(messages: ConversationMessage[]): { - should: boolean; - reason?: 'turns' | 'tokens'; - turns: number; - estimatedTokens: number; - } { - const turns = this.countTurns(messages); - const estimatedTokens = this.estimateTokens(messages); - - console.log(`[UserConversationProcessor] shouldCompactify: turns=${turns}, estimatedTokens=${estimatedTokens}`); - - if (turns >= COMPACTIFICATION_CONFIG.MAX_TURNS) { - return { should: true, reason: 'turns', turns, estimatedTokens }; - } - - if (estimatedTokens >= COMPACTIFICATION_CONFIG.MAX_ESTIMATED_TOKENS) { - return { should: true, reason: 'tokens', turns, estimatedTokens }; - } - - return { should: false, turns, estimatedTokens }; - } - - /** - * Find the last valid turn boundary before the preserve threshold - * A turn boundary is right before a user message - */ - private findTurnBoundary(messages: ConversationMessage[], preserveCount: number): number { - // Start from the point where we want to split - const targetSplitIndex = messages.length - preserveCount; - - if (targetSplitIndex <= 0) { - return 0; - } - - // Walk backwards to find the nearest user message boundary - for (let i = targetSplitIndex; i >= 0; i--) { - if (messages[i].role === 'user') { - // Split right before this user message to preserve turn integrity - return i; - } - } - - // If no user message found, don't split - return 0; - } - - /** - * Generate LLM-powered conversation summary - * Sends the full conversation history as-is to the LLM with a summarization instruction - */ - private async generateConversationSummary( - messages: ConversationMessage[], - env: Env, - options: OperationOptions, - logger: StructuredLogger - ): Promise { - try { - // Prepare summarization instruction - const summarizationInstruction = createUserMessage( - `Please provide a comprehensive summary of the entire conversation above. Your summary should: - -1. Capture the key features, changes, and fixes discussed -2. Note any recurring issues or important bugs mentioned -3. Highlight the current state of the project -4. Preserve critical technical details and decisions made -5. Maintain chronological flow of major changes and developments - -Format your summary as a cohesive, well-structured narrative. Focus on what matters for understanding the project's evolution and current state. - -Provide the summary now:` - ); - - logger.info('Generating conversation summary via LLM', { - messageCount: messages.length, - estimatedInputTokens: this.estimateTokens(messages) - }); - - // Send full conversation history + summarization request - const summaryResult = await executeInference({ - env, - messages: [...messages, summarizationInstruction], - agentActionName: 'conversationalResponse', - context: options.inferenceContext, - }); - - const summary = summaryResult.string.trim(); - - logger.info('Generated conversation summary', { - summaryLength: summary.length, - summaryTokens: this.tokensFromChars(summary.length) - }); - - return summary; - } catch (error) { - logger.error('Failed to generate conversation summary', { error }); - // Fallback to simple concatenation - return messages - .map(m => { - const content = typeof m.content === 'string' ? m.content : '[complex content]'; - return `${m.role}: ${this.stripSystemContext(content).substring(0, 200)}`; - }) - .join('\n') - .substring(0, 2000); - } - } - - /** - * Intelligent conversation compactification system - * - * Strategy: - * - Monitors turns (user message to user message) and token count - * - Triggers at 50 turns OR ~100k tokens - * - Uses LLM to generate intelligent summary - * - Preserves last 10 messages in full - * - Respects turn boundaries to avoid tool call fragmentation - */ - async compactifyContext( - runningHistory: ConversationMessage[], - env: Env, - options: OperationOptions, - toolCallRenderer: RenderToolCall, - logger: StructuredLogger - ): Promise { - try { - // Check if compactification is needed on the running history - const analysis = this.shouldCompactify(runningHistory); - - if (!analysis.should) { - // No compactification needed - return runningHistory; - } - - logger.info('Compactification triggered', { - reason: analysis.reason, - turns: analysis.turns, - estimatedTokens: analysis.estimatedTokens, - totalRunningMessages: runningHistory.length, - }); - - // Currently compactification would be done on the running history, but should we consider doing it on the full history? - - // Find turn boundary for splitting - const splitIndex = this.findTurnBoundary( - runningHistory, - COMPACTIFICATION_CONFIG.PRESERVE_RECENT_MESSAGES - ); - - // Safety check: ensure we have something to compactify - if (splitIndex <= 0) { - logger.warn('Cannot find valid turn boundary for compactification, preserving all messages'); - return runningHistory; - } - - // Split messages - const messagesToSummarize = runningHistory.slice(0, splitIndex); - const recentMessages = runningHistory.slice(splitIndex); - - logger.info('Compactification split determined', { - summarizeCount: messagesToSummarize.length, - preserveCount: recentMessages.length, - splitIndex - }); - - toolCallRenderer({ - name: 'summarize_history', - status: 'start', - args: { - messageCount: messagesToSummarize.length, - recentCount: recentMessages.length - } - }); - - // Generate LLM-powered summary - const summary = await this.generateConversationSummary( - messagesToSummarize, - env, - options, - logger - ); - - // Create summary message - its conversationId will be the archive ID - const summarizedTurns = this.countTurns(messagesToSummarize); - const archiveId = `archive-${Date.now()}-${IdGenerator.generateConversationId()}`; - - const summaryMessage: ConversationMessage = { - role: 'assistant' as MessageRole, - content: `[Conversation History Summary: ${messagesToSummarize.length} messages, ${summarizedTurns} turns]\n[Archive ID: ${archiveId}]\n\n${summary}`, - conversationId: archiveId - }; - - toolCallRenderer({ - name: 'summarize_history', - status: 'success', - args: { - summary: summary.substring(0, 200) + '...', - archiveId - } - }); - - // Return summary + recent messages - const compactifiedHistory = [summaryMessage, ...recentMessages]; - - logger.info('Compactification completed with archival', { - originalMessageCount: runningHistory.length, - newMessageCount: compactifiedHistory.length, - compressionRatio: (compactifiedHistory.length / runningHistory.length).toFixed(2), - estimatedTokenSavings: analysis.estimatedTokens - this.estimateTokens(compactifiedHistory), - archivedMessageCount: messagesToSummarize.length, - archiveId - }); - - return compactifiedHistory; - - } catch (error) { - logger.error('Compactification failed, preserving original messages', { error }); - - // Safe fallback: if we have too many messages, keep recent ones - if (runningHistory.length > COMPACTIFICATION_CONFIG.PRESERVE_RECENT_MESSAGES * 3) { - const fallbackCount = COMPACTIFICATION_CONFIG.PRESERVE_RECENT_MESSAGES * 2; - logger.warn(`Applying emergency fallback: keeping last ${fallbackCount} messages`); - return runningHistory.slice(-fallbackCount); - } - - return runningHistory; - } - } processProjectUpdates(updateType: T, _data: WebSocketMessageData, logger: StructuredLogger) : ConversationMessage[] { diff --git a/worker/agents/operations/common.ts b/worker/agents/operations/common.ts index 1ed88286..baae2fc2 100644 --- a/worker/agents/operations/common.ts +++ b/worker/agents/operations/common.ts @@ -5,7 +5,7 @@ import { InferenceContext } from "../inferutils/config.types"; import { createUserMessage, createSystemMessage, createAssistantMessage } from "../inferutils/common"; import { generalSystemPromptBuilder, USER_PROMPT_FORMATTER } from "../prompts"; import { CodeSerializerType } from "../utils/codeSerializers"; -import { CodingAgentInterface } from "../services/implementations/CodingAgent"; +import { ICodingAgent } from "../services/interfaces/ICodingAgent"; export function getSystemPromptWithProjectContext( systemPrompt: string, @@ -23,9 +23,9 @@ export function getSystemPromptWithProjectContext( })), createUserMessage( USER_PROMPT_FORMATTER.PROJECT_CONTEXT( - context.getCompletedPhases(), + GenerationContext.getCompletedPhases(context), allFiles, - context.getFileTree(), + GenerationContext.getFileTree(context), commandsHistory, serializerType ) @@ -35,18 +35,32 @@ export function getSystemPromptWithProjectContext( return messages; } -export interface OperationOptions { +/** + * Operation options with context type constraint + * @template TContext - Context type (defaults to GenerationContext for universal operations) + */ +export interface OperationOptions { env: Env; agentId: string; - context: GenerationContext; + context: TContext; logger: StructuredLogger; inferenceContext: InferenceContext; - agent: CodingAgentInterface; + agent: ICodingAgent; } -export abstract class AgentOperation { +/** + * Base class for agent operations with type-safe context enforcement + * @template TContext - Required context type (defaults to GenerationContext) + * @template TInput - Operation input type + * @template TOutput - Operation output type + */ +export abstract class AgentOperation< + TContext extends GenerationContext = GenerationContext, + TInput = unknown, + TOutput = unknown +> { abstract execute( - inputs: InputType, - options: OperationOptions - ): Promise; + inputs: TInput, + options: OperationOptions + ): Promise; } \ No newline at end of file diff --git a/worker/agents/planning/blueprint.ts b/worker/agents/planning/blueprint.ts index 5f3148f9..0b7cacb8 100644 --- a/worker/agents/planning/blueprint.ts +++ b/worker/agents/planning/blueprint.ts @@ -1,7 +1,7 @@ import { TemplateDetails, TemplateFileSchema } from '../../services/sandbox/sandboxTypes'; // Import the type import { STRATEGIES, PROMPT_UTILS, generalSystemPromptBuilder } from '../prompts'; import { executeInference } from '../inferutils/infer'; -import { Blueprint, BlueprintSchema, TemplateSelection } from '../schemas'; +import { PhasicBlueprint, AgenticBlueprint, PhasicBlueprintSchema, AgenticBlueprintSchema, TemplateSelection, Blueprint } from '../schemas'; import { createLogger } from '../../logger'; import { createSystemMessage, createUserMessage, createMultiModalUserMessage } from '../inferutils/common'; import { InferenceContext } from '../inferutils/config.types'; @@ -10,10 +10,78 @@ import z from 'zod'; import { imagesToBase64 } from 'worker/utils/images'; import { ProcessedImageAttachment } from 'worker/types/image-attachment'; import { getTemplateImportantFiles } from 'worker/services/sandbox/utils'; +import { ProjectType } from '../core/types'; const logger = createLogger('Blueprint'); -const SYSTEM_PROMPT = ` +const SIMPLE_SYSTEM_PROMPT = ` + You are a Senior Software Architect at Cloudflare with expertise in rapid prototyping and modern web development. + Your expertise lies in creating concise, actionable blueprints for building web applications quickly and efficiently. + + + + Create a high-level blueprint for a web application based on the client's request. + The project will be built on Cloudflare Workers and will start from a provided template. + Focus on a clear, concise design that captures the core requirements without over-engineering. + Enhance the user's request thoughtfully - be creative but practical. + + + + Design the product described by the client and provide: + - A professional, memorable project name + - A brief but clear description of what the application does + - A simple color palette (2-3 base colors) for visual identity + - Essential frameworks and libraries needed (beyond the template) + - A high-level step-by-step implementation plan + + Keep it concise - this is a simplified blueprint focused on rapid development. + Build upon the provided template's existing structure and components. + + + + ## Core Principles + β€’ **Simplicity First:** Keep the design straightforward and achievable + β€’ **Template-Aware:** Leverage existing components and patterns from the template + β€’ **Essential Only:** Include only the frameworks/libraries that are truly needed + β€’ **Clear Plan:** Provide a logical step-by-step implementation sequence + + ## Color Palette + β€’ Choose 2-3 base RGB colors that work well together + β€’ Consider the application's purpose and mood + β€’ Ensure good contrast for accessibility + β€’ Only specify base colors, not shades + + ## Frameworks & Dependencies + β€’ Build on the template's existing dependencies + β€’ Only add libraries that are essential for the requested features + β€’ Prefer batteries-included libraries that work out-of-the-box + β€’ No libraries requiring API keys or complex configuration + + ## Implementation Plan + β€’ Break down the work into 5-8 logical steps + β€’ Each step should be a clear, achievable milestone + β€’ Order steps by dependency and priority + β€’ Keep descriptions brief but actionable + + + +{{template}} + + +**SHADCN COMPONENTS, Error boundary components and use-toast hook ARE PRESENT AND INSTALLED BUT EXCLUDED FROM THESE FILES DUE TO CONTEXT SPAM** +{{filesText}} + + + +**Use these files as a reference for the file structure, components and hooks that are present** +{{fileTreeText}} + + +Preinstalled dependencies: +{{dependencies}} +`; + +const PHASIC_SYSTEM_PROMPT = ` You are a meticulous and forward-thinking Senior Software Architect and Product Manager at Cloudflare with extensive expertise in modern UI/UX design and visual excellence. Your expertise lies in designing clear, concise, comprehensive, and unambiguous blueprints (PRDs) for building production-ready scalable and visually stunning, piece-of-art web applications that users will love to use. @@ -158,15 +226,36 @@ Preinstalled dependencies: {{dependencies}} `; -export interface BlueprintGenerationArgs { +const PROJECT_TYPE_BLUEPRINT_GUIDANCE: Record = { + app: '', + workflow: `## Workflow Project Context +- Focus entirely on backend flows running on Cloudflare Workers (no UI/screens) +- Describe REST endpoints, scheduled jobs, queue consumers, Durable Objects, and data storage bindings in detail +- User flow should outline request/response shapes and operational safeguards +- Implementation roadmap must mention testing strategies (unit tests, integration tests) and deployment validation steps.`, + presentation: `## Presentation Project Context +- Design a Spectacle-based slide deck with a cohesive narrative arc (intro, problem, solution, showcase, CTA) +- Produce visually rich slides with precise layout, typography, imagery, and animation guidance +- User flow should actually be a \"story flow\" describing slide order, transitions, interactions, and speaker cues +- Implementation roadmap must reference Spectacle features (themes, deck index, slide components, animations, print/external export mode) +- Prioritize static data and storytelling polish; avoid backend complexity entirely.`, + general: `## Objective Context +- Start from scratch; choose the most suitable representation for the request. +- If the outcome is documentation/specs/notes, prefer Markdown/MDX and do not assume any runtime. +- If a slide deck is helpful, outline the deck structure and content. Avoid assuming a specific file layout; keep the plan flexible. +- Keep dependencies minimal; introduce runtime only when clearly needed.`, +}; + +const getProjectTypeGuidance = (projectType: ProjectType): string => + PROJECT_TYPE_BLUEPRINT_GUIDANCE[projectType] || ''; + +interface BaseBlueprintGenerationArgs { env: Env; inferenceContext: InferenceContext; query: string; language: string; frameworks: string[]; - // Add optional template info - templateDetails: TemplateDetails; - templateMetaInfo: TemplateSelection; + projectType: ProjectType; images?: ProcessedImageAttachment[]; stream?: { chunk_size: number; @@ -174,26 +263,50 @@ export interface BlueprintGenerationArgs { }; } +export interface PhasicBlueprintGenerationArgs extends BaseBlueprintGenerationArgs { + templateDetails: TemplateDetails; + templateMetaInfo: TemplateSelection; +} + +export interface AgenticBlueprintGenerationArgs extends BaseBlueprintGenerationArgs { + templateDetails?: TemplateDetails; + templateMetaInfo?: TemplateSelection; +} + /** * Generate a blueprint for the application based on user prompt */ -// Update function signature and system prompt -export async function generateBlueprint({ env, inferenceContext, query, language, frameworks, templateDetails, templateMetaInfo, images, stream }: BlueprintGenerationArgs): Promise { +export async function generateBlueprint(args: PhasicBlueprintGenerationArgs): Promise; +export async function generateBlueprint(args: AgenticBlueprintGenerationArgs): Promise; +export async function generateBlueprint( + args: PhasicBlueprintGenerationArgs | AgenticBlueprintGenerationArgs +): Promise { + const { env, inferenceContext, query, language, frameworks, templateDetails, templateMetaInfo, images, stream, projectType } = args; + const isAgentic = !templateDetails || !templateMetaInfo; + try { - logger.info("Generating application blueprint", { query, queryLength: query.length, imagesCount: images?.length || 0 }); - logger.info(templateDetails ? `Using template: ${templateDetails.name}` : "Not using a template."); - - // --------------------------------------------------------------------------- - // Build the SYSTEM prompt for blueprint generation - // --------------------------------------------------------------------------- + logger.info(`Generating ${isAgentic ? 'agentic' : 'phasic'} blueprint`, { query, queryLength: query.length, imagesCount: images?.length || 0 }); + if (templateDetails) logger.info(`Using template: ${templateDetails.name}`); - const filesText = TemplateRegistry.markdown.serialize( - { files: getTemplateImportantFiles(templateDetails).filter(f => !f.filePath.includes('package.json')) }, - z.object({ files: z.array(TemplateFileSchema) }) - ); - - const fileTreeText = PROMPT_UTILS.serializeTreeNodes(templateDetails.fileTree); - const systemPrompt = SYSTEM_PROMPT.replace('{{filesText}}', filesText).replace('{{fileTreeText}}', fileTreeText); + // Select prompt and schema based on behavior type + const systemPromptTemplate = isAgentic ? SIMPLE_SYSTEM_PROMPT : PHASIC_SYSTEM_PROMPT; + const schema = isAgentic ? AgenticBlueprintSchema : PhasicBlueprintSchema; + + // Build system prompt with template context (if provided) + let systemPrompt = systemPromptTemplate; + if (templateDetails) { + const filesText = TemplateRegistry.markdown.serialize( + { files: getTemplateImportantFiles(templateDetails).filter(f => !f.filePath.includes('package.json')) }, + z.object({ files: z.array(TemplateFileSchema) }) + ); + const fileTreeText = PROMPT_UTILS.serializeTreeNodes(templateDetails.fileTree); + systemPrompt = systemPrompt.replace('{{filesText}}', filesText).replace('{{fileTreeText}}', fileTreeText); + } + const projectGuidance = getProjectTypeGuidance(projectType); + if (projectGuidance) { + systemPrompt = `${systemPrompt}\n\n${projectGuidance}`; + } + const systemPromptMessage = createSystemMessage(generalSystemPromptBuilder(systemPrompt, { query, templateDetails, @@ -201,7 +314,7 @@ export async function generateBlueprint({ env, inferenceContext, query, language templateMetaInfo, blueprint: undefined, language, - dependencies: templateDetails.deps, + dependencies: templateDetails?.deps, })); const userMessage = images && images.length > 0 @@ -231,21 +344,18 @@ export async function generateBlueprint({ env, inferenceContext, query, language env, messages, agentActionName: "blueprint", - schema: BlueprintSchema, + schema, context: inferenceContext, - stream: stream, + stream, }); - if (results) { - // Filter and remove any pdf files - results.initialPhase.files = results.initialPhase.files.filter(f => !f.path.endsWith('.pdf')); + // Filter out PDF files from phasic blueprints + if (results && !isAgentic) { + const phasicResults = results as PhasicBlueprint; + phasicResults.initialPhase.files = phasicResults.initialPhase.files.filter(f => !f.path.endsWith('.pdf')); } - // // A hack - // if (results?.initialPhase) { - // results.initialPhase.lastPhase = false; - // } - return results as Blueprint; + return results as PhasicBlueprint | AgenticBlueprint; } catch (error) { logger.error("Error generating blueprint:", error); throw error; diff --git a/worker/agents/planning/templateSelector.ts b/worker/agents/planning/templateSelector.ts index c2bea73f..bd202316 100644 --- a/worker/agents/planning/templateSelector.ts +++ b/worker/agents/planning/templateSelector.ts @@ -1,47 +1,122 @@ import { createSystemMessage, createUserMessage, createMultiModalUserMessage } from '../inferutils/common'; -import { TemplateListResponse} from '../../services/sandbox/sandboxTypes'; +import { TemplateInfo } from '../../services/sandbox/sandboxTypes'; import { createLogger } from '../../logger'; import { executeInference } from '../inferutils/infer'; import { InferenceContext } from '../inferutils/config.types'; import { RateLimitExceededError, SecurityError } from 'shared/types/errors'; -import { TemplateSelection, TemplateSelectionSchema } from '../../agents/schemas'; +import { TemplateSelection, TemplateSelectionSchema, ProjectTypePredictionSchema } from '../../agents/schemas'; import { generateSecureToken } from 'worker/utils/cryptoUtils'; import type { ImageAttachment } from '../../types/image-attachment'; +import { ProjectType } from '../core/types'; const logger = createLogger('TemplateSelector'); interface SelectTemplateArgs { env: Env; query: string; - availableTemplates: TemplateListResponse['templates']; + projectType?: ProjectType | 'auto'; + availableTemplates: TemplateInfo[]; inferenceContext: InferenceContext; images?: ImageAttachment[]; } /** - * Uses AI to select the most suitable template for a given query. + * Predicts the project type from the user query */ -export async function selectTemplate({ env, query, availableTemplates, inferenceContext, images }: SelectTemplateArgs, retryCount: number = 3): Promise { - if (availableTemplates.length === 0) { - logger.info("No templates available for selection."); - return { selectedTemplateName: null, reasoning: "No templates were available to choose from.", useCase: null, complexity: null, styleSelection: null, projectName: '' }; - } - +async function predictProjectType( + env: Env, + query: string, + inferenceContext: InferenceContext, + images?: ImageAttachment[] +): Promise { try { - logger.info(`Asking AI to select a template for the ${retryCount} time`, { - query, - queryLength: query.length, - imagesCount: images?.length || 0, - availableTemplates: availableTemplates.map(t => t.name), - templateCount: availableTemplates.length + logger.info('Predicting project type from query', { queryLength: query.length }); + + const systemPrompt = `You are an Expert Project Type Classifier at Cloudflare. Your task is to analyze user requests and determine what type of project they want to build. + +## PROJECT TYPES: + +**app** - Full-stack web applications +- Interactive websites with frontend and backend +- Dashboards, games, social platforms, e-commerce sites +- Any application requiring user interface and interactivity +- Examples: "Build a todo app", "Create a gaming dashboard", "Make a blog platform" + +**workflow** - Backend workflows and APIs +- Server-side logic without UI +- API endpoints, cron jobs, webhooks +- Data processing, automation tasks +- Examples: "Create an API to process payments", "Build a webhook handler", "Automate data sync" + +**presentation** - Slides and presentation decks +- Slide-based content for presentations +- Marketing decks, pitch decks, educational slides +- Visual storytelling with slides +- Examples: "Create slides about AI", "Make a product pitch deck", "Build a presentation on climate change" + +**general** - From-scratch content or mixed artifacts +- Docs/notes/specs in Markdown/MDX, or a slide deck initialized later +- Start with docs when users ask for write-ups; initialize slides if explicitly requested or clearly appropriate +- No sandbox/runtime unless slides/app are initialized by the builder +- Examples: "Write a spec", "Draft an outline and slides if helpful", "Create teaching materials" + +## RULES: +- Default to 'app' when uncertain +- Choose 'workflow' only when explicitly about APIs, automation, or backend-only tasks +- Choose 'presentation' only when explicitly about slides, decks, or presentations +- Choose 'general' for docs/notes/specs or when the user asks to start from scratch without a specific runtime template +- Consider the presence of UI/visual requirements as indicator for 'app' +- High confidence when keywords are explicit, medium/low when inferring`; + + const userPrompt = `**User Request:** "${query}" + +**Task:** Determine the project type and provide: +1. Project type (app, workflow, presentation, or general) +2. Reasoning for your classification +3. Confidence level (high, medium, low) + +Analyze the request carefully and classify accordingly.`; + + const userMessage = images && images.length > 0 + ? createMultiModalUserMessage( + userPrompt, + images.map(img => `data:${img.mimeType};base64,${img.base64Data}`), + 'high' + ) + : createUserMessage(userPrompt); + + const messages = [ + createSystemMessage(systemPrompt), + userMessage + ]; + + const { object: prediction } = await executeInference({ + env, + messages, + agentActionName: "templateSelection", // Reuse existing agent action + schema: ProjectTypePredictionSchema, + context: inferenceContext, + maxTokens: 500, + }); + + logger.info(`Predicted project type: ${prediction.projectType} (${prediction.confidence} confidence)`, { + reasoning: prediction.reasoning }); - const validTemplateNames = availableTemplates.map(t => t.name); + return prediction.projectType; - const templateDescriptions = availableTemplates.map((t, index) => - `### Template #${index + 1} \n Name - ${t.name} \n Language: ${t.language}, Frameworks: ${t.frameworks?.join(', ') || 'None'}\n Description: \`\`\`${t.description.selection}\`\`\`` - ).join('\n\n'); + } catch (error) { + logger.error("Error predicting project type, defaulting to 'app':", error); + return 'app'; + } +} - const systemPrompt = `You are an Expert Software Architect at Cloudflare specializing in template selection for rapid development. Your task is to select the most suitable starting template based on user requirements. +/** + * Generates appropriate system prompt based on project type + */ +function getSystemPromptForProjectType(projectType: ProjectType): string { + if (projectType === 'app') { + // Keep the detailed, original prompt for apps + return `You are an Expert Software Architect at Cloudflare specializing in template selection for rapid development. Your task is to select the most suitable starting template based on user requirements. ## SELECTION EXAMPLES: @@ -85,7 +160,83 @@ Reasoning: "Social template provides user interactions, content sharing, and com - Ignore misleading template names - analyze actual features - **ONLY** Choose from the list of available templates - Focus on functionality over naming conventions -- Provide clear, specific reasoning for selection` +- Provide clear, specific reasoning for selection`; + } + + // Simpler, more general prompts for workflow and presentation + return `You are an Expert Template Selector at Cloudflare. Your task is to select the most suitable ${projectType} template based on user requirements. + +## PROJECT TYPE: ${projectType.toUpperCase()} + +## SELECTION CRITERIA: +1. **Best Match** - Template that best fits the user's requirements +2. **Feature Alignment** - Templates with relevant functionality +3. **Minimal Modification** - Template requiring least customization + +## RULES: +- ALWAYS select a template from the available list +- Analyze template descriptions carefully +- **ONLY** Choose from the provided templates +- Provide clear reasoning for your selection`; +} + +/** + * Uses AI to select the most suitable template for a given query. + */ +export async function selectTemplate({ env, query, projectType, availableTemplates, inferenceContext, images }: SelectTemplateArgs, retryCount: number = 3): Promise { + // Step 1: Predict project type if 'auto' + const actualProjectType: ProjectType = projectType === 'auto' + ? await predictProjectType(env, query, inferenceContext, images) + : (projectType || 'app') as ProjectType; + + logger.info(`Using project type: ${actualProjectType}${projectType === 'auto' ? ' (auto-detected)' : ''}`); + + // Step 2: Filter templates by project type + const filteredTemplates = projectType === 'general' ? availableTemplates : availableTemplates.filter(t => t.projectType === actualProjectType); + + if (filteredTemplates.length === 0) { + logger.warn(`No templates available for project type: ${actualProjectType}`); + return { + selectedTemplateName: null, + reasoning: `No templates were available for project type: ${actualProjectType}`, + useCase: null, + complexity: null, + styleSelection: null, + projectType: actualProjectType + }; + } + + // Step 3: Skip template selection if only 1 template for workflow/presentation + if ((actualProjectType === 'workflow' || actualProjectType === 'presentation') && filteredTemplates.length === 1) { + logger.info(`Only one ${actualProjectType} template available, auto-selecting: ${filteredTemplates[0].name}`); + return { + selectedTemplateName: filteredTemplates[0].name, + reasoning: `Auto-selected the only available ${actualProjectType} template`, + useCase: 'General', + complexity: 'simple', + styleSelection: null, + projectType: actualProjectType + }; + } + + try { + logger.info(`Asking AI to select a template for the ${retryCount} time`, { + query, + projectType: actualProjectType, + queryLength: query.length, + imagesCount: images?.length || 0, + availableTemplates: filteredTemplates.map(t => t.name), + templateCount: filteredTemplates.length + }); + + const validTemplateNames = filteredTemplates.map(t => t.name); + + const templateDescriptions = filteredTemplates.map((t, index) => + `### Template #${index + 1} \n Name - ${t.name} \n Language: ${t.language}, Frameworks: ${t.frameworks?.join(', ') || 'None'}\n Description: \`\`\`${t.description.selection}\`\`\`` + ).join('\n\n'); + + // Step 4: Perform AI-based template selection + const systemPrompt = getSystemPromptForProjectType(actualProjectType as ProjectType) const userPrompt = `**User Request:** "${query}" @@ -97,8 +248,7 @@ Template detail: ${templateDescriptions} **Task:** Select the most suitable template and provide: 1. Template name (exact match from list) 2. Clear reasoning for why it fits the user's needs -3. Appropriate style for the project type. Try to come up with unique styles that might look nice and unique. Be creative about your choices. But don't pick brutalist all the time. -4. Descriptive project name +${actualProjectType === 'app' ? '3. Appropriate style for the project type. Try to come up with unique styles that might look nice and unique. Be creative about your choices. But don\'t pick brutalist all the time.' : ''} Analyze each template's features, frameworks, and architecture to make the best match. ${images && images.length > 0 ? `\n**Note:** User provided ${images.length} image(s) - consider visual requirements and UI style from the images.` : ''} @@ -128,7 +278,12 @@ ENTROPY SEED: ${generateSecureToken(64)} - for unique results`; }); logger.info(`AI template selection result: ${selection.selectedTemplateName || 'None'}, Reasoning: ${selection.reasoning}`); - return selection; + + // Ensure projectType is set correctly + return { + ...selection, + projectType: actualProjectType + }; } catch (error) { logger.error("Error during AI template selection:", error); @@ -137,9 +292,9 @@ ENTROPY SEED: ${generateSecureToken(64)} - for unique results`; } if (retryCount > 0) { - return selectTemplate({ env, query, availableTemplates, inferenceContext, images }, retryCount - 1); + return selectTemplate({ env, query, projectType, availableTemplates, inferenceContext, images }, retryCount - 1); } // Fallback to no template selection in case of error - return { selectedTemplateName: null, reasoning: "An error occurred during the template selection process.", useCase: null, complexity: null, styleSelection: null, projectName: '' }; + return { selectedTemplateName: null, reasoning: "An error occurred during the template selection process.", useCase: null, complexity: null, styleSelection: null, projectType: actualProjectType }; } -} \ No newline at end of file +} diff --git a/worker/agents/prompts.ts b/worker/agents/prompts.ts index ce292c87..3410b6ae 100644 --- a/worker/agents/prompts.ts +++ b/worker/agents/prompts.ts @@ -1,7 +1,7 @@ import { FileTreeNode, RuntimeError, StaticAnalysisResponse, TemplateDetails } from "../services/sandbox/sandboxTypes"; import { TemplateRegistry } from "./inferutils/schemaFormatters"; import z from 'zod'; -import { Blueprint, BlueprintSchemaLite, FileOutputType, PhaseConceptLiteSchema, PhaseConceptSchema, PhaseConceptType, TemplateSelection } from "./schemas"; +import { PhasicBlueprint, AgenticBlueprint, BlueprintSchemaLite, AgenticBlueprintSchema, FileOutputType, PhaseConceptLiteSchema, PhaseConceptSchema, PhaseConceptType, TemplateSelection, Blueprint } from "./schemas"; import { IssueReport } from "./domain/values/IssueReport"; import { FileState, MAX_PHASES } from "./core/state"; import { CODE_SERIALIZERS, CodeSerializerType } from "./utils/codeSerializers"; @@ -1312,8 +1312,8 @@ FRONTEND_FIRST_CODING: ` export interface GeneralSystemPromptBuilderParams { query: string, - templateDetails: TemplateDetails, - dependencies: Record, + templateDetails?: TemplateDetails, + dependencies?: Record, blueprint?: Blueprint, language?: string, frameworks?: string[], @@ -1327,19 +1327,29 @@ export function generalSystemPromptBuilder( // Base variables always present const variables: Record = { query: params.query, - template: PROMPT_UTILS.serializeTemplate(params.templateDetails), - dependencies: JSON.stringify(params.dependencies ?? {}) }; + + // Template context (optional) + if (params.templateDetails) { + variables.template = PROMPT_UTILS.serializeTemplate(params.templateDetails); + variables.dependencies = JSON.stringify(params.dependencies ?? {}); + } - // Optional blueprint variables + // Blueprint variables - discriminate by type if (params.blueprint) { - // Redact the initial phase information from blueprint - const blueprint = { - ...params.blueprint, - initialPhase: undefined, + if ('implementationRoadmap' in params.blueprint) { + // Phasic blueprint + const phasicBlueprint = params.blueprint as PhasicBlueprint; + const blueprintForPrompt = { ...phasicBlueprint, initialPhase: undefined }; + variables.blueprint = TemplateRegistry.markdown.serialize(blueprintForPrompt, BlueprintSchemaLite); + variables.blueprintDependencies = phasicBlueprint.frameworks?.join(', ') ?? ''; + } else { + // Agentic blueprint + const agenticBlueprint = params.blueprint as AgenticBlueprint; + variables.blueprint = TemplateRegistry.markdown.serialize(agenticBlueprint, AgenticBlueprintSchema); + variables.blueprintDependencies = agenticBlueprint.frameworks?.join(', ') ?? ''; + variables.agenticPlan = agenticBlueprint.plan.map((step, i) => `${i + 1}. ${step}`).join('\n'); } - variables.blueprint = TemplateRegistry.markdown.serialize(blueprint, BlueprintSchemaLite); - variables.blueprintDependencies = params.blueprint.frameworks?.join(', ') ?? ''; } // Optional language and frameworks diff --git a/worker/agents/schemas.ts b/worker/agents/schemas.ts index 55344e32..f37eb618 100644 --- a/worker/agents/schemas.ts +++ b/worker/agents/schemas.ts @@ -1,5 +1,12 @@ import z from 'zod'; +// Schema for AI project type prediction +export const ProjectTypePredictionSchema = z.object({ + projectType: z.enum(['app', 'workflow', 'presentation', 'general']).describe('The predicted type of project based on the user query'), + reasoning: z.string().describe('Brief explanation for why this project type was selected'), + confidence: z.enum(['high', 'medium', 'low']).describe('Confidence level in the prediction'), +}); + // Schema for AI template selection output export const TemplateSelectionSchema = z.object({ selectedTemplateName: z.string().nullable().describe('The name of the most suitable template, or null if none are suitable.'), @@ -7,7 +14,7 @@ export const TemplateSelectionSchema = z.object({ useCase: z.enum(['SaaS Product Website', 'Dashboard', 'Blog', 'Portfolio', 'E-Commerce', 'General', 'Other']).describe('The use case for which the template is selected, if applicable.').nullable(), complexity: z.enum(['simple', 'moderate', 'complex']).describe('The complexity of developing the project based on the the user query').nullable(), styleSelection: z.enum(['Minimalist Design', 'Brutalism', 'Retro', 'Illustrative', 'Kid_Playful', 'Custom']).describe('Pick a style relevant to the user query').nullable(), - projectName: z.string().describe('The name of the project based on the user query'), + projectType: z.enum(['app', 'workflow', 'presentation', 'general']).default('app').describe('The type of project based on the user query'), }); export const FileOutputSchema = z.object({ @@ -75,12 +82,16 @@ export const CodeReviewOutput = z.object({ commands: z.array(z.string()).describe('Commands that might be needed to run for fixing an issue. Empty array if no commands are needed'), }); -export const BlueprintSchema = z.object({ - title: z.string().describe('Title of the application'), - projectName: z.string().describe('Name of the project, in small case, no special characters, no spaces, no dots. Only letters, numbers, hyphens, underscores are allowed.'), +export const SimpleBlueprintSchema = z.object({ + title: z.string().describe('Title for the project'), + projectName: z.string().describe('Name for the project, in small case, no special characters, no spaces, no dots. Only letters, numbers, hyphens, underscores are allowed.'), + description: z.string().describe('Short, brief, concise description of the project in a single sentence'), + colorPalette: z.array(z.string()).describe('Color palette RGB codes to be used in the project, only base colors and not their shades, max 3 colors'), + frameworks: z.array(z.string()).describe('Essential Frameworks, libraries and dependencies to be used in the project, with only major versions optionally specified'), +}); + +export const PhasicBlueprintSchema = SimpleBlueprintSchema.extend({ detailedDescription: z.string().describe('Enhanced and detailed description of what the application does and how its supposed to work. Break down the project into smaller components and describe each component in detail.'), - description: z.string().describe('Short, brief, concise description of the application in a single sentence'), - colorPalette: z.array(z.string()).describe('Color palette RGB codes to be used in the application, only base colors and not their shades, max 3 colors'), views: z.array(z.object({ name: z.string().describe('Name of the view'), description: z.string().describe('Description of the view'), @@ -101,10 +112,13 @@ export const BlueprintSchema = z.object({ description: z.string().describe('Description of the phase'), })).describe('Phases of the implementation roadmap'), initialPhase: PhaseConceptSchema.describe('The first phase to be implemented, in **STRICT** accordance with '), - // commands: z.array(z.string()).describe('Commands to set up the development environment and install all dependencies not already in the template. These will run before code generation starts.'), }); -export const BlueprintSchemaLite = BlueprintSchema.omit({ +export const AgenticBlueprintSchema = SimpleBlueprintSchema.extend({ + plan: z.array(z.string()).describe('Step by step plan for implementing the project'), +}); + +export const BlueprintSchemaLite = PhasicBlueprintSchema.omit({ initialPhase: true, }); @@ -124,7 +138,8 @@ export const ScreenshotAnalysisSchema = z.object({ }); export type TemplateSelection = z.infer; -export type Blueprint = z.infer; +export type PhasicBlueprint = z.infer; +export type AgenticBlueprint = z.infer; export type FileConceptType = z.infer; export type PhaseConceptType = z.infer; export type PhaseConceptLiteType = z.infer; @@ -145,4 +160,4 @@ export const ConversationalResponseSchema = z.object({ export type ConversationalResponseType = z.infer; - +export type Blueprint = z.infer | z.infer; diff --git a/worker/agents/services/implementations/BaseAgentService.ts b/worker/agents/services/implementations/BaseAgentService.ts index 38de6b7b..5a512998 100644 --- a/worker/agents/services/implementations/BaseAgentService.ts +++ b/worker/agents/services/implementations/BaseAgentService.ts @@ -2,18 +2,19 @@ import { IStateManager } from '../interfaces/IStateManager'; import { IFileManager } from '../interfaces/IFileManager'; import { StructuredLogger } from '../../../logger'; import { ServiceOptions } from '../interfaces/IServiceOptions'; +import { BaseProjectState } from '../../core/state'; /** * Base class for all agent services * Provides common dependencies and DO-compatible access patterns */ -export abstract class BaseAgentService { - protected readonly stateManager: IStateManager; +export abstract class BaseAgentService { + protected readonly stateManager: IStateManager; protected readonly fileManager: IFileManager; protected readonly getLogger: () => StructuredLogger; protected readonly env: Env; - constructor(options: ServiceOptions) { + constructor(options: ServiceOptions) { this.stateManager = options.stateManager; this.fileManager = options.fileManager; this.getLogger = options.getLogger; @@ -23,14 +24,14 @@ export abstract class BaseAgentService { /** * Get current agent state */ - protected getState() { + protected getState(): Readonly { return this.stateManager.getState(); } /** * Update agent state */ - protected setState(newState: ReturnType) { + protected setState(newState: TState) { this.stateManager.setState(newState); } diff --git a/worker/agents/services/implementations/CodingAgent.ts b/worker/agents/services/implementations/CodingAgent.ts deleted file mode 100644 index d13e1a5c..00000000 --- a/worker/agents/services/implementations/CodingAgent.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { ProcessedImageAttachment } from "worker/types/image-attachment"; -import { Blueprint, FileConceptType } from "worker/agents/schemas"; -import { ExecuteCommandsResponse, StaticAnalysisResponse, RuntimeError } from "worker/services/sandbox/sandboxTypes"; -import { ICodingAgent } from "../interfaces/ICodingAgent"; -import { OperationOptions } from "worker/agents/operations/common"; -import { DeepDebugResult } from "worker/agents/core/types"; -import { RenderToolCall } from "worker/agents/operations/UserConversationProcessor"; -import { WebSocketMessageResponses } from "worker/agents/constants"; - -/* -* CodingAgentInterface - stub for passing to tool calls -*/ -export class CodingAgentInterface { - agentStub: ICodingAgent; - constructor (agentStub: ICodingAgent) { - this.agentStub = agentStub; - } - - getLogs(reset?: boolean, durationSeconds?: number): Promise { - return this.agentStub.getLogs(reset, durationSeconds); - } - - fetchRuntimeErrors(clear?: boolean): Promise { - return this.agentStub.fetchRuntimeErrors(clear); - } - - async deployPreview(clearLogs: boolean = true, forceRedeploy: boolean = false): Promise { - const response = await this.agentStub.deployToSandbox([], forceRedeploy, undefined, clearLogs); - // Send a message to refresh the preview - if (response && response.previewURL) { - this.agentStub.broadcast(WebSocketMessageResponses.PREVIEW_FORCE_REFRESH, {}); - return `Deployment successful: ${response.previewURL}`; - } else { - return `Failed to deploy: ${response?.tunnelURL}`; - } - } - - async deployToCloudflare(): Promise { - const response = await this.agentStub.deployToCloudflare(); - if (response && response.deploymentUrl) { - return `Deployment successful: ${response.deploymentUrl}`; - } else { - return `Failed to deploy: ${response?.workersUrl}`; - } - } - - queueRequest(request: string, images?: ProcessedImageAttachment[]): void { - this.agentStub.queueUserRequest(request, images); - } - - clearConversation(): void { - this.agentStub.clearConversation(); - } - - getOperationOptions(): OperationOptions { - return this.agentStub.getOperationOptions(); - } - - getGit() { - return this.agentStub.getGit(); - } - - updateProjectName(newName: string): Promise { - return this.agentStub.updateProjectName(newName); - } - - updateBlueprint(patch: Partial): Promise { - return this.agentStub.updateBlueprint(patch); - } - - // Generic debugging helpers β€” delegate to underlying agent - readFiles(paths: string[]): Promise<{ files: { path: string; content: string }[] }> { - return this.agentStub.readFiles(paths); - } - - runStaticAnalysisCode(files?: string[]): Promise { - return this.agentStub.runStaticAnalysisCode(files); - } - - execCommands(commands: string[], shouldSave: boolean, timeout?: number): Promise { - return this.agentStub.execCommands(commands, shouldSave, timeout); - } - - // Exposes a simplified regenerate API for tools - regenerateFile(path: string, issues: string[]): Promise<{ path: string; diff: string }> { - return this.agentStub.regenerateFileByPath(path, issues); - } - - // Exposes file generation via phase implementation - generateFiles( - phaseName: string, - phaseDescription: string, - requirements: string[], - files: FileConceptType[] - ): Promise<{ files: Array<{ path: string; purpose: string; diff: string }> }> { - return this.agentStub.generateFiles(phaseName, phaseDescription, requirements, files); - } - - isCodeGenerating(): boolean { - return this.agentStub.isCodeGenerating(); - } - - waitForGeneration(): Promise { - return this.agentStub.waitForGeneration(); - } - - isDeepDebugging(): boolean { - return this.agentStub.isDeepDebugging(); - } - - waitForDeepDebug(): Promise { - return this.agentStub.waitForDeepDebug(); - } - - executeDeepDebug( - issue: string, - toolRenderer: RenderToolCall, - streamCb: (chunk: string) => void, - focusPaths?: string[] - ): Promise { - return this.agentStub.executeDeepDebug(issue, toolRenderer, streamCb, focusPaths); - } -} diff --git a/worker/agents/services/implementations/DeploymentManager.ts b/worker/agents/services/implementations/DeploymentManager.ts index 52fe6183..2b81a7fa 100644 --- a/worker/agents/services/implementations/DeploymentManager.ts +++ b/worker/agents/services/implementations/DeploymentManager.ts @@ -14,6 +14,8 @@ import { ServiceOptions } from '../interfaces/IServiceOptions'; import { BaseSandboxService } from 'worker/services/sandbox/BaseSandboxService'; import { getSandboxService } from '../../../services/sandbox/factory'; import { validateAndCleanBootstrapCommands } from 'worker/agents/utils/common'; +import { DeploymentTarget } from '../../core/types'; +import { BaseProjectState } from '../../core/state'; const PER_ATTEMPT_TIMEOUT_MS = 60000; // 60 seconds per individual attempt const MASTER_DEPLOYMENT_TIMEOUT_MS = 300000; // 5 minutes total @@ -24,13 +26,13 @@ const HEALTH_CHECK_INTERVAL_MS = 30000; * Handles instance creation, file deployment, analysis, and GitHub/Cloudflare export * Also manages sessionId and health check intervals */ -export class DeploymentManager extends BaseAgentService implements IDeploymentManager { +export class DeploymentManager extends BaseAgentService implements IDeploymentManager { private healthCheckInterval: ReturnType | null = null; private currentDeploymentPromise: Promise | null = null; private cachedSandboxClient: BaseSandboxService | null = null; constructor( - options: ServiceOptions, + options: ServiceOptions, private maxCommandsHistory: number ) { super(options); @@ -552,7 +554,6 @@ export class DeploymentManager extends BaseAgentService implements IDeploymentMa */ private async createNewInstance(): Promise { const state = this.getState(); - const templateName = state.templateName; const projectName = state.projectName; // Add AI proxy vars if AI template @@ -570,18 +571,25 @@ export class DeploymentManager extends BaseAgentService implements IDeploymentMa }; } } - + + // Get latest files + const files = this.fileManager.getAllFiles(); + + this.getLog().info('Files to deploy', { + files: files.map(f => f.filePath) + }); + // Create instance const client = this.getClient(); const logger = this.getLog(); - - const createResponse = await client.createInstance( - templateName, - `v1-${projectName}`, - undefined, - localEnvVars - ); - + + const createResponse = await client.createInstance({ + files, + projectName, + initCommand: 'bun run dev', + envVars: localEnvVars + }); + if (!createResponse || !createResponse.success || !createResponse.runId) { throw new Error(`Failed to create sandbox instance: ${createResponse?.error || 'Unknown error'}`); } @@ -622,10 +630,15 @@ export class DeploymentManager extends BaseAgentService implements IDeploymentMa * Deploy to Cloudflare Workers * Returns deployment URL and deployment ID for database updates */ - async deployToCloudflare(callbacks?: CloudflareDeploymentCallbacks): Promise<{ deploymentUrl: string | null; deploymentId?: string }> { + async deployToCloudflare(request?: { + target?: DeploymentTarget; + callbacks?: CloudflareDeploymentCallbacks; + }): Promise<{ deploymentUrl: string | null; deploymentId?: string }> { const state = this.getState(); const logger = this.getLog(); const client = this.getClient(); + const target = request?.target ?? 'platform'; + const callbacks = request?.callbacks; await this.waitForPreview(); @@ -634,7 +647,7 @@ export class DeploymentManager extends BaseAgentService implements IDeploymentMa instanceId: state.sandboxInstanceId ?? '' }); - logger.info('Starting Cloudflare deployment'); + logger.info('Starting Cloudflare deployment', { target }); // Check if we have generated files if (!state.generatedFilesMap || Object.keys(state.generatedFilesMap).length === 0) { @@ -660,7 +673,8 @@ export class DeploymentManager extends BaseAgentService implements IDeploymentMa // Deploy to Cloudflare const deploymentResult = await client.deployToCloudflareWorkers( - state.sandboxInstanceId + state.sandboxInstanceId, + target ); logger.info('Deployment result:', deploymentResult); diff --git a/worker/agents/services/implementations/FileManager.ts b/worker/agents/services/implementations/FileManager.ts index 3ad856d4..5959d923 100644 --- a/worker/agents/services/implementations/FileManager.ts +++ b/worker/agents/services/implementations/FileManager.ts @@ -3,7 +3,7 @@ import { IFileManager } from '../interfaces/IFileManager'; import { IStateManager } from '../interfaces/IStateManager'; import { FileOutputType } from '../../schemas'; import { FileProcessing } from '../../domain/pure/FileProcessing'; -import { FileState } from 'worker/agents/core/state'; +import { BaseProjectState, FileState } from 'worker/agents/core/state'; import { TemplateDetails } from '../../../services/sandbox/sandboxTypes'; import { GitVersionControl } from 'worker/agents/git'; @@ -13,7 +13,7 @@ import { GitVersionControl } from 'worker/agents/git'; */ export class FileManager implements IFileManager { constructor( - private stateManager: IStateManager, + private stateManager: IStateManager, private getTemplateDetailsFunc: () => TemplateDetails, private git: GitVersionControl ) { diff --git a/worker/agents/services/implementations/StateManager.ts b/worker/agents/services/implementations/StateManager.ts index 388d6c61..25d56261 100644 --- a/worker/agents/services/implementations/StateManager.ts +++ b/worker/agents/services/implementations/StateManager.ts @@ -1,37 +1,29 @@ +import { BaseProjectState } from 'worker/agents/core/state'; import { IStateManager } from '../interfaces/IStateManager'; -import { CodeGenState } from '../../core/state'; /** * State manager implementation for Durable Objects * Works with the Agent's state management */ -export class StateManager implements IStateManager { +export class StateManager implements IStateManager { constructor( - private getStateFunc: () => CodeGenState, - private setStateFunc: (state: CodeGenState) => void + private getStateFunc: () => TState, + private setStateFunc: (state: TState) => void ) {} - getState(): Readonly { + getState(): Readonly { return this.getStateFunc(); } - setState(newState: CodeGenState): void { + setState(newState: TState): void { this.setStateFunc(newState); } - updateField(field: K, value: CodeGenState[K]): void { + updateField(field: K, value: TState[K]): void { const currentState = this.getState(); this.setState({ ...currentState, [field]: value }); } - - batchUpdate(updates: Partial): void { - const currentState = this.getState(); - this.setState({ - ...currentState, - ...updates - }); - } } \ No newline at end of file diff --git a/worker/agents/services/interfaces/ICodingAgent.ts b/worker/agents/services/interfaces/ICodingAgent.ts index 48db5f4d..cbb34c27 100644 --- a/worker/agents/services/interfaces/ICodingAgent.ts +++ b/worker/agents/services/interfaces/ICodingAgent.ts @@ -1,65 +1,78 @@ -import { FileOutputType, Blueprint, FileConceptType } from "worker/agents/schemas"; +import { FileOutputType, FileConceptType, Blueprint } from "worker/agents/schemas"; import { BaseSandboxService } from "worker/services/sandbox/BaseSandboxService"; import { ExecuteCommandsResponse, PreviewType, StaticAnalysisResponse, RuntimeError } from "worker/services/sandbox/sandboxTypes"; import { ProcessedImageAttachment } from "worker/types/image-attachment"; -import { OperationOptions } from "worker/agents/operations/common"; -import { DeepDebugResult } from "worker/agents/core/types"; +import { BehaviorType, DeepDebugResult, DeploymentTarget, ProjectType } from "worker/agents/core/types"; import { RenderToolCall } from "worker/agents/operations/UserConversationProcessor"; import { WebSocketMessageType, WebSocketMessageData } from "worker/api/websocketTypes"; import { GitVersionControl } from "worker/agents/git/git"; +import { OperationOptions } from "worker/agents/operations/common"; +import { TemplateFile } from "worker/services/sandbox/sandboxTypes"; -export abstract class ICodingAgent { - abstract getSandboxServiceClient(): BaseSandboxService; - - abstract getGit(): GitVersionControl; - - abstract deployToSandbox(files: FileOutputType[], redeploy: boolean, commitMessage?: string, clearLogs?: boolean): Promise; - - abstract deployToCloudflare(): Promise<{ deploymentUrl?: string; workersUrl?: string } | null>; - - abstract getLogs(reset?: boolean, durationSeconds?: number): Promise; - - abstract queueUserRequest(request: string, images?: ProcessedImageAttachment[]): void; - - abstract clearConversation(): void; +export interface ICodingAgent { + getBehavior(): BehaviorType; + + getLogs(reset?: boolean, durationSeconds?: number): Promise; + + fetchRuntimeErrors(clear?: boolean): Promise; + + deployToSandbox(files?: FileOutputType[], redeploy?: boolean, commitMessage?: string, clearLogs?: boolean): Promise; + + broadcast(msg: T, data?: WebSocketMessageData): void; + + deployToCloudflare(target?: DeploymentTarget): Promise<{ deploymentUrl?: string; workersUrl?: string } | null>; + + queueUserRequest(request: string, images?: ProcessedImageAttachment[]): void; + + clearConversation(): void; + + deployPreview(clearLogs?: boolean, forceRedeploy?: boolean): Promise; + + updateProjectName(newName: string): Promise; - abstract updateProjectName(newName: string): Promise; + setBlueprint(blueprint: Blueprint): Promise; - abstract updateBlueprint(patch: Partial): Promise; + getProjectType(): ProjectType; - abstract getOperationOptions(): OperationOptions; + importTemplate(templateName: string): Promise<{ templateName: string; filesImported: number; files: TemplateFile[] }>; - abstract readFiles(paths: string[]): Promise<{ files: { path: string; content: string }[] }>; + getOperationOptions(): OperationOptions; - abstract runStaticAnalysisCode(files?: string[]): Promise; + listFiles(): FileOutputType[]; - abstract execCommands(commands: string[], shouldSave: boolean, timeout?: number): Promise; + readFiles(paths: string[]): Promise<{ files: { path: string; content: string }[] }>; + + runStaticAnalysisCode(files?: string[]): Promise; - abstract regenerateFileByPath(path: string, issues: string[]): Promise<{ path: string; diff: string }>; + execCommands(commands: string[], shouldSave: boolean, timeout?: number): Promise; - abstract generateFiles( + updateBlueprint(patch: Partial): Promise; + + generateFiles( phaseName: string, phaseDescription: string, requirements: string[], files: FileConceptType[] ): Promise<{ files: Array<{ path: string; purpose: string; diff: string }> }>; - abstract fetchRuntimeErrors(clear?: boolean): Promise; - - abstract isCodeGenerating(): boolean; - - abstract waitForGeneration(): Promise; - - abstract isDeepDebugging(): boolean; - - abstract waitForDeepDebug(): Promise; - - abstract broadcast(message: T, data?: WebSocketMessageData): void; - - abstract executeDeepDebug( + regenerateFileByPath(path: string, issues: string[]): Promise<{ path: string; diff: string }>; + + isCodeGenerating(): boolean; + + waitForGeneration(): Promise; + + isDeepDebugging(): boolean; + + waitForDeepDebug(): Promise; + + executeDeepDebug( issue: string, toolRenderer: RenderToolCall, streamCb: (chunk: string) => void, focusPaths?: string[], ): Promise; + + get git(): GitVersionControl; + + getSandboxServiceClient(): BaseSandboxService; } diff --git a/worker/agents/services/interfaces/IDeploymentManager.ts b/worker/agents/services/interfaces/IDeploymentManager.ts index eab92211..32dc0b93 100644 --- a/worker/agents/services/interfaces/IDeploymentManager.ts +++ b/worker/agents/services/interfaces/IDeploymentManager.ts @@ -2,6 +2,7 @@ import { FileOutputType } from '../../schemas'; import { StaticAnalysisResponse, RuntimeError, PreviewType } from '../../../services/sandbox/sandboxTypes'; import { DeploymentStartedMessage, DeploymentCompletedMessage, DeploymentFailedMessage } from '../../../api/websocketTypes'; import { CloudflareDeploymentStartedMessage, CloudflareDeploymentCompletedMessage, CloudflareDeploymentErrorMessage } from '../../../api/websocketTypes'; +import { DeploymentTarget } from '../../core/types'; /** * Callbacks for sandbox deployment events @@ -97,6 +98,9 @@ export interface IDeploymentManager { * Deploy to Cloudflare Workers * Returns deployment URL and deployment ID for database updates */ - deployToCloudflare(callbacks?: CloudflareDeploymentCallbacks): Promise<{ deploymentUrl: string | null; deploymentId?: string }>; + deployToCloudflare(request?: { + target?: DeploymentTarget; + callbacks?: CloudflareDeploymentCallbacks; + }): Promise<{ deploymentUrl: string | null; deploymentId?: string }>; } diff --git a/worker/agents/services/interfaces/IServiceOptions.ts b/worker/agents/services/interfaces/IServiceOptions.ts index aa5275f7..596fef9c 100644 --- a/worker/agents/services/interfaces/IServiceOptions.ts +++ b/worker/agents/services/interfaces/IServiceOptions.ts @@ -1,13 +1,14 @@ import { IStateManager } from './IStateManager'; import { IFileManager } from './IFileManager'; import { StructuredLogger } from '../../../logger'; +import { BaseProjectState } from '../../core/state'; /** * Common options for all agent services */ -export interface ServiceOptions { +export interface ServiceOptions { env: Env, - stateManager: IStateManager; + stateManager: IStateManager; fileManager: IFileManager; getLogger: () => StructuredLogger; } diff --git a/worker/agents/services/interfaces/IStateManager.ts b/worker/agents/services/interfaces/IStateManager.ts index 935e3be2..8c51767d 100644 --- a/worker/agents/services/interfaces/IStateManager.ts +++ b/worker/agents/services/interfaces/IStateManager.ts @@ -1,27 +1,22 @@ -import { CodeGenState } from '../../core/state'; +import { BaseProjectState } from "worker/agents/core/state"; /** * Interface for state management * Abstracts state persistence and updates */ -export interface IStateManager { +export interface IStateManager { /** * Get current state */ - getState(): Readonly; + getState(): Readonly; /** * Update state immutably */ - setState(newState: CodeGenState): void; + setState(newState: TState): void; /** * Update specific field */ - updateField(field: K, value: CodeGenState[K]): void; - - /** - * Batch update multiple fields - */ - batchUpdate(updates: Partial): void; + updateField(field: K, value: TState[K]): void; } \ No newline at end of file diff --git a/worker/agents/tools/customTools.ts b/worker/agents/tools/customTools.ts index 51c96d4c..500719b8 100644 --- a/worker/agents/tools/customTools.ts +++ b/worker/agents/tools/customTools.ts @@ -6,10 +6,10 @@ import { toolFeedbackDefinition } from './toolkit/feedback'; import { createQueueRequestTool } from './toolkit/queue-request'; import { createGetLogsTool } from './toolkit/get-logs'; import { createDeployPreviewTool } from './toolkit/deploy-preview'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; import { createDeepDebuggerTool } from "./toolkit/deep-debugger"; import { createRenameProjectTool } from './toolkit/rename-project'; import { createAlterBlueprintTool } from './toolkit/alter-blueprint'; +import { createGenerateBlueprintTool } from './toolkit/generate-blueprint'; import { DebugSession } from '../assistants/codeDebugger'; import { createReadFilesTool } from './toolkit/read-files'; import { createExecCommandsTool } from './toolkit/exec-commands'; @@ -21,14 +21,21 @@ import { createGetRuntimeErrorsTool } from './toolkit/get-runtime-errors'; import { createWaitForGenerationTool } from './toolkit/wait-for-generation'; import { createWaitForDebugTool } from './toolkit/wait-for-debug'; import { createGitTool } from './toolkit/git'; +import { ICodingAgent } from '../services/interfaces/ICodingAgent'; +import { createInitSuitableTemplateTool } from './toolkit/init-suitable-template'; +import { createVirtualFilesystemTool } from './toolkit/virtual-filesystem'; +import { createGenerateImagesTool } from './toolkit/generate-images'; +import { Message } from '../inferutils/common'; +import { ChatCompletionMessageFunctionToolCall } from 'openai/resources'; export async function executeToolWithDefinition( + toolCall: ChatCompletionMessageFunctionToolCall, toolDef: ToolDefinition, args: TArgs ): Promise { - toolDef.onStart?.(args); + await toolDef.onStart?.(toolCall, args); const result = await toolDef.implementation(args); - toolDef.onComplete?.(args, result); + await toolDef.onComplete?.(toolCall, args, result); return result; } @@ -37,7 +44,7 @@ export async function executeToolWithDefinition( * Add new tools here - they're automatically included in the conversation */ export function buildTools( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger, toolRenderer: RenderToolCall, streamCb: (chunk: string) => void, @@ -60,32 +67,95 @@ export function buildTools( } export function buildDebugTools(session: DebugSession, logger: StructuredLogger, toolRenderer?: RenderToolCall): ToolDefinition[] { - const tools = [ - createGetLogsTool(session.agent, logger), - createGetRuntimeErrorsTool(session.agent, logger), - createReadFilesTool(session.agent, logger), - createRunAnalysisTool(session.agent, logger), - createExecCommandsTool(session.agent, logger), - createRegenerateFileTool(session.agent, logger), - createGenerateFilesTool(session.agent, logger), - createDeployPreviewTool(session.agent, logger), - createWaitTool(logger), - createGitTool(session.agent, logger), - ]; + const tools = [ + createGetLogsTool(session.agent, logger), + createGetRuntimeErrorsTool(session.agent, logger), + createReadFilesTool(session.agent, logger), + createRunAnalysisTool(session.agent, logger), + createExecCommandsTool(session.agent, logger), + createRegenerateFileTool(session.agent, logger), + createGenerateFilesTool(session.agent, logger), + createDeployPreviewTool(session.agent, logger), + createWaitTool(logger), + createGitTool(session.agent, logger), + ]; + return withRenderer(tools, toolRenderer); +} + +/** + * Toolset for the Agentic Project Builder (autonomous build assistant) + */ +export function buildAgenticBuilderTools( + session: DebugSession, + logger: StructuredLogger, + toolRenderer?: RenderToolCall, + onToolComplete?: (message: Message) => Promise +): ToolDefinition[] { + const tools = [ + // PRD generation + refinement + createGenerateBlueprintTool(session.agent, logger), + createAlterBlueprintTool(session.agent, logger), + // Template selection + createInitSuitableTemplateTool(session.agent, logger), + // Virtual filesystem operations (list + read from Durable Object storage) + createVirtualFilesystemTool(session.agent, logger), + // Build + analysis toolchain + createGenerateFilesTool(session.agent, logger), + createRegenerateFileTool(session.agent, logger), + createRunAnalysisTool(session.agent, logger), + // Runtime + deploy + createDeployPreviewTool(session.agent, logger), + createGetRuntimeErrorsTool(session.agent, logger), + createGetLogsTool(session.agent, logger), + // Utilities + createExecCommandsTool(session.agent, logger), + createWaitTool(logger), + createGitTool(session.agent, logger), + // WIP: images + createGenerateImagesTool(session.agent, logger), + ]; + + return withRenderer(tools, toolRenderer, onToolComplete); +} + +/** + * Decorate tool definitions with a renderer for UI visualization and conversation sync + */ +function withRenderer( + tools: ToolDefinition[], + toolRenderer?: RenderToolCall, + onComplete?: (message: Message) => Promise +): ToolDefinition[] { + if (!toolRenderer) return tools; - // Attach tool renderer for UI visualization if provided - if (toolRenderer) { return tools.map(td => ({ - ...td, - onStart: (args: Record) => toolRenderer({ name: td.function.name, status: 'start', args }), - onComplete: (args: Record, result: unknown) => toolRenderer({ - name: td.function.name, - status: 'success', - args, - result: typeof result === 'string' ? result : JSON.stringify(result) - }) - })); - } + ...td, + onStart: async (_tc: ChatCompletionMessageFunctionToolCall, args: Record) => { + if (toolRenderer) { + toolRenderer({ name: td.function.name, status: 'start', args }); + } + }, + onComplete: async (tc: ChatCompletionMessageFunctionToolCall, args: Record, result: unknown) => { + // UI rendering + if (toolRenderer) { + toolRenderer({ + name: td.function.name, + status: 'success', + args, + result: typeof result === 'string' ? result : JSON.stringify(result) + }); + } - return tools; + // Conversation sync callback + if (onComplete) { + const toolMessage: Message = { + role: 'tool', + content: typeof result === 'string' ? result : JSON.stringify(result), + name: td.function.name, + tool_call_id: tc.id, + }; + await onComplete(toolMessage); + } + } + })); } diff --git a/worker/agents/tools/toolkit/alter-blueprint.ts b/worker/agents/tools/toolkit/alter-blueprint.ts index d4e5faa5..e11eaaaa 100644 --- a/worker/agents/tools/toolkit/alter-blueprint.ts +++ b/worker/agents/tools/toolkit/alter-blueprint.ts @@ -1,53 +1,72 @@ import { ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; import { Blueprint } from 'worker/agents/schemas'; type AlterBlueprintArgs = { - patch: Partial & { - projectName?: string; - }; + patch: Record; }; export function createAlterBlueprintTool( - agent: CodingAgentInterface, - logger: StructuredLogger + agent: ICodingAgent, + logger: StructuredLogger ): ToolDefinition { - return { - type: 'function' as const, - function: { - name: 'alter_blueprint', - description: 'Apply a validated patch to the current blueprint. Only allowed keys are accepted.', - parameters: { - type: 'object', - additionalProperties: false, - properties: { - patch: { - type: 'object', - additionalProperties: false, - properties: { - title: { type: 'string' }, - projectName: { type: 'string', minLength: 3, maxLength: 50, pattern: '^[a-z0-9-_]+$' }, - detailedDescription: { type: 'string' }, - description: { type: 'string' }, - colorPalette: { type: 'array', items: { type: 'string' } }, - views: { type: 'array', items: { type: 'object', additionalProperties: false, properties: { name: { type: 'string' }, description: { type: 'string' } }, required: ['name', 'description'] } }, - userFlow: { type: 'object', additionalProperties: false, properties: { uiLayout: { type: 'string' }, uiDesign: { type: 'string' }, userJourney: { type: 'string' } } }, - dataFlow: { type: 'string' }, - architecture: { type: 'object', additionalProperties: false, properties: { dataFlow: { type: 'string' } } }, - pitfalls: { type: 'array', items: { type: 'string' } }, - frameworks: { type: 'array', items: { type: 'string' } }, - implementationRoadmap: { type: 'array', items: { type: 'object', additionalProperties: false, properties: { phase: { type: 'string' }, description: { type: 'string' } }, required: ['phase', 'description'] } }, + // Build behavior-aware schema at tool creation time (tools are created per-agent) + const isAgentic = agent.getBehavior() === 'agentic'; + + const agenticProperties = { + title: { type: 'string' }, + projectName: { type: 'string', minLength: 3, maxLength: 50, pattern: '^[a-z0-9-_]+$' }, + description: { type: 'string' }, + detailedDescription: { type: 'string' }, + colorPalette: { type: 'array', items: { type: 'string' } }, + frameworks: { type: 'array', items: { type: 'string' } }, + // Agentic-only: plan + plan: { type: 'array', items: { type: 'string' } }, + } as const; + + const phasicProperties = { + title: { type: 'string' }, + projectName: { type: 'string', minLength: 3, maxLength: 50, pattern: '^[a-z0-9-_]+$' }, + description: { type: 'string' }, + detailedDescription: { type: 'string' }, + colorPalette: { type: 'array', items: { type: 'string' } }, + frameworks: { type: 'array', items: { type: 'string' } }, + views: { type: 'array', items: { type: 'object', additionalProperties: false, properties: { name: { type: 'string' }, description: { type: 'string' } }, required: ['name', 'description'] } }, + userFlow: { type: 'object', additionalProperties: false, properties: { uiLayout: { type: 'string' }, uiDesign: { type: 'string' }, userJourney: { type: 'string' } } }, + dataFlow: { type: 'string' }, + architecture: { type: 'object', additionalProperties: false, properties: { dataFlow: { type: 'string' } } }, + pitfalls: { type: 'array', items: { type: 'string' } }, + implementationRoadmap: { type: 'array', items: { type: 'object', additionalProperties: false, properties: { phase: { type: 'string' }, description: { type: 'string' } }, required: ['phase', 'description'] } }, + // No plan here; phasic handles phases separately + } as const; + + const dynamicPatchSchema = isAgentic ? agenticProperties : phasicProperties; + + return { + type: 'function' as const, + function: { + name: 'alter_blueprint', + description: isAgentic + ? 'Apply a patch to the agentic blueprint (title, description, colorPalette, frameworks, plan, projectName).' + : 'Apply a patch to the phasic blueprint (title, description, colorPalette, frameworks, views, userFlow, architecture, dataFlow, pitfalls, implementationRoadmap, projectName).', + parameters: { + type: 'object', + additionalProperties: false, + properties: { + patch: { + type: 'object', + additionalProperties: false, + properties: dynamicPatchSchema as Record, + }, + }, + required: ['patch'], }, - }, }, - required: ['patch'], - }, - }, - implementation: async (args) => { - logger.info('Altering blueprint', { keys: Object.keys(args.patch) }); - const updated = await agent.updateBlueprint(args.patch); - return updated; - }, - }; + implementation: async ({ patch }) => { + logger.info('Altering blueprint', { keys: Object.keys(patch || {}) }); + const updated = await agent.updateBlueprint(patch as Partial); + return updated; + }, + }; } diff --git a/worker/agents/tools/toolkit/deep-debugger.ts b/worker/agents/tools/toolkit/deep-debugger.ts index bf15de96..f9e3e154 100644 --- a/worker/agents/tools/toolkit/deep-debugger.ts +++ b/worker/agents/tools/toolkit/deep-debugger.ts @@ -1,10 +1,10 @@ import { ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; import { RenderToolCall } from 'worker/agents/operations/UserConversationProcessor'; export function createDeepDebuggerTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger, toolRenderer: RenderToolCall, streamCb: (chunk: string) => void, diff --git a/worker/agents/tools/toolkit/deploy-preview.ts b/worker/agents/tools/toolkit/deploy-preview.ts index c6be2788..36995c79 100644 --- a/worker/agents/tools/toolkit/deploy-preview.ts +++ b/worker/agents/tools/toolkit/deploy-preview.ts @@ -1,13 +1,13 @@ import { ErrorResult, ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; type DeployPreviewArgs = Record; type DeployPreviewResult = { message: string } | ErrorResult; export function createDeployPreviewTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger ): ToolDefinition { return { diff --git a/worker/agents/tools/toolkit/exec-commands.ts b/worker/agents/tools/toolkit/exec-commands.ts index c9548d05..3e947455 100644 --- a/worker/agents/tools/toolkit/exec-commands.ts +++ b/worker/agents/tools/toolkit/exec-commands.ts @@ -1,6 +1,6 @@ import { ToolDefinition, ErrorResult } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; import { ExecuteCommandsResponse } from 'worker/services/sandbox/sandboxTypes'; export type ExecCommandsArgs = { @@ -12,7 +12,7 @@ export type ExecCommandsArgs = { export type ExecCommandsResult = ExecuteCommandsResponse | ErrorResult; export function createExecCommandsTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger, ): ToolDefinition { return { diff --git a/worker/agents/tools/toolkit/generate-blueprint.ts b/worker/agents/tools/toolkit/generate-blueprint.ts new file mode 100644 index 00000000..4ef0a914 --- /dev/null +++ b/worker/agents/tools/toolkit/generate-blueprint.ts @@ -0,0 +1,74 @@ +import { ToolDefinition } from '../types'; +import { StructuredLogger } from '../../../logger'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; +import { generateBlueprint, type AgenticBlueprintGenerationArgs } from 'worker/agents/planning/blueprint'; +import type { Blueprint } from 'worker/agents/schemas'; +import { WebSocketMessageResponses } from '../../constants'; + +type GenerateBlueprintArgs = { + prompt: string; +}; +type GenerateBlueprintResult = { message: string; blueprint: Blueprint }; + +/** + * Generates a blueprint + */ +export function createGenerateBlueprintTool( + agent: ICodingAgent, + logger: StructuredLogger +): ToolDefinition { + return { + type: 'function' as const, + function: { + name: 'generate_blueprint', + description: + 'Generate a blueprint using the backend blueprint generator. Produces a plan-based blueprint for agentic behavior and a detailed PRD for phasic. Provide a description/prompt for the project to generate a blueprint.', + parameters: { + type: 'object', + properties: { + prompt: { + type: 'string', + description: 'Prompt/user query for building the project. Use this to provide clarifications, additional requirements, or refined specifications based on conversation context.' + } + }, + required: ['prompt'], + }, + }, + implementation: async ({ prompt }: GenerateBlueprintArgs) => { + const { env, inferenceContext, context } = agent.getOperationOptions(); + + const isAgentic = agent.getBehavior() === 'agentic'; + + // Language/frameworks are optional; provide sensible defaults + const language = 'typescript'; + const frameworks: string[] = []; + + const args: AgenticBlueprintGenerationArgs = { + env, + inferenceContext, + query: prompt, + language, + frameworks, + templateDetails: context.templateDetails, + projectType: agent.getProjectType(), + stream: { + chunk_size: 256, + onChunk: (chunk: string) => { + agent.broadcast(WebSocketMessageResponses.BLUEPRINT_CHUNK, { chunk }); + } + } + }; + const blueprint = await generateBlueprint(args); + + // Persist in state for subsequent steps + await agent.setBlueprint(blueprint); + + logger.info('Blueprint generated via tool', { + behavior: isAgentic ? 'agentic' : 'phasic', + title: blueprint.title, + }); + + return { message: 'Blueprint generated successfully', blueprint }; + }, + }; +} diff --git a/worker/agents/tools/toolkit/generate-files.ts b/worker/agents/tools/toolkit/generate-files.ts index 7091a818..db513d78 100644 --- a/worker/agents/tools/toolkit/generate-files.ts +++ b/worker/agents/tools/toolkit/generate-files.ts @@ -1,6 +1,6 @@ import { ToolDefinition, ErrorResult } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; import { FileConceptType } from 'worker/agents/schemas'; export type GenerateFilesArgs = { @@ -18,7 +18,7 @@ export type GenerateFilesResult = | ErrorResult; export function createGenerateFilesTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger, ): ToolDefinition { return { diff --git a/worker/agents/tools/toolkit/generate-images.ts b/worker/agents/tools/toolkit/generate-images.ts new file mode 100644 index 00000000..7a971185 --- /dev/null +++ b/worker/agents/tools/toolkit/generate-images.ts @@ -0,0 +1,35 @@ +import { ToolDefinition } from '../types'; +import { StructuredLogger } from '../../../logger'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; + +type GenerateImagesArgs = { + prompts: string[]; + style?: string; +}; + +type GenerateImagesResult = { message: string }; + +export function createGenerateImagesTool( + _agent: ICodingAgent, + _logger: StructuredLogger, +): ToolDefinition { + return { + type: 'function' as const, + function: { + name: 'generate_images', + description: 'Generate images for the project (stub). Use later when the image generation pipeline is available.', + parameters: { + type: 'object', + properties: { + prompts: { type: 'array', items: { type: 'string' } }, + style: { type: 'string' }, + }, + required: ['prompts'], + }, + }, + implementation: async ({ prompts, style }: GenerateImagesArgs) => { + return { message: `Image generation not implemented yet. Requested ${prompts.length} prompt(s)${style ? ` with style ${style}` : ''}.` }; + }, + }; +} + diff --git a/worker/agents/tools/toolkit/get-logs.ts b/worker/agents/tools/toolkit/get-logs.ts index a5401058..b2214201 100644 --- a/worker/agents/tools/toolkit/get-logs.ts +++ b/worker/agents/tools/toolkit/get-logs.ts @@ -1,6 +1,6 @@ import { ErrorResult, ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; type GetLogsArgs = { reset?: boolean; @@ -11,7 +11,7 @@ type GetLogsArgs = { type GetLogsResult = { logs: string } | ErrorResult; export function createGetLogsTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger ): ToolDefinition { return { diff --git a/worker/agents/tools/toolkit/get-runtime-errors.ts b/worker/agents/tools/toolkit/get-runtime-errors.ts index 5c75374d..c3e35277 100644 --- a/worker/agents/tools/toolkit/get-runtime-errors.ts +++ b/worker/agents/tools/toolkit/get-runtime-errors.ts @@ -1,6 +1,6 @@ import { ErrorResult, ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; import { RuntimeError } from 'worker/services/sandbox/sandboxTypes'; type GetRuntimeErrorsArgs = Record; @@ -8,7 +8,7 @@ type GetRuntimeErrorsArgs = Record; type GetRuntimeErrorsResult = { errors: RuntimeError[] } | ErrorResult; export function createGetRuntimeErrorsTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger ): ToolDefinition { return { diff --git a/worker/agents/tools/toolkit/git.ts b/worker/agents/tools/toolkit/git.ts index 7ca91246..7aeba29e 100644 --- a/worker/agents/tools/toolkit/git.ts +++ b/worker/agents/tools/toolkit/git.ts @@ -1,6 +1,6 @@ import { ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; type GitCommand = 'commit' | 'log' | 'show' | 'reset'; @@ -13,7 +13,7 @@ interface GitToolArgs { } export function createGitTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger, options?: { excludeCommands?: GitCommand[] } ): ToolDefinition { @@ -65,7 +65,7 @@ export function createGitTool( }, implementation: async ({ command, message, limit, oid, includeDiff }: GitToolArgs) => { try { - const gitInstance = agent.getGit(); + const gitInstance = agent.git; switch (command) { case 'commit': { diff --git a/worker/agents/tools/toolkit/init-suitable-template.ts b/worker/agents/tools/toolkit/init-suitable-template.ts new file mode 100644 index 00000000..e970bdb6 --- /dev/null +++ b/worker/agents/tools/toolkit/init-suitable-template.ts @@ -0,0 +1,132 @@ +import { ToolDefinition, ErrorResult } from '../types'; +import { StructuredLogger } from '../../../logger'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; +import { BaseSandboxService } from 'worker/services/sandbox/BaseSandboxService'; +import { selectTemplate } from '../../planning/templateSelector'; +import { TemplateSelection } from '../../schemas'; +import { TemplateFile } from 'worker/services/sandbox/sandboxTypes'; + +export type InitSuitableTemplateArgs = { + query: string; +}; + +export type InitSuitableTemplateResult = + | { + selection: TemplateSelection; + importedFiles: TemplateFile[]; + reasoning: string; + message: string; + } + | ErrorResult; + +/** + * template selection and import. + * Analyzes user requirements, selects best matching template from library, + * and automatically imports it to the virtual filesystem. + */ +export function createInitSuitableTemplateTool( + agent: ICodingAgent, + logger: StructuredLogger +): ToolDefinition { + return { + type: 'function' as const, + function: { + name: 'init_suitable_template', + description: 'Analyze user requirements and automatically select + import the most suitable template from library. Uses AI to match requirements against available templates. Returns selection with reasoning and imported files. For interactive projects (app/presentation/workflow) only. Call this BEFORE generate_blueprint.', + parameters: { + type: 'object', + properties: { + query: { + type: 'string', + description: 'User requirements and project description. Provide clear description of what needs to be built.', + }, + }, + required: ['query'], + }, + }, + implementation: async ({ query }: InitSuitableTemplateArgs) => { + try { + const projectType = agent.getProjectType(); + const operationOptions = agent.getOperationOptions(); + + logger.info('Analyzing template suitability and importing', { + projectType, + queryLength: query.length + }); + + // Fetch available templates + const templatesResponse = await BaseSandboxService.listTemplates(); + if (!templatesResponse.success || !templatesResponse.templates) { + return { + error: `Failed to fetch templates: ${templatesResponse.error || 'Unknown error'}` + }; + } + + logger.info('Templates fetched', { count: templatesResponse.templates.length }); + + // Use AI selector to find best match + const selection = await selectTemplate({ + env: operationOptions.env, + query, + projectType, + availableTemplates: templatesResponse.templates, + inferenceContext: operationOptions.inferenceContext, + }); + + logger.info('Template selection completed', { + selected: selection.selectedTemplateName, + projectType: selection.projectType + }); + + // If no suitable template found, return error suggesting scratch mode + if (!selection.selectedTemplateName) { + return { + error: `No suitable template found for this project. Reasoning: ${selection.reasoning}. Consider using virtual-first mode (generate all config files yourself) or refine requirements.` + }; + } + + // Import the selected template + const importResult = await agent.importTemplate( + selection.selectedTemplateName + ); + + logger.info('Template imported successfully', { + templateName: importResult.templateName, + filesCount: importResult.files.length + }); + + // Build detailed reasoning message + const reasoningMessage = ` +**AI Template Selection Complete** + +**Selected Template**: ${selection.selectedTemplateName} +**Project Type**: ${selection.projectType} +**Complexity**: ${selection.complexity || 'N/A'} +**Style**: ${selection.styleSelection || 'N/A'} +**Use Case**: ${selection.useCase || 'N/A'} + +**Why This Template**: +${selection.reasoning} + +**Template Files Imported**: ${importResult.files.length} important files +**Ready for**: Blueprint generation with template context + +**Next Step**: Use generate_blueprint() to create project plan that leverages this template's features. +`.trim(); + + return { + selection, + importedFiles: importResult.files, + reasoning: reasoningMessage, + message: `Template "${selection.selectedTemplateName}" selected and imported successfully.` + }; + + } catch (error) { + logger.error('Error in init_suitable_template', error); + return { + error: `Error selecting/importing template: ${error instanceof Error ? error.message : 'Unknown error'}` + }; + } + }, + }; +} diff --git a/worker/agents/tools/toolkit/initialize-slides.ts b/worker/agents/tools/toolkit/initialize-slides.ts new file mode 100644 index 00000000..7cb529b2 --- /dev/null +++ b/worker/agents/tools/toolkit/initialize-slides.ts @@ -0,0 +1,46 @@ +import { ToolDefinition } from '../types'; +import { StructuredLogger } from '../../../logger'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; + +type InitializeSlidesArgs = { + theme?: string; + force_preview?: boolean; +}; + +type InitializeSlidesResult = { message: string }; + +/** + * Initializes a Spectacle-based slides runtime in from-scratch projects. + * - Imports the Spectacle template files into the repository + * - Commits them + * - Deploys a preview (agent policy will allow because slides exist) + */ +export function createInitializeSlidesTool( + agent: ICodingAgent, + logger: StructuredLogger, +): ToolDefinition { + return { + type: 'function' as const, + function: { + name: 'initialize_slides', + description: 'Initialize a Spectacle slides project inside the current workspace and deploy a live preview. Use only if the user wants a slide deck.', + parameters: { + type: 'object', + properties: { + theme: { type: 'string', description: 'Optional theme preset name' }, + force_preview: { type: 'boolean', description: 'Force redeploy sandbox after import' }, + }, + required: [], + }, + }, + implementation: async ({ theme, force_preview }: InitializeSlidesArgs) => { + logger.info('Initializing slides via Spectacle template', { theme }); + const { templateName, filesImported } = await agent.importTemplate('spectacle'); + logger.info('Imported template', { templateName, filesImported }); + + const deployMsg = await agent.deployPreview(true, !!force_preview); + return { message: `Slides initialized with template '${templateName}', files: ${filesImported}. ${deployMsg}` }; + }, + }; +} + diff --git a/worker/agents/tools/toolkit/queue-request.ts b/worker/agents/tools/toolkit/queue-request.ts index 486bd809..4fd90f5e 100644 --- a/worker/agents/tools/toolkit/queue-request.ts +++ b/worker/agents/tools/toolkit/queue-request.ts @@ -1,13 +1,13 @@ import { ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; type QueueRequestArgs = { modificationRequest: string; }; export function createQueueRequestTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger ): ToolDefinition { return { @@ -34,7 +34,7 @@ export function createQueueRequestTool( logger.info('Received app edit request', { modificationRequest: args.modificationRequest, }); - agent.queueRequest(args.modificationRequest); + agent.queueUserRequest(args.modificationRequest); return null; }, }; diff --git a/worker/agents/tools/toolkit/read-files.ts b/worker/agents/tools/toolkit/read-files.ts index fa8dc0f3..15cb28c4 100644 --- a/worker/agents/tools/toolkit/read-files.ts +++ b/worker/agents/tools/toolkit/read-files.ts @@ -1,6 +1,6 @@ import { ToolDefinition, ErrorResult } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; export type ReadFilesArgs = { paths: string[]; @@ -12,7 +12,7 @@ export type ReadFilesResult = | ErrorResult; export function createReadFilesTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger, ): ToolDefinition { return { diff --git a/worker/agents/tools/toolkit/regenerate-file.ts b/worker/agents/tools/toolkit/regenerate-file.ts index 7afca870..5be23f99 100644 --- a/worker/agents/tools/toolkit/regenerate-file.ts +++ b/worker/agents/tools/toolkit/regenerate-file.ts @@ -1,6 +1,6 @@ import { ToolDefinition, ErrorResult } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; export type RegenerateFileArgs = { path: string; @@ -12,7 +12,7 @@ export type RegenerateFileResult = | ErrorResult; export function createRegenerateFileTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger, ): ToolDefinition { return { @@ -38,7 +38,7 @@ CRITICAL: Provide detailed, specific issues - not vague descriptions. See system path, issuesCount: issues.length, }); - return await agent.regenerateFile(path, issues); + return await agent.regenerateFileByPath(path, issues); } catch (error) { return { error: diff --git a/worker/agents/tools/toolkit/rename-project.ts b/worker/agents/tools/toolkit/rename-project.ts index be7ab416..850161b5 100644 --- a/worker/agents/tools/toolkit/rename-project.ts +++ b/worker/agents/tools/toolkit/rename-project.ts @@ -1,6 +1,6 @@ import { ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; type RenameArgs = { newName: string; @@ -9,7 +9,7 @@ type RenameArgs = { type RenameResult = { projectName: string }; export function createRenameProjectTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger ): ToolDefinition { return { diff --git a/worker/agents/tools/toolkit/run-analysis.ts b/worker/agents/tools/toolkit/run-analysis.ts index 52e67c78..b95a38c8 100644 --- a/worker/agents/tools/toolkit/run-analysis.ts +++ b/worker/agents/tools/toolkit/run-analysis.ts @@ -1,6 +1,6 @@ import { ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; import { StaticAnalysisResponse } from 'worker/services/sandbox/sandboxTypes'; export type RunAnalysisArgs = { @@ -10,7 +10,7 @@ export type RunAnalysisArgs = { export type RunAnalysisResult = StaticAnalysisResponse; export function createRunAnalysisTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger, ): ToolDefinition { return { diff --git a/worker/agents/tools/toolkit/virtual-filesystem.ts b/worker/agents/tools/toolkit/virtual-filesystem.ts new file mode 100644 index 00000000..0b1e0d7f --- /dev/null +++ b/worker/agents/tools/toolkit/virtual-filesystem.ts @@ -0,0 +1,81 @@ +import { ToolDefinition, ErrorResult } from '../types'; +import { StructuredLogger } from '../../../logger'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; + +export type VirtualFilesystemArgs = { + command: 'list' | 'read'; + paths?: string[]; +}; + +export type VirtualFilesystemResult = + | { files: Array<{ path: string; purpose?: string; size: number }> } + | { files: Array<{ path: string; content: string }> } + | ErrorResult; + +export function createVirtualFilesystemTool( + agent: ICodingAgent, + logger: StructuredLogger +): ToolDefinition { + return { + type: 'function' as const, + function: { + name: 'virtual_filesystem', + description: `Interact with the virtual persistent workspace. +IMPORTANT: This reads from the VIRTUAL filesystem, NOT the sandbox. Files appear here immediately after generation and may not be deployed to sandbox yet.`, + parameters: { + type: 'object', + properties: { + command: { + type: 'string', + enum: ['list', 'read'], + description: 'Action to perform: "list" shows all files, "read" returns file contents', + }, + paths: { + type: 'array', + items: { type: 'string' }, + description: 'File paths to read (required when command="read"). Use relative paths from project root.', + }, + }, + required: ['command'], + }, + }, + implementation: async ({ command, paths }: VirtualFilesystemArgs) => { + try { + if (command === 'list') { + logger.info('Listing virtual filesystem files'); + + const files = agent.listFiles(); + + const fileList = files.map(file => ({ + path: file.filePath, + purpose: file.filePurpose, + size: file.fileContents.length + })); + + return { + files: fileList + }; + } else if (command === 'read') { + if (!paths || paths.length === 0) { + return { + error: 'paths array is required when command is "read"' + }; + } + + logger.info('Reading files from virtual filesystem', { count: paths.length }); + + return await agent.readFiles(paths); + } else { + return { + error: `Invalid command: ${command}. Must be "list" or "read"` + }; + } + } catch (error) { + logger.error('Error in virtual_filesystem', error); + return { + error: `Error accessing virtual filesystem: ${error instanceof Error ? error.message : 'Unknown error'}` + }; + } + }, + }; +} diff --git a/worker/agents/tools/toolkit/wait-for-debug.ts b/worker/agents/tools/toolkit/wait-for-debug.ts index 2852ea5a..59e31185 100644 --- a/worker/agents/tools/toolkit/wait-for-debug.ts +++ b/worker/agents/tools/toolkit/wait-for-debug.ts @@ -1,9 +1,9 @@ import { ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; export function createWaitForDebugTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger ): ToolDefinition, { status: string } | { error: string }> { return { diff --git a/worker/agents/tools/toolkit/wait-for-generation.ts b/worker/agents/tools/toolkit/wait-for-generation.ts index d34224c7..a599f8ff 100644 --- a/worker/agents/tools/toolkit/wait-for-generation.ts +++ b/worker/agents/tools/toolkit/wait-for-generation.ts @@ -1,9 +1,9 @@ import { ToolDefinition } from '../types'; import { StructuredLogger } from '../../../logger'; -import { CodingAgentInterface } from 'worker/agents/services/implementations/CodingAgent'; +import { ICodingAgent } from 'worker/agents/services/interfaces/ICodingAgent'; export function createWaitForGenerationTool( - agent: CodingAgentInterface, + agent: ICodingAgent, logger: StructuredLogger ): ToolDefinition, { status: string } | { error: string }> { return { diff --git a/worker/agents/tools/types.ts b/worker/agents/tools/types.ts index 3683227f..37f80502 100644 --- a/worker/agents/tools/types.ts +++ b/worker/agents/tools/types.ts @@ -1,4 +1,4 @@ -import { ChatCompletionFunctionTool } from 'openai/resources'; +import { ChatCompletionFunctionTool, ChatCompletionMessageFunctionToolCall } from 'openai/resources'; export interface MCPServerConfig { name: string; sseUrl: string; @@ -26,8 +26,8 @@ export type ToolDefinition< TResult = unknown > = ChatCompletionFunctionTool & { implementation: ToolImplementation; - onStart?: (args: TArgs) => void; - onComplete?: (args: TArgs, result: TResult) => void; + onStart?: (toolCall: ChatCompletionMessageFunctionToolCall, args: TArgs) => Promise; + onComplete?: (toolCall: ChatCompletionMessageFunctionToolCall, args: TArgs, result: TResult) => Promise; }; export type ExtractToolArgs = T extends ToolImplementation ? A : never; diff --git a/worker/agents/utils/common.ts b/worker/agents/utils/common.ts index 0b1625f5..a85dee62 100644 --- a/worker/agents/utils/common.ts +++ b/worker/agents/utils/common.ts @@ -1,3 +1,6 @@ +import { downloadR2Image, imageToBase64 } from "../../utils/images"; +import { ConversationMessage, mapImagesInMultiModalMessage } from "../inferutils/common"; + export function extractCommands(rawOutput: string, onlyInstallCommands: boolean = false): string[] { const commands: string[] = []; @@ -215,4 +218,25 @@ export function looksLikeCommand(text: string): boolean { ]; return commandIndicators.some((pattern) => pattern.test(text)); +} + +export async function prepareMessagesForInference(env: Env, messages: ConversationMessage[]) : Promise { + // For each multimodal image, convert the image to base64 data url + const processedMessages = await Promise.all(messages.map(m => { + return mapImagesInMultiModalMessage(structuredClone(m), async (c) => { + let url = c.image_url.url; + if (url.includes('base64,')) { + return c; + } + const image = await downloadR2Image(env, url); + return { + ...c, + image_url: { + ...c.image_url, + url: await imageToBase64(env, image) + }, + }; + }); + })); + return processedMessages; } \ No newline at end of file diff --git a/worker/agents/utils/conversationCompactifier.ts b/worker/agents/utils/conversationCompactifier.ts new file mode 100644 index 00000000..3804c9d4 --- /dev/null +++ b/worker/agents/utils/conversationCompactifier.ts @@ -0,0 +1,317 @@ +import { ConversationMessage, MessageRole, createUserMessage } from "../inferutils/common"; +import { executeInference } from "../inferutils/infer"; +import { StructuredLogger } from "../../logger"; +import { IdGenerator } from './idGenerator'; +import { OperationOptions } from "../operations/common"; +import type { ChatCompletionMessageFunctionToolCall } from 'openai/resources'; + +/** + * Compactification configuration constants + */ +export const COMPACTIFICATION_CONFIG = { + MAX_TURNS: 40, // Trigger after 40 conversation turns + MAX_ESTIMATED_TOKENS: 100000, + PRESERVE_RECENT_MESSAGES: 10, // Always keep last 10 messages uncompacted + CHARS_PER_TOKEN: 4, // Rough estimation: 1 token β‰ˆ 4 characters +} as const; + +/** + * Tool call renderer type for UI feedback during compactification + * Compatible with RenderToolCall from UserConversationProcessor + */ +export type CompactificationRenderer = (args: { + name: string; + status: 'start' | 'success' | 'error'; + args?: Record; + result?: string; +}) => void; + +/** + * Count conversation turns (user message to next user message) + */ +function countTurns(messages: ConversationMessage[]): number { + return messages.filter(m => m.role === 'user').length; +} + +/** + * Convert character count to estimated token count + */ +function tokensFromChars(chars: number): number { + return Math.ceil(chars / COMPACTIFICATION_CONFIG.CHARS_PER_TOKEN); +} + +/** + * Remove system context tags from message content + */ +function stripSystemContext(text: string): string { + return text.replace(/[\s\S]*?<\/system_context>\n?/gi, '').trim(); +} + +/** + * Estimate token count for messages (4 chars β‰ˆ 1 token) + */ +function estimateTokens(messages: ConversationMessage[]): number { + let totalChars = 0; + + for (const msg of messages) { + if (typeof msg.content === 'string') { + totalChars += msg.content.length; + } else if (Array.isArray(msg.content)) { + // Multi-modal content + for (const part of msg.content) { + if (part.type === 'text') { + totalChars += part.text.length; + } else if (part.type === 'image_url') { + // Images use ~1000 tokens each (approximate) + totalChars += 4000; + } + } + } + + // Account for tool calls + if (msg.tool_calls && Array.isArray(msg.tool_calls)) { + for (const tc of msg.tool_calls as ChatCompletionMessageFunctionToolCall[]) { + // Function name + if (tc.function?.name) { + totalChars += tc.function.name.length; + } + // Function arguments (JSON string) + if (tc.function?.arguments) { + totalChars += tc.function.arguments.length; + } + // Tool call structure overhead (id, type, etc.) - rough estimate + totalChars += 50; + } + } + } + + return tokensFromChars(totalChars); +} + +/** + * Check if compactification should be triggered + */ +export function shouldCompactify(messages: ConversationMessage[]): { + should: boolean; + reason?: 'turns' | 'tokens'; + turns: number; + estimatedTokens: number; +} { + const turns = countTurns(messages); + const estimatedTokens = estimateTokens(messages); + + console.log(`[ConversationCompactifier] shouldCompactify: turns=${turns}, estimatedTokens=${estimatedTokens}`); + + if (turns >= COMPACTIFICATION_CONFIG.MAX_TURNS) { + return { should: true, reason: 'turns', turns, estimatedTokens }; + } + + if (estimatedTokens >= COMPACTIFICATION_CONFIG.MAX_ESTIMATED_TOKENS) { + return { should: true, reason: 'tokens', turns, estimatedTokens }; + } + + return { should: false, turns, estimatedTokens }; +} + +/** + * Find the last valid turn boundary before the preserve threshold + * A turn boundary is right before a user message + */ +function findTurnBoundary(messages: ConversationMessage[], preserveCount: number): number { + // Start from the point where we want to split + const targetSplitIndex = messages.length - preserveCount; + + if (targetSplitIndex <= 0) { + return 0; + } + + // Walk backwards to find the nearest user message boundary + for (let i = targetSplitIndex; i >= 0; i--) { + if (messages[i].role === 'user') { + // Split right before this user message to preserve turn integrity + return i; + } + } + + // If no user message found, don't split + return 0; +} + +/** + * Generate LLM-powered conversation summary + * Sends the full conversation history as-is to the LLM with a summarization instruction + */ +async function generateConversationSummary( + messages: ConversationMessage[], + env: Env, + options: OperationOptions, + logger: StructuredLogger +): Promise { + try { + // Prepare summarization instruction + const summarizationInstruction = createUserMessage( + `Please provide a comprehensive summary of the entire conversation above. Your summary should: + +1. Capture the key features, changes, and fixes discussed +2. Note any recurring issues or important bugs mentioned +3. Highlight the current state of the project +4. Preserve critical technical details and decisions made +5. Maintain chronological flow of major changes and developments + +Format your summary as a cohesive, well-structured narrative. Focus on what matters for understanding the project's evolution and current state. + +Provide the summary now:` + ); + + logger.info('Generating conversation summary via LLM', { + messageCount: messages.length, + estimatedInputTokens: estimateTokens(messages) + }); + + // Send full conversation history + summarization request + const summaryResult = await executeInference({ + env, + messages: [...messages, summarizationInstruction], + agentActionName: 'conversationalResponse', + context: options.inferenceContext, + }); + + const summary = summaryResult.string.trim(); + + logger.info('Generated conversation summary', { + summaryLength: summary.length, + summaryTokens: tokensFromChars(summary.length) + }); + + return summary; + } catch (error) { + logger.error('Failed to generate conversation summary', { error }); + // Fallback to simple concatenation + return messages + .map(m => { + const content = typeof m.content === 'string' ? m.content : '[complex content]'; + return `${m.role}: ${stripSystemContext(content).substring(0, 200)}`; + }) + .join('\n') + .substring(0, 2000); + } +} + +/** + * Intelligent conversation compactification system + * + * Strategy: + * - Monitors turns (user message to user message) and token count + * - Triggers at 40 turns OR ~100k tokens + * - Uses LLM to generate intelligent summary + * - Preserves last 10 messages in full + * - Respects turn boundaries to avoid tool call fragmentation + */ +export async function compactifyContext( + runningHistory: ConversationMessage[], + env: Env, + options: OperationOptions, + toolCallRenderer: CompactificationRenderer, + logger: StructuredLogger +): Promise { + try { + // Check if compactification is needed on the running history + const analysis = shouldCompactify(runningHistory); + + if (!analysis.should) { + // No compactification needed + return runningHistory; + } + + logger.info('Compactification triggered', { + reason: analysis.reason, + turns: analysis.turns, + estimatedTokens: analysis.estimatedTokens, + totalRunningMessages: runningHistory.length, + }); + + // Find turn boundary for splitting + const splitIndex = findTurnBoundary( + runningHistory, + COMPACTIFICATION_CONFIG.PRESERVE_RECENT_MESSAGES + ); + + // Safety check: ensure we have something to compactify + if (splitIndex <= 0) { + logger.warn('Cannot find valid turn boundary for compactification, preserving all messages'); + return runningHistory; + } + + // Split messages + const messagesToSummarize = runningHistory.slice(0, splitIndex); + const recentMessages = runningHistory.slice(splitIndex); + + logger.info('Compactification split determined', { + summarizeCount: messagesToSummarize.length, + preserveCount: recentMessages.length, + splitIndex + }); + + toolCallRenderer({ + name: 'summarize_history', + status: 'start', + args: { + messageCount: messagesToSummarize.length, + recentCount: recentMessages.length + } + }); + + // Generate LLM-powered summary + const summary = await generateConversationSummary( + messagesToSummarize, + env, + options, + logger + ); + + // Create summary message - its conversationId will be the archive ID + const summarizedTurns = countTurns(messagesToSummarize); + const archiveId = `archive-${Date.now()}-${IdGenerator.generateConversationId()}`; + + const summaryMessage: ConversationMessage = { + role: 'assistant' as MessageRole, + content: `[Conversation History Summary: ${messagesToSummarize.length} messages, ${summarizedTurns} turns]\n[Archive ID: ${archiveId}]\n\n${summary}`, + conversationId: archiveId + }; + + toolCallRenderer({ + name: 'summarize_history', + status: 'success', + args: { + summary: summary.substring(0, 200) + '...', + archiveId + } + }); + + // Return summary + recent messages + const compactifiedHistory = [summaryMessage, ...recentMessages]; + + logger.info('Compactification completed with archival', { + originalMessageCount: runningHistory.length, + newMessageCount: compactifiedHistory.length, + compressionRatio: (compactifiedHistory.length / runningHistory.length).toFixed(2), + estimatedTokenSavings: analysis.estimatedTokens - estimateTokens(compactifiedHistory), + archivedMessageCount: messagesToSummarize.length, + archiveId + }); + + return compactifiedHistory; + + } catch (error) { + logger.error('Compactification failed, preserving original messages', { error }); + + // Safe fallback: if we have too many messages, keep recent ones + if (runningHistory.length > COMPACTIFICATION_CONFIG.PRESERVE_RECENT_MESSAGES * 3) { + const fallbackCount = COMPACTIFICATION_CONFIG.PRESERVE_RECENT_MESSAGES * 2; + logger.warn(`Applying emergency fallback: keeping last ${fallbackCount} messages`); + return runningHistory.slice(-fallbackCount); + } + + return runningHistory; + } +} diff --git a/worker/agents/utils/templates.ts b/worker/agents/utils/templates.ts new file mode 100644 index 00000000..b3c5aede --- /dev/null +++ b/worker/agents/utils/templates.ts @@ -0,0 +1,21 @@ +import type { TemplateDetails } from '../../services/sandbox/sandboxTypes'; + +/** + * Single source of truth for an in-memory "scratch" template. + * Used when starting from-scratch (general mode) or when no template fits. + */ +export function createScratchTemplateDetails(): TemplateDetails { + return { + name: 'scratch', + description: { selection: 'from-scratch baseline', usage: 'No template. Agent will scaffold as needed.' }, + fileTree: { path: '/', type: 'directory', children: [] }, + allFiles: {}, + language: 'typescript', + deps: {}, + frameworks: [], + importantFiles: [], + dontTouchFiles: [], + redactedFiles: [], + }; +} + diff --git a/worker/api/controllers/agent/controller.ts b/worker/api/controllers/agent/controller.ts index b40454f0..12cceda4 100644 --- a/worker/api/controllers/agent/controller.ts +++ b/worker/api/controllers/agent/controller.ts @@ -1,7 +1,8 @@ import { WebSocketMessageResponses } from '../../../agents/constants'; import { BaseController } from '../baseController'; import { generateId } from '../../../utils/idGenerator'; -import { CodeGenState } from '../../../agents/core/state'; +import { AgentState } from '../../../agents/core/state'; +import { BehaviorType, ProjectType } from '../../../agents/core/types'; import { getAgentStub, getTemplateForQuery } from '../../../agents'; import { AgentConnectionData, AgentPreviewResponse, CodeGenArgs } from './types'; import { ApiResponse, ControllerResponse } from '../types'; @@ -16,12 +17,22 @@ import { ImageType, uploadImage } from 'worker/utils/images'; import { ProcessedImageAttachment } from 'worker/types/image-attachment'; import { getTemplateImportantFiles } from 'worker/services/sandbox/utils'; -const defaultCodeGenArgs: CodeGenArgs = { - query: '', +const defaultCodeGenArgs: Partial = { language: 'typescript', frameworks: ['react', 'vite'], selectedTemplate: 'auto', - agentMode: 'deterministic', +}; + +const resolveBehaviorType = (body: CodeGenArgs): BehaviorType => { + if (body.behaviorType) return body.behaviorType; + const pt = body.projectType; + if (pt === 'presentation' || pt === 'workflow' || pt === 'general') return 'agentic'; + // default (including 'app' and when projectType omitted) + return 'phasic'; +}; + +const resolveProjectType = (body: CodeGenArgs): ProjectType | 'auto' => { + return body.projectType || 'auto'; }; @@ -77,12 +88,13 @@ export class CodingAgentController extends BaseController { const agentId = generateId(); const modelConfigService = new ModelConfigService(env); + const projectType = resolveProjectType(body); + const behaviorType = resolveBehaviorType(body); + + this.logger.info(`Resolved behaviorType: ${behaviorType}, projectType: ${projectType} for agent ${agentId}`); // Fetch all user model configs, api keys and agent instance at once - const [userConfigsRecord, agentInstance] = await Promise.all([ - modelConfigService.getUserModelConfigs(user.id), - getAgentStub(env, agentId) - ]); + const userConfigsRecord = await modelConfigService.getUserModelConfigs(user.id); // Convert Record to Map and extract only ModelConfig properties const userModelConfigs = new Map(); @@ -110,8 +122,9 @@ export class CodingAgentController extends BaseController { this.logger.info(`Initialized inference context for user ${user.id}`, { modelConfigsCount: Object.keys(userModelConfigs).length, }); + this.logger.info(`Creating project of type: ${projectType}`); - const { templateDetails, selection } = await getTemplateForQuery(env, inferenceContext, query, body.images, this.logger); + const { templateDetails, selection, projectType: finalProjectType } = await getTemplateForQuery(env, inferenceContext, query, projectType, body.images, this.logger); const websocketUrl = `${url.protocol === 'https:' ? 'wss:' : 'ws:'}//${url.host}/api/agent/${agentId}/ws`; const httpStatusUrl = `${url.origin}/api/agent/${agentId}`; @@ -122,19 +135,22 @@ export class CodingAgentController extends BaseController { return uploadImage(env, image, ImageType.UPLOADS); })); } - + writer.write({ message: 'Code generation started', agentId: agentId, websocketUrl, httpStatusUrl, + behaviorType, + projectType: finalProjectType, template: { name: templateDetails.name, files: getTemplateImportantFiles(templateDetails), } }); + const agentInstance = await getAgentStub(env, agentId, { behaviorType, projectType: finalProjectType }); - const agentPromise = agentInstance.initialize({ + const baseInitArgs = { query, language: body.language || defaultCodeGenArgs.language, frameworks: body.frameworks || defaultCodeGenArgs.frameworks, @@ -144,9 +160,12 @@ export class CodingAgentController extends BaseController { onBlueprintChunk: (chunk: string) => { writer.write({chunk}); }, - templateInfo: { templateDetails, selection }, - }, body.agentMode || defaultCodeGenArgs.agentMode) as Promise; - agentPromise.then(async (_state: CodeGenState) => { + } as const; + + const initArgs = { ...baseInitArgs, templateInfo: { templateDetails, selection } } + + const agentPromise = agentInstance.initialize(initArgs) as Promise; + agentPromise.then(async (_state: AgentState) => { writer.write("terminate"); writer.close(); this.logger.info(`Agent ${agentId} terminated successfully`); @@ -335,4 +354,4 @@ export class CodingAgentController extends BaseController { return appError; } } -} \ No newline at end of file +} diff --git a/worker/api/controllers/agent/types.ts b/worker/api/controllers/agent/types.ts index 42b8ae6e..4c411d54 100644 --- a/worker/api/controllers/agent/types.ts +++ b/worker/api/controllers/agent/types.ts @@ -1,12 +1,14 @@ -import { PreviewType } from "../../../services/sandbox/sandboxTypes"; +import type { PreviewType } from "../../../services/sandbox/sandboxTypes"; import type { ImageAttachment } from '../../../types/image-attachment'; +import type { BehaviorType, ProjectType } from '../../../agents/core/types'; export interface CodeGenArgs { query: string; language?: string; frameworks?: string[]; selectedTemplate?: string; - agentMode: 'deterministic' | 'smart'; + behaviorType?: BehaviorType; + projectType?: ProjectType; images?: ImageAttachment[]; } @@ -18,6 +20,5 @@ export interface AgentConnectionData { agentId: string; } -export interface AgentPreviewResponse extends PreviewType { -} - \ No newline at end of file +export type AgentPreviewResponse = PreviewType; + diff --git a/worker/api/controllers/githubExporter/controller.ts b/worker/api/controllers/githubExporter/controller.ts index ae8057f1..3825325e 100644 --- a/worker/api/controllers/githubExporter/controller.ts +++ b/worker/api/controllers/githubExporter/controller.ts @@ -5,6 +5,7 @@ import { GitHubExporterOAuthProvider } from '../../../services/oauth/github-expo import { getAgentStub } from '../../../agents'; import { createLogger } from '../../../logger'; import { AppService } from '../../../database/services/AppService'; +import { ExportResult } from 'worker/agents/core/types'; export interface GitHubExportData { success: boolean; @@ -164,13 +165,16 @@ export class GitHubExporterController extends BaseController { this.logger.info('Pushing files to repository', { agentId, repositoryUrl }); const agentStub = await getAgentStub(env, agentId); - const pushResult = await agentStub.pushToGitHub({ - cloneUrl, - repositoryHtmlUrl: repositoryUrl, - isPrivate, - token, - email: 'vibesdk-bot@cloudflare.com', - username + const pushResult: ExportResult = await agentStub.exportProject({ + kind: 'github', + github: { + cloneUrl, + repositoryHtmlUrl: repositoryUrl, + isPrivate, + token, + email: 'vibesdk-bot@cloudflare.com', + username + } }); if (!pushResult?.success) { diff --git a/worker/api/websocketTypes.ts b/worker/api/websocketTypes.ts index 6831ab55..450a6fc9 100644 --- a/worker/api/websocketTypes.ts +++ b/worker/api/websocketTypes.ts @@ -1,5 +1,5 @@ import type { CodeReviewOutputType, FileConceptType, FileOutputType } from "../agents/schemas"; -import type { CodeGenState } from "../agents/core/state"; +import type { AgentState } from "../agents/core/state"; import type { ConversationState } from "../agents/inferutils/common"; import type { CodeIssue, RuntimeError, StaticAnalysisResponse, TemplateDetails } from "../services/sandbox/sandboxTypes"; import type { CodeFixResult } from "../services/code-fixer"; @@ -13,12 +13,12 @@ type ErrorMessage = { type StateMessage = { type: 'cf_agent_state'; - state: CodeGenState; + state: AgentState; }; type AgentConnectedMessage = { type: 'agent_connected'; - state: CodeGenState; + state: AgentState; templateDetails: TemplateDetails; }; @@ -478,4 +478,4 @@ type WebSocketMessagePayload = Extract = Omit, 'type'>; \ No newline at end of file +export type WebSocketMessageData = Omit, 'type'>; diff --git a/worker/index.ts b/worker/index.ts index 449b5a05..d0a0bbe4 100644 --- a/worker/index.ts +++ b/worker/index.ts @@ -1,5 +1,4 @@ import { createLogger } from './logger'; -import { SmartCodeGeneratorAgent } from './agents/core/smartGeneratorAgent'; import { isDispatcherAvailable } from './utils/dispatcherUtils'; import { createApp } from './app'; // import * as Sentry from '@sentry/cloudflare'; @@ -13,10 +12,10 @@ import { handleGitProtocolRequest, isGitProtocolRequest } from './api/handlers/g // Durable Object and Service exports export { UserAppSandboxService, DeployerService } from './services/sandbox/sandboxSdkClient'; +export { CodeGeneratorAgent } from './agents/core/codingAgent'; -// export const CodeGeneratorAgent = Sentry.instrumentDurableObjectWithSentry(sentryOptions, SmartCodeGeneratorAgent); +// export const CodeGeneratorAgent = Sentry.instrumentDurableObjectWithSentry(sentryOptions, CodeGeneratorAgent); // export const DORateLimitStore = Sentry.instrumentDurableObjectWithSentry(sentryOptions, BaseDORateLimitStore); -export const CodeGeneratorAgent = SmartCodeGeneratorAgent; export const DORateLimitStore = BaseDORateLimitStore; // Logger for the main application and handlers diff --git a/worker/services/sandbox/BaseSandboxService.ts b/worker/services/sandbox/BaseSandboxService.ts index f1056391..cef1a8cb 100644 --- a/worker/services/sandbox/BaseSandboxService.ts +++ b/worker/services/sandbox/BaseSandboxService.ts @@ -28,12 +28,15 @@ import { GetLogsResponse, ListInstancesResponse, TemplateDetails, + TemplateInfo, + InstanceCreationRequest, } from './sandboxTypes'; import { createObjectLogger, StructuredLogger } from '../../logger'; import { env } from 'cloudflare:workers' import { ZipExtractor } from './zipExtractor'; import { FileTreeBuilder } from './fileTreeBuilder'; +import { DeploymentTarget } from 'worker/agents/core/types'; /** * Streaming event for enhanced command execution @@ -45,16 +48,6 @@ export interface StreamEvent { error?: string; timestamp: Date; } - -export interface TemplateInfo { - name: string; - language?: string; - frameworks?: string[]; - description: { - selection: string; - usage: string; - }; -} const templateDetailsCache: Record = {}; @@ -100,7 +93,8 @@ export abstract class BaseSandboxService { name: t.name, language: t.language, frameworks: t.frameworks || [], - description: t.description + description: t.description, + projectType: t.projectType || 'app' })), count: filteredTemplates.length }; @@ -214,8 +208,11 @@ export abstract class BaseSandboxService { /** * Create a new instance from a template * Returns: { success: boolean, instanceId?: string, error?: string } + * @param options - Instance creation options */ - abstract createInstance(templateName: string, projectName: string, webhookUrl?: string, localEnvVars?: Record): Promise; + abstract createInstance( + options: InstanceCreationRequest + ): Promise; /** * List all instances across all sessions @@ -305,10 +302,10 @@ export abstract class BaseSandboxService { * Deploy instance to Cloudflare Workers * Returns: { success: boolean, message: string, deployedUrl?: string, deploymentId?: string, error?: string } */ - abstract deployToCloudflareWorkers(instanceId: string): Promise; + abstract deployToCloudflareWorkers(instanceId: string, target?: DeploymentTarget): Promise; // ========================================== // GITHUB INTEGRATION (Required) // ========================================== -} \ No newline at end of file +} diff --git a/worker/services/sandbox/remoteSandboxService.ts b/worker/services/sandbox/remoteSandboxService.ts index 42f06085..3d33622e 100644 --- a/worker/services/sandbox/remoteSandboxService.ts +++ b/worker/services/sandbox/remoteSandboxService.ts @@ -14,7 +14,6 @@ import { GetLogsResponse, ListInstancesResponse, BootstrapResponseSchema, - BootstrapRequest, GetInstanceResponseSchema, BootstrapStatusResponseSchema, WriteFilesResponseSchema, @@ -29,8 +28,10 @@ import { GitHubPushRequest, GitHubPushResponse, GitHubPushResponseSchema, + InstanceCreationRequest, } from './sandboxTypes'; import { BaseSandboxService } from "./BaseSandboxService"; +import { DeploymentTarget } from 'worker/agents/core/types'; import { env } from 'cloudflare:workers' import z from 'zod'; import { FileOutputType } from 'worker/agents/schemas'; @@ -117,14 +118,10 @@ export class RemoteSandboxServiceClient extends BaseSandboxService{ /** * Create a new runner instance. */ - async createInstance(templateName: string, projectName: string, webhookUrl?: string, localEnvVars?: Record): Promise { - const requestBody: BootstrapRequest = { - templateName, - projectName, - ...(webhookUrl && { webhookUrl }), - ...(localEnvVars && { envVars: localEnvVars }) - }; - return this.makeRequest('/instances', 'POST', BootstrapResponseSchema, requestBody); + async createInstance( + options: InstanceCreationRequest + ): Promise { + return this.makeRequest('/instances', 'POST', BootstrapResponseSchema, options); } /** @@ -193,7 +190,14 @@ export class RemoteSandboxServiceClient extends BaseSandboxService{ * @param instanceId The ID of the runner instance to deploy * @param credentials Optional Cloudflare deployment credentials */ - async deployToCloudflareWorkers(instanceId: string): Promise { + async deployToCloudflareWorkers(instanceId: string, target: DeploymentTarget = 'platform'): Promise { + if (target === 'user') { + return { + success: false, + message: 'User-targeted deployments are not available with remote sandbox runner', + error: 'unsupported_target' + }; + } return this.makeRequest(`/instances/${instanceId}/deploy`, 'POST', DeploymentResultSchema); } diff --git a/worker/services/sandbox/sandboxSdkClient.ts b/worker/services/sandbox/sandboxSdkClient.ts index e987e209..858ea534 100644 --- a/worker/services/sandbox/sandboxSdkClient.ts +++ b/worker/services/sandbox/sandboxSdkClient.ts @@ -22,6 +22,8 @@ import { GetLogsResponse, ListInstancesResponse, StoredError, + TemplateFile, + InstanceCreationRequest, } from './sandboxTypes'; import { createObjectLogger } from '../../logger'; @@ -32,6 +34,7 @@ import { buildDeploymentConfig, parseWranglerConfig, deployToDispatch, + deployWorker, } from '../deployer/deploy'; import { createAssetManifest @@ -43,12 +46,12 @@ import { ResourceProvisioningResult } from './types'; import { getPreviewDomain } from '../../utils/urls'; import { isDev } from 'worker/utils/envs' import { FileTreeBuilder } from './fileTreeBuilder'; +import { DeploymentTarget } from 'worker/agents/core/types'; // Export the Sandbox class in your Worker export { Sandbox as UserAppSandboxService, Sandbox as DeployerService} from "@cloudflare/sandbox"; interface InstanceMetadata { - templateName: string; projectName: string; startTime: string; webhookUrl?: string; @@ -230,24 +233,24 @@ export class SandboxSdkClient extends BaseSandboxService { } } - /** Write a binary file to the sandbox using small base64 chunks to avoid large control messages. */ - private async writeBinaryFileViaBase64(targetPath: string, data: ArrayBuffer, bytesPerChunk: number = 16 * 1024): Promise { - const dir = targetPath.includes('/') ? targetPath.slice(0, targetPath.lastIndexOf('/')) : '.'; - // Ensure directory and clean target file - await this.safeSandboxExec(`mkdir -p '${dir}'`); - await this.safeSandboxExec(`rm -f '${targetPath}'`); - - const buffer = new Uint8Array(data); - for (let i = 0; i < buffer.length; i += bytesPerChunk) { - const chunk = buffer.subarray(i, Math.min(i + bytesPerChunk, buffer.length)); - const base64Chunk = btoa(String.fromCharCode(...chunk)); - // Append decoded bytes into the target file inside the sandbox - const appendResult = await this.safeSandboxExec(`printf '%s' '${base64Chunk}' | base64 -d >> '${targetPath}'`); - if (appendResult.exitCode !== 0) { - throw new Error(`Failed to append to ${targetPath}: ${appendResult.stderr}`); - } - } - } + // /** Write a binary file to the sandbox using small base64 chunks to avoid large control messages. */ + // private async writeBinaryFileViaBase64(targetPath: string, data: ArrayBuffer, bytesPerChunk: number = 16 * 1024): Promise { + // const dir = targetPath.includes('/') ? targetPath.slice(0, targetPath.lastIndexOf('/')) : '.'; + // // Ensure directory and clean target file + // await this.safeSandboxExec(`mkdir -p '${dir}'`); + // await this.safeSandboxExec(`rm -f '${targetPath}'`); + + // const buffer = new Uint8Array(data); + // for (let i = 0; i < buffer.length; i += bytesPerChunk) { + // const chunk = buffer.subarray(i, Math.min(i + bytesPerChunk, buffer.length)); + // const base64Chunk = btoa(String.fromCharCode(...chunk)); + // // Append decoded bytes into the target file inside the sandbox + // const appendResult = await this.safeSandboxExec(`printf '%s' '${base64Chunk}' | base64 -d >> '${targetPath}'`); + // if (appendResult.exitCode !== 0) { + // throw new Error(`Failed to append to ${targetPath}: ${appendResult.stderr}`); + // } + // } + // } /** * Write multiple files efficiently using a single shell script @@ -255,7 +258,7 @@ export class SandboxSdkClient extends BaseSandboxService { * Uses base64 encoding to handle all content safely */ private async writeFilesViaScript( - files: Array<{path: string, content: string}>, + files: TemplateFile[], session: ExecutionSession ): Promise> { if (files.length === 0) return []; @@ -265,8 +268,8 @@ export class SandboxSdkClient extends BaseSandboxService { // Generate shell script const scriptLines = ['#!/bin/bash']; - for (const { path, content } of files) { - const utf8Bytes = new TextEncoder().encode(content); + for (const { filePath, fileContents } of files) { + const utf8Bytes = new TextEncoder().encode(fileContents); // Convert bytes to base64 in chunks to avoid stack overflow const chunkSize = 8192; @@ -286,7 +289,7 @@ export class SandboxSdkClient extends BaseSandboxService { const base64 = base64Chunks.join(''); scriptLines.push( - `mkdir -p "$(dirname "${path}")" && echo '${base64}' | base64 -d > "${path}" && echo "OK:${path}" || echo "FAIL:${path}"` + `mkdir -p "$(dirname "${filePath}")" && echo '${base64}' | base64 -d > "${filePath}" && echo "OK:${filePath}" || echo "FAIL:${filePath}"` ); } @@ -295,7 +298,7 @@ export class SandboxSdkClient extends BaseSandboxService { try { // Write script (1 request) - const writeResult = await session.writeFile(scriptPath, script); + const writeResult = await session.writeFile(scriptPath, script); // TODO: Checksum integrity verification if (!writeResult.success) { throw new Error('Failed to write batch script'); } @@ -311,10 +314,10 @@ export class SandboxSdkClient extends BaseSandboxService { if (match[1]) successPaths.add(match[1]); } - const results = files.map(({ path }) => ({ - file: path, - success: successPaths.has(path), - error: successPaths.has(path) ? undefined : 'Write failed' + const results = files.map(({ filePath }) => ({ + file: filePath, + success: successPaths.has(filePath), + error: successPaths.has(filePath) ? undefined : 'Write failed' })); const successCount = successPaths.size; @@ -337,14 +340,50 @@ export class SandboxSdkClient extends BaseSandboxService { } catch (error) { this.logger.error('Batch write failed', error); - return files.map(({ path }) => ({ - file: path, + return files.map(({ filePath }) => ({ + file: filePath, success: false, error: error instanceof Error ? error.message : 'Unknown error' })); } } + async writeFilesBulk(instanceId: string, files: TemplateFile[]): Promise { + try { + const session = await this.getInstanceSession(instanceId); + // Use batch script for efficient writing (3 requests for any number of files) + const filesToWrite = files.map(file => ({ + filePath: `/workspace/${instanceId}/${file.filePath}`, + fileContents: file.fileContents + })); + + const writeResults = await this.writeFilesViaScript(filesToWrite, session); + + // Map results back to original format + const results: WriteFilesResponse['results'] = []; + for (const writeResult of writeResults) { + results.push({ + file: writeResult.file.replace(`/workspace/${instanceId}/`, ''), + success: writeResult.success, + error: writeResult.error + }); + } + + return { + success: true, + results, + message: 'Files written successfully' + }; + } catch (error) { + this.logger.error('writeFiles', error, { instanceId }); + return { + success: false, + results: files.map(f => ({ file: f.filePath, success: false, error: 'Instance error' })), + message: 'Failed to write files' + }; + } + } + async updateProjectName(instanceId: string, projectName: string): Promise { try { await this.updateProjectConfiguration(instanceId, projectName); @@ -431,48 +470,6 @@ export class SandboxSdkClient extends BaseSandboxService { throw new Error('No available ports found in range 8001-8999'); } - - private async checkTemplateExists(templateName: string): Promise { - // Single command to check if template directory and package.json both exist - const checkResult = await this.safeSandboxExec(`test -f ${templateName}/package.json && echo "exists" || echo "missing"`); - return checkResult.exitCode === 0 && checkResult.stdout.trim() === "exists"; - } - - async downloadTemplate(templateName: string, downloadDir?: string) : Promise { - // Fetch the zip file from R2 - const downloadUrl = downloadDir ? `${downloadDir}/${templateName}.zip` : `${templateName}.zip`; - this.logger.info(`Fetching object: ${downloadUrl} from R2 bucket`); - const r2Object = await env.TEMPLATES_BUCKET.get(downloadUrl); - - if (!r2Object) { - throw new Error(`Object '${downloadUrl}' not found in bucket`); - } - - const zipData = await r2Object.arrayBuffer(); - - this.logger.info(`Downloaded zip file (${zipData.byteLength} bytes)`); - return zipData; - } - - private async ensureTemplateExists(templateName: string, downloadDir?: string, isInstance: boolean = false) { - if (!await this.checkTemplateExists(templateName)) { - // Download and extract template - this.logger.info(`Template doesnt exist, Downloading template from: ${templateName}`); - - const zipData = await this.downloadTemplate(templateName, downloadDir); - // Stream zip to sandbox in safe base64 chunks and write directly as binary - await this.writeBinaryFileViaBase64(`${templateName}.zip`, zipData); - this.logger.info(`Wrote zip file to sandbox in chunks: ${templateName}.zip`); - - const setupResult = await this.safeSandboxExec(`unzip -o -q ${templateName}.zip -d ${isInstance ? '.' : templateName}`); - - if (setupResult.exitCode !== 0) { - throw new Error(`Failed to download/extract template: ${setupResult.stderr}`); - } - } else { - this.logger.info(`Template already exists`); - } - } private async buildFileTree(instanceId: string): Promise { try { @@ -537,7 +534,6 @@ export class SandboxSdkClient extends BaseSandboxService { // Create lightweight instance details from metadata const instanceDetails: InstanceDetails = { runId: instanceId, - templateName: metadata.templateName, startTime: new Date(metadata.startTime), uptime: Math.floor((Date.now() - new Date(metadata.startTime).getTime()) / 1000), directory: instanceId, @@ -631,7 +627,7 @@ export class SandboxSdkClient extends BaseSandboxService { return false; } - private async startDevServer(instanceId: string, port: number): Promise { + private async startDevServer(instanceId: string, initCommand: string, port: number): Promise { try { // Use session-based process management // Note: Environment variables should already be set via setLocalEnvVars @@ -639,7 +635,7 @@ export class SandboxSdkClient extends BaseSandboxService { // Start process with env vars inline for those not in .dev.vars const process = await session.startProcess( - `VITE_LOGGER_TYPE=json PORT=${port} monitor-cli process start --instance-id ${instanceId} --port ${port} -- bun run dev` + `VITE_LOGGER_TYPE=json PORT=${port} monitor-cli process start --instance-id ${instanceId} --port ${port} -- ${initCommand}` ); this.logger.info('Development server started', { instanceId, processId: process.id }); @@ -898,7 +894,12 @@ export class SandboxSdkClient extends BaseSandboxService { } } - private async setupInstance(instanceId: string, projectName: string, localEnvVars?: Record): Promise<{previewURL: string, tunnelURL: string, processId: string, allocatedPort: number} | undefined> { + private async setupInstance( + instanceId: string, + projectName: string, + initCommand: string, + localEnvVars?: Record, + ): Promise<{previewURL: string, tunnelURL: string, processId: string, allocatedPort: number} | undefined> { try { const sandbox = this.getSandbox(); // Update project configuration with the specified project name @@ -924,12 +925,11 @@ export class SandboxSdkClient extends BaseSandboxService { this.logger.warn('Failed to store wrangler config in KV', { instanceId, error: error instanceof Error ? error.message : 'Unknown error' }); // Non-blocking - continue with setup } - + // If on local development, start cloudflared tunnel + let tunnelUrlPromise = Promise.resolve(''); // Allocate single port for both dev server and tunnel const allocatedPort = await this.allocateAvailablePort(); - // If on local development, start cloudflared tunnel - let tunnelUrlPromise = Promise.resolve(''); if (isDev(env) || env.USE_TUNNEL_FOR_PREVIEW) { this.logger.info('Starting cloudflared tunnel for local development', { instanceId }); tunnelUrlPromise = this.startCloudflaredTunnel(instanceId, allocatedPort); @@ -949,7 +949,7 @@ export class SandboxSdkClient extends BaseSandboxService { await this.setLocalEnvVars(instanceId, localEnvVars); } // Start dev server on allocated port - const processId = await this.startDevServer(instanceId, allocatedPort); + const processId = await this.startDevServer(instanceId, initCommand, allocatedPort); this.logger.info('Instance created successfully', { instanceId, processId, port: allocatedPort }); // Expose the same port for preview URL @@ -984,46 +984,15 @@ export class SandboxSdkClient extends BaseSandboxService { return undefined; } - - private async fetchDontTouchFiles(templateName: string): Promise { - let donttouchFiles: string[] = []; - try { - // Read .donttouch_files.json using default session with full path - const session = await this.getDefaultSession(); - const donttouchFile = await session.readFile(`${templateName}/.donttouch_files.json`); - if (!donttouchFile.success) { - this.logger.warn('Failed to read .donttouch_files.json'); - return donttouchFiles; - } - donttouchFiles = JSON.parse(donttouchFile.content) as string[]; - } catch (error) { - this.logger.warn(`Failed to read .donttouch_files.json: ${error instanceof Error ? error.message : 'Unknown error'}`); - } - return donttouchFiles; - } - - private async fetchRedactedFiles(templateName: string): Promise { - let redactedFiles: string[] = []; - try { - // Read .redacted_files.json using default session with full path - const session = await this.getDefaultSession(); - const redactedFile = await session.readFile(`${templateName}/.redacted_files.json`); - if (!redactedFile.success) { - this.logger.warn('Failed to read .redacted_files.json'); - return redactedFiles; - } - redactedFiles = JSON.parse(redactedFile.content) as string[]; - } catch (error) { - this.logger.warn(`Failed to read .redacted_files.json: ${error instanceof Error ? error.message : 'Unknown error'}`); - } - return redactedFiles; - } - - async createInstance(templateName: string, projectName: string, webhookUrl?: string, localEnvVars?: Record): Promise { + + async createInstance( + options: InstanceCreationRequest + ): Promise { + const { files, projectName, webhookUrl, envVars, initCommand } = options; try { // Environment variables will be set via session creation on first use - if (localEnvVars && Object.keys(localEnvVars).length > 0) { - this.logger.info('Environment variables will be configured via session', { envVars: Object.keys(localEnvVars) }); + if (envVars && Object.keys(envVars).length > 0) { + this.logger.info('Environment variables will be configured via session', { envVars: Object.keys(envVars) }); } let instanceId: string; if (env.ALLOCATION_STRATEGY === 'one_to_one') { @@ -1057,22 +1026,27 @@ export class SandboxSdkClient extends BaseSandboxService { } else { instanceId = `i-${generateId()}`; } - this.logger.info('Creating sandbox instance', { instanceId, templateName, projectName }); - - let results: {previewURL: string, tunnelURL: string, processId: string, allocatedPort: number} | undefined; - await this.ensureTemplateExists(templateName); + this.logger.info('Creating sandbox instance', { instanceId, projectName }); - const [donttouchFiles, redactedFiles] = await Promise.all([ - this.fetchDontTouchFiles(templateName), - this.fetchRedactedFiles(templateName) - ]); + const dontTouchFile = files.find(f => f.filePath === '.donttouch_files.json'); + const dontTouchFiles = dontTouchFile ? JSON.parse(dontTouchFile.fileContents) : []; - const moveTemplateResult = await this.safeSandboxExec(`mv ${templateName} ${instanceId}`); - if (moveTemplateResult.exitCode !== 0) { - throw new Error(`Failed to move template: ${moveTemplateResult.stderr}`); + const redactedFile = files.find(f => f.filePath === '.redacted_files.json'); + const redactedFiles = redactedFile ? JSON.parse(redactedFile.fileContents) : []; + + // Create directory for instance + await this.sandbox.exec(`mkdir -p /workspace/${instanceId}`); + + // Write files in bulk to sandbox + const rawResults = await this.writeFilesBulk(instanceId, files); + if (!rawResults.success) { + return { + success: false, + error: 'Failed to write files to sandbox' + }; } - - const setupPromise = () => this.setupInstance(instanceId, projectName, localEnvVars); + + const setupPromise = () => this.setupInstance(instanceId, projectName, initCommand, envVars); const setupResult = await setupPromise(); if (!setupResult) { return { @@ -1080,18 +1054,16 @@ export class SandboxSdkClient extends BaseSandboxService { error: 'Failed to setup instance' }; } - results = setupResult; // Store instance metadata const metadata = { - templateName: templateName, projectName: projectName, startTime: new Date().toISOString(), webhookUrl: webhookUrl, - previewURL: results?.previewURL, - processId: results?.processId, - tunnelURL: results?.tunnelURL, - allocatedPort: results?.allocatedPort, - donttouch_files: donttouchFiles, + previewURL: setupResult?.previewURL, + processId: setupResult?.processId, + tunnelURL: setupResult?.tunnelURL, + allocatedPort: setupResult?.allocatedPort, + donttouch_files: dontTouchFiles, redacted_files: redactedFiles, }; await this.storeInstanceMetadata(instanceId, metadata); @@ -1099,13 +1071,13 @@ export class SandboxSdkClient extends BaseSandboxService { return { success: true, runId: instanceId, - message: `Successfully created instance from template ${templateName}`, - previewURL: results?.previewURL, - tunnelURL: results?.tunnelURL, - processId: results?.processId, + message: `Successfully created instance ${instanceId}`, + previewURL: setupResult?.previewURL, + tunnelURL: setupResult?.tunnelURL, + processId: setupResult?.processId, }; } catch (error) { - this.logger.error('createInstance', error, { templateName: templateName, projectName: projectName }); + this.logger.error(`Failed to create instance for project ${projectName}`, error); return { success: false, error: `Failed to create instance: ${error instanceof Error ? error.message : 'Unknown error'}` @@ -1135,7 +1107,6 @@ export class SandboxSdkClient extends BaseSandboxService { const instanceDetails: InstanceDetails = { runId: instanceId, - templateName: metadata.templateName, startTime, uptime, directory: instanceId, @@ -1276,31 +1247,13 @@ export class SandboxSdkClient extends BaseSandboxService { async writeFiles(instanceId: string, files: WriteFilesRequest['files']): Promise { try { const session = await this.getInstanceSession(instanceId); - - const results = []; - // Filter out donttouch files const metadata = await this.getInstanceMetadata(instanceId); const donttouchFiles = new Set(metadata.donttouch_files); const filteredFiles = files.filter(file => !donttouchFiles.has(file.filePath)); - - // Use batch script for efficient writing (3 requests for any number of files) - const filesToWrite = filteredFiles.map(file => ({ - path: `/workspace/${instanceId}/${file.filePath}`, - content: file.fileContents - })); - - const writeResults = await this.writeFilesViaScript(filesToWrite, session); - - // Map results back to original format - for (const writeResult of writeResults) { - results.push({ - file: writeResult.file.replace(`/workspace/${instanceId}/`, ''), - success: writeResult.success, - error: writeResult.error - }); - } + const rawResults = await this.writeFilesBulk(instanceId, filteredFiles); + const results = rawResults.results; // Add files that were not written to results const wereDontTouchFiles = files.filter(file => donttouchFiles.has(file.filePath)); @@ -1574,8 +1527,6 @@ export class SandboxSdkClient extends BaseSandboxService { async clearInstanceErrors(instanceId: string): Promise { try { - let clearedCount = 0; - // Try enhanced error system first - clear ALL errors try { const cmd = `timeout 10s monitor-cli errors clear -i ${instanceId} --confirm`; @@ -1600,11 +1551,11 @@ export class SandboxSdkClient extends BaseSandboxService { this.logger.warn('Error clearing unavailable, falling back to legacy', enhancedError); } - this.logger.info(`Cleared ${clearedCount} errors for instance ${instanceId}`); + this.logger.info(`Cleared errors for instance ${instanceId}`); return { success: true, - message: `Cleared ${clearedCount} errors` + message: `Cleared errors` }; } catch (error) { this.logger.error('clearInstanceErrors', error, { instanceId }); @@ -1785,7 +1736,7 @@ export class SandboxSdkClient extends BaseSandboxService { // ========================================== // DEPLOYMENT // ========================================== - async deployToCloudflareWorkers(instanceId: string): Promise { + async deployToCloudflareWorkers(instanceId: string, target: DeploymentTarget = 'platform'): Promise { try { this.logger.info('Starting deployment', { instanceId }); @@ -1819,7 +1770,7 @@ export class SandboxSdkClient extends BaseSandboxService { // Step 2: Parse wrangler config from KV this.logger.info('Reading wrangler configuration from KV'); - let wranglerConfigContent = await env.VibecoderStore.get(this.getWranglerKVKey(instanceId)); + const wranglerConfigContent = await env.VibecoderStore.get(this.getWranglerKVKey(instanceId)); if (!wranglerConfigContent) { // This should never happen unless KV itself has some issues @@ -1925,8 +1876,14 @@ export class SandboxSdkClient extends BaseSandboxService { ); // Step 7: Deploy using pure function - this.logger.info('Deploying to Cloudflare'); - if ('DISPATCH_NAMESPACE' in env) { + const useDispatch = target === 'platform'; + this.logger.info('Deploying to Cloudflare', { target }); + + if (useDispatch) { + if (!('DISPATCH_NAMESPACE' in env)) { + throw new Error('DISPATCH_NAMESPACE not found in environment variables, cannot deploy without dispatch namespace'); + } + this.logger.info('Using dispatch namespace', { dispatchNamespace: env.DISPATCH_NAMESPACE }); await deployToDispatch( { @@ -1939,7 +1896,13 @@ export class SandboxSdkClient extends BaseSandboxService { config.assets ); } else { - throw new Error('DISPATCH_NAMESPACE not found in environment variables, cannot deploy without dispatch namespace'); + await deployWorker( + deployConfig, + fileContents, + additionalModules, + config.migrations, + config.assets + ); } // Step 8: Determine deployment URL @@ -1950,7 +1913,7 @@ export class SandboxSdkClient extends BaseSandboxService { instanceId, deployedUrl, deploymentId, - mode: 'dispatch-namespace' + mode: useDispatch ? 'dispatch-namespace' : 'user-worker' }); return { @@ -2041,4 +2004,4 @@ export class SandboxSdkClient extends BaseSandboxService { } return 'https'; } -} \ No newline at end of file +} diff --git a/worker/services/sandbox/sandboxTypes.ts b/worker/services/sandbox/sandboxTypes.ts index a5ab96ab..8ad56c96 100644 --- a/worker/services/sandbox/sandboxTypes.ts +++ b/worker/services/sandbox/sandboxTypes.ts @@ -66,11 +66,21 @@ export type StoredError = z.infer; export const RuntimeErrorSchema = SimpleErrorSchema export type RuntimeError = z.infer +// -- Instance creation options -- + +export const InstanceCreationRequestSchema = z.object({ + files: z.array(TemplateFileSchema), + projectName: z.string(), + webhookUrl: z.string().url().optional(), + envVars: z.record(z.string(), z.string()).optional(), + initCommand: z.string().default('bun run dev'), +}) +export type InstanceCreationRequest = z.infer + // --- Instance Details --- export const InstanceDetailsSchema = z.object({ runId: z.string(), - templateName: z.string(), startTime: z.union([z.string(), z.date()]), uptime: z.number(), previewURL: z.string().optional(), @@ -102,6 +112,7 @@ export const TemplateInfoSchema = z.object({ name: z.string(), language: z.string().optional(), frameworks: z.array(z.string()).optional(), + projectType: z.enum(['app', 'workflow', 'presentation']).default('app'), description: z.object({ selection: z.string(), usage: z.string(), @@ -139,12 +150,7 @@ export const GetTemplateFilesResponseSchema = z.object({ }) export type GetTemplateFilesResponse = z.infer -export const BootstrapRequestSchema = z.object({ - templateName: z.string(), - projectName: z.string(), - webhookUrl: z.string().url().optional(), - envVars: z.record(z.string(), z.string()).optional(), -}) +export const BootstrapRequestSchema = InstanceCreationRequestSchema export type BootstrapRequest = z.infer export const PreviewSchema = z.object({ @@ -290,44 +296,6 @@ export const ShutdownResponseSchema = z.object({ }) export type ShutdownResponse = z.infer -// /templates/from-instance (POST) -export const PromoteToTemplateRequestSchema = z.object({ - instanceId: z.string(), - templateName: z.string().optional(), -}) -export type PromoteToTemplateRequest = z.infer - -export const PromoteToTemplateResponseSchema = z.object({ - success: z.boolean(), - message: z.string().optional(), - templateName: z.string().optional(), - error: z.string().optional(), -}) -export type PromoteToTemplateResponse = z.infer - -// /templates (POST) - AI template generation -export const GenerateTemplateRequestSchema = z.object({ - prompt: z.string(), - templateName: z.string(), - options: z.object({ - framework: z.string().optional(), - language: z.enum(['javascript', 'typescript']).optional(), - styling: z.enum(['tailwind', 'css', 'scss']).optional(), - features: z.array(z.string()).optional(), - }).optional(), -}) -export type GenerateTemplateRequest = z.infer - -export const GenerateTemplateResponseSchema = z.object({ - success: z.boolean(), - templateName: z.string(), - summary: z.string().optional(), - fileCount: z.number().optional(), - fileTree: FileTreeNodeSchema.optional(), - error: z.string().optional(), -}) -export type GenerateTemplateResponse = z.infer - // /instances/:id/lint (GET) export const LintSeveritySchema = z.enum(['error', 'warning', 'info']) export type LintSeverity = z.infer @@ -399,7 +367,7 @@ export const WebhookRuntimeErrorEventSchema = WebhookEventBaseSchema.extend({ runId: z.string(), error: RuntimeErrorSchema, instanceInfo: z.object({ - templateName: z.string().optional(), + instanceId: z.string(), serviceDirectory: z.string().optional(), }), }), @@ -476,18 +444,6 @@ export const WebhookPayloadSchema = z.object({ event: WebhookEventSchema, }) export type WebhookPayload = z.infer - -// Current runner service payload (direct payload without wrapper) -export const RunnerServiceWebhookPayloadSchema = z.object({ - runId: z.string(), - error: RuntimeErrorSchema, - instanceInfo: z.object({ - templateName: z.string().optional(), - serviceDirectory: z.string().optional(), - }), -}) -export type RunnerServiceWebhookPayload = z.infer - /** * GitHub integration types for exporting generated applications */